Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_WASM_WASM_CODE_MANAGER_H_
6 : #define V8_WASM_WASM_CODE_MANAGER_H_
7 :
8 : #include <atomic>
9 : #include <list>
10 : #include <map>
11 : #include <memory>
12 : #include <unordered_set>
13 : #include <utility>
14 : #include <vector>
15 :
16 : #include "src/base/macros.h"
17 : #include "src/base/optional.h"
18 : #include "src/builtins/builtins-definitions.h"
19 : #include "src/handles.h"
20 : #include "src/trap-handler/trap-handler.h"
21 : #include "src/vector.h"
22 : #include "src/wasm/compilation-environment.h"
23 : #include "src/wasm/wasm-features.h"
24 : #include "src/wasm/wasm-limits.h"
25 : #include "src/wasm/wasm-tier.h"
26 :
27 : namespace v8 {
28 : namespace internal {
29 :
30 : class Code;
31 : class CodeDesc;
32 : class Isolate;
33 :
34 : namespace wasm {
35 :
36 : class NativeModule;
37 : class WasmCodeManager;
38 : struct WasmCompilationResult;
39 : class WasmEngine;
40 : class WasmMemoryTracker;
41 : class WasmImportWrapperCache;
42 : struct WasmModule;
43 :
44 : // Sorted, disjoint and non-overlapping memory regions. A region is of the
45 : // form [start, end). So there's no [start, end), [end, other_end),
46 : // because that should have been reduced to [start, other_end).
47 2485974 : class V8_EXPORT_PRIVATE DisjointAllocationPool final {
48 : public:
49 : DisjointAllocationPool() = default;
50 :
51 : explicit DisjointAllocationPool(base::AddressRegion region)
52 1242983 : : regions_({region}) {}
53 :
54 : DisjointAllocationPool(DisjointAllocationPool&& other) V8_NOEXCEPT = default;
55 : DisjointAllocationPool& operator=(DisjointAllocationPool&& other)
56 : V8_NOEXCEPT = default;
57 :
58 : // Merge the parameter region into this object while preserving ordering of
59 : // the regions. The assumption is that the passed parameter is not
60 : // intersecting this object - for example, it was obtained from a previous
61 : // Allocate.
62 : void Merge(base::AddressRegion);
63 :
64 : // Allocate a contiguous region of size {size}. Return an empty pool on
65 : // failure.
66 : base::AddressRegion Allocate(size_t size);
67 :
68 : bool IsEmpty() const { return regions_.empty(); }
69 : const std::list<base::AddressRegion>& regions() const { return regions_; }
70 :
71 : private:
72 : std::list<base::AddressRegion> regions_;
73 :
74 : DISALLOW_COPY_AND_ASSIGN(DisjointAllocationPool);
75 : };
76 :
77 : class V8_EXPORT_PRIVATE WasmCode final {
78 : public:
79 : enum Kind {
80 : kFunction,
81 : kWasmToJsWrapper,
82 : kRuntimeStub,
83 : kInterpreterEntry,
84 : kJumpTable
85 : };
86 :
87 : // Each runtime stub is identified by an id. This id is used to reference the
88 : // stub via {RelocInfo::WASM_STUB_CALL} and gets resolved during relocation.
89 : enum RuntimeStubId {
90 : #define DEF_ENUM(Name) k##Name,
91 : #define DEF_ENUM_TRAP(Name) kThrowWasm##Name,
92 : WASM_RUNTIME_STUB_LIST(DEF_ENUM, DEF_ENUM_TRAP)
93 : #undef DEF_ENUM_TRAP
94 : #undef DEF_ENUM
95 : kRuntimeStubCount
96 : };
97 :
98 : Vector<byte> instructions() const { return instructions_; }
99 : Address instruction_start() const {
100 40165944 : return reinterpret_cast<Address>(instructions_.start());
101 : }
102 : Vector<const byte> reloc_info() const { return reloc_info_.as_vector(); }
103 : Vector<const byte> source_positions() const {
104 : return source_position_table_.as_vector();
105 : }
106 :
107 : uint32_t index() const {
108 : DCHECK(!IsAnonymous());
109 : return index_;
110 : }
111 : // Anonymous functions are functions that don't carry an index.
112 : bool IsAnonymous() const { return index_ == kAnonymousFuncIndex; }
113 : Kind kind() const { return kind_; }
114 : NativeModule* native_module() const { return native_module_; }
115 : ExecutionTier tier() const { return tier_; }
116 : Address constant_pool() const;
117 : Address code_comments() const;
118 : uint32_t code_comments_size() const;
119 : size_t constant_pool_offset() const { return constant_pool_offset_; }
120 : size_t safepoint_table_offset() const { return safepoint_table_offset_; }
121 : size_t handler_table_offset() const { return handler_table_offset_; }
122 : size_t code_comments_offset() const { return code_comments_offset_; }
123 : size_t unpadded_binary_size() const { return unpadded_binary_size_; }
124 : uint32_t stack_slots() const { return stack_slots_; }
125 : uint32_t tagged_parameter_slots() const { return tagged_parameter_slots_; }
126 : bool is_liftoff() const { return tier_ == ExecutionTier::kLiftoff; }
127 : bool contains(Address pc) const {
128 11974134 : return reinterpret_cast<Address>(instructions_.start()) <= pc &&
129 5987067 : pc < reinterpret_cast<Address>(instructions_.end());
130 : }
131 :
132 : Vector<trap_handler::ProtectedInstructionData> protected_instructions()
133 : const {
134 : return protected_instructions_.as_vector();
135 : }
136 :
137 : void Validate() const;
138 : void Print(const char* name = nullptr) const;
139 : void MaybePrint(const char* name = nullptr) const;
140 : void Disassemble(const char* name, std::ostream& os,
141 : Address current_pc = kNullAddress) const;
142 :
143 : static bool ShouldBeLogged(Isolate* isolate);
144 : void LogCode(Isolate* isolate) const;
145 :
146 : ~WasmCode();
147 :
148 : void IncRef() {
149 : int old_val = ref_count_.fetch_add(1, std::memory_order_relaxed);
150 : DCHECK_LE(1, old_val);
151 : DCHECK_GT(kMaxInt, old_val);
152 : USE(old_val);
153 : }
154 :
155 : // Decrement the ref count. Returns whether this code becomes dead and needs
156 : // to be freed.
157 10158050 : V8_WARN_UNUSED_RESULT bool DecRef() {
158 10158050 : int old_count = ref_count_.load(std::memory_order_relaxed);
159 : while (true) {
160 : DCHECK_LE(1, old_count);
161 10158108 : if (V8_UNLIKELY(old_count == 1)) return DecRefOnPotentiallyDeadCode();
162 20162592 : if (ref_count_.compare_exchange_weak(old_count, old_count - 1,
163 : std::memory_order_relaxed)) {
164 : return false;
165 : }
166 : }
167 : }
168 :
169 : // Decrement the ref count on a set of {WasmCode} objects, potentially
170 : // belonging to different {NativeModule}s. Dead code will be deleted.
171 : static void DecrementRefCount(Vector<WasmCode*>);
172 :
173 : enum FlushICache : bool { kFlushICache = true, kNoFlushICache = false };
174 :
175 : static constexpr uint32_t kAnonymousFuncIndex = 0xffffffff;
176 : STATIC_ASSERT(kAnonymousFuncIndex > kV8MaxWasmFunctions);
177 :
178 : private:
179 : friend class NativeModule;
180 :
181 : WasmCode(NativeModule* native_module, uint32_t index,
182 : Vector<byte> instructions, uint32_t stack_slots,
183 : uint32_t tagged_parameter_slots, size_t safepoint_table_offset,
184 : size_t handler_table_offset, size_t constant_pool_offset,
185 : size_t code_comments_offset, size_t unpadded_binary_size,
186 : OwnedVector<trap_handler::ProtectedInstructionData>
187 : protected_instructions,
188 : OwnedVector<const byte> reloc_info,
189 : OwnedVector<const byte> source_position_table, Kind kind,
190 : ExecutionTier tier)
191 : : instructions_(instructions),
192 : reloc_info_(std::move(reloc_info)),
193 : source_position_table_(std::move(source_position_table)),
194 : native_module_(native_module),
195 : index_(index),
196 : kind_(kind),
197 : constant_pool_offset_(constant_pool_offset),
198 : stack_slots_(stack_slots),
199 : tagged_parameter_slots_(tagged_parameter_slots),
200 : safepoint_table_offset_(safepoint_table_offset),
201 : handler_table_offset_(handler_table_offset),
202 : code_comments_offset_(code_comments_offset),
203 : unpadded_binary_size_(unpadded_binary_size),
204 : protected_instructions_(std::move(protected_instructions)),
205 11754486 : tier_(tier) {
206 : DCHECK_LE(safepoint_table_offset, unpadded_binary_size);
207 : DCHECK_LE(handler_table_offset, unpadded_binary_size);
208 : DCHECK_LE(code_comments_offset, unpadded_binary_size);
209 : DCHECK_LE(constant_pool_offset, unpadded_binary_size);
210 : }
211 :
212 : // Code objects that have been registered with the global trap handler within
213 : // this process, will have a {trap_handler_index} associated with them.
214 : size_t trap_handler_index() const;
215 : void set_trap_handler_index(size_t);
216 : bool HasTrapHandlerIndex() const;
217 :
218 : // Register protected instruction information with the trap handler. Sets
219 : // trap_handler_index.
220 : void RegisterTrapHandlerData();
221 :
222 : // Slow path for {DecRef}: The code becomes potentially dead.
223 : // Returns whether this code becomes dead and needs to be freed.
224 : bool DecRefOnPotentiallyDeadCode();
225 :
226 : Vector<byte> instructions_;
227 : OwnedVector<const byte> reloc_info_;
228 : OwnedVector<const byte> source_position_table_;
229 : NativeModule* native_module_ = nullptr;
230 : uint32_t index_;
231 : Kind kind_;
232 : size_t constant_pool_offset_ = 0;
233 : uint32_t stack_slots_ = 0;
234 : // Number of tagged parameters passed to this function via the stack. This
235 : // value is used by the stack walker (e.g. GC) to find references.
236 : uint32_t tagged_parameter_slots_ = 0;
237 : // we care about safepoint data for wasm-to-js functions,
238 : // since there may be stack/register tagged values for large number
239 : // conversions.
240 : size_t safepoint_table_offset_ = 0;
241 : size_t handler_table_offset_ = 0;
242 : size_t code_comments_offset_ = 0;
243 : size_t unpadded_binary_size_ = 0;
244 : intptr_t trap_handler_index_ = -1;
245 : OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions_;
246 : ExecutionTier tier_;
247 :
248 : // WasmCode is ref counted. Counters are held by:
249 : // 1) The jump table.
250 : // 2) Function tables.
251 : // 3) {WasmCodeRefScope}s.
252 : // 4) The set of potentially dead code in the {WasmEngine}.
253 : // If a decrement of (1) or (2) would drop the ref count to 0, that code
254 : // becomes a candidate for garbage collection. At that point, we add
255 : // ref counts for (4) *before* decrementing the counter to ensure the code
256 : // stays alive as long as it's being used. Once the ref count drops to zero,
257 : // the code object is deleted and the memory for the machine code is freed.
258 : std::atomic<int> ref_count_{1};
259 :
260 : DISALLOW_COPY_AND_ASSIGN(WasmCode);
261 : };
262 :
263 : // Return a textual description of the kind.
264 : const char* GetWasmCodeKindAsString(WasmCode::Kind);
265 :
266 : class V8_EXPORT_PRIVATE NativeModule final {
267 : public:
268 : #if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_ARM64
269 : static constexpr bool kCanAllocateMoreMemory = false;
270 : #else
271 : static constexpr bool kCanAllocateMoreMemory = true;
272 : #endif
273 :
274 : // {AddCode} is thread safe w.r.t. other calls to {AddCode} or methods adding
275 : // code below, i.e. it can be called concurrently from background threads.
276 : // The returned code still needs to be published via {PublishCode}.
277 : std::unique_ptr<WasmCode> AddCode(
278 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
279 : uint32_t tagged_parameter_slots,
280 : OwnedVector<trap_handler::ProtectedInstructionData>
281 : protected_instructions,
282 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
283 : ExecutionTier tier);
284 :
285 : // {PublishCode} makes the code available to the system by entering it into
286 : // the code table and patching the jump table. It returns a raw pointer to the
287 : // given {WasmCode} object.
288 : WasmCode* PublishCode(std::unique_ptr<WasmCode>);
289 : // Hold the {allocation_mutex_} when calling {PublishCodeLocked}.
290 : WasmCode* PublishCodeLocked(std::unique_ptr<WasmCode>);
291 :
292 : WasmCode* AddDeserializedCode(
293 : uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
294 : uint32_t tagged_parameter_slots, size_t safepoint_table_offset,
295 : size_t handler_table_offset, size_t constant_pool_offset,
296 : size_t code_comments_offset, size_t unpadded_binary_size,
297 : OwnedVector<trap_handler::ProtectedInstructionData>
298 : protected_instructions,
299 : OwnedVector<const byte> reloc_info,
300 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
301 : ExecutionTier tier);
302 :
303 : // Adds anonymous code for testing purposes.
304 : WasmCode* AddCodeForTesting(Handle<Code> code);
305 :
306 : // Use this to setup lazy compilation for the entire module ({UseLazyStubs})
307 : // or for individual functions ({UseLazyStub}). It will use the existing
308 : // {WasmCode::kWasmCompileLazy} runtime stub and populate the jump table with
309 : // trampolines to that runtime stub.
310 : void UseLazyStubs();
311 : void UseLazyStub(uint32_t func_index);
312 :
313 : // Initializes all runtime stubs by setting up entry addresses in the runtime
314 : // stub table. It must be called exactly once per native module before adding
315 : // other WasmCode so that runtime stub ids can be resolved during relocation.
316 : void SetRuntimeStubs(Isolate* isolate);
317 :
318 : // Creates a snapshot of the current state of the code table. This is useful
319 : // to get a consistent view of the table (e.g. used by the serializer).
320 : std::vector<WasmCode*> SnapshotCodeTable() const;
321 :
322 : WasmCode* GetCode(uint32_t index) const;
323 : bool HasCode(uint32_t index) const;
324 :
325 : Address runtime_stub_entry(WasmCode::RuntimeStubId index) const {
326 : DCHECK_LT(index, WasmCode::kRuntimeStubCount);
327 559279 : Address entry_address = runtime_stub_entries_[index];
328 : DCHECK_NE(kNullAddress, entry_address);
329 : return entry_address;
330 : }
331 :
332 : Address jump_table_start() const {
333 1467831 : return jump_table_ ? jump_table_->instruction_start() : kNullAddress;
334 : }
335 :
336 : ptrdiff_t jump_table_offset(uint32_t func_index) const {
337 : DCHECK_GE(func_index, num_imported_functions());
338 473484 : return GetCallTargetForFunction(func_index) - jump_table_start();
339 : }
340 :
341 : bool is_jump_table_slot(Address address) const {
342 : return jump_table_->contains(address);
343 : }
344 :
345 : // Transition this module from code relying on trap handlers (i.e. without
346 : // explicit memory bounds checks) to code that does not require trap handlers
347 : // (i.e. code with explicit bounds checks).
348 : // This method must only be called if {use_trap_handler()} is true (it will be
349 : // false afterwards). All code in this {NativeModule} needs to be re-added
350 : // after calling this method.
351 : void DisableTrapHandler();
352 :
353 : // Returns the target to call for the given function (returns a jump table
354 : // slot within {jump_table_}).
355 : Address GetCallTargetForFunction(uint32_t func_index) const;
356 :
357 : // Reverse lookup from a given call target (i.e. a jump table slot as the
358 : // above {GetCallTargetForFunction} returns) to a function index.
359 : uint32_t GetFunctionIndexFromJumpTableSlot(Address slot_address) const;
360 :
361 : bool SetExecutable(bool executable);
362 :
363 : // For cctests, where we build both WasmModule and the runtime objects
364 : // on the fly, and bypass the instance builder pipeline.
365 : void ReserveCodeTableForTesting(uint32_t max_functions);
366 :
367 : void LogWasmCodes(Isolate* isolate);
368 :
369 : CompilationState* compilation_state() { return compilation_state_.get(); }
370 :
371 : // Create a {CompilationEnv} object for compilation. The caller has to ensure
372 : // that the {WasmModule} pointer stays valid while the {CompilationEnv} is
373 : // being used.
374 : CompilationEnv CreateCompilationEnv() const;
375 :
376 : uint32_t num_functions() const {
377 1243842 : return module_->num_declared_functions + module_->num_imported_functions;
378 : }
379 : uint32_t num_imported_functions() const {
380 125046 : return module_->num_imported_functions;
381 : }
382 : UseTrapHandler use_trap_handler() const { return use_trap_handler_; }
383 48 : void set_lazy_compile_frozen(bool frozen) { lazy_compile_frozen_ = frozen; }
384 : bool lazy_compile_frozen() const { return lazy_compile_frozen_; }
385 2821 : void set_lazy_compilation(bool lazy) { lazy_compilation_ = lazy; }
386 : bool lazy_compilation() const { return lazy_compilation_; }
387 : Vector<const uint8_t> wire_bytes() const { return wire_bytes_->as_vector(); }
388 : const WasmModule* module() const { return module_.get(); }
389 : std::shared_ptr<const WasmModule> shared_module() const { return module_; }
390 : size_t committed_code_space() const { return committed_code_space_.load(); }
391 : WasmEngine* engine() const { return engine_; }
392 :
393 : void SetWireBytes(OwnedVector<const uint8_t> wire_bytes);
394 :
395 : WasmCode* Lookup(Address) const;
396 :
397 : WasmImportWrapperCache* import_wrapper_cache() const {
398 : return import_wrapper_cache_.get();
399 : }
400 :
401 : ~NativeModule();
402 :
403 374642 : const WasmFeatures& enabled_features() const { return enabled_features_; }
404 :
405 : const char* GetRuntimeStubName(Address runtime_stub_entry) const;
406 :
407 : // Sample the current code size of this modules to the given counters.
408 : enum CodeSamplingTime : int8_t { kAfterBaseline, kAfterTopTier, kSampling };
409 : void SampleCodeSize(Counters*, CodeSamplingTime) const;
410 :
411 : WasmCode* AddCompiledCode(WasmCompilationResult);
412 : std::vector<WasmCode*> AddCompiledCode(Vector<WasmCompilationResult>);
413 :
414 : // Free a set of functions of this module. Uncommits whole pages if possible.
415 : // The given vector must be ordered by the instruction start address, and all
416 : // {WasmCode} objects must not be used any more.
417 : void FreeCode(Vector<WasmCode* const>);
418 :
419 : private:
420 : friend class WasmCode;
421 : friend class WasmCodeManager;
422 : friend class NativeModuleModificationScope;
423 :
424 : // Private constructor, called via {WasmCodeManager::NewNativeModule()}.
425 : NativeModule(WasmEngine* engine, const WasmFeatures& enabled_features,
426 : bool can_request_more, VirtualMemory code_space,
427 : std::shared_ptr<const WasmModule> module,
428 : std::shared_ptr<Counters> async_counters,
429 : std::shared_ptr<NativeModule>* shared_this);
430 :
431 : std::unique_ptr<WasmCode> AddCodeWithCodeSpace(
432 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
433 : uint32_t tagged_parameter_slots,
434 : OwnedVector<trap_handler::ProtectedInstructionData>
435 : protected_instructions,
436 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
437 : ExecutionTier tier, Vector<uint8_t> code_space);
438 :
439 : // Add and publish anonymous code.
440 : WasmCode* AddAndPublishAnonymousCode(Handle<Code>, WasmCode::Kind kind,
441 : const char* name = nullptr);
442 : // Allocate code space. Returns a valid buffer or fails with OOM (crash).
443 : Vector<byte> AllocateForCode(size_t size);
444 :
445 : WasmCode* CreateEmptyJumpTable(uint32_t jump_table_size);
446 :
447 : // Hold the {mutex_} when calling this method.
448 : bool has_interpreter_redirection(uint32_t func_index) {
449 : DCHECK_LT(func_index, num_functions());
450 : DCHECK_LE(module_->num_imported_functions, func_index);
451 1411962 : if (!interpreter_redirections_) return false;
452 3009 : uint32_t bitset_idx = func_index - module_->num_imported_functions;
453 6018 : uint8_t byte = interpreter_redirections_[bitset_idx / kBitsPerByte];
454 3009 : return byte & (1 << (bitset_idx % kBitsPerByte));
455 : }
456 :
457 : // Hold the {mutex_} when calling this method.
458 369231 : void SetInterpreterRedirection(uint32_t func_index) {
459 : DCHECK_LT(func_index, num_functions());
460 : DCHECK_LE(module_->num_imported_functions, func_index);
461 369231 : if (!interpreter_redirections_) {
462 366215 : interpreter_redirections_.reset(
463 732430 : new uint8_t[RoundUp<kBitsPerByte>(module_->num_declared_functions) /
464 732430 : kBitsPerByte]{});
465 : }
466 369231 : uint32_t bitset_idx = func_index - module_->num_imported_functions;
467 369231 : uint8_t& byte = interpreter_redirections_[bitset_idx / kBitsPerByte];
468 369231 : byte |= 1 << (bitset_idx % kBitsPerByte);
469 369231 : }
470 :
471 : // Features enabled for this module. We keep a copy of the features that
472 : // were enabled at the time of the creation of this native module,
473 : // to be consistent across asynchronous compilations later.
474 : const WasmFeatures enabled_features_;
475 :
476 : // The decoded module, stored in a shared_ptr such that background compile
477 : // tasks can keep this alive.
478 : std::shared_ptr<const WasmModule> module_;
479 :
480 : // Wire bytes, held in a shared_ptr so they can be kept alive by the
481 : // {WireBytesStorage}, held by background compile tasks.
482 : std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes_;
483 :
484 : // Contains entry points for runtime stub calls via {WASM_STUB_CALL}.
485 : Address runtime_stub_entries_[WasmCode::kRuntimeStubCount] = {kNullAddress};
486 :
487 : // Jump table used for runtime stubs (i.e. trampolines to embedded builtins).
488 : WasmCode* runtime_stub_table_ = nullptr;
489 :
490 : // Jump table used to easily redirect wasm function calls.
491 : WasmCode* jump_table_ = nullptr;
492 :
493 : // The compilation state keeps track of compilation tasks for this module.
494 : // Note that its destructor blocks until all tasks are finished/aborted and
495 : // hence needs to be destructed first when this native module dies.
496 : std::unique_ptr<CompilationState> compilation_state_;
497 :
498 : // A cache of the import wrappers, keyed on the kind and signature.
499 : std::unique_ptr<WasmImportWrapperCache> import_wrapper_cache_;
500 :
501 : // This mutex protects concurrent calls to {AddCode} and friends.
502 : mutable base::Mutex allocation_mutex_;
503 :
504 : //////////////////////////////////////////////////////////////////////////////
505 : // Protected by {allocation_mutex_}:
506 :
507 : // Holds all allocated code objects. Mutable because it might get sorted in
508 : // {Lookup()}.
509 : mutable std::vector<std::unique_ptr<WasmCode>> owned_code_;
510 :
511 : // Keep track of the portion of {owned_code_} that is sorted.
512 : // Entries [0, owned_code_sorted_portion_) are known to be sorted.
513 : // Mutable because it might get modified in {Lookup()}.
514 : mutable size_t owned_code_sorted_portion_ = 0;
515 :
516 : std::unique_ptr<WasmCode* []> code_table_;
517 :
518 : // Null if no redirections exist, otherwise a bitset over all functions in
519 : // this module marking those functions that have been redirected.
520 : std::unique_ptr<uint8_t[]> interpreter_redirections_;
521 :
522 : DisjointAllocationPool free_code_space_;
523 : DisjointAllocationPool allocated_code_space_;
524 : std::list<VirtualMemory> owned_code_space_;
525 :
526 : // End of fields protected by {allocation_mutex_}.
527 : //////////////////////////////////////////////////////////////////////////////
528 :
529 : WasmEngine* const engine_;
530 : std::atomic<size_t> committed_code_space_{0};
531 : std::atomic<size_t> generated_code_size_{0};
532 : int modification_scope_depth_ = 0;
533 : bool can_request_more_memory_;
534 : UseTrapHandler use_trap_handler_ = kNoTrapHandler;
535 : bool is_executable_ = false;
536 : bool lazy_compile_frozen_ = false;
537 : bool lazy_compilation_ = false;
538 :
539 : DISALLOW_COPY_AND_ASSIGN(NativeModule);
540 : };
541 :
542 59873 : class V8_EXPORT_PRIVATE WasmCodeManager final {
543 : public:
544 : explicit WasmCodeManager(WasmMemoryTracker* memory_tracker,
545 : size_t max_committed);
546 :
547 : #ifdef DEBUG
548 : ~WasmCodeManager() {
549 : // No more committed code space.
550 : DCHECK_EQ(0, total_committed_code_space_.load());
551 : }
552 : #endif
553 :
554 : NativeModule* LookupNativeModule(Address pc) const;
555 : WasmCode* LookupCode(Address pc) const;
556 : size_t committed_code_space() const {
557 : return total_committed_code_space_.load();
558 : }
559 :
560 : void SetMaxCommittedMemoryForTesting(size_t limit);
561 :
562 : static size_t EstimateNativeModuleCodeSize(const WasmModule* module);
563 : static size_t EstimateNativeModuleNonCodeSize(const WasmModule* module);
564 :
565 : private:
566 : friend class NativeModule;
567 : friend class WasmEngine;
568 :
569 : std::shared_ptr<NativeModule> NewNativeModule(
570 : WasmEngine* engine, Isolate* isolate,
571 : const WasmFeatures& enabled_features, size_t code_size_estimate,
572 : bool can_request_more, std::shared_ptr<const WasmModule> module);
573 :
574 : V8_WARN_UNUSED_RESULT VirtualMemory TryAllocate(size_t size,
575 : void* hint = nullptr);
576 : bool Commit(Address, size_t);
577 : // Currently, we uncommit a whole module, so all we need is account
578 : // for the freed memory size. We do that in FreeNativeModule.
579 : // There's no separate Uncommit.
580 :
581 : void FreeNativeModule(NativeModule*);
582 :
583 : void AssignRanges(Address start, Address end, NativeModule*);
584 :
585 : WasmMemoryTracker* const memory_tracker_;
586 :
587 : size_t max_committed_code_space_;
588 :
589 : std::atomic<size_t> total_committed_code_space_;
590 : // If the committed code space exceeds {critical_committed_code_space_}, then
591 : // we trigger a GC before creating the next module. This value is set to the
592 : // currently committed space plus 50% of the available code space on creation
593 : // and updated after each GC.
594 : std::atomic<size_t> critical_committed_code_space_;
595 :
596 : mutable base::Mutex native_modules_mutex_;
597 :
598 : //////////////////////////////////////////////////////////////////////////////
599 : // Protected by {native_modules_mutex_}:
600 :
601 : std::map<Address, std::pair<Address, NativeModule*>> lookup_map_;
602 :
603 : // End of fields protected by {native_modules_mutex_}.
604 : //////////////////////////////////////////////////////////////////////////////
605 :
606 : DISALLOW_COPY_AND_ASSIGN(WasmCodeManager);
607 : };
608 :
609 : // Within the scope, the native_module is writable and not executable.
610 : // At the scope's destruction, the native_module is executable and not writable.
611 : // The states inside the scope and at the scope termination are irrespective of
612 : // native_module's state when entering the scope.
613 : // We currently mark the entire module's memory W^X:
614 : // - for AOT, that's as efficient as it can be.
615 : // - for Lazy, we don't have a heuristic for functions that may need patching,
616 : // and even if we did, the resulting set of pages may be fragmented.
617 : // Currently, we try and keep the number of syscalls low.
618 : // - similar argument for debug time.
619 : class NativeModuleModificationScope final {
620 : public:
621 : explicit NativeModuleModificationScope(NativeModule* native_module);
622 : ~NativeModuleModificationScope();
623 :
624 : private:
625 : NativeModule* native_module_;
626 : };
627 :
628 : // {WasmCodeRefScope}s form a perfect stack. New {WasmCode} pointers generated
629 : // by e.g. creating new code or looking up code by its address are added to the
630 : // top-most {WasmCodeRefScope}.
631 : class V8_EXPORT_PRIVATE WasmCodeRefScope {
632 : public:
633 : WasmCodeRefScope();
634 : ~WasmCodeRefScope();
635 :
636 : // Register a {WasmCode} reference in the current {WasmCodeRefScope}. Fails if
637 : // there is no current scope.
638 : static void AddRef(WasmCode*);
639 :
640 : private:
641 : WasmCodeRefScope* const previous_scope_;
642 : std::unordered_set<WasmCode*> code_ptrs_;
643 :
644 : DISALLOW_COPY_AND_ASSIGN(WasmCodeRefScope);
645 : };
646 :
647 : // Similarly to a global handle, a {GlobalWasmCodeRef} stores a single
648 : // ref-counted pointer to a {WasmCode} object.
649 : class GlobalWasmCodeRef {
650 : public:
651 : explicit GlobalWasmCodeRef(WasmCode* code,
652 : std::shared_ptr<NativeModule> native_module)
653 155087 : : code_(code), native_module_(std::move(native_module)) {
654 : code_->IncRef();
655 : }
656 :
657 310174 : ~GlobalWasmCodeRef() {
658 155087 : if (code_->DecRef()) code_->native_module()->FreeCode(VectorOf(&code_, 1));
659 155087 : }
660 :
661 : // Get a pointer to the contained {WasmCode} object. This is only guaranteed
662 : // to exist as long as this {GlobalWasmCodeRef} exists.
663 : WasmCode* code() const { return code_; }
664 :
665 : private:
666 : WasmCode* const code_;
667 : // Also keep the {NativeModule} alive.
668 : const std::shared_ptr<NativeModule> native_module_;
669 : DISALLOW_COPY_AND_ASSIGN(GlobalWasmCodeRef);
670 : };
671 :
672 : } // namespace wasm
673 : } // namespace internal
674 : } // namespace v8
675 :
676 : #endif // V8_WASM_WASM_CODE_MANAGER_H_
|