Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_WASM_WASM_CODE_MANAGER_H_
6 : #define V8_WASM_WASM_CODE_MANAGER_H_
7 :
8 : #include <atomic>
9 : #include <list>
10 : #include <map>
11 : #include <memory>
12 : #include <unordered_set>
13 : #include <utility>
14 : #include <vector>
15 :
16 : #include "src/base/macros.h"
17 : #include "src/base/optional.h"
18 : #include "src/builtins/builtins-definitions.h"
19 : #include "src/handles.h"
20 : #include "src/trap-handler/trap-handler.h"
21 : #include "src/vector.h"
22 : #include "src/wasm/compilation-environment.h"
23 : #include "src/wasm/wasm-features.h"
24 : #include "src/wasm/wasm-limits.h"
25 : #include "src/wasm/wasm-tier.h"
26 :
27 : namespace v8 {
28 : namespace internal {
29 :
30 : class Code;
31 : class CodeDesc;
32 : class Isolate;
33 :
34 : namespace wasm {
35 :
36 : class NativeModule;
37 : class WasmCodeManager;
38 : struct WasmCompilationResult;
39 : class WasmEngine;
40 : class WasmMemoryTracker;
41 : class WasmImportWrapperCache;
42 : struct WasmModule;
43 :
44 : // Sorted, disjoint and non-overlapping memory regions. A region is of the
45 : // form [start, end). So there's no [start, end), [end, other_end),
46 : // because that should have been reduced to [start, other_end).
47 2484726 : class V8_EXPORT_PRIVATE DisjointAllocationPool final {
48 : public:
49 : DisjointAllocationPool() = default;
50 :
51 : explicit DisjointAllocationPool(base::AddressRegion region)
52 1242359 : : regions_({region}) {}
53 :
54 : DisjointAllocationPool(DisjointAllocationPool&& other) V8_NOEXCEPT = default;
55 : DisjointAllocationPool& operator=(DisjointAllocationPool&& other)
56 : V8_NOEXCEPT = default;
57 :
58 : // Merge the parameter region into this object while preserving ordering of
59 : // the regions. The assumption is that the passed parameter is not
60 : // intersecting this object - for example, it was obtained from a previous
61 : // Allocate.
62 : void Merge(base::AddressRegion);
63 :
64 : // Allocate a contiguous region of size {size}. Return an empty pool on
65 : // failure.
66 : base::AddressRegion Allocate(size_t size);
67 :
68 : bool IsEmpty() const { return regions_.empty(); }
69 : const std::list<base::AddressRegion>& regions() const { return regions_; }
70 :
71 : private:
72 : std::list<base::AddressRegion> regions_;
73 :
74 : DISALLOW_COPY_AND_ASSIGN(DisjointAllocationPool);
75 : };
76 :
77 : class V8_EXPORT_PRIVATE WasmCode final {
78 : public:
79 : enum Kind {
80 : kFunction,
81 : kWasmToJsWrapper,
82 : kRuntimeStub,
83 : kInterpreterEntry,
84 : kJumpTable
85 : };
86 :
87 : // Each runtime stub is identified by an id. This id is used to reference the
88 : // stub via {RelocInfo::WASM_STUB_CALL} and gets resolved during relocation.
89 : enum RuntimeStubId {
90 : #define DEF_ENUM(Name) k##Name,
91 : #define DEF_ENUM_TRAP(Name) kThrowWasm##Name,
92 : WASM_RUNTIME_STUB_LIST(DEF_ENUM, DEF_ENUM_TRAP)
93 : #undef DEF_ENUM_TRAP
94 : #undef DEF_ENUM
95 : kRuntimeStubCount
96 : };
97 :
98 : Vector<byte> instructions() const { return instructions_; }
99 : Address instruction_start() const {
100 39644770 : return reinterpret_cast<Address>(instructions_.start());
101 : }
102 : Vector<const byte> reloc_info() const { return reloc_info_.as_vector(); }
103 : Vector<const byte> source_positions() const {
104 : return source_position_table_.as_vector();
105 : }
106 :
107 : uint32_t index() const {
108 : DCHECK(!IsAnonymous());
109 : return index_;
110 : }
111 : // Anonymous functions are functions that don't carry an index.
112 : bool IsAnonymous() const { return index_ == kAnonymousFuncIndex; }
113 : Kind kind() const { return kind_; }
114 : NativeModule* native_module() const { return native_module_; }
115 : ExecutionTier tier() const { return tier_; }
116 : Address constant_pool() const;
117 : Address handler_table() const;
118 : uint32_t handler_table_size() const;
119 : Address code_comments() const;
120 : uint32_t code_comments_size() const;
121 : size_t constant_pool_offset() const { return constant_pool_offset_; }
122 : size_t safepoint_table_offset() const { return safepoint_table_offset_; }
123 : size_t handler_table_offset() const { return handler_table_offset_; }
124 : size_t code_comments_offset() const { return code_comments_offset_; }
125 : size_t unpadded_binary_size() const { return unpadded_binary_size_; }
126 : uint32_t stack_slots() const { return stack_slots_; }
127 : uint32_t tagged_parameter_slots() const { return tagged_parameter_slots_; }
128 : bool is_liftoff() const { return tier_ == ExecutionTier::kLiftoff; }
129 : bool contains(Address pc) const {
130 11959938 : return reinterpret_cast<Address>(instructions_.start()) <= pc &&
131 5979969 : pc < reinterpret_cast<Address>(instructions_.end());
132 : }
133 :
134 : Vector<trap_handler::ProtectedInstructionData> protected_instructions()
135 : const {
136 : return protected_instructions_.as_vector();
137 : }
138 :
139 : void Validate() const;
140 : void Print(const char* name = nullptr) const;
141 : void MaybePrint(const char* name = nullptr) const;
142 : void Disassemble(const char* name, std::ostream& os,
143 : Address current_pc = kNullAddress) const;
144 :
145 : static bool ShouldBeLogged(Isolate* isolate);
146 : void LogCode(Isolate* isolate) const;
147 :
148 : ~WasmCode();
149 :
150 : void IncRef() {
151 : int old_val = ref_count_.fetch_add(1, std::memory_order_relaxed);
152 : DCHECK_LE(1, old_val);
153 : DCHECK_GT(kMaxInt, old_val);
154 : USE(old_val);
155 : }
156 :
157 : // Decrement the ref count. Returns whether this code becomes dead and needs
158 : // to be freed.
159 10139142 : V8_WARN_UNUSED_RESULT bool DecRef() {
160 10139142 : int old_count = ref_count_.load(std::memory_order_relaxed);
161 : while (true) {
162 : DCHECK_LE(1, old_count);
163 10139169 : if (V8_UNLIKELY(old_count == 1)) return DecRefOnPotentiallyDeadCode();
164 20125268 : if (ref_count_.compare_exchange_weak(old_count, old_count - 1,
165 : std::memory_order_relaxed)) {
166 : return false;
167 : }
168 : }
169 : }
170 :
171 : // Decrement the ref count on a set of {WasmCode} objects, potentially
172 : // belonging to different {NativeModule}s. Dead code will be deleted.
173 : static void DecrementRefCount(Vector<WasmCode*>);
174 :
175 : enum FlushICache : bool { kFlushICache = true, kNoFlushICache = false };
176 :
177 : static constexpr uint32_t kAnonymousFuncIndex = 0xffffffff;
178 : STATIC_ASSERT(kAnonymousFuncIndex > kV8MaxWasmFunctions);
179 :
180 : private:
181 : friend class NativeModule;
182 :
183 : WasmCode(NativeModule* native_module, uint32_t index,
184 : Vector<byte> instructions, uint32_t stack_slots,
185 : uint32_t tagged_parameter_slots, size_t safepoint_table_offset,
186 : size_t handler_table_offset, size_t constant_pool_offset,
187 : size_t code_comments_offset, size_t unpadded_binary_size,
188 : OwnedVector<trap_handler::ProtectedInstructionData>
189 : protected_instructions,
190 : OwnedVector<const byte> reloc_info,
191 : OwnedVector<const byte> source_position_table, Kind kind,
192 : ExecutionTier tier)
193 : : instructions_(instructions),
194 : reloc_info_(std::move(reloc_info)),
195 : source_position_table_(std::move(source_position_table)),
196 : native_module_(native_module),
197 : index_(index),
198 : kind_(kind),
199 : constant_pool_offset_(constant_pool_offset),
200 : stack_slots_(stack_slots),
201 : tagged_parameter_slots_(tagged_parameter_slots),
202 : safepoint_table_offset_(safepoint_table_offset),
203 : handler_table_offset_(handler_table_offset),
204 : code_comments_offset_(code_comments_offset),
205 : unpadded_binary_size_(unpadded_binary_size),
206 : protected_instructions_(std::move(protected_instructions)),
207 11721540 : tier_(tier) {
208 : DCHECK_LE(safepoint_table_offset, unpadded_binary_size);
209 : DCHECK_LE(handler_table_offset, unpadded_binary_size);
210 : DCHECK_LE(code_comments_offset, unpadded_binary_size);
211 : DCHECK_LE(constant_pool_offset, unpadded_binary_size);
212 : }
213 :
214 : // Code objects that have been registered with the global trap handler within
215 : // this process, will have a {trap_handler_index} associated with them.
216 : size_t trap_handler_index() const;
217 : void set_trap_handler_index(size_t);
218 : bool HasTrapHandlerIndex() const;
219 :
220 : // Register protected instruction information with the trap handler. Sets
221 : // trap_handler_index.
222 : void RegisterTrapHandlerData();
223 :
224 : // Slow path for {DecRef}: The code becomes potentially dead.
225 : // Returns whether this code becomes dead and needs to be freed.
226 : bool DecRefOnPotentiallyDeadCode();
227 :
228 : Vector<byte> instructions_;
229 : OwnedVector<const byte> reloc_info_;
230 : OwnedVector<const byte> source_position_table_;
231 : NativeModule* native_module_ = nullptr;
232 : uint32_t index_;
233 : Kind kind_;
234 : size_t constant_pool_offset_ = 0;
235 : uint32_t stack_slots_ = 0;
236 : // Number of tagged parameters passed to this function via the stack. This
237 : // value is used by the stack walker (e.g. GC) to find references.
238 : uint32_t tagged_parameter_slots_ = 0;
239 : // we care about safepoint data for wasm-to-js functions,
240 : // since there may be stack/register tagged values for large number
241 : // conversions.
242 : size_t safepoint_table_offset_ = 0;
243 : size_t handler_table_offset_ = 0;
244 : size_t code_comments_offset_ = 0;
245 : size_t unpadded_binary_size_ = 0;
246 : intptr_t trap_handler_index_ = -1;
247 : OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions_;
248 : ExecutionTier tier_;
249 :
250 : // WasmCode is ref counted. Counters are held by:
251 : // 1) The jump table.
252 : // 2) Function tables.
253 : // 3) {WasmCodeRefScope}s.
254 : // 4) The set of potentially dead code in the {WasmEngine}.
255 : // If a decrement of (1) or (2) would drop the ref count to 0, that code
256 : // becomes a candidate for garbage collection. At that point, we add
257 : // ref counts for (4) *before* decrementing the counter to ensure the code
258 : // stays alive as long as it's being used. Once the ref count drops to zero,
259 : // the code object is deleted and the memory for the machine code is freed.
260 : std::atomic<int> ref_count_{1};
261 :
262 : DISALLOW_COPY_AND_ASSIGN(WasmCode);
263 : };
264 :
265 : // Return a textual description of the kind.
266 : const char* GetWasmCodeKindAsString(WasmCode::Kind);
267 :
268 : class V8_EXPORT_PRIVATE NativeModule final {
269 : public:
270 : #if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_ARM64
271 : static constexpr bool kCanAllocateMoreMemory = false;
272 : #else
273 : static constexpr bool kCanAllocateMoreMemory = true;
274 : #endif
275 :
276 : // {AddCode} is thread safe w.r.t. other calls to {AddCode} or methods adding
277 : // code below, i.e. it can be called concurrently from background threads.
278 : // The returned code still needs to be published via {PublishCode}.
279 : std::unique_ptr<WasmCode> AddCode(
280 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
281 : uint32_t tagged_parameter_slots,
282 : OwnedVector<trap_handler::ProtectedInstructionData>
283 : protected_instructions,
284 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
285 : ExecutionTier tier);
286 :
287 : // {PublishCode} makes the code available to the system by entering it into
288 : // the code table and patching the jump table. It returns a raw pointer to the
289 : // given {WasmCode} object.
290 : WasmCode* PublishCode(std::unique_ptr<WasmCode>);
291 : // Hold the {allocation_mutex_} when calling {PublishCodeLocked}.
292 : WasmCode* PublishCodeLocked(std::unique_ptr<WasmCode>);
293 :
294 : WasmCode* AddDeserializedCode(
295 : uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
296 : uint32_t tagged_parameter_slots, size_t safepoint_table_offset,
297 : size_t handler_table_offset, size_t constant_pool_offset,
298 : size_t code_comments_offset, size_t unpadded_binary_size,
299 : OwnedVector<trap_handler::ProtectedInstructionData>
300 : protected_instructions,
301 : OwnedVector<const byte> reloc_info,
302 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
303 : ExecutionTier tier);
304 :
305 : // Adds anonymous code for testing purposes.
306 : WasmCode* AddCodeForTesting(Handle<Code> code);
307 :
308 : // Use this to setup lazy compilation for the entire module ({UseLazyStubs})
309 : // or for individual functions ({UseLazyStub}). It will use the existing
310 : // {WasmCode::kWasmCompileLazy} runtime stub and populate the jump table with
311 : // trampolines to that runtime stub.
312 : void UseLazyStubs();
313 : void UseLazyStub(uint32_t func_index);
314 :
315 : // Initializes all runtime stubs by setting up entry addresses in the runtime
316 : // stub table. It must be called exactly once per native module before adding
317 : // other WasmCode so that runtime stub ids can be resolved during relocation.
318 : void SetRuntimeStubs(Isolate* isolate);
319 :
320 : // Creates a snapshot of the current state of the code table. This is useful
321 : // to get a consistent view of the table (e.g. used by the serializer).
322 : std::vector<WasmCode*> SnapshotCodeTable() const;
323 :
324 : WasmCode* GetCode(uint32_t index) const;
325 : bool HasCode(uint32_t index) const;
326 :
327 : Address runtime_stub_entry(WasmCode::RuntimeStubId index) const {
328 : DCHECK_LT(index, WasmCode::kRuntimeStubCount);
329 540165 : Address entry_address = runtime_stub_entries_[index];
330 : DCHECK_NE(kNullAddress, entry_address);
331 : return entry_address;
332 : }
333 :
334 : Address jump_table_start() const {
335 1461770 : return jump_table_ ? jump_table_->instruction_start() : kNullAddress;
336 : }
337 :
338 : ptrdiff_t jump_table_offset(uint32_t func_index) const {
339 : DCHECK_GE(func_index, num_imported_functions());
340 463217 : return GetCallTargetForFunction(func_index) - jump_table_start();
341 : }
342 :
343 : bool is_jump_table_slot(Address address) const {
344 : return jump_table_->contains(address);
345 : }
346 :
347 : // Returns the target to call for the given function (returns a jump table
348 : // slot within {jump_table_}).
349 : Address GetCallTargetForFunction(uint32_t func_index) const;
350 :
351 : // Reverse lookup from a given call target (i.e. a jump table slot as the
352 : // above {GetCallTargetForFunction} returns) to a function index.
353 : uint32_t GetFunctionIndexFromJumpTableSlot(Address slot_address) const;
354 :
355 : bool SetExecutable(bool executable);
356 :
357 : // For cctests, where we build both WasmModule and the runtime objects
358 : // on the fly, and bypass the instance builder pipeline.
359 : void ReserveCodeTableForTesting(uint32_t max_functions);
360 :
361 : void LogWasmCodes(Isolate* isolate);
362 :
363 : CompilationState* compilation_state() { return compilation_state_.get(); }
364 :
365 : // Create a {CompilationEnv} object for compilation. The caller has to ensure
366 : // that the {WasmModule} pointer stays valid while the {CompilationEnv} is
367 : // being used.
368 : CompilationEnv CreateCompilationEnv() const;
369 :
370 : uint32_t num_functions() const {
371 1243218 : return module_->num_declared_functions + module_->num_imported_functions;
372 : }
373 : uint32_t num_imported_functions() const {
374 125038 : return module_->num_imported_functions;
375 : }
376 : UseTrapHandler use_trap_handler() const { return use_trap_handler_; }
377 48 : void set_lazy_compile_frozen(bool frozen) { lazy_compile_frozen_ = frozen; }
378 : bool lazy_compile_frozen() const { return lazy_compile_frozen_; }
379 2817 : void set_lazy_compilation(bool lazy) { lazy_compilation_ = lazy; }
380 : bool lazy_compilation() const { return lazy_compilation_; }
381 : Vector<const uint8_t> wire_bytes() const { return wire_bytes_->as_vector(); }
382 : const WasmModule* module() const { return module_.get(); }
383 : std::shared_ptr<const WasmModule> shared_module() const { return module_; }
384 : size_t committed_code_space() const { return committed_code_space_.load(); }
385 : WasmEngine* engine() const { return engine_; }
386 :
387 : void SetWireBytes(OwnedVector<const uint8_t> wire_bytes);
388 :
389 : WasmCode* Lookup(Address) const;
390 :
391 : WasmImportWrapperCache* import_wrapper_cache() const {
392 : return import_wrapper_cache_.get();
393 : }
394 :
395 : ~NativeModule();
396 :
397 374642 : const WasmFeatures& enabled_features() const { return enabled_features_; }
398 :
399 : const char* GetRuntimeStubName(Address runtime_stub_entry) const;
400 :
401 : // Sample the current code size of this modules to the given counters.
402 : enum CodeSamplingTime : int8_t { kAfterBaseline, kAfterTopTier, kSampling };
403 : void SampleCodeSize(Counters*, CodeSamplingTime) const;
404 :
405 : WasmCode* AddCompiledCode(WasmCompilationResult);
406 : std::vector<WasmCode*> AddCompiledCode(Vector<WasmCompilationResult>);
407 :
408 : // Free a set of functions of this module. Uncommits whole pages if possible.
409 : // The given vector must be ordered by the instruction start address, and all
410 : // {WasmCode} objects must not be used any more.
411 : void FreeCode(Vector<WasmCode* const>);
412 :
413 : private:
414 : friend class WasmCode;
415 : friend class WasmCodeManager;
416 : friend class NativeModuleModificationScope;
417 :
418 : // Private constructor, called via {WasmCodeManager::NewNativeModule()}.
419 : NativeModule(WasmEngine* engine, const WasmFeatures& enabled_features,
420 : bool can_request_more, VirtualMemory code_space,
421 : std::shared_ptr<const WasmModule> module,
422 : std::shared_ptr<Counters> async_counters,
423 : std::shared_ptr<NativeModule>* shared_this);
424 :
425 : std::unique_ptr<WasmCode> AddCodeWithCodeSpace(
426 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
427 : uint32_t tagged_parameter_slots,
428 : OwnedVector<trap_handler::ProtectedInstructionData>
429 : protected_instructions,
430 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
431 : ExecutionTier tier, Vector<uint8_t> code_space);
432 :
433 : // Add and publish anonymous code.
434 : WasmCode* AddAndPublishAnonymousCode(Handle<Code>, WasmCode::Kind kind,
435 : const char* name = nullptr);
436 : // Allocate code space. Returns a valid buffer or fails with OOM (crash).
437 : Vector<byte> AllocateForCode(size_t size);
438 :
439 : WasmCode* CreateEmptyJumpTable(uint32_t jump_table_size);
440 :
441 : // Hold the {mutex_} when calling this method.
442 : bool has_interpreter_redirection(uint32_t func_index) {
443 : DCHECK_LT(func_index, num_functions());
444 : DCHECK_LE(module_->num_imported_functions, func_index);
445 1406838 : if (!interpreter_redirections_) return false;
446 3011 : uint32_t bitset_idx = func_index - module_->num_imported_functions;
447 6022 : uint8_t byte = interpreter_redirections_[bitset_idx / kBitsPerByte];
448 3011 : return byte & (1 << (bitset_idx % kBitsPerByte));
449 : }
450 :
451 : // Hold the {mutex_} when calling this method.
452 369231 : void SetInterpreterRedirection(uint32_t func_index) {
453 : DCHECK_LT(func_index, num_functions());
454 : DCHECK_LE(module_->num_imported_functions, func_index);
455 369231 : if (!interpreter_redirections_) {
456 366215 : interpreter_redirections_.reset(
457 732430 : new uint8_t[RoundUp<kBitsPerByte>(module_->num_declared_functions) /
458 732430 : kBitsPerByte]{});
459 : }
460 369231 : uint32_t bitset_idx = func_index - module_->num_imported_functions;
461 369231 : uint8_t& byte = interpreter_redirections_[bitset_idx / kBitsPerByte];
462 369231 : byte |= 1 << (bitset_idx % kBitsPerByte);
463 369231 : }
464 :
465 : // Features enabled for this module. We keep a copy of the features that
466 : // were enabled at the time of the creation of this native module,
467 : // to be consistent across asynchronous compilations later.
468 : const WasmFeatures enabled_features_;
469 :
470 : // The decoded module, stored in a shared_ptr such that background compile
471 : // tasks can keep this alive.
472 : std::shared_ptr<const WasmModule> module_;
473 :
474 : // Wire bytes, held in a shared_ptr so they can be kept alive by the
475 : // {WireBytesStorage}, held by background compile tasks.
476 : std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes_;
477 :
478 : // Contains entry points for runtime stub calls via {WASM_STUB_CALL}.
479 : Address runtime_stub_entries_[WasmCode::kRuntimeStubCount] = {kNullAddress};
480 :
481 : // Jump table used for runtime stubs (i.e. trampolines to embedded builtins).
482 : WasmCode* runtime_stub_table_ = nullptr;
483 :
484 : // Jump table used to easily redirect wasm function calls.
485 : WasmCode* jump_table_ = nullptr;
486 :
487 : // The compilation state keeps track of compilation tasks for this module.
488 : // Note that its destructor blocks until all tasks are finished/aborted and
489 : // hence needs to be destructed first when this native module dies.
490 : std::unique_ptr<CompilationState> compilation_state_;
491 :
492 : // A cache of the import wrappers, keyed on the kind and signature.
493 : std::unique_ptr<WasmImportWrapperCache> import_wrapper_cache_;
494 :
495 : // This mutex protects concurrent calls to {AddCode} and friends.
496 : mutable base::Mutex allocation_mutex_;
497 :
498 : //////////////////////////////////////////////////////////////////////////////
499 : // Protected by {allocation_mutex_}:
500 :
501 : // Holds all allocated code objects. Mutable because it might get sorted in
502 : // {Lookup()}.
503 : mutable std::vector<std::unique_ptr<WasmCode>> owned_code_;
504 :
505 : // Keep track of the portion of {owned_code_} that is sorted.
506 : // Entries [0, owned_code_sorted_portion_) are known to be sorted.
507 : // Mutable because it might get modified in {Lookup()}.
508 : mutable size_t owned_code_sorted_portion_ = 0;
509 :
510 : std::unique_ptr<WasmCode* []> code_table_;
511 :
512 : // Null if no redirections exist, otherwise a bitset over all functions in
513 : // this module marking those functions that have been redirected.
514 : std::unique_ptr<uint8_t[]> interpreter_redirections_;
515 :
516 : DisjointAllocationPool free_code_space_;
517 : DisjointAllocationPool allocated_code_space_;
518 : std::list<VirtualMemory> owned_code_space_;
519 :
520 : // End of fields protected by {allocation_mutex_}.
521 : //////////////////////////////////////////////////////////////////////////////
522 :
523 : WasmEngine* const engine_;
524 : std::atomic<size_t> committed_code_space_{0};
525 : std::atomic<size_t> generated_code_size_{0};
526 : int modification_scope_depth_ = 0;
527 : bool can_request_more_memory_;
528 : UseTrapHandler use_trap_handler_ = kNoTrapHandler;
529 : bool is_executable_ = false;
530 : bool lazy_compile_frozen_ = false;
531 : bool lazy_compilation_ = false;
532 :
533 : DISALLOW_COPY_AND_ASSIGN(NativeModule);
534 : };
535 :
536 59889 : class V8_EXPORT_PRIVATE WasmCodeManager final {
537 : public:
538 : explicit WasmCodeManager(WasmMemoryTracker* memory_tracker,
539 : size_t max_committed);
540 :
541 : #ifdef DEBUG
542 : ~WasmCodeManager() {
543 : // No more committed code space.
544 : DCHECK_EQ(0, total_committed_code_space_.load());
545 : }
546 : #endif
547 :
548 : NativeModule* LookupNativeModule(Address pc) const;
549 : WasmCode* LookupCode(Address pc) const;
550 : size_t committed_code_space() const {
551 : return total_committed_code_space_.load();
552 : }
553 :
554 : void SetMaxCommittedMemoryForTesting(size_t limit);
555 :
556 : static size_t EstimateNativeModuleCodeSize(const WasmModule* module);
557 : static size_t EstimateNativeModuleNonCodeSize(const WasmModule* module);
558 :
559 : private:
560 : friend class NativeModule;
561 : friend class WasmEngine;
562 :
563 : std::shared_ptr<NativeModule> NewNativeModule(
564 : WasmEngine* engine, Isolate* isolate,
565 : const WasmFeatures& enabled_features, size_t code_size_estimate,
566 : bool can_request_more, std::shared_ptr<const WasmModule> module);
567 :
568 : V8_WARN_UNUSED_RESULT VirtualMemory TryAllocate(size_t size,
569 : void* hint = nullptr);
570 : bool Commit(Address, size_t);
571 : // Currently, we uncommit a whole module, so all we need is account
572 : // for the freed memory size. We do that in FreeNativeModule.
573 : // There's no separate Uncommit.
574 :
575 : void FreeNativeModule(NativeModule*);
576 :
577 : void AssignRanges(Address start, Address end, NativeModule*);
578 :
579 : WasmMemoryTracker* const memory_tracker_;
580 :
581 : size_t max_committed_code_space_;
582 :
583 : std::atomic<size_t> total_committed_code_space_;
584 : // If the committed code space exceeds {critical_committed_code_space_}, then
585 : // we trigger a GC before creating the next module. This value is set to the
586 : // currently committed space plus 50% of the available code space on creation
587 : // and updated after each GC.
588 : std::atomic<size_t> critical_committed_code_space_;
589 :
590 : mutable base::Mutex native_modules_mutex_;
591 :
592 : //////////////////////////////////////////////////////////////////////////////
593 : // Protected by {native_modules_mutex_}:
594 :
595 : std::map<Address, std::pair<Address, NativeModule*>> lookup_map_;
596 :
597 : // End of fields protected by {native_modules_mutex_}.
598 : //////////////////////////////////////////////////////////////////////////////
599 :
600 : DISALLOW_COPY_AND_ASSIGN(WasmCodeManager);
601 : };
602 :
603 : // Within the scope, the native_module is writable and not executable.
604 : // At the scope's destruction, the native_module is executable and not writable.
605 : // The states inside the scope and at the scope termination are irrespective of
606 : // native_module's state when entering the scope.
607 : // We currently mark the entire module's memory W^X:
608 : // - for AOT, that's as efficient as it can be.
609 : // - for Lazy, we don't have a heuristic for functions that may need patching,
610 : // and even if we did, the resulting set of pages may be fragmented.
611 : // Currently, we try and keep the number of syscalls low.
612 : // - similar argument for debug time.
613 : class NativeModuleModificationScope final {
614 : public:
615 : explicit NativeModuleModificationScope(NativeModule* native_module);
616 : ~NativeModuleModificationScope();
617 :
618 : private:
619 : NativeModule* native_module_;
620 : };
621 :
622 : // {WasmCodeRefScope}s form a perfect stack. New {WasmCode} pointers generated
623 : // by e.g. creating new code or looking up code by its address are added to the
624 : // top-most {WasmCodeRefScope}.
625 : class V8_EXPORT_PRIVATE WasmCodeRefScope {
626 : public:
627 : WasmCodeRefScope();
628 : ~WasmCodeRefScope();
629 :
630 : // Register a {WasmCode} reference in the current {WasmCodeRefScope}. Fails if
631 : // there is no current scope.
632 : static void AddRef(WasmCode*);
633 :
634 : private:
635 : WasmCodeRefScope* const previous_scope_;
636 : std::unordered_set<WasmCode*> code_ptrs_;
637 :
638 : DISALLOW_COPY_AND_ASSIGN(WasmCodeRefScope);
639 : };
640 :
641 : // Similarly to a global handle, a {GlobalWasmCodeRef} stores a single
642 : // ref-counted pointer to a {WasmCode} object.
643 : class GlobalWasmCodeRef {
644 : public:
645 : explicit GlobalWasmCodeRef(WasmCode* code,
646 : std::shared_ptr<NativeModule> native_module)
647 154168 : : code_(code), native_module_(std::move(native_module)) {
648 : code_->IncRef();
649 : }
650 :
651 308336 : ~GlobalWasmCodeRef() {
652 154168 : if (code_->DecRef()) code_->native_module()->FreeCode(VectorOf(&code_, 1));
653 154168 : }
654 :
655 : // Get a pointer to the contained {WasmCode} object. This is only guaranteed
656 : // to exist as long as this {GlobalWasmCodeRef} exists.
657 : WasmCode* code() const { return code_; }
658 :
659 : private:
660 : WasmCode* const code_;
661 : // Also keep the {NativeModule} alive.
662 : const std::shared_ptr<NativeModule> native_module_;
663 : DISALLOW_COPY_AND_ASSIGN(GlobalWasmCodeRef);
664 : };
665 :
666 : } // namespace wasm
667 : } // namespace internal
668 : } // namespace v8
669 :
670 : #endif // V8_WASM_WASM_CODE_MANAGER_H_
|