Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_WASM_WASM_CODE_MANAGER_H_
6 : #define V8_WASM_WASM_CODE_MANAGER_H_
7 :
8 : #include <atomic>
9 : #include <list>
10 : #include <map>
11 : #include <memory>
12 : #include <utility>
13 : #include <vector>
14 :
15 : #include "src/base/macros.h"
16 : #include "src/builtins/builtins-definitions.h"
17 : #include "src/handles.h"
18 : #include "src/trap-handler/trap-handler.h"
19 : #include "src/vector.h"
20 : #include "src/wasm/compilation-environment.h"
21 : #include "src/wasm/wasm-features.h"
22 : #include "src/wasm/wasm-limits.h"
23 :
24 : namespace v8 {
25 : namespace internal {
26 :
27 : class Code;
28 : class CodeDesc;
29 : class Isolate;
30 :
31 : namespace wasm {
32 :
33 : class NativeModule;
34 : class WasmCodeManager;
35 : struct WasmCompilationResult;
36 : class WasmEngine;
37 : class WasmMemoryTracker;
38 : class WasmImportWrapperCache;
39 : struct WasmModule;
40 :
41 : // Sorted, disjoint and non-overlapping memory regions. A region is of the
42 : // form [start, end). So there's no [start, end), [end, other_end),
43 : // because that should have been reduced to [start, other_end).
44 2483816 : class V8_EXPORT_PRIVATE DisjointAllocationPool final {
45 : public:
46 : DisjointAllocationPool() = default;
47 :
48 : explicit DisjointAllocationPool(base::AddressRegion region)
49 1241904 : : regions_({region}) {}
50 :
51 : DisjointAllocationPool(DisjointAllocationPool&& other) V8_NOEXCEPT = default;
52 : DisjointAllocationPool& operator=(DisjointAllocationPool&& other)
53 : V8_NOEXCEPT = default;
54 :
55 : // Merge the parameter region into this object while preserving ordering of
56 : // the regions. The assumption is that the passed parameter is not
57 : // intersecting this object - for example, it was obtained from a previous
58 : // Allocate.
59 : void Merge(base::AddressRegion);
60 :
61 : // Allocate a contiguous region of size {size}. Return an empty pool on
62 : // failure.
63 : base::AddressRegion Allocate(size_t size);
64 :
65 : bool IsEmpty() const { return regions_.empty(); }
66 : const std::list<base::AddressRegion>& regions() const { return regions_; }
67 :
68 : private:
69 : std::list<base::AddressRegion> regions_;
70 :
71 : DISALLOW_COPY_AND_ASSIGN(DisjointAllocationPool);
72 : };
73 :
74 : class V8_EXPORT_PRIVATE WasmCode final {
75 : public:
76 : enum Kind {
77 : kFunction,
78 : kWasmToJsWrapper,
79 : kRuntimeStub,
80 : kInterpreterEntry,
81 : kJumpTable
82 : };
83 :
84 : // Each runtime stub is identified by an id. This id is used to reference the
85 : // stub via {RelocInfo::WASM_STUB_CALL} and gets resolved during relocation.
86 : enum RuntimeStubId {
87 : #define DEF_ENUM(Name) k##Name,
88 : #define DEF_ENUM_TRAP(Name) kThrowWasm##Name,
89 : WASM_RUNTIME_STUB_LIST(DEF_ENUM, DEF_ENUM_TRAP)
90 : #undef DEF_ENUM_TRAP
91 : #undef DEF_ENUM
92 : kRuntimeStubCount
93 : };
94 :
95 : // kOther is used if we have WasmCode that is neither
96 : // liftoff- nor turbofan-compiled, i.e. if Kind is
97 : // not a kFunction.
98 : enum Tier : int8_t { kLiftoff, kTurbofan, kOther };
99 :
100 : Vector<byte> instructions() const { return instructions_; }
101 : Address instruction_start() const {
102 40195750 : return reinterpret_cast<Address>(instructions_.start());
103 : }
104 : Vector<const byte> reloc_info() const { return reloc_info_.as_vector(); }
105 : Vector<const byte> source_positions() const {
106 : return source_position_table_.as_vector();
107 : }
108 :
109 : uint32_t index() const {
110 : DCHECK(!IsAnonymous());
111 : return index_;
112 : }
113 : // Anonymous functions are functions that don't carry an index.
114 : bool IsAnonymous() const { return index_ == kAnonymousFuncIndex; }
115 : Kind kind() const { return kind_; }
116 : NativeModule* native_module() const { return native_module_; }
117 : Tier tier() const { return tier_; }
118 : Address constant_pool() const;
119 : Address code_comments() const;
120 : size_t constant_pool_offset() const { return constant_pool_offset_; }
121 : size_t safepoint_table_offset() const { return safepoint_table_offset_; }
122 : size_t handler_table_offset() const { return handler_table_offset_; }
123 : size_t code_comments_offset() const { return code_comments_offset_; }
124 : size_t unpadded_binary_size() const { return unpadded_binary_size_; }
125 : uint32_t stack_slots() const { return stack_slots_; }
126 : uint32_t tagged_parameter_slots() const { return tagged_parameter_slots_; }
127 : bool is_liftoff() const { return tier_ == kLiftoff; }
128 : bool contains(Address pc) const {
129 11341660 : return reinterpret_cast<Address>(instructions_.start()) <= pc &&
130 5670830 : pc < reinterpret_cast<Address>(instructions_.end());
131 : }
132 :
133 : Vector<trap_handler::ProtectedInstructionData> protected_instructions()
134 : const {
135 : return protected_instructions_.as_vector();
136 : }
137 :
138 : void Validate() const;
139 : void Print(const char* name = nullptr) const;
140 : void MaybePrint(const char* name = nullptr) const;
141 : void Disassemble(const char* name, std::ostream& os,
142 : Address current_pc = kNullAddress) const;
143 :
144 : static bool ShouldBeLogged(Isolate* isolate);
145 : void LogCode(Isolate* isolate) const;
146 :
147 : ~WasmCode();
148 :
149 : enum FlushICache : bool { kFlushICache = true, kNoFlushICache = false };
150 :
151 : static constexpr uint32_t kAnonymousFuncIndex = 0xffffffff;
152 : STATIC_ASSERT(kAnonymousFuncIndex > kV8MaxWasmFunctions);
153 :
154 : private:
155 : friend class NativeModule;
156 :
157 : WasmCode(NativeModule* native_module, uint32_t index,
158 : Vector<byte> instructions, uint32_t stack_slots,
159 : uint32_t tagged_parameter_slots, size_t safepoint_table_offset,
160 : size_t handler_table_offset, size_t constant_pool_offset,
161 : size_t code_comments_offset, size_t unpadded_binary_size,
162 : OwnedVector<trap_handler::ProtectedInstructionData>
163 : protected_instructions,
164 : OwnedVector<const byte> reloc_info,
165 : OwnedVector<const byte> source_position_table, Kind kind, Tier tier)
166 : : instructions_(instructions),
167 : reloc_info_(std::move(reloc_info)),
168 : source_position_table_(std::move(source_position_table)),
169 : native_module_(native_module),
170 : index_(index),
171 : kind_(kind),
172 : constant_pool_offset_(constant_pool_offset),
173 : stack_slots_(stack_slots),
174 : tagged_parameter_slots_(tagged_parameter_slots),
175 : safepoint_table_offset_(safepoint_table_offset),
176 : handler_table_offset_(handler_table_offset),
177 : code_comments_offset_(code_comments_offset),
178 : unpadded_binary_size_(unpadded_binary_size),
179 : protected_instructions_(std::move(protected_instructions)),
180 11756046 : tier_(tier) {
181 : DCHECK_LE(safepoint_table_offset, unpadded_binary_size);
182 : DCHECK_LE(handler_table_offset, unpadded_binary_size);
183 : DCHECK_LE(code_comments_offset, unpadded_binary_size);
184 : DCHECK_LE(constant_pool_offset, unpadded_binary_size);
185 : }
186 :
187 : // Code objects that have been registered with the global trap handler within
188 : // this process, will have a {trap_handler_index} associated with them.
189 : size_t trap_handler_index() const;
190 : void set_trap_handler_index(size_t);
191 : bool HasTrapHandlerIndex() const;
192 :
193 : // Register protected instruction information with the trap handler. Sets
194 : // trap_handler_index.
195 : void RegisterTrapHandlerData();
196 :
197 : Vector<byte> instructions_;
198 : OwnedVector<const byte> reloc_info_;
199 : OwnedVector<const byte> source_position_table_;
200 : NativeModule* native_module_ = nullptr;
201 : uint32_t index_;
202 : Kind kind_;
203 : size_t constant_pool_offset_ = 0;
204 : uint32_t stack_slots_ = 0;
205 : // Number of tagged parameters passed to this function via the stack. This
206 : // value is used by the stack walker (e.g. GC) to find references.
207 : uint32_t tagged_parameter_slots_ = 0;
208 : // we care about safepoint data for wasm-to-js functions,
209 : // since there may be stack/register tagged values for large number
210 : // conversions.
211 : size_t safepoint_table_offset_ = 0;
212 : size_t handler_table_offset_ = 0;
213 : size_t code_comments_offset_ = 0;
214 : size_t unpadded_binary_size_ = 0;
215 : intptr_t trap_handler_index_ = -1;
216 : OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions_;
217 : Tier tier_;
218 :
219 : DISALLOW_COPY_AND_ASSIGN(WasmCode);
220 : };
221 :
222 : // Return a textual description of the kind.
223 : const char* GetWasmCodeKindAsString(WasmCode::Kind);
224 :
225 : class V8_EXPORT_PRIVATE NativeModule final {
226 : public:
227 : #if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_ARM64
228 : static constexpr bool kCanAllocateMoreMemory = false;
229 : #else
230 : static constexpr bool kCanAllocateMoreMemory = true;
231 : #endif
232 :
233 : // {AddCode} is thread safe w.r.t. other calls to {AddCode} or methods adding
234 : // code below, i.e. it can be called concurrently from background threads.
235 : // The returned code still needs to be published via {PublishCode}.
236 : std::unique_ptr<WasmCode> AddCode(
237 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
238 : uint32_t tagged_parameter_slots,
239 : OwnedVector<trap_handler::ProtectedInstructionData>
240 : protected_instructions,
241 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
242 : WasmCode::Tier tier);
243 :
244 : // {PublishCode} makes the code available to the system by entering it into
245 : // the code table and patching the jump table. It returns a raw pointer to the
246 : // given {WasmCode} object.
247 : WasmCode* PublishCode(std::unique_ptr<WasmCode>);
248 : // Hold the {allocation_mutex_} when calling {PublishCodeLocked}.
249 : WasmCode* PublishCodeLocked(std::unique_ptr<WasmCode>);
250 :
251 : WasmCode* AddDeserializedCode(
252 : uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
253 : uint32_t tagged_parameter_slots, size_t safepoint_table_offset,
254 : size_t handler_table_offset, size_t constant_pool_offset,
255 : size_t code_comments_offset, size_t unpadded_binary_size,
256 : OwnedVector<trap_handler::ProtectedInstructionData>
257 : protected_instructions,
258 : OwnedVector<const byte> reloc_info,
259 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
260 : WasmCode::Tier tier);
261 :
262 : // Adds anonymous code for testing purposes.
263 : WasmCode* AddCodeForTesting(Handle<Code> code);
264 :
265 : // Use this to start lazy compilation for the entire module. It will use the
266 : // existing {WasmCode::kWasmCompileLazy} runtime stub and populate the jump
267 : // table with trampolines to that runtime stub.
268 : void SetLazyBuiltin();
269 :
270 : // Initializes all runtime stubs by setting up entry addresses in the runtime
271 : // stub table. It must be called exactly once per native module before adding
272 : // other WasmCode so that runtime stub ids can be resolved during relocation.
273 : void SetRuntimeStubs(Isolate* isolate);
274 :
275 : // Creates a snapshot of the current state of the code table. This is useful
276 : // to get a consistent view of the table (e.g. used by the serializer).
277 : std::vector<WasmCode*> SnapshotCodeTable() const;
278 :
279 : WasmCode* code(uint32_t index) const {
280 : DCHECK_LT(index, num_functions());
281 : DCHECK_LE(module_->num_imported_functions, index);
282 19296080 : return code_table_[index - module_->num_imported_functions];
283 : }
284 :
285 : bool has_code(uint32_t index) const { return code(index) != nullptr; }
286 :
287 : Address runtime_stub_entry(WasmCode::RuntimeStubId index) const {
288 : DCHECK_LT(index, WasmCode::kRuntimeStubCount);
289 556759 : Address entry_address = runtime_stub_entries_[index];
290 : DCHECK_NE(kNullAddress, entry_address);
291 : return entry_address;
292 : }
293 :
294 : Address jump_table_start() const {
295 1470091 : return jump_table_ ? jump_table_->instruction_start() : kNullAddress;
296 : }
297 :
298 : ptrdiff_t jump_table_offset(uint32_t func_index) const {
299 : DCHECK_GE(func_index, num_imported_functions());
300 480109 : return GetCallTargetForFunction(func_index) - jump_table_start();
301 : }
302 :
303 : bool is_jump_table_slot(Address address) const {
304 : return jump_table_->contains(address);
305 : }
306 :
307 : // Transition this module from code relying on trap handlers (i.e. without
308 : // explicit memory bounds checks) to code that does not require trap handlers
309 : // (i.e. code with explicit bounds checks).
310 : // This method must only be called if {use_trap_handler()} is true (it will be
311 : // false afterwards). All code in this {NativeModule} needs to be re-added
312 : // after calling this method.
313 : void DisableTrapHandler();
314 :
315 : // Returns the target to call for the given function (returns a jump table
316 : // slot within {jump_table_}).
317 : Address GetCallTargetForFunction(uint32_t func_index) const;
318 :
319 : // Reverse lookup from a given call target (i.e. a jump table slot as the
320 : // above {GetCallTargetForFunction} returns) to a function index.
321 : uint32_t GetFunctionIndexFromJumpTableSlot(Address slot_address) const;
322 :
323 : bool SetExecutable(bool executable);
324 :
325 : // For cctests, where we build both WasmModule and the runtime objects
326 : // on the fly, and bypass the instance builder pipeline.
327 : void ReserveCodeTableForTesting(uint32_t max_functions);
328 :
329 : void LogWasmCodes(Isolate* isolate);
330 :
331 : CompilationState* compilation_state() { return compilation_state_.get(); }
332 :
333 : // Create a {CompilationEnv} object for compilation. The caller has to ensure
334 : // that the {WasmModule} pointer stays valid while the {CompilationEnv} is
335 : // being used.
336 : CompilationEnv CreateCompilationEnv() const;
337 :
338 : uint32_t num_functions() const {
339 1242759 : return module_->num_declared_functions + module_->num_imported_functions;
340 : }
341 : uint32_t num_imported_functions() const {
342 125034 : return module_->num_imported_functions;
343 : }
344 : UseTrapHandler use_trap_handler() const { return use_trap_handler_; }
345 48 : void set_lazy_compile_frozen(bool frozen) { lazy_compile_frozen_ = frozen; }
346 : bool lazy_compile_frozen() const { return lazy_compile_frozen_; }
347 : Vector<const uint8_t> wire_bytes() const { return wire_bytes_->as_vector(); }
348 : const WasmModule* module() const { return module_.get(); }
349 : std::shared_ptr<const WasmModule> shared_module() const { return module_; }
350 : size_t committed_code_space() const { return committed_code_space_.load(); }
351 : WasmEngine* engine() const { return engine_; }
352 :
353 : void SetWireBytes(OwnedVector<const uint8_t> wire_bytes);
354 :
355 : WasmCode* Lookup(Address) const;
356 :
357 : WasmImportWrapperCache* import_wrapper_cache() const {
358 : return import_wrapper_cache_.get();
359 : }
360 :
361 : ~NativeModule();
362 :
363 382419 : const WasmFeatures& enabled_features() const { return enabled_features_; }
364 :
365 : const char* GetRuntimeStubName(Address runtime_stub_entry) const;
366 :
367 : // Sample the current code size of this modules to the given counters.
368 : enum CodeSamplingTime : int8_t { kAfterBaseline, kAfterTopTier, kSampling };
369 : void SampleCodeSize(Counters*, CodeSamplingTime) const;
370 :
371 : WasmCode* AddCompiledCode(WasmCompilationResult);
372 : std::vector<WasmCode*> AddCompiledCode(Vector<WasmCompilationResult>);
373 :
374 : private:
375 : friend class WasmCode;
376 : friend class WasmCodeManager;
377 : friend class NativeModuleModificationScope;
378 :
379 : // Private constructor, called via {WasmCodeManager::NewNativeModule()}.
380 : NativeModule(WasmEngine* engine, const WasmFeatures& enabled_features,
381 : bool can_request_more, VirtualMemory code_space,
382 : std::shared_ptr<const WasmModule> module,
383 : std::shared_ptr<Counters> async_counters,
384 : std::shared_ptr<NativeModule>* shared_this);
385 :
386 : std::unique_ptr<WasmCode> AddCodeWithCodeSpace(
387 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
388 : uint32_t tagged_parameter_slots,
389 : OwnedVector<trap_handler::ProtectedInstructionData>
390 : protected_instructions,
391 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
392 : WasmCode::Tier tier, Vector<uint8_t> code_space);
393 :
394 : // Add and publish anonymous code.
395 : WasmCode* AddAndPublishAnonymousCode(Handle<Code>, WasmCode::Kind kind,
396 : const char* name = nullptr);
397 : // Allocate code space. Returns a valid buffer or fails with OOM (crash).
398 : Vector<byte> AllocateForCode(size_t size);
399 :
400 : WasmCode* CreateEmptyJumpTable(uint32_t jump_table_size);
401 :
402 : Vector<WasmCode*> code_table() const {
403 422 : return {code_table_.get(), module_->num_declared_functions};
404 : }
405 :
406 : // Hold the {mutex_} when calling this method.
407 : bool has_interpreter_redirection(uint32_t func_index) {
408 : DCHECK_LT(func_index, num_functions());
409 : DCHECK_LE(module_->num_imported_functions, func_index);
410 1435516 : if (!interpreter_redirections_) return false;
411 2818 : uint32_t bitset_idx = func_index - module_->num_imported_functions;
412 5636 : uint8_t byte = interpreter_redirections_[bitset_idx / kBitsPerByte];
413 2818 : return byte & (1 << (bitset_idx % kBitsPerByte));
414 : }
415 :
416 : // Hold the {mutex_} when calling this method.
417 368052 : void SetInterpreterRedirection(uint32_t func_index) {
418 : DCHECK_LT(func_index, num_functions());
419 : DCHECK_LE(module_->num_imported_functions, func_index);
420 368052 : if (!interpreter_redirections_) {
421 365260 : interpreter_redirections_.reset(
422 730520 : new uint8_t[RoundUp<kBitsPerByte>(module_->num_declared_functions) /
423 730520 : kBitsPerByte]{});
424 : }
425 368052 : uint32_t bitset_idx = func_index - module_->num_imported_functions;
426 368052 : uint8_t& byte = interpreter_redirections_[bitset_idx / kBitsPerByte];
427 368052 : byte |= 1 << (bitset_idx % kBitsPerByte);
428 368052 : }
429 :
430 : // Features enabled for this module. We keep a copy of the features that
431 : // were enabled at the time of the creation of this native module,
432 : // to be consistent across asynchronous compilations later.
433 : const WasmFeatures enabled_features_;
434 :
435 : // The decoded module, stored in a shared_ptr such that background compile
436 : // tasks can keep this alive.
437 : std::shared_ptr<const WasmModule> module_;
438 :
439 : // Wire bytes, held in a shared_ptr so they can be kept alive by the
440 : // {WireBytesStorage}, held by background compile tasks.
441 : std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes_;
442 :
443 : // Contains entry points for runtime stub calls via {WASM_STUB_CALL}.
444 : Address runtime_stub_entries_[WasmCode::kRuntimeStubCount] = {kNullAddress};
445 :
446 : // Jump table used for runtime stubs (i.e. trampolines to embedded builtins).
447 : WasmCode* runtime_stub_table_ = nullptr;
448 :
449 : // Jump table used to easily redirect wasm function calls.
450 : WasmCode* jump_table_ = nullptr;
451 :
452 : // The compilation state keeps track of compilation tasks for this module.
453 : // Note that its destructor blocks until all tasks are finished/aborted and
454 : // hence needs to be destructed first when this native module dies.
455 : std::unique_ptr<CompilationState> compilation_state_;
456 :
457 : // A cache of the import wrappers, keyed on the kind and signature.
458 : std::unique_ptr<WasmImportWrapperCache> import_wrapper_cache_;
459 :
460 : // This mutex protects concurrent calls to {AddCode} and friends.
461 : mutable base::Mutex allocation_mutex_;
462 :
463 : //////////////////////////////////////////////////////////////////////////////
464 : // Protected by {allocation_mutex_}:
465 :
466 : // Holds all allocated code objects. Mutable because it might get sorted in
467 : // {Lookup()}.
468 : mutable std::vector<std::unique_ptr<WasmCode>> owned_code_;
469 :
470 : // Keep track of the portion of {owned_code_} that is sorted.
471 : // Entries [0, owned_code_sorted_portion_) are known to be sorted.
472 : // Mutable because it might get modified in {Lookup()}.
473 : mutable size_t owned_code_sorted_portion_ = 0;
474 :
475 : std::unique_ptr<WasmCode* []> code_table_;
476 :
477 : // Null if no redirections exist, otherwise a bitset over all functions in
478 : // this module marking those functions that have been redirected.
479 : std::unique_ptr<uint8_t[]> interpreter_redirections_;
480 :
481 : DisjointAllocationPool free_code_space_;
482 : DisjointAllocationPool allocated_code_space_;
483 : std::list<VirtualMemory> owned_code_space_;
484 :
485 : // End of fields protected by {allocation_mutex_}.
486 : //////////////////////////////////////////////////////////////////////////////
487 :
488 : WasmEngine* const engine_;
489 : std::atomic<size_t> committed_code_space_{0};
490 : std::atomic<size_t> generated_code_size_{0};
491 : int modification_scope_depth_ = 0;
492 : bool can_request_more_memory_;
493 : UseTrapHandler use_trap_handler_ = kNoTrapHandler;
494 : bool is_executable_ = false;
495 : bool lazy_compile_frozen_ = false;
496 :
497 : DISALLOW_COPY_AND_ASSIGN(NativeModule);
498 : };
499 :
500 58983 : class V8_EXPORT_PRIVATE WasmCodeManager final {
501 : public:
502 : explicit WasmCodeManager(WasmMemoryTracker* memory_tracker,
503 : size_t max_committed);
504 :
505 : NativeModule* LookupNativeModule(Address pc) const;
506 : WasmCode* LookupCode(Address pc) const;
507 : size_t remaining_uncommitted_code_space() const;
508 :
509 : void SetMaxCommittedMemoryForTesting(size_t limit);
510 :
511 : static size_t EstimateNativeModuleCodeSize(const WasmModule* module);
512 : static size_t EstimateNativeModuleNonCodeSize(const WasmModule* module);
513 :
514 : private:
515 : friend class NativeModule;
516 : friend class WasmEngine;
517 :
518 : std::shared_ptr<NativeModule> NewNativeModule(
519 : WasmEngine* engine, Isolate* isolate,
520 : const WasmFeatures& enabled_features, size_t code_size_estimate,
521 : bool can_request_more, std::shared_ptr<const WasmModule> module);
522 :
523 : V8_WARN_UNUSED_RESULT VirtualMemory TryAllocate(size_t size,
524 : void* hint = nullptr);
525 : bool Commit(Address, size_t);
526 : // Currently, we uncommit a whole module, so all we need is account
527 : // for the freed memory size. We do that in FreeNativeModule.
528 : // There's no separate Uncommit.
529 :
530 : void FreeNativeModule(NativeModule*);
531 :
532 : void AssignRanges(Address start, Address end, NativeModule*);
533 :
534 : WasmMemoryTracker* const memory_tracker_;
535 : std::atomic<size_t> remaining_uncommitted_code_space_;
536 : // If the remaining uncommitted code space falls below
537 : // {critical_uncommitted_code_space_}, then we trigger a GC before creating
538 : // the next module. This value is initialized to 50% of the available code
539 : // space on creation and after each GC.
540 : std::atomic<size_t> critical_uncommitted_code_space_;
541 : mutable base::Mutex native_modules_mutex_;
542 :
543 : //////////////////////////////////////////////////////////////////////////////
544 : // Protected by {native_modules_mutex_}:
545 :
546 : std::map<Address, std::pair<Address, NativeModule*>> lookup_map_;
547 :
548 : // End of fields protected by {native_modules_mutex_}.
549 : //////////////////////////////////////////////////////////////////////////////
550 :
551 : DISALLOW_COPY_AND_ASSIGN(WasmCodeManager);
552 : };
553 :
554 : // Within the scope, the native_module is writable and not executable.
555 : // At the scope's destruction, the native_module is executable and not writable.
556 : // The states inside the scope and at the scope termination are irrespective of
557 : // native_module's state when entering the scope.
558 : // We currently mark the entire module's memory W^X:
559 : // - for AOT, that's as efficient as it can be.
560 : // - for Lazy, we don't have a heuristic for functions that may need patching,
561 : // and even if we did, the resulting set of pages may be fragmented.
562 : // Currently, we try and keep the number of syscalls low.
563 : // - similar argument for debug time.
564 : class NativeModuleModificationScope final {
565 : public:
566 : explicit NativeModuleModificationScope(NativeModule* native_module);
567 : ~NativeModuleModificationScope();
568 :
569 : private:
570 : NativeModule* native_module_;
571 : };
572 :
573 : } // namespace wasm
574 : } // namespace internal
575 : } // namespace v8
576 :
577 : #endif // V8_WASM_WASM_CODE_MANAGER_H_
|