Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/wasm/wasm-code-manager.h"
6 :
7 : #include <iomanip>
8 :
9 : #include "src/assembler-inl.h"
10 : #include "src/base/adapters.h"
11 : #include "src/base/macros.h"
12 : #include "src/base/platform/platform.h"
13 : #include "src/disassembler.h"
14 : #include "src/globals.h"
15 : #include "src/macro-assembler-inl.h"
16 : #include "src/macro-assembler.h"
17 : #include "src/objects-inl.h"
18 : #include "src/wasm/compilation-environment.h"
19 : #include "src/wasm/function-compiler.h"
20 : #include "src/wasm/jump-table-assembler.h"
21 : #include "src/wasm/wasm-import-wrapper-cache-inl.h"
22 : #include "src/wasm/wasm-module.h"
23 : #include "src/wasm/wasm-objects-inl.h"
24 : #include "src/wasm/wasm-objects.h"
25 :
26 : #define TRACE_HEAP(...) \
27 : do { \
28 : if (FLAG_trace_wasm_native_heap) PrintF(__VA_ARGS__); \
29 : } while (false)
30 :
31 : namespace v8 {
32 : namespace internal {
33 : namespace wasm {
34 :
35 : namespace {
36 :
37 : // Binary predicate to perform lookups in {NativeModule::owned_code_} with a
38 : // given address into a code object. Use with {std::upper_bound} for example.
39 : struct WasmCodeUniquePtrComparator {
40 : bool operator()(Address pc, const std::unique_ptr<WasmCode>& code) const {
41 : DCHECK_NE(kNullAddress, pc);
42 : DCHECK_NOT_NULL(code);
43 : return pc < code->instruction_start();
44 : }
45 : };
46 :
47 : } // namespace
48 :
49 41525156 : void DisjointAllocationPool::Merge(base::AddressRegion region) {
50 41525156 : auto dest_it = regions_.begin();
51 : auto dest_end = regions_.end();
52 :
53 : // Skip over dest regions strictly before {region}.
54 123044897 : while (dest_it != dest_end && dest_it->end() < region.begin()) ++dest_it;
55 :
56 : // After last dest region: insert and done.
57 41525156 : if (dest_it == dest_end) {
58 1530613 : regions_.push_back(region);
59 1530613 : return;
60 : }
61 :
62 : // Adjacent (from below) to dest: merge and done.
63 79989086 : if (dest_it->begin() == region.end()) {
64 : base::AddressRegion merged_region{region.begin(),
65 1 : region.size() + dest_it->size()};
66 : DCHECK_EQ(merged_region.end(), dest_it->end());
67 1 : *dest_it = merged_region;
68 : return;
69 : }
70 :
71 : // Before dest: insert and done.
72 39994542 : if (dest_it->begin() > region.end()) {
73 0 : regions_.insert(dest_it, region);
74 : return;
75 : }
76 :
77 : // Src is adjacent from above. Merge and check whether the merged region is
78 : // now adjacent to the next region.
79 : DCHECK_EQ(dest_it->end(), region.begin());
80 39994542 : dest_it->set_size(dest_it->size() + region.size());
81 : DCHECK_EQ(dest_it->end(), region.end());
82 : auto next_dest = dest_it;
83 : ++next_dest;
84 39994548 : if (next_dest != dest_end && dest_it->end() == next_dest->begin()) {
85 6 : dest_it->set_size(dest_it->size() + next_dest->size());
86 : DCHECK_EQ(dest_it->end(), next_dest->end());
87 : regions_.erase(next_dest);
88 : }
89 : }
90 :
91 41525132 : base::AddressRegion DisjointAllocationPool::Allocate(size_t size) {
92 124575400 : for (auto it = regions_.begin(), end = regions_.end(); it != end; ++it) {
93 41525134 : if (size > it->size()) continue;
94 41525130 : base::AddressRegion ret{it->begin(), size};
95 41525130 : if (size == it->size()) {
96 : regions_.erase(it);
97 : } else {
98 41525128 : *it = base::AddressRegion{it->begin() + size, it->size() - size};
99 : }
100 41525130 : return ret;
101 : }
102 2 : return {};
103 : }
104 :
105 1188 : Address WasmCode::constant_pool() const {
106 : if (FLAG_enable_embedded_constant_pool) {
107 : if (constant_pool_offset_ < code_comments_offset_) {
108 : return instruction_start() + constant_pool_offset_;
109 : }
110 : }
111 : return kNullAddress;
112 : }
113 :
114 0 : Address WasmCode::code_comments() const {
115 0 : if (code_comments_offset_ < unpadded_binary_size_) {
116 0 : return instruction_start() + code_comments_offset_;
117 : }
118 : return kNullAddress;
119 : }
120 :
121 93886 : size_t WasmCode::trap_handler_index() const {
122 93886 : CHECK(HasTrapHandlerIndex());
123 93886 : return static_cast<size_t>(trap_handler_index_);
124 : }
125 :
126 0 : void WasmCode::set_trap_handler_index(size_t value) {
127 93886 : trap_handler_index_ = value;
128 0 : }
129 :
130 93886 : void WasmCode::RegisterTrapHandlerData() {
131 : DCHECK(!HasTrapHandlerIndex());
132 187772 : if (kind() != WasmCode::kFunction) return;
133 :
134 : Address base = instruction_start();
135 :
136 : size_t size = instructions().size();
137 : const int index =
138 : RegisterHandlerData(base, size, protected_instructions().size(),
139 93886 : protected_instructions().start());
140 :
141 : // TODO(eholk): if index is negative, fail.
142 93886 : CHECK_LE(0, index);
143 : set_trap_handler_index(static_cast<size_t>(index));
144 : }
145 :
146 41619019 : bool WasmCode::HasTrapHandlerIndex() const { return trap_handler_index_ >= 0; }
147 :
148 6129158 : bool WasmCode::ShouldBeLogged(Isolate* isolate) {
149 6129158 : return isolate->logger()->is_listening_to_code_events() ||
150 2910901 : isolate->is_profiling();
151 : }
152 :
153 0 : void WasmCode::LogCode(Isolate* isolate) const {
154 : DCHECK(ShouldBeLogged(isolate));
155 0 : if (IsAnonymous()) return;
156 :
157 : ModuleWireBytes wire_bytes(native_module()->wire_bytes());
158 : // TODO(herhut): Allow to log code without on-heap round-trip of the name.
159 : WireBytesRef name_ref =
160 0 : native_module()->module()->LookupFunctionName(wire_bytes, index());
161 0 : WasmName name_vec = wire_bytes.GetNameOrNull(name_ref);
162 0 : if (!name_vec.is_empty()) {
163 : HandleScope scope(isolate);
164 : MaybeHandle<String> maybe_name = isolate->factory()->NewStringFromUtf8(
165 0 : Vector<const char>::cast(name_vec));
166 : Handle<String> name;
167 0 : if (!maybe_name.ToHandle(&name)) {
168 0 : name = isolate->factory()->NewStringFromAsciiChecked("<name too long>");
169 : }
170 : int name_length;
171 : auto cname =
172 : name->ToCString(AllowNullsFlag::DISALLOW_NULLS,
173 0 : RobustnessFlag::ROBUST_STRING_TRAVERSAL, &name_length);
174 0 : PROFILE(isolate,
175 : CodeCreateEvent(CodeEventListener::FUNCTION_TAG, this,
176 : {cname.get(), static_cast<size_t>(name_length)}));
177 : } else {
178 : EmbeddedVector<char, 32> generated_name;
179 0 : int length = SNPrintF(generated_name, "wasm-function[%d]", index());
180 0 : generated_name.Truncate(length);
181 0 : PROFILE(isolate, CodeCreateEvent(CodeEventListener::FUNCTION_TAG, this,
182 : generated_name));
183 : }
184 :
185 0 : if (!source_positions().is_empty()) {
186 0 : LOG_CODE_EVENT(isolate, CodeLinePosInfoRecordEvent(instruction_start(),
187 : source_positions()));
188 : }
189 : }
190 :
191 0 : const char* WasmCode::GetRuntimeStubName() const {
192 : DCHECK_EQ(WasmCode::kRuntimeStub, kind());
193 : #define RETURN_NAME(Name) \
194 : if (native_module_->runtime_stub_table_[WasmCode::k##Name] == this) { \
195 : return #Name; \
196 : }
197 : #define RETURN_NAME_TRAP(Name) RETURN_NAME(ThrowWasm##Name)
198 0 : WASM_RUNTIME_STUB_LIST(RETURN_NAME, RETURN_NAME_TRAP)
199 : #undef RETURN_NAME_TRAP
200 : #undef RETURN_NAME
201 0 : return "<unknown>";
202 : }
203 :
204 254 : void WasmCode::Validate() const {
205 : #ifdef DEBUG
206 : // We expect certain relocation info modes to never appear in {WasmCode}
207 : // objects or to be restricted to a small set of valid values. Hence the
208 : // iteration below does not use a mask, but visits all relocation data.
209 : for (RelocIterator it(instructions(), reloc_info(), constant_pool());
210 : !it.done(); it.next()) {
211 : RelocInfo::Mode mode = it.rinfo()->rmode();
212 : switch (mode) {
213 : case RelocInfo::WASM_CALL: {
214 : Address target = it.rinfo()->wasm_call_address();
215 : WasmCode* code = native_module_->Lookup(target);
216 : CHECK_NOT_NULL(code);
217 : CHECK_EQ(WasmCode::kJumpTable, code->kind());
218 : CHECK(code->contains(target));
219 : break;
220 : }
221 : case RelocInfo::WASM_STUB_CALL: {
222 : Address target = it.rinfo()->wasm_stub_call_address();
223 : WasmCode* code = native_module_->Lookup(target);
224 : CHECK_NOT_NULL(code);
225 : CHECK_EQ(WasmCode::kRuntimeStub, code->kind());
226 : CHECK_EQ(target, code->instruction_start());
227 : break;
228 : }
229 : case RelocInfo::INTERNAL_REFERENCE:
230 : case RelocInfo::INTERNAL_REFERENCE_ENCODED: {
231 : Address target = it.rinfo()->target_internal_reference();
232 : CHECK(contains(target));
233 : break;
234 : }
235 : case RelocInfo::EXTERNAL_REFERENCE:
236 : case RelocInfo::CONST_POOL:
237 : case RelocInfo::VENEER_POOL:
238 : // These are OK to appear.
239 : break;
240 : default:
241 : FATAL("Unexpected mode: %d", mode);
242 : }
243 : }
244 : #endif
245 254 : }
246 :
247 40002389 : void WasmCode::MaybePrint(const char* name) const {
248 : // Determines whether flags want this code to be printed.
249 40002389 : if ((FLAG_print_wasm_code && kind() == kFunction) ||
250 80002786 : (FLAG_print_wasm_stub_code && kind() != kFunction) || FLAG_print_code) {
251 70 : Print(name);
252 : }
253 40001419 : }
254 :
255 70 : void WasmCode::Print(const char* name) const {
256 70 : StdoutStream os;
257 70 : os << "--- WebAssembly code ---\n";
258 70 : Disassemble(name, os);
259 70 : os << "--- End code ---\n";
260 70 : }
261 :
262 70 : void WasmCode::Disassemble(const char* name, std::ostream& os,
263 140 : Address current_pc) const {
264 70 : if (name) os << "name: " << name << "\n";
265 140 : if (!IsAnonymous()) os << "index: " << index() << "\n";
266 140 : os << "kind: " << GetWasmCodeKindAsString(kind_) << "\n";
267 70 : os << "compiler: " << (is_liftoff() ? "Liftoff" : "TurboFan") << "\n";
268 70 : size_t padding = instructions().size() - unpadded_binary_size_;
269 140 : os << "Body (size = " << instructions().size() << " = "
270 140 : << unpadded_binary_size_ << " + " << padding << " padding)\n";
271 :
272 : #ifdef ENABLE_DISASSEMBLER
273 : size_t instruction_size = unpadded_binary_size_;
274 : if (constant_pool_offset_ < instruction_size) {
275 : instruction_size = constant_pool_offset_;
276 : }
277 : if (safepoint_table_offset_ && safepoint_table_offset_ < instruction_size) {
278 : instruction_size = safepoint_table_offset_;
279 : }
280 : if (handler_table_offset_ && handler_table_offset_ < instruction_size) {
281 : instruction_size = handler_table_offset_;
282 : }
283 : DCHECK_LT(0, instruction_size);
284 : os << "Instructions (size = " << instruction_size << ")\n";
285 : Disassembler::Decode(nullptr, &os, instructions().start(),
286 : instructions().start() + instruction_size,
287 : CodeReference(this), current_pc);
288 : os << "\n";
289 :
290 : if (handler_table_offset_ > 0) {
291 : HandlerTable table(instruction_start(), handler_table_offset_);
292 : os << "Exception Handler Table (size = " << table.NumberOfReturnEntries()
293 : << "):\n";
294 : table.HandlerTableReturnPrint(os);
295 : os << "\n";
296 : }
297 :
298 : if (!protected_instructions_.is_empty()) {
299 : os << "Protected instructions:\n pc offset land pad\n";
300 : for (auto& data : protected_instructions()) {
301 : os << std::setw(10) << std::hex << data.instr_offset << std::setw(10)
302 : << std::hex << data.landing_offset << "\n";
303 : }
304 : os << "\n";
305 : }
306 :
307 : if (!source_positions().is_empty()) {
308 : os << "Source positions:\n pc offset position\n";
309 : for (SourcePositionTableIterator it(source_positions()); !it.done();
310 : it.Advance()) {
311 : os << std::setw(10) << std::hex << it.code_offset() << std::dec
312 : << std::setw(10) << it.source_position().ScriptOffset()
313 : << (it.is_statement() ? " statement" : "") << "\n";
314 : }
315 : os << "\n";
316 : }
317 :
318 : if (safepoint_table_offset_ > 0) {
319 : SafepointTable table(instruction_start(), safepoint_table_offset_,
320 : stack_slots_);
321 : os << "Safepoints (size = " << table.size() << ")\n";
322 : for (uint32_t i = 0; i < table.length(); i++) {
323 : uintptr_t pc_offset = table.GetPcOffset(i);
324 : os << reinterpret_cast<const void*>(instruction_start() + pc_offset);
325 : os << std::setw(6) << std::hex << pc_offset << " " << std::dec;
326 : table.PrintEntry(i, os);
327 : os << " (sp -> fp)";
328 : SafepointEntry entry = table.GetEntry(i);
329 : if (entry.trampoline_pc() != -1) {
330 : os << " trampoline: " << std::hex << entry.trampoline_pc() << std::dec;
331 : }
332 : if (entry.has_deoptimization_index()) {
333 : os << " deopt: " << std::setw(6) << entry.deoptimization_index();
334 : }
335 : if (entry.has_argument_count()) {
336 : os << " argc: " << entry.argument_count();
337 : }
338 : os << "\n";
339 : }
340 : os << "\n";
341 : }
342 :
343 : os << "RelocInfo (size = " << reloc_info_.size() << ")\n";
344 : for (RelocIterator it(instructions(), reloc_info(), constant_pool());
345 : !it.done(); it.next()) {
346 : it.rinfo()->Print(nullptr, os);
347 : }
348 : os << "\n";
349 :
350 : if (code_comments_offset() < unpadded_binary_size_) {
351 : Address code_comments = reinterpret_cast<Address>(instructions().start() +
352 : code_comments_offset());
353 : PrintCodeCommentsSection(os, code_comments);
354 : }
355 : #endif // ENABLE_DISASSEMBLER
356 70 : }
357 :
358 0 : const char* GetWasmCodeKindAsString(WasmCode::Kind kind) {
359 70 : switch (kind) {
360 : case WasmCode::kFunction:
361 : return "wasm function";
362 : case WasmCode::kWasmToJsWrapper:
363 0 : return "wasm-to-js";
364 : case WasmCode::kLazyStub:
365 0 : return "lazy-compile";
366 : case WasmCode::kRuntimeStub:
367 0 : return "runtime-stub";
368 : case WasmCode::kInterpreterEntry:
369 0 : return "interpreter entry";
370 : case WasmCode::kJumpTable:
371 0 : return "jump table";
372 : }
373 0 : return "unknown kind";
374 : }
375 :
376 41525133 : WasmCode::~WasmCode() {
377 41525133 : if (HasTrapHandlerIndex()) {
378 93886 : CHECK_LT(trap_handler_index(),
379 : static_cast<size_t>(std::numeric_limits<int>::max()));
380 93886 : trap_handler::ReleaseHandlerData(static_cast<int>(trap_handler_index()));
381 : }
382 41525133 : }
383 :
384 1530590 : NativeModule::NativeModule(Isolate* isolate, const WasmFeatures& enabled,
385 : bool can_request_more, VirtualMemory code_space,
386 : WasmCodeManager* code_manager,
387 : std::shared_ptr<const WasmModule> module)
388 : : enabled_features_(enabled),
389 : module_(std::move(module)),
390 : compilation_state_(CompilationState::New(isolate, this)),
391 : import_wrapper_cache_(std::unique_ptr<WasmImportWrapperCache>(
392 1530590 : new WasmImportWrapperCache(this))),
393 1530590 : free_code_space_(code_space.region()),
394 : code_manager_(code_manager),
395 : can_request_more_memory_(can_request_more),
396 : use_trap_handler_(trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler
397 12244717 : : kNoTrapHandler) {
398 : DCHECK_NOT_NULL(module_);
399 1530589 : owned_code_space_.emplace_back(std::move(code_space));
400 1530590 : owned_code_.reserve(num_functions());
401 :
402 1530590 : uint32_t num_wasm_functions = module_->num_declared_functions;
403 1530590 : if (num_wasm_functions > 0) {
404 156895 : code_table_.reset(new WasmCode*[num_wasm_functions]);
405 156895 : memset(code_table_.get(), 0, num_wasm_functions * sizeof(WasmCode*));
406 :
407 156895 : jump_table_ = CreateEmptyJumpTable(num_wasm_functions);
408 : }
409 1530590 : }
410 :
411 1366730 : void NativeModule::ReserveCodeTableForTesting(uint32_t max_functions) {
412 : DCHECK_LE(num_functions(), max_functions);
413 1366730 : WasmCode** new_table = new WasmCode*[max_functions];
414 1366730 : memset(new_table, 0, max_functions * sizeof(*new_table));
415 : memcpy(new_table, code_table_.get(),
416 1366730 : module_->num_declared_functions * sizeof(*new_table));
417 : code_table_.reset(new_table);
418 :
419 : // Re-allocate jump table.
420 1366730 : jump_table_ = CreateEmptyJumpTable(max_functions);
421 1366730 : }
422 :
423 153678 : void NativeModule::LogWasmCodes(Isolate* isolate) {
424 307356 : if (!WasmCode::ShouldBeLogged(isolate)) return;
425 :
426 : // TODO(titzer): we skip the logging of the import wrappers
427 : // here, but they should be included somehow.
428 0 : for (WasmCode* code : code_table()) {
429 0 : if (code != nullptr) code->LogCode(isolate);
430 : }
431 : }
432 :
433 195145 : CompilationEnv NativeModule::CreateCompilationEnv() const {
434 : return {module(), use_trap_handler_, kRuntimeExceptionSupport,
435 390290 : enabled_features_};
436 : }
437 :
438 41524545 : WasmCode* NativeModule::AddOwnedCode(
439 : uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
440 : size_t safepoint_table_offset, size_t handler_table_offset,
441 : size_t constant_pool_offset, size_t code_comments_offset,
442 : size_t unpadded_binary_size,
443 : OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
444 : OwnedVector<const byte> reloc_info,
445 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
446 : WasmCode::Tier tier) {
447 : WasmCode* code;
448 : {
449 : // Both allocation and insertion in owned_code_ happen in the same critical
450 : // section, thus ensuring owned_code_'s elements are rarely if ever moved.
451 41524545 : base::MutexGuard lock(&allocation_mutex_);
452 41525131 : Vector<byte> executable_buffer = AllocateForCode(instructions.size());
453 : // Ownership will be transferred to {owned_code_} below.
454 : code = new WasmCode(
455 : this, index, executable_buffer, stack_slots, safepoint_table_offset,
456 : handler_table_offset, constant_pool_offset, code_comments_offset,
457 : unpadded_binary_size, std::move(protected_instructions),
458 124575357 : std::move(reloc_info), std::move(source_position_table), kind, tier);
459 :
460 81519628 : if (owned_code_.empty() ||
461 39994519 : code->instruction_start() > owned_code_.back()->instruction_start()) {
462 : // Common case.
463 41525109 : owned_code_.emplace_back(code);
464 : } else {
465 : // Slow but unlikely case.
466 : // TODO(mtrofin): We allocate in increasing address order, and
467 : // even if we end up with segmented memory, we may end up only with a few
468 : // large moves - if, for example, a new segment is below the current ones.
469 : auto insert_before = std::upper_bound(
470 0 : owned_code_.begin(), owned_code_.end(), code->instruction_start(),
471 0 : WasmCodeUniquePtrComparator{});
472 0 : owned_code_.emplace(insert_before, code);
473 : }
474 : }
475 41525123 : memcpy(reinterpret_cast<void*>(code->instruction_start()),
476 41525123 : instructions.start(), instructions.size());
477 :
478 41525123 : return code;
479 : }
480 :
481 941 : WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) {
482 941 : WasmCode* ret = AddAnonymousCode(code, WasmCode::kFunction);
483 941 : return ret;
484 : }
485 :
486 3187 : void NativeModule::SetLazyBuiltin(Handle<Code> code) {
487 20786 : uint32_t num_wasm_functions = module_->num_declared_functions;
488 6374 : if (num_wasm_functions == 0) return;
489 3133 : WasmCode* lazy_builtin = AddAnonymousCode(code, WasmCode::kLazyStub);
490 : // Fill the jump table with jumps to the lazy compile stub.
491 : Address lazy_compile_target = lazy_builtin->instruction_start();
492 20732 : for (uint32_t i = 0; i < num_wasm_functions; ++i) {
493 : JumpTableAssembler::EmitLazyCompileJumpSlot(
494 : jump_table_->instruction_start(), i,
495 : i + module_->num_imported_functions, lazy_compile_target,
496 35198 : WasmCode::kNoFlushICache);
497 : }
498 : Assembler::FlushICache(jump_table_->instructions().start(),
499 3133 : jump_table_->instructions().size());
500 : }
501 :
502 1529633 : void NativeModule::SetRuntimeStubs(Isolate* isolate) {
503 : HandleScope scope(isolate);
504 : DCHECK_NULL(runtime_stub_table_[0]); // Only called once.
505 : #define COPY_BUILTIN(Name) \
506 : runtime_stub_table_[WasmCode::k##Name] = \
507 : AddAnonymousCode(isolate->builtins()->builtin_handle(Builtins::k##Name), \
508 : WasmCode::kRuntimeStub, #Name);
509 : #define COPY_BUILTIN_TRAP(Name) COPY_BUILTIN(ThrowWasm##Name)
510 1529633 : WASM_RUNTIME_STUB_LIST(COPY_BUILTIN, COPY_BUILTIN_TRAP)
511 : #undef COPY_BUILTIN_TRAP
512 : #undef COPY_BUILTIN
513 1529633 : }
514 :
515 38244892 : WasmCode* NativeModule::AddAnonymousCode(Handle<Code> code, WasmCode::Kind kind,
516 : const char* name) {
517 : // For off-heap builtins, we create a copy of the off-heap instruction stream
518 : // instead of the on-heap code object containing the trampoline. Ensure that
519 : // we do not apply the on-heap reloc info to the off-heap instructions.
520 : const size_t relocation_size =
521 38245834 : code->is_off_heap_trampoline() ? 0 : code->relocation_size();
522 : OwnedVector<byte> reloc_info = OwnedVector<byte>::New(relocation_size);
523 : memcpy(reloc_info.start(), code->relocation_start(), relocation_size);
524 : Handle<ByteArray> source_pos_table(code->SourcePositionTable(),
525 76489776 : code->GetIsolate());
526 : OwnedVector<byte> source_pos =
527 38244886 : OwnedVector<byte>::New(source_pos_table->length());
528 : source_pos_table->copy_out(0, source_pos.start(), source_pos_table->length());
529 : Vector<const byte> instructions(
530 76489780 : reinterpret_cast<byte*>(code->InstructionStart()),
531 114734668 : static_cast<size_t>(code->InstructionSize()));
532 38244892 : int stack_slots = code->has_safepoint_info() ? code->stack_slots() : 0;
533 : int safepoint_table_offset =
534 38244892 : code->has_safepoint_info() ? code->safepoint_table_offset() : 0;
535 : WasmCode* ret =
536 : AddOwnedCode(WasmCode::kAnonymousFuncIndex, // index
537 : instructions, // instructions
538 : stack_slots, // stack_slots
539 : safepoint_table_offset, // safepoint_table_offset
540 : code->handler_table_offset(), // handler_table_offset
541 : code->constant_pool_offset(), // constant_pool_offset
542 : code->code_comments_offset(), // code_comments_offset
543 : instructions.size(), // unpadded_binary_size
544 : {}, // protected_instructions
545 : std::move(reloc_info), // reloc_info
546 : std::move(source_pos), // source positions
547 : kind, // kind
548 267714225 : WasmCode::kOther); // tier
549 :
550 : // Apply the relocation delta by iterating over the RelocInfo.
551 38244889 : intptr_t delta = ret->instruction_start() - code->InstructionStart();
552 38244890 : int mode_mask = RelocInfo::kApplyMask |
553 38244890 : RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
554 38244891 : RelocIterator orig_it(*code, mode_mask);
555 76489786 : for (RelocIterator it(ret->instructions(), ret->reloc_info(),
556 38244894 : ret->constant_pool(), mode_mask);
557 0 : !it.done(); it.next(), orig_it.next()) {
558 0 : RelocInfo::Mode mode = it.rinfo()->rmode();
559 0 : if (RelocInfo::IsWasmStubCall(mode)) {
560 0 : uint32_t stub_call_tag = orig_it.rinfo()->wasm_call_tag();
561 : DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
562 : WasmCode* code =
563 : runtime_stub(static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
564 : it.rinfo()->set_wasm_stub_call_address(code->instruction_start(),
565 0 : SKIP_ICACHE_FLUSH);
566 : } else {
567 0 : it.rinfo()->apply(delta);
568 : }
569 : }
570 :
571 : // Flush the i-cache here instead of in AddOwnedCode, to include the changes
572 : // made while iterating over the RelocInfo above.
573 : Assembler::FlushICache(ret->instructions().start(),
574 38244893 : ret->instructions().size());
575 38244892 : ret->MaybePrint(name);
576 : ret->Validate();
577 38244891 : return ret;
578 : }
579 :
580 1755757 : WasmCode* NativeModule::AddCode(
581 1756197 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
582 : size_t safepoint_table_offset, size_t handler_table_offset,
583 : OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
584 : OwnedVector<const byte> source_pos_table, WasmCode::Kind kind,
585 : WasmCode::Tier tier) {
586 1755757 : OwnedVector<byte> reloc_info = OwnedVector<byte>::New(desc.reloc_size);
587 1756197 : memcpy(reloc_info.start(), desc.buffer + desc.buffer_size - desc.reloc_size,
588 3512394 : desc.reloc_size);
589 :
590 : WasmCode* ret = AddOwnedCode(
591 : index, {desc.buffer, static_cast<size_t>(desc.instr_size)}, stack_slots,
592 : safepoint_table_offset, handler_table_offset, desc.constant_pool_offset(),
593 : desc.code_comments_offset(), desc.instr_size,
594 : std::move(protected_instructions), std::move(reloc_info),
595 12293775 : std::move(source_pos_table), kind, tier);
596 :
597 : // Apply the relocation delta by iterating over the RelocInfo.
598 1756329 : intptr_t delta = ret->instructions().start() - desc.buffer;
599 1756329 : int mode_mask = RelocInfo::kApplyMask |
600 : RelocInfo::ModeMask(RelocInfo::WASM_CALL) |
601 1756329 : RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
602 4363265 : for (RelocIterator it(ret->instructions(), ret->reloc_info(),
603 1756329 : ret->constant_pool(), mode_mask);
604 850651 : !it.done(); it.next()) {
605 850650 : RelocInfo::Mode mode = it.rinfo()->rmode();
606 850650 : if (RelocInfo::IsWasmCall(mode)) {
607 36623 : uint32_t call_tag = it.rinfo()->wasm_call_tag();
608 : Address target = GetCallTargetForFunction(call_tag);
609 36630 : it.rinfo()->set_wasm_call_address(target, SKIP_ICACHE_FLUSH);
610 814027 : } else if (RelocInfo::IsWasmStubCall(mode)) {
611 592195 : uint32_t stub_call_tag = it.rinfo()->wasm_call_tag();
612 : DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
613 : WasmCode* code =
614 : runtime_stub(static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
615 : it.rinfo()->set_wasm_stub_call_address(code->instruction_start(),
616 592194 : SKIP_ICACHE_FLUSH);
617 : } else {
618 221832 : it.rinfo()->apply(delta);
619 : }
620 : }
621 :
622 : // Flush the i-cache here instead of in AddOwnedCode, to include the changes
623 : // made while iterating over the RelocInfo above.
624 : Assembler::FlushICache(ret->instructions().start(),
625 1756310 : ret->instructions().size());
626 1756303 : ret->MaybePrint();
627 : ret->Validate();
628 1756295 : return ret;
629 : }
630 :
631 254 : WasmCode* NativeModule::AddDeserializedCode(
632 : uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
633 : size_t safepoint_table_offset, size_t handler_table_offset,
634 : size_t constant_pool_offset, size_t code_comments_offset,
635 : size_t unpadded_binary_size,
636 : OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
637 : OwnedVector<const byte> reloc_info,
638 : OwnedVector<const byte> source_position_table, WasmCode::Tier tier) {
639 : WasmCode* code =
640 : AddOwnedCode(index, instructions, stack_slots, safepoint_table_offset,
641 : handler_table_offset, constant_pool_offset,
642 : code_comments_offset, unpadded_binary_size,
643 : std::move(protected_instructions), std::move(reloc_info),
644 1270 : std::move(source_position_table), WasmCode::kFunction, tier);
645 :
646 254 : if (!code->protected_instructions_.is_empty()) {
647 18 : code->RegisterTrapHandlerData();
648 : }
649 254 : base::MutexGuard lock(&allocation_mutex_);
650 254 : InstallCode(code);
651 : // Note: we do not flush the i-cache here, since the code needs to be
652 : // relocated anyway. The caller is responsible for flushing the i-cache later.
653 254 : return code;
654 : }
655 :
656 3495315 : void NativeModule::PublishCode(WasmCode* code) {
657 1747542 : base::MutexGuard lock(&allocation_mutex_);
658 : // Skip publishing code if there is an active redirection to the interpreter
659 : // for the given function index, in order to preserve the redirection.
660 3495538 : if (has_interpreter_redirection(code->index())) return;
661 :
662 1747763 : if (!code->protected_instructions_.is_empty()) {
663 93868 : code->RegisterTrapHandlerData();
664 : }
665 1747763 : InstallCode(code);
666 : }
667 :
668 1214 : void NativeModule::PublishInterpreterEntry(WasmCode* code,
669 : uint32_t func_index) {
670 1214 : code->index_ = func_index;
671 1214 : base::MutexGuard lock(&allocation_mutex_);
672 1214 : InstallCode(code);
673 1214 : SetInterpreterRedirection(func_index);
674 1214 : }
675 :
676 230 : std::vector<WasmCode*> NativeModule::SnapshotCodeTable() const {
677 230 : base::MutexGuard lock(&allocation_mutex_);
678 : std::vector<WasmCode*> result;
679 230 : result.reserve(code_table().size());
680 230 : for (WasmCode* code : code_table()) result.push_back(code);
681 230 : return result;
682 : }
683 :
684 1523625 : WasmCode* NativeModule::CreateEmptyJumpTable(uint32_t num_wasm_functions) {
685 : // Only call this if we really need a jump table.
686 : DCHECK_LT(0, num_wasm_functions);
687 : OwnedVector<byte> instructions = OwnedVector<byte>::New(
688 1523625 : JumpTableAssembler::SizeForNumberOfSlots(num_wasm_functions));
689 : memset(instructions.start(), 0, instructions.size());
690 : return AddOwnedCode(WasmCode::kAnonymousFuncIndex, // index
691 : instructions.as_vector(), // instructions
692 : 0, // stack_slots
693 : instructions.size(), // safepoint_table_offset
694 : instructions.size(), // handler_table_offset
695 : instructions.size(), // constant_pool_offset
696 : instructions.size(), // code_comments_offset
697 : instructions.size(), // unpadded_binary_size
698 : {}, // protected_instructions
699 : {}, // reloc_info
700 : {}, // source_pos
701 : WasmCode::kJumpTable, // kind
702 9141752 : WasmCode::kOther); // tier
703 : }
704 :
705 5246463 : void NativeModule::InstallCode(WasmCode* code) {
706 : DCHECK_LT(code->index(), num_functions());
707 : DCHECK_LE(module_->num_imported_functions, code->index());
708 :
709 : // Update code table, except for interpreter entries.
710 1749224 : if (code->kind() != WasmCode::kInterpreterEntry) {
711 5245254 : code_table_[code->index() - module_->num_imported_functions] = code;
712 : }
713 :
714 : // Patch jump table.
715 1749224 : uint32_t slot_idx = code->index() - module_->num_imported_functions;
716 : JumpTableAssembler::PatchJumpTableSlot(jump_table_->instruction_start(),
717 : slot_idx, code->instruction_start(),
718 3498448 : WasmCode::kFlushICache);
719 1749231 : }
720 :
721 41525127 : Vector<byte> NativeModule::AllocateForCode(size_t size) {
722 : DCHECK_LT(0, size);
723 41525127 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
724 : // This happens under a lock assumed by the caller.
725 : size = RoundUp(size, kCodeAlignment);
726 41525126 : base::AddressRegion code_space = free_code_space_.Allocate(size);
727 41525126 : if (code_space.is_empty()) {
728 0 : if (!can_request_more_memory_) {
729 : V8::FatalProcessOutOfMemory(nullptr,
730 0 : "NativeModule::AllocateForCode reservation");
731 : UNREACHABLE();
732 : }
733 :
734 : Address hint = owned_code_space_.empty() ? kNullAddress
735 0 : : owned_code_space_.back().end();
736 :
737 : VirtualMemory new_mem =
738 0 : code_manager_->TryAllocate(size, reinterpret_cast<void*>(hint));
739 0 : if (!new_mem.IsReserved()) {
740 : V8::FatalProcessOutOfMemory(nullptr,
741 0 : "NativeModule::AllocateForCode reservation");
742 : UNREACHABLE();
743 : }
744 0 : code_manager_->AssignRanges(new_mem.address(), new_mem.end(), this);
745 :
746 0 : free_code_space_.Merge(new_mem.region());
747 0 : owned_code_space_.emplace_back(std::move(new_mem));
748 0 : code_space = free_code_space_.Allocate(size);
749 0 : DCHECK(!code_space.is_empty());
750 : }
751 41525126 : const Address page_size = page_allocator->AllocatePageSize();
752 41525121 : Address commit_start = RoundUp(code_space.begin(), page_size);
753 : Address commit_end = RoundUp(code_space.end(), page_size);
754 : // {commit_start} will be either code_space.start or the start of the next
755 : // page. {commit_end} will be the start of the page after the one in which
756 : // the allocation ends.
757 : // We start from an aligned start, and we know we allocated vmem in
758 : // page multiples.
759 : // We just need to commit what's not committed. The page in which we
760 : // start is already committed (or we start at the beginning of a page).
761 : // The end needs to be committed all through the end of the page.
762 41525121 : if (commit_start < commit_end) {
763 1547820 : committed_code_space_.fetch_add(commit_end - commit_start);
764 : // Committed code cannot grow bigger than maximum code space size.
765 : DCHECK_LE(committed_code_space_.load(), kMaxWasmCodeMemory);
766 : #if V8_OS_WIN
767 : // On Windows, we cannot commit a region that straddles different
768 : // reservations of virtual memory. Because we bump-allocate, and because, if
769 : // we need more memory, we append that memory at the end of the
770 : // owned_code_space_ list, we traverse that list in reverse order to find
771 : // the reservation(s) that guide how to chunk the region to commit.
772 : for (auto& vmem : base::Reversed(owned_code_space_)) {
773 : if (commit_end <= vmem.address() || vmem.end() <= commit_start) continue;
774 : Address start = std::max(commit_start, vmem.address());
775 : Address end = std::min(commit_end, vmem.end());
776 : size_t commit_size = static_cast<size_t>(end - start);
777 : if (!code_manager_->Commit(start, commit_size)) {
778 : V8::FatalProcessOutOfMemory(nullptr,
779 : "NativeModule::AllocateForCode commit");
780 : UNREACHABLE();
781 : }
782 : // Opportunistically reduce the commit range. This might terminate the
783 : // loop early.
784 : if (commit_start == start) commit_start = end;
785 : if (commit_end == end) commit_end = start;
786 : if (commit_start >= commit_end) break;
787 : }
788 : #else
789 1547820 : if (!code_manager_->Commit(commit_start, commit_end - commit_start)) {
790 : V8::FatalProcessOutOfMemory(nullptr,
791 0 : "NativeModule::AllocateForCode commit");
792 : UNREACHABLE();
793 : }
794 : #endif
795 : }
796 : DCHECK(IsAligned(code_space.begin(), kCodeAlignment));
797 41525123 : allocated_code_space_.Merge(code_space);
798 : TRACE_HEAP("Code alloc for %p: %" PRIxPTR ",+%zu\n", this, code_space.begin(),
799 : size);
800 41525124 : return {reinterpret_cast<byte*>(code_space.begin()), code_space.size()};
801 : }
802 :
803 : namespace {
804 2900253 : class NativeModuleWireBytesStorage final : public WireBytesStorage {
805 : public:
806 : explicit NativeModuleWireBytesStorage(NativeModule* native_module)
807 2900253 : : native_module_(native_module) {}
808 :
809 1754074 : Vector<const uint8_t> GetCode(WireBytesRef ref) const final {
810 1754074 : return native_module_->wire_bytes().SubVector(ref.offset(),
811 3508148 : ref.end_offset());
812 : }
813 :
814 : private:
815 : NativeModule* const native_module_;
816 : };
817 : } // namespace
818 :
819 8534472 : void NativeModule::SetWireBytes(OwnedVector<const byte> wire_bytes) {
820 4267236 : wire_bytes_ = std::move(wire_bytes);
821 4267236 : if (!wire_bytes.is_empty()) {
822 : compilation_state_->SetWireBytesStorage(
823 11601012 : std::make_shared<NativeModuleWireBytesStorage>(this));
824 : }
825 4267236 : }
826 :
827 6429162 : WasmCode* NativeModule::Lookup(Address pc) const {
828 6429162 : base::MutexGuard lock(&allocation_mutex_);
829 6429162 : if (owned_code_.empty()) return nullptr;
830 : auto iter = std::upper_bound(owned_code_.begin(), owned_code_.end(), pc,
831 : WasmCodeUniquePtrComparator());
832 6429162 : if (iter == owned_code_.begin()) return nullptr;
833 : --iter;
834 : WasmCode* candidate = iter->get();
835 : DCHECK_NOT_NULL(candidate);
836 6429162 : return candidate->contains(pc) ? candidate : nullptr;
837 : }
838 :
839 402646 : Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const {
840 : // TODO(clemensh): Measure performance win of returning instruction start
841 : // directly if we have turbofan code. Downside: Redirecting functions (e.g.
842 : // for debugging) gets much harder.
843 :
844 : // Return the jump table slot for that function index.
845 : DCHECK_NOT_NULL(jump_table_);
846 439276 : uint32_t slot_idx = func_index - module_->num_imported_functions;
847 : uint32_t slot_offset = JumpTableAssembler::SlotIndexToOffset(slot_idx);
848 : DCHECK_LT(slot_offset, jump_table_->instructions().size());
849 878552 : return jump_table_->instruction_start() + slot_offset;
850 : }
851 :
852 180 : uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(
853 : Address slot_address) const {
854 : DCHECK(is_jump_table_slot(slot_address));
855 : uint32_t slot_offset =
856 360 : static_cast<uint32_t>(slot_address - jump_table_->instruction_start());
857 : uint32_t slot_idx = JumpTableAssembler::SlotOffsetToIndex(slot_offset);
858 : DCHECK_LT(slot_idx, module_->num_declared_functions);
859 180 : return module_->num_imported_functions + slot_idx;
860 : }
861 :
862 43 : void NativeModule::DisableTrapHandler() {
863 : // Switch {use_trap_handler_} from true to false.
864 : DCHECK(use_trap_handler_);
865 43 : use_trap_handler_ = kNoTrapHandler;
866 :
867 : // Clear the code table (just to increase the chances to hit an error if we
868 : // forget to re-add all code).
869 43 : uint32_t num_wasm_functions = module_->num_declared_functions;
870 43 : memset(code_table_.get(), 0, num_wasm_functions * sizeof(WasmCode*));
871 :
872 : // TODO(clemensh): Actually free the owned code, such that the memory can be
873 : // recycled.
874 43 : }
875 :
876 3061179 : NativeModule::~NativeModule() {
877 : TRACE_HEAP("Deleting native module: %p\n", reinterpret_cast<void*>(this));
878 : // Cancel all background compilation before resetting any field of the
879 : // NativeModule or freeing anything.
880 1530589 : compilation_state_->CancelAndWait();
881 1530590 : code_manager_->FreeNativeModule(this);
882 1530590 : }
883 :
884 61326 : WasmCodeManager::WasmCodeManager(WasmMemoryTracker* memory_tracker,
885 : size_t max_committed)
886 : : memory_tracker_(memory_tracker),
887 : remaining_uncommitted_code_space_(max_committed),
888 245304 : critical_uncommitted_code_space_(max_committed / 2) {
889 : DCHECK_LE(max_committed, kMaxWasmCodeMemory);
890 61326 : }
891 :
892 1547819 : bool WasmCodeManager::Commit(Address start, size_t size) {
893 : // TODO(v8:8462) Remove eager commit once perf supports remapping.
894 1547819 : if (FLAG_perf_prof) return true;
895 : DCHECK(IsAligned(start, AllocatePageSize()));
896 : DCHECK(IsAligned(size, AllocatePageSize()));
897 : // Reserve the size. Use CAS loop to avoid underflow on
898 : // {remaining_uncommitted_}. Temporary underflow would allow concurrent
899 : // threads to over-commit.
900 1547821 : size_t old_value = remaining_uncommitted_code_space_.load();
901 : while (true) {
902 1547824 : if (old_value < size) return false;
903 1547824 : if (remaining_uncommitted_code_space_.compare_exchange_weak(
904 1547824 : old_value, old_value - size)) {
905 : break;
906 : }
907 : }
908 : PageAllocator::Permission permission = FLAG_wasm_write_protect_code_memory
909 : ? PageAllocator::kReadWrite
910 1547820 : : PageAllocator::kReadWriteExecute;
911 :
912 : bool ret =
913 1547820 : SetPermissions(GetPlatformPageAllocator(), start, size, permission);
914 : TRACE_HEAP("Setting rw permissions for %p:%p\n",
915 : reinterpret_cast<void*>(start),
916 : reinterpret_cast<void*>(start + size));
917 :
918 1547822 : if (!ret) {
919 : // Highly unlikely.
920 : remaining_uncommitted_code_space_.fetch_add(size);
921 0 : return false;
922 : }
923 : return ret;
924 : }
925 :
926 0 : void WasmCodeManager::AssignRanges(Address start, Address end,
927 : NativeModule* native_module) {
928 0 : base::MutexGuard lock(&native_modules_mutex_);
929 0 : lookup_map_.insert(std::make_pair(start, std::make_pair(end, native_module)));
930 0 : }
931 :
932 1530583 : void WasmCodeManager::AssignRangesAndAddModule(Address start, Address end,
933 : NativeModule* native_module) {
934 1530583 : base::MutexGuard lock(&native_modules_mutex_);
935 1530590 : lookup_map_.insert(std::make_pair(start, std::make_pair(end, native_module)));
936 : native_modules_.emplace(native_module);
937 1530590 : }
938 :
939 1531719 : VirtualMemory WasmCodeManager::TryAllocate(size_t size, void* hint) {
940 1531719 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
941 : DCHECK_GT(size, 0);
942 1531724 : size = RoundUp(size, page_allocator->AllocatePageSize());
943 1531740 : if (!memory_tracker_->ReserveAddressSpace(size,
944 1531722 : WasmMemoryTracker::kHardLimit)) {
945 1150 : return {};
946 : }
947 1530590 : if (hint == nullptr) hint = page_allocator->GetRandomMmapAddr();
948 :
949 : VirtualMemory mem(page_allocator, size, hint,
950 1530590 : page_allocator->AllocatePageSize());
951 1530590 : if (!mem.IsReserved()) {
952 0 : memory_tracker_->ReleaseReservation(size);
953 0 : return {};
954 : }
955 : TRACE_HEAP("VMem alloc: %p:%p (%zu)\n",
956 : reinterpret_cast<void*>(mem.address()),
957 : reinterpret_cast<void*>(mem.end()), mem.size());
958 :
959 : // TODO(v8:8462) Remove eager commit once perf supports remapping.
960 1530590 : if (FLAG_perf_prof) {
961 : SetPermissions(GetPlatformPageAllocator(), mem.address(), mem.size(),
962 0 : PageAllocator::kReadWriteExecute);
963 : }
964 1530590 : return mem;
965 : }
966 :
967 83480 : void WasmCodeManager::SampleModuleSizes(Isolate* isolate) const {
968 83480 : base::MutexGuard lock(&native_modules_mutex_);
969 991354 : for (NativeModule* native_module : native_modules_) {
970 : int code_size =
971 824394 : static_cast<int>(native_module->committed_code_space_.load() / MB);
972 824394 : isolate->counters()->wasm_module_code_size_mb()->AddSample(code_size);
973 : }
974 83480 : }
975 :
976 14 : void WasmCodeManager::SetMaxCommittedMemoryForTesting(size_t limit) {
977 : remaining_uncommitted_code_space_.store(limit);
978 14 : critical_uncommitted_code_space_.store(limit / 2);
979 14 : }
980 :
981 : namespace {
982 :
983 83480 : void ModuleSamplingCallback(v8::Isolate* v8_isolate, v8::GCType type,
984 : v8::GCCallbackFlags flags, void* data) {
985 : Isolate* isolate = reinterpret_cast<Isolate*>(v8_isolate);
986 83480 : isolate->wasm_engine()->code_manager()->SampleModuleSizes(isolate);
987 83480 : }
988 :
989 : } // namespace
990 :
991 : // static
992 62883 : void WasmCodeManager::InstallSamplingGCCallback(Isolate* isolate) {
993 : isolate->heap()->AddGCEpilogueCallback(ModuleSamplingCallback,
994 62883 : v8::kGCTypeMarkSweepCompact, nullptr);
995 62883 : }
996 :
997 : // static
998 1691092 : size_t WasmCodeManager::EstimateNativeModuleCodeSize(const WasmModule* module) {
999 : constexpr size_t kCodeSizeMultiplier = 4;
1000 : constexpr size_t kCodeOverhead = 32; // for prologue, stack check, ...
1001 : constexpr size_t kStaticCodeSize = 512; // runtime stubs, ...
1002 : constexpr size_t kImportSize = 64 * kSystemPointerSize;
1003 :
1004 : size_t estimate = kStaticCodeSize;
1005 4222029 : for (auto& function : module->functions) {
1006 839845 : estimate += kCodeOverhead + kCodeSizeMultiplier * function.code.length();
1007 : }
1008 : estimate +=
1009 3382184 : JumpTableAssembler::SizeForNumberOfSlots(module->num_declared_functions);
1010 1691092 : estimate += kImportSize * module->num_imported_functions;
1011 :
1012 1691092 : return estimate;
1013 : }
1014 :
1015 : // static
1016 1528571 : size_t WasmCodeManager::EstimateNativeModuleNonCodeSize(
1017 : const WasmModule* module) {
1018 1528571 : size_t wasm_module_estimate = EstimateStoredSize(module);
1019 :
1020 1528578 : uint32_t num_wasm_functions = module->num_declared_functions;
1021 :
1022 : // TODO(wasm): Include wire bytes size.
1023 : size_t native_module_estimate =
1024 : sizeof(NativeModule) + /* NativeModule struct */
1025 1528578 : (sizeof(WasmCode*) * num_wasm_functions) + /* code table size */
1026 1528578 : (sizeof(WasmCode) * num_wasm_functions); /* code object size */
1027 :
1028 1528578 : return wasm_module_estimate + native_module_estimate;
1029 : }
1030 :
1031 1530572 : std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
1032 : Isolate* isolate, const WasmFeatures& enabled, size_t code_size_estimate,
1033 : bool can_request_more, std::shared_ptr<const WasmModule> module) {
1034 : DCHECK_EQ(this, isolate->wasm_engine()->code_manager());
1035 1530572 : if (remaining_uncommitted_code_space_.load() <
1036 : critical_uncommitted_code_space_.load()) {
1037 : (reinterpret_cast<v8::Isolate*>(isolate))
1038 68 : ->MemoryPressureNotification(MemoryPressureLevel::kCritical);
1039 : critical_uncommitted_code_space_.store(
1040 68 : remaining_uncommitted_code_space_.load() / 2);
1041 : }
1042 :
1043 : // If the code must be contiguous, reserve enough address space up front.
1044 : size_t code_vmem_size =
1045 : kRequiresCodeRange ? kMaxWasmCodeMemory : code_size_estimate;
1046 : // Try up to two times; getting rid of dead JSArrayBuffer allocations might
1047 : // require two GCs because the first GC maybe incremental and may have
1048 : // floating garbage.
1049 : static constexpr int kAllocationRetries = 2;
1050 1530572 : VirtualMemory code_space;
1051 1150 : for (int retries = 0;; ++retries) {
1052 3063462 : code_space = TryAllocate(code_vmem_size);
1053 1531740 : if (code_space.IsReserved()) break;
1054 1150 : if (retries == kAllocationRetries) {
1055 0 : V8::FatalProcessOutOfMemory(isolate, "WasmCodeManager::NewNativeModule");
1056 : UNREACHABLE();
1057 : }
1058 : // Run one GC, then try the allocation again.
1059 : isolate->heap()->MemoryPressureNotification(MemoryPressureLevel::kCritical,
1060 1150 : true);
1061 1150 : }
1062 :
1063 : Address start = code_space.address();
1064 : size_t size = code_space.size();
1065 : Address end = code_space.end();
1066 : std::unique_ptr<NativeModule> ret(new NativeModule(
1067 : isolate, enabled, can_request_more, std::move(code_space),
1068 4591770 : isolate->wasm_engine()->code_manager(), std::move(module)));
1069 : TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", ret.get(), start,
1070 : size);
1071 1530590 : AssignRangesAndAddModule(start, end, ret.get());
1072 1530590 : return ret;
1073 : }
1074 :
1075 925190 : bool NativeModule::SetExecutable(bool executable) {
1076 925190 : if (is_executable_ == executable) return true;
1077 : TRACE_HEAP("Setting module %p as executable: %d.\n", this, executable);
1078 :
1079 910965 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
1080 :
1081 910965 : if (FLAG_wasm_write_protect_code_memory) {
1082 : PageAllocator::Permission permission =
1083 0 : executable ? PageAllocator::kReadExecute : PageAllocator::kReadWrite;
1084 : #if V8_OS_WIN
1085 : // On windows, we need to switch permissions per separate virtual memory
1086 : // reservation. This is really just a problem when the NativeModule is
1087 : // growable (meaning can_request_more_memory_). That's 32-bit in production,
1088 : // or unittests.
1089 : // For now, in that case, we commit at reserved memory granularity.
1090 : // Technically, that may be a waste, because we may reserve more than we
1091 : // use. On 32-bit though, the scarce resource is the address space -
1092 : // committed or not.
1093 : if (can_request_more_memory_) {
1094 : for (auto& vmem : owned_code_space_) {
1095 : if (!SetPermissions(page_allocator, vmem.address(), vmem.size(),
1096 : permission)) {
1097 : return false;
1098 : }
1099 : TRACE_HEAP("Set %p:%p to executable:%d\n", vmem.address(), vmem.end(),
1100 : executable);
1101 : }
1102 : is_executable_ = executable;
1103 : return true;
1104 : }
1105 : #endif
1106 0 : for (auto& region : allocated_code_space_.regions()) {
1107 : // allocated_code_space_ is fine-grained, so we need to
1108 : // page-align it.
1109 : size_t region_size =
1110 0 : RoundUp(region.size(), page_allocator->AllocatePageSize());
1111 0 : if (!SetPermissions(page_allocator, region.begin(), region_size,
1112 : permission)) {
1113 : return false;
1114 : }
1115 : TRACE_HEAP("Set %p:%p to executable:%d\n",
1116 : reinterpret_cast<void*>(region.begin()),
1117 : reinterpret_cast<void*>(region.end()), executable);
1118 : }
1119 : }
1120 910965 : is_executable_ = executable;
1121 910965 : return true;
1122 : }
1123 :
1124 1530590 : void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
1125 1530590 : base::MutexGuard lock(&native_modules_mutex_);
1126 : DCHECK_EQ(1, native_modules_.count(native_module));
1127 : native_modules_.erase(native_module);
1128 : TRACE_HEAP("Freeing NativeModule %p\n", native_module);
1129 6122360 : for (auto& code_space : native_module->owned_code_space_) {
1130 : DCHECK(code_space.IsReserved());
1131 : TRACE_HEAP("VMem Release: %" PRIxPTR ":%" PRIxPTR " (%zu)\n",
1132 : code_space.address(), code_space.end(), code_space.size());
1133 3061180 : lookup_map_.erase(code_space.address());
1134 1530590 : memory_tracker_->ReleaseReservation(code_space.size());
1135 1530590 : code_space.Free();
1136 : DCHECK(!code_space.IsReserved());
1137 : }
1138 1530590 : native_module->owned_code_space_.clear();
1139 :
1140 1530590 : size_t code_size = native_module->committed_code_space_.load();
1141 : DCHECK(IsAligned(code_size, AllocatePageSize()));
1142 : remaining_uncommitted_code_space_.fetch_add(code_size);
1143 : // Remaining code space cannot grow bigger than maximum code space size.
1144 : DCHECK_LE(remaining_uncommitted_code_space_.load(), kMaxWasmCodeMemory);
1145 1530590 : }
1146 :
1147 53165541 : NativeModule* WasmCodeManager::LookupNativeModule(Address pc) const {
1148 53165541 : base::MutexGuard lock(&native_modules_mutex_);
1149 53165665 : if (lookup_map_.empty()) return nullptr;
1150 :
1151 : auto iter = lookup_map_.upper_bound(pc);
1152 13708469 : if (iter == lookup_map_.begin()) return nullptr;
1153 : --iter;
1154 12757778 : Address region_start = iter->first;
1155 12757778 : Address region_end = iter->second.first;
1156 12757778 : NativeModule* candidate = iter->second.second;
1157 :
1158 : DCHECK_NOT_NULL(candidate);
1159 12757778 : return region_start <= pc && pc < region_end ? candidate : nullptr;
1160 : }
1161 :
1162 53158748 : WasmCode* WasmCodeManager::LookupCode(Address pc) const {
1163 53158748 : NativeModule* candidate = LookupNativeModule(pc);
1164 53158863 : return candidate ? candidate->Lookup(pc) : nullptr;
1165 : }
1166 :
1167 19 : size_t WasmCodeManager::remaining_uncommitted_code_space() const {
1168 19 : return remaining_uncommitted_code_space_.load();
1169 : }
1170 :
1171 : // TODO(v8:7424): Code protection scopes are not yet supported with shared code
1172 : // enabled and need to be revisited to work with --wasm-shared-code as well.
1173 162889 : NativeModuleModificationScope::NativeModuleModificationScope(
1174 : NativeModule* native_module)
1175 162889 : : native_module_(native_module) {
1176 162889 : if (FLAG_wasm_write_protect_code_memory && native_module_ &&
1177 0 : (native_module_->modification_scope_depth_++) == 0) {
1178 0 : bool success = native_module_->SetExecutable(false);
1179 0 : CHECK(success);
1180 : }
1181 162889 : }
1182 :
1183 162877 : NativeModuleModificationScope::~NativeModuleModificationScope() {
1184 162877 : if (FLAG_wasm_write_protect_code_memory && native_module_ &&
1185 0 : (native_module_->modification_scope_depth_--) == 1) {
1186 0 : bool success = native_module_->SetExecutable(true);
1187 0 : CHECK(success);
1188 : }
1189 162877 : }
1190 :
1191 : } // namespace wasm
1192 : } // namespace internal
1193 183867 : } // namespace v8
1194 : #undef TRACE_HEAP
|