Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/wasm/wasm-code-manager.h"
6 :
7 : #include <iomanip>
8 :
9 : #include "src/assembler-inl.h"
10 : #include "src/base/adapters.h"
11 : #include "src/base/macros.h"
12 : #include "src/base/platform/platform.h"
13 : #include "src/disassembler.h"
14 : #include "src/globals.h"
15 : #include "src/log.h"
16 : #include "src/macro-assembler-inl.h"
17 : #include "src/macro-assembler.h"
18 : #include "src/objects-inl.h"
19 : #include "src/ostreams.h"
20 : #include "src/wasm/compilation-environment.h"
21 : #include "src/wasm/function-compiler.h"
22 : #include "src/wasm/jump-table-assembler.h"
23 : #include "src/wasm/wasm-import-wrapper-cache-inl.h"
24 : #include "src/wasm/wasm-module.h"
25 : #include "src/wasm/wasm-objects-inl.h"
26 : #include "src/wasm/wasm-objects.h"
27 :
28 : #define TRACE_HEAP(...) \
29 : do { \
30 : if (FLAG_trace_wasm_native_heap) PrintF(__VA_ARGS__); \
31 : } while (false)
32 :
33 : namespace v8 {
34 : namespace internal {
35 : namespace wasm {
36 :
37 : namespace {
38 :
39 : // Binary predicate to perform lookups in {NativeModule::owned_code_} with a
40 : // given address into a code object. Use with {std::upper_bound} for example.
41 : struct WasmCodeUniquePtrComparator {
42 : bool operator()(Address pc, const std::unique_ptr<WasmCode>& code) const {
43 : DCHECK_NE(kNullAddress, pc);
44 : DCHECK_NOT_NULL(code);
45 : return pc < code->instruction_start();
46 : }
47 : };
48 :
49 : } // namespace
50 :
51 3552283 : void DisjointAllocationPool::Merge(base::AddressRegion region) {
52 3552283 : auto dest_it = regions_.begin();
53 : auto dest_end = regions_.end();
54 :
55 : // Skip over dest regions strictly before {region}.
56 9416845 : while (dest_it != dest_end && dest_it->end() < region.begin()) ++dest_it;
57 :
58 : // After last dest region: insert and done.
59 3552283 : if (dest_it == dest_end) {
60 1240046 : regions_.push_back(region);
61 1240046 : return;
62 : }
63 :
64 : // Adjacent (from below) to dest: merge and done.
65 4624474 : if (dest_it->begin() == region.end()) {
66 : base::AddressRegion merged_region{region.begin(),
67 1 : region.size() + dest_it->size()};
68 : DCHECK_EQ(merged_region.end(), dest_it->end());
69 1 : *dest_it = merged_region;
70 : return;
71 : }
72 :
73 : // Before dest: insert and done.
74 2312236 : if (dest_it->begin() > region.end()) {
75 0 : regions_.insert(dest_it, region);
76 : return;
77 : }
78 :
79 : // Src is adjacent from above. Merge and check whether the merged region is
80 : // now adjacent to the next region.
81 : DCHECK_EQ(dest_it->end(), region.begin());
82 2312236 : dest_it->set_size(dest_it->size() + region.size());
83 : DCHECK_EQ(dest_it->end(), region.end());
84 : auto next_dest = dest_it;
85 : ++next_dest;
86 2312242 : if (next_dest != dest_end && dest_it->end() == next_dest->begin()) {
87 6 : dest_it->set_size(dest_it->size() + next_dest->size());
88 : DCHECK_EQ(dest_it->end(), next_dest->end());
89 : regions_.erase(next_dest);
90 : }
91 : }
92 :
93 3552257 : base::AddressRegion DisjointAllocationPool::Allocate(size_t size) {
94 10656775 : for (auto it = regions_.begin(), end = regions_.end(); it != end; ++it) {
95 3552259 : if (size > it->size()) continue;
96 3552255 : base::AddressRegion ret{it->begin(), size};
97 3552255 : if (size == it->size()) {
98 : regions_.erase(it);
99 : } else {
100 3552253 : *it = base::AddressRegion{it->begin() + size, it->size() - size};
101 : }
102 3552255 : return ret;
103 : }
104 2 : return {};
105 : }
106 :
107 1125 : Address WasmCode::constant_pool() const {
108 : if (FLAG_enable_embedded_constant_pool) {
109 : if (constant_pool_offset_ < code_comments_offset_) {
110 : return instruction_start() + constant_pool_offset_;
111 : }
112 : }
113 : return kNullAddress;
114 : }
115 :
116 0 : Address WasmCode::code_comments() const {
117 0 : if (code_comments_offset_ < unpadded_binary_size_) {
118 0 : return instruction_start() + code_comments_offset_;
119 : }
120 : return kNullAddress;
121 : }
122 :
123 68415 : size_t WasmCode::trap_handler_index() const {
124 68415 : CHECK(HasTrapHandlerIndex());
125 68415 : return static_cast<size_t>(trap_handler_index_);
126 : }
127 :
128 0 : void WasmCode::set_trap_handler_index(size_t value) {
129 68415 : trap_handler_index_ = value;
130 0 : }
131 :
132 68415 : void WasmCode::RegisterTrapHandlerData() {
133 : DCHECK(!HasTrapHandlerIndex());
134 136830 : if (kind() != WasmCode::kFunction) return;
135 :
136 : Address base = instruction_start();
137 :
138 : size_t size = instructions().size();
139 : const int index =
140 : RegisterHandlerData(base, size, protected_instructions().size(),
141 68415 : protected_instructions().start());
142 :
143 : // TODO(eholk): if index is negative, fail.
144 68415 : CHECK_LE(0, index);
145 : set_trap_handler_index(static_cast<size_t>(index));
146 : }
147 :
148 3620668 : bool WasmCode::HasTrapHandlerIndex() const { return trap_handler_index_ >= 0; }
149 :
150 1873742 : bool WasmCode::ShouldBeLogged(Isolate* isolate) {
151 : // The return value is cached in {WasmEngine::IsolateData::log_codes}. Ensure
152 : // to call {WasmEngine::EnableCodeLogging} if this return value would change
153 : // for any isolate. Otherwise we might lose code events.
154 1873742 : return isolate->logger()->is_listening_to_code_events() ||
155 800193 : isolate->is_profiling();
156 : }
157 :
158 0 : void WasmCode::LogCode(Isolate* isolate) const {
159 : DCHECK(ShouldBeLogged(isolate));
160 0 : if (IsAnonymous()) return;
161 :
162 : ModuleWireBytes wire_bytes(native_module()->wire_bytes());
163 : // TODO(herhut): Allow to log code without on-heap round-trip of the name.
164 : WireBytesRef name_ref =
165 0 : native_module()->module()->LookupFunctionName(wire_bytes, index());
166 0 : WasmName name_vec = wire_bytes.GetNameOrNull(name_ref);
167 0 : if (!name_vec.is_empty()) {
168 : HandleScope scope(isolate);
169 : MaybeHandle<String> maybe_name = isolate->factory()->NewStringFromUtf8(
170 0 : Vector<const char>::cast(name_vec));
171 : Handle<String> name;
172 0 : if (!maybe_name.ToHandle(&name)) {
173 0 : name = isolate->factory()->NewStringFromAsciiChecked("<name too long>");
174 : }
175 : int name_length;
176 : auto cname =
177 : name->ToCString(AllowNullsFlag::DISALLOW_NULLS,
178 0 : RobustnessFlag::ROBUST_STRING_TRAVERSAL, &name_length);
179 0 : PROFILE(isolate,
180 : CodeCreateEvent(CodeEventListener::FUNCTION_TAG, this,
181 : {cname.get(), static_cast<size_t>(name_length)}));
182 : } else {
183 : EmbeddedVector<char, 32> generated_name;
184 0 : int length = SNPrintF(generated_name, "wasm-function[%d]", index());
185 0 : generated_name.Truncate(length);
186 0 : PROFILE(isolate, CodeCreateEvent(CodeEventListener::FUNCTION_TAG, this,
187 : generated_name));
188 : }
189 :
190 0 : if (!source_positions().is_empty()) {
191 0 : LOG_CODE_EVENT(isolate, CodeLinePosInfoRecordEvent(instruction_start(),
192 : source_positions()));
193 : }
194 : }
195 :
196 228 : void WasmCode::Validate() const {
197 : #ifdef DEBUG
198 : // We expect certain relocation info modes to never appear in {WasmCode}
199 : // objects or to be restricted to a small set of valid values. Hence the
200 : // iteration below does not use a mask, but visits all relocation data.
201 : for (RelocIterator it(instructions(), reloc_info(), constant_pool());
202 : !it.done(); it.next()) {
203 : RelocInfo::Mode mode = it.rinfo()->rmode();
204 : switch (mode) {
205 : case RelocInfo::WASM_CALL: {
206 : Address target = it.rinfo()->wasm_call_address();
207 : WasmCode* code = native_module_->Lookup(target);
208 : CHECK_NOT_NULL(code);
209 : CHECK_EQ(WasmCode::kJumpTable, code->kind());
210 : CHECK_EQ(native_module()->jump_table_, code);
211 : CHECK(code->contains(target));
212 : break;
213 : }
214 : case RelocInfo::WASM_STUB_CALL: {
215 : Address target = it.rinfo()->wasm_stub_call_address();
216 : WasmCode* code = native_module_->Lookup(target);
217 : CHECK_NOT_NULL(code);
218 : #ifdef V8_EMBEDDED_BUILTINS
219 : CHECK_EQ(WasmCode::kJumpTable, code->kind());
220 : CHECK_EQ(native_module()->runtime_stub_table_, code);
221 : CHECK(code->contains(target));
222 : #else
223 : CHECK_EQ(WasmCode::kRuntimeStub, code->kind());
224 : CHECK_EQ(target, code->instruction_start());
225 : #endif
226 : break;
227 : }
228 : case RelocInfo::INTERNAL_REFERENCE:
229 : case RelocInfo::INTERNAL_REFERENCE_ENCODED: {
230 : Address target = it.rinfo()->target_internal_reference();
231 : CHECK(contains(target));
232 : break;
233 : }
234 : case RelocInfo::EXTERNAL_REFERENCE:
235 : case RelocInfo::CONST_POOL:
236 : case RelocInfo::VENEER_POOL:
237 : // These are OK to appear.
238 : break;
239 : default:
240 : FATAL("Unexpected mode: %d", mode);
241 : }
242 : }
243 : #endif
244 228 : }
245 :
246 1079369 : void WasmCode::MaybePrint(const char* name) const {
247 : // Determines whether flags want this code to be printed.
248 1079369 : if ((FLAG_print_wasm_code && kind() == kFunction) ||
249 2158549 : (FLAG_print_wasm_stub_code && kind() != kFunction) || FLAG_print_code) {
250 64 : Print(name);
251 : }
252 1079305 : }
253 :
254 64 : void WasmCode::Print(const char* name) const {
255 64 : StdoutStream os;
256 64 : os << "--- WebAssembly code ---\n";
257 64 : Disassemble(name, os);
258 64 : os << "--- End code ---\n";
259 64 : }
260 :
261 64 : void WasmCode::Disassemble(const char* name, std::ostream& os,
262 128 : Address current_pc) const {
263 64 : if (name) os << "name: " << name << "\n";
264 128 : if (!IsAnonymous()) os << "index: " << index() << "\n";
265 128 : os << "kind: " << GetWasmCodeKindAsString(kind_) << "\n";
266 64 : os << "compiler: " << (is_liftoff() ? "Liftoff" : "TurboFan") << "\n";
267 64 : size_t padding = instructions().size() - unpadded_binary_size_;
268 128 : os << "Body (size = " << instructions().size() << " = "
269 128 : << unpadded_binary_size_ << " + " << padding << " padding)\n";
270 :
271 : #ifdef ENABLE_DISASSEMBLER
272 : size_t instruction_size = unpadded_binary_size_;
273 : if (constant_pool_offset_ < instruction_size) {
274 : instruction_size = constant_pool_offset_;
275 : }
276 : if (safepoint_table_offset_ && safepoint_table_offset_ < instruction_size) {
277 : instruction_size = safepoint_table_offset_;
278 : }
279 : if (handler_table_offset_ && handler_table_offset_ < instruction_size) {
280 : instruction_size = handler_table_offset_;
281 : }
282 : DCHECK_LT(0, instruction_size);
283 : os << "Instructions (size = " << instruction_size << ")\n";
284 : Disassembler::Decode(nullptr, &os, instructions().start(),
285 : instructions().start() + instruction_size,
286 : CodeReference(this), current_pc);
287 : os << "\n";
288 :
289 : if (handler_table_offset_ > 0) {
290 : HandlerTable table(instruction_start(), handler_table_offset_);
291 : os << "Exception Handler Table (size = " << table.NumberOfReturnEntries()
292 : << "):\n";
293 : table.HandlerTableReturnPrint(os);
294 : os << "\n";
295 : }
296 :
297 : if (!protected_instructions_.is_empty()) {
298 : os << "Protected instructions:\n pc offset land pad\n";
299 : for (auto& data : protected_instructions()) {
300 : os << std::setw(10) << std::hex << data.instr_offset << std::setw(10)
301 : << std::hex << data.landing_offset << "\n";
302 : }
303 : os << "\n";
304 : }
305 :
306 : if (!source_positions().is_empty()) {
307 : os << "Source positions:\n pc offset position\n";
308 : for (SourcePositionTableIterator it(source_positions()); !it.done();
309 : it.Advance()) {
310 : os << std::setw(10) << std::hex << it.code_offset() << std::dec
311 : << std::setw(10) << it.source_position().ScriptOffset()
312 : << (it.is_statement() ? " statement" : "") << "\n";
313 : }
314 : os << "\n";
315 : }
316 :
317 : if (safepoint_table_offset_ > 0) {
318 : SafepointTable table(instruction_start(), safepoint_table_offset_,
319 : stack_slots_);
320 : os << "Safepoints (size = " << table.size() << ")\n";
321 : for (uint32_t i = 0; i < table.length(); i++) {
322 : uintptr_t pc_offset = table.GetPcOffset(i);
323 : os << reinterpret_cast<const void*>(instruction_start() + pc_offset);
324 : os << std::setw(6) << std::hex << pc_offset << " " << std::dec;
325 : table.PrintEntry(i, os);
326 : os << " (sp -> fp)";
327 : SafepointEntry entry = table.GetEntry(i);
328 : if (entry.trampoline_pc() != -1) {
329 : os << " trampoline: " << std::hex << entry.trampoline_pc() << std::dec;
330 : }
331 : if (entry.has_deoptimization_index()) {
332 : os << " deopt: " << std::setw(6) << entry.deoptimization_index();
333 : }
334 : os << "\n";
335 : }
336 : os << "\n";
337 : }
338 :
339 : os << "RelocInfo (size = " << reloc_info_.size() << ")\n";
340 : for (RelocIterator it(instructions(), reloc_info(), constant_pool());
341 : !it.done(); it.next()) {
342 : it.rinfo()->Print(nullptr, os);
343 : }
344 : os << "\n";
345 :
346 : if (code_comments_offset() < unpadded_binary_size_) {
347 : Address code_comments = reinterpret_cast<Address>(instructions().start() +
348 : code_comments_offset());
349 : PrintCodeCommentsSection(os, code_comments);
350 : }
351 : #endif // ENABLE_DISASSEMBLER
352 64 : }
353 :
354 0 : const char* GetWasmCodeKindAsString(WasmCode::Kind kind) {
355 64 : switch (kind) {
356 : case WasmCode::kFunction:
357 : return "wasm function";
358 : case WasmCode::kWasmToJsWrapper:
359 0 : return "wasm-to-js";
360 : case WasmCode::kLazyStub:
361 0 : return "lazy-compile";
362 : case WasmCode::kRuntimeStub:
363 0 : return "runtime-stub";
364 : case WasmCode::kInterpreterEntry:
365 0 : return "interpreter entry";
366 : case WasmCode::kJumpTable:
367 0 : return "jump table";
368 : }
369 0 : return "unknown kind";
370 : }
371 :
372 3552253 : WasmCode::~WasmCode() {
373 3552253 : if (HasTrapHandlerIndex()) {
374 68415 : CHECK_LT(trap_handler_index(),
375 : static_cast<size_t>(std::numeric_limits<int>::max()));
376 68415 : trap_handler::ReleaseHandlerData(static_cast<int>(trap_handler_index()));
377 : }
378 : }
379 :
380 1240023 : NativeModule::NativeModule(WasmEngine* engine, const WasmFeatures& enabled,
381 : bool can_request_more, VirtualMemory code_space,
382 : std::shared_ptr<const WasmModule> module,
383 : std::shared_ptr<Counters> async_counters)
384 : : enabled_features_(enabled),
385 : module_(std::move(module)),
386 : compilation_state_(
387 : CompilationState::New(this, std::move(async_counters))),
388 : import_wrapper_cache_(std::unique_ptr<WasmImportWrapperCache>(
389 1240023 : new WasmImportWrapperCache(this))),
390 1240023 : free_code_space_(code_space.region()),
391 : engine_(engine),
392 : can_request_more_memory_(can_request_more),
393 : use_trap_handler_(trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler
394 12400230 : : kNoTrapHandler) {
395 : DCHECK_NOT_NULL(module_);
396 1240023 : owned_code_space_.emplace_back(std::move(code_space));
397 1240023 : owned_code_.reserve(num_functions());
398 :
399 1240023 : uint32_t num_wasm_functions = module_->num_declared_functions;
400 1240023 : if (num_wasm_functions > 0) {
401 140075 : code_table_.reset(new WasmCode*[num_wasm_functions]);
402 140075 : memset(code_table_.get(), 0, num_wasm_functions * sizeof(WasmCode*));
403 :
404 : jump_table_ = CreateEmptyJumpTable(
405 140075 : JumpTableAssembler::SizeForNumberOfSlots(num_wasm_functions));
406 : }
407 1240022 : }
408 :
409 1093612 : void NativeModule::ReserveCodeTableForTesting(uint32_t max_functions) {
410 : DCHECK_LE(num_functions(), max_functions);
411 1093612 : WasmCode** new_table = new WasmCode*[max_functions];
412 1093612 : memset(new_table, 0, max_functions * sizeof(*new_table));
413 1093612 : if (module_->num_declared_functions > 0) {
414 : memcpy(new_table, code_table_.get(),
415 0 : module_->num_declared_functions * sizeof(*new_table));
416 : }
417 : code_table_.reset(new_table);
418 :
419 : // Re-allocate jump table.
420 : jump_table_ = CreateEmptyJumpTable(
421 1093612 : JumpTableAssembler::SizeForNumberOfSlots(max_functions));
422 1093612 : }
423 :
424 136708 : void NativeModule::LogWasmCodes(Isolate* isolate) {
425 273416 : if (!WasmCode::ShouldBeLogged(isolate)) return;
426 :
427 : // TODO(titzer): we skip the logging of the import wrappers
428 : // here, but they should be included somehow.
429 0 : for (WasmCode* code : code_table()) {
430 0 : if (code != nullptr) code->LogCode(isolate);
431 : }
432 : }
433 :
434 173893 : CompilationEnv NativeModule::CreateCompilationEnv() const {
435 : return {module(), use_trap_handler_, kRuntimeExceptionSupport,
436 347786 : enabled_features_};
437 : }
438 :
439 3552213 : WasmCode* NativeModule::AddOwnedCode(
440 : uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
441 : uint32_t tagged_parameter_slots, size_t safepoint_table_offset,
442 : size_t handler_table_offset, size_t constant_pool_offset,
443 : size_t code_comments_offset, size_t unpadded_binary_size,
444 : OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
445 : OwnedVector<const byte> reloc_info,
446 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
447 : WasmCode::Tier tier) {
448 3552213 : CHECK(!FLAG_jitless); // TODO(jgruber): Support wasm in jitless mode.
449 :
450 : WasmCode* code;
451 : {
452 : // Both allocation and insertion in owned_code_ happen in the same critical
453 : // section, thus ensuring owned_code_'s elements are rarely if ever moved.
454 3552213 : base::MutexGuard lock(&allocation_mutex_);
455 3552253 : Vector<byte> executable_buffer = AllocateForCode(instructions.size());
456 : // Ownership will be transferred to {owned_code_} below.
457 : code = new WasmCode(
458 : this, index, executable_buffer, stack_slots, tagged_parameter_slots,
459 : safepoint_table_offset, handler_table_offset, constant_pool_offset,
460 : code_comments_offset, unpadded_binary_size,
461 : std::move(protected_instructions), std::move(reloc_info),
462 10656755 : std::move(source_position_table), kind, tier);
463 :
464 5864480 : if (owned_code_.empty() ||
465 2312229 : code->instruction_start() > owned_code_.back()->instruction_start()) {
466 : // Common case.
467 3552251 : owned_code_.emplace_back(code);
468 : } else {
469 : // Slow but unlikely case.
470 : // TODO(mtrofin): We allocate in increasing address order, and
471 : // even if we end up with segmented memory, we may end up only with a few
472 : // large moves - if, for example, a new segment is below the current ones.
473 : auto insert_before = std::upper_bound(
474 0 : owned_code_.begin(), owned_code_.end(), code->instruction_start(),
475 0 : WasmCodeUniquePtrComparator{});
476 0 : owned_code_.emplace(insert_before, code);
477 : }
478 : }
479 3552252 : memcpy(reinterpret_cast<void*>(code->instruction_start()),
480 3552252 : instructions.start(), instructions.size());
481 :
482 3552252 : return code;
483 : }
484 :
485 753 : WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) {
486 753 : WasmCode* ret = AddAnonymousCode(code, WasmCode::kFunction);
487 753 : return ret;
488 : }
489 :
490 2619 : void NativeModule::SetLazyBuiltin(Handle<Code> code) {
491 17612 : uint32_t num_wasm_functions = module_->num_declared_functions;
492 5238 : if (num_wasm_functions == 0) return;
493 2571 : WasmCode* lazy_builtin = AddAnonymousCode(code, WasmCode::kLazyStub);
494 : // Fill the jump table with jumps to the lazy compile stub.
495 : Address lazy_compile_target = lazy_builtin->instruction_start();
496 17564 : for (uint32_t i = 0; i < num_wasm_functions; ++i) {
497 : JumpTableAssembler::EmitLazyCompileJumpSlot(
498 : jump_table_->instruction_start(), i,
499 : i + module_->num_imported_functions, lazy_compile_target,
500 29986 : WasmCode::kNoFlushICache);
501 : }
502 : FlushInstructionCache(jump_table_->instructions().start(),
503 2571 : jump_table_->instructions().size());
504 : }
505 :
506 1239254 : void NativeModule::SetRuntimeStubs(Isolate* isolate) {
507 : // TODO(mstarzinger): Switch this from accessing the {Isolate} to using the
508 : // embedded blob directly. This will allow us to do this from the background.
509 : HandleScope scope(isolate);
510 : DCHECK_EQ(kNullAddress, runtime_stub_entries_[0]); // Only called once.
511 : #ifdef V8_EMBEDDED_BUILTINS
512 : WasmCode* jump_table =
513 : CreateEmptyJumpTable(JumpTableAssembler::SizeForNumberOfStubSlots(
514 1239254 : WasmCode::kRuntimeStubCount));
515 : Address base = jump_table->instruction_start();
516 : #define RUNTIME_STUB(Name) {Builtins::k##Name, WasmCode::k##Name},
517 : #define RUNTIME_STUB_TRAP(Name) RUNTIME_STUB(ThrowWasm##Name)
518 : std::pair<Builtins::Name, WasmCode::RuntimeStubId> wasm_runtime_stubs[] = {
519 1239253 : WASM_RUNTIME_STUB_LIST(RUNTIME_STUB, RUNTIME_STUB_TRAP)};
520 : #undef RUNTIME_STUB
521 : #undef RUNTIME_STUB_TRAP
522 32220602 : for (auto pair : wasm_runtime_stubs) {
523 30981349 : Handle<Code> builtin_code = isolate->builtins()->builtin_handle(pair.first);
524 30981348 : CHECK(builtin_code->is_off_heap_trampoline());
525 : JumpTableAssembler::EmitRuntimeStubSlot(
526 : base, pair.second, builtin_code->OffHeapInstructionStart(),
527 30981348 : WasmCode::kNoFlushICache);
528 : uint32_t slot_offset =
529 : JumpTableAssembler::StubSlotIndexToOffset(pair.second);
530 30981349 : runtime_stub_entries_[pair.second] = base + slot_offset;
531 : }
532 : FlushInstructionCache(jump_table->instructions().start(),
533 1239253 : jump_table->instructions().size());
534 : DCHECK_NULL(runtime_stub_table_);
535 1239253 : runtime_stub_table_ = jump_table;
536 : #else // V8_EMBEDDED_BUILTINS
537 : USE(runtime_stub_table_); // Actually unused, but avoids ifdef's in header.
538 : #define COPY_BUILTIN(Name) \
539 : runtime_stub_entries_[WasmCode::k##Name] = \
540 : AddAnonymousCode(isolate->builtins()->builtin_handle(Builtins::k##Name), \
541 : WasmCode::kRuntimeStub, #Name) \
542 : ->instruction_start();
543 : #define COPY_BUILTIN_TRAP(Name) COPY_BUILTIN(ThrowWasm##Name)
544 : WASM_RUNTIME_STUB_LIST(COPY_BUILTIN, COPY_BUILTIN_TRAP)
545 : #undef COPY_BUILTIN_TRAP
546 : #undef COPY_BUILTIN
547 : #endif // V8_EMBEDDED_BUILTINS
548 : DCHECK_NE(kNullAddress, runtime_stub_entries_[0]);
549 1239254 : }
550 :
551 3324 : WasmCode* NativeModule::AddAnonymousCode(Handle<Code> code, WasmCode::Kind kind,
552 : const char* name) {
553 : // For off-heap builtins, we create a copy of the off-heap instruction stream
554 : // instead of the on-heap code object containing the trampoline. Ensure that
555 : // we do not apply the on-heap reloc info to the off-heap instructions.
556 : const size_t relocation_size =
557 4077 : code->is_off_heap_trampoline() ? 0 : code->relocation_size();
558 : OwnedVector<byte> reloc_info = OwnedVector<byte>::New(relocation_size);
559 3324 : if (relocation_size > 0) {
560 : memcpy(reloc_info.start(), code->relocation_start(), relocation_size);
561 : }
562 : Handle<ByteArray> source_pos_table(code->SourcePositionTable(),
563 6648 : code->GetIsolate());
564 : OwnedVector<byte> source_pos =
565 3324 : OwnedVector<byte>::New(source_pos_table->length());
566 3324 : if (source_pos_table->length() > 0) {
567 : source_pos_table->copy_out(0, source_pos.start(),
568 : source_pos_table->length());
569 : }
570 : Vector<const byte> instructions(
571 6648 : reinterpret_cast<byte*>(code->InstructionStart()),
572 9972 : static_cast<size_t>(code->InstructionSize()));
573 3324 : const int stack_slots = code->has_safepoint_info() ? code->stack_slots() : 0;
574 :
575 : // TODO(jgruber,v8:8758): Remove this translation. It exists only because
576 : // Code objects contains real offsets but WasmCode expects an offset of 0 to
577 : // mean 'empty'.
578 : const int safepoint_table_offset =
579 6648 : code->has_safepoint_table() ? code->safepoint_table_offset() : 0;
580 : const int handler_table_offset =
581 6648 : code->has_handler_table() ? code->handler_table_offset() : 0;
582 :
583 : WasmCode* ret =
584 : AddOwnedCode(WasmCode::kAnonymousFuncIndex, // index
585 : instructions, // instructions
586 : stack_slots, // stack_slots
587 : 0, // tagged_parameter_slots
588 : safepoint_table_offset, // safepoint_table_offset
589 : handler_table_offset, // handler_table_offset
590 : code->constant_pool_offset(), // constant_pool_offset
591 : code->code_comments_offset(), // code_comments_offset
592 : instructions.size(), // unpadded_binary_size
593 : {}, // protected_instructions
594 : std::move(reloc_info), // reloc_info
595 : std::move(source_pos), // source positions
596 : kind, // kind
597 19944 : WasmCode::kOther); // tier
598 :
599 : // Apply the relocation delta by iterating over the RelocInfo.
600 3324 : intptr_t delta = ret->instruction_start() - code->InstructionStart();
601 3324 : int mode_mask = RelocInfo::kApplyMask |
602 3324 : RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
603 3324 : RelocIterator orig_it(*code, mode_mask);
604 6648 : for (RelocIterator it(ret->instructions(), ret->reloc_info(),
605 3324 : ret->constant_pool(), mode_mask);
606 0 : !it.done(); it.next(), orig_it.next()) {
607 0 : RelocInfo::Mode mode = it.rinfo()->rmode();
608 0 : if (RelocInfo::IsWasmStubCall(mode)) {
609 0 : uint32_t stub_call_tag = orig_it.rinfo()->wasm_call_tag();
610 : DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
611 : Address entry = runtime_stub_entry(
612 : static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
613 0 : it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
614 : } else {
615 : it.rinfo()->apply(delta);
616 : }
617 : }
618 :
619 : // Flush the i-cache here instead of in AddOwnedCode, to include the changes
620 : // made while iterating over the RelocInfo above.
621 : FlushInstructionCache(ret->instructions().start(),
622 3324 : ret->instructions().size());
623 3324 : ret->MaybePrint(name);
624 : ret->Validate();
625 3324 : return ret;
626 : }
627 :
628 1075699 : WasmCode* NativeModule::AddCode(
629 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
630 : uint32_t tagged_parameter_slots,
631 : OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
632 : OwnedVector<const byte> source_pos_table, WasmCode::Kind kind,
633 : WasmCode::Tier tier) {
634 1075699 : OwnedVector<byte> reloc_info = OwnedVector<byte>::New(desc.reloc_size);
635 1075757 : if (desc.reloc_size > 0) {
636 375168 : memcpy(reloc_info.start(), desc.buffer + desc.buffer_size - desc.reloc_size,
637 750336 : desc.reloc_size);
638 : }
639 :
640 : // TODO(jgruber,v8:8758): Remove this translation. It exists only because
641 : // CodeDesc contains real offsets but WasmCode expects an offset of 0 to mean
642 : // 'empty'.
643 : const int safepoint_table_offset =
644 1075757 : desc.safepoint_table_size == 0 ? 0 : desc.safepoint_table_offset;
645 : const int handler_table_offset =
646 1075757 : desc.handler_table_size == 0 ? 0 : desc.handler_table_offset;
647 :
648 : WasmCode* ret = AddOwnedCode(
649 : index, {desc.buffer, static_cast<size_t>(desc.instr_size)}, stack_slots,
650 : tagged_parameter_slots, safepoint_table_offset, handler_table_offset,
651 : desc.constant_pool_offset, desc.code_comments_offset, desc.instr_size,
652 : std::move(protected_instructions), std::move(reloc_info),
653 6454533 : std::move(source_pos_table), kind, tier);
654 :
655 : // Apply the relocation delta by iterating over the RelocInfo.
656 1075754 : intptr_t delta = ret->instructions().start() - desc.buffer;
657 1075754 : int mode_mask = RelocInfo::kApplyMask |
658 : RelocInfo::ModeMask(RelocInfo::WASM_CALL) |
659 1075754 : RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
660 2755857 : for (RelocIterator it(ret->instructions(), ret->reloc_info(),
661 1075754 : ret->constant_pool(), mode_mask);
662 604343 : !it.done(); it.next()) {
663 604344 : RelocInfo::Mode mode = it.rinfo()->rmode();
664 604344 : if (RelocInfo::IsWasmCall(mode)) {
665 30787 : uint32_t call_tag = it.rinfo()->wasm_call_tag();
666 : Address target = GetCallTargetForFunction(call_tag);
667 30785 : it.rinfo()->set_wasm_call_address(target, SKIP_ICACHE_FLUSH);
668 573557 : } else if (RelocInfo::IsWasmStubCall(mode)) {
669 524063 : uint32_t stub_call_tag = it.rinfo()->wasm_call_tag();
670 : DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
671 : Address entry = runtime_stub_entry(
672 : static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
673 524062 : it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
674 : } else {
675 : it.rinfo()->apply(delta);
676 : }
677 : }
678 :
679 : // Flush the i-cache here instead of in AddOwnedCode, to include the changes
680 : // made while iterating over the RelocInfo above.
681 : FlushInstructionCache(ret->instructions().start(),
682 1075756 : ret->instructions().size());
683 1075756 : ret->MaybePrint();
684 : ret->Validate();
685 1075756 : return ret;
686 : }
687 :
688 228 : WasmCode* NativeModule::AddDeserializedCode(
689 : uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
690 : uint32_t tagged_parameter_slots, size_t safepoint_table_offset,
691 : size_t handler_table_offset, size_t constant_pool_offset,
692 : size_t code_comments_offset, size_t unpadded_binary_size,
693 : OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
694 : OwnedVector<const byte> reloc_info,
695 : OwnedVector<const byte> source_position_table, WasmCode::Tier tier) {
696 : WasmCode* code = AddOwnedCode(
697 : index, instructions, stack_slots, tagged_parameter_slots,
698 : safepoint_table_offset, handler_table_offset, constant_pool_offset,
699 : code_comments_offset, unpadded_binary_size,
700 : std::move(protected_instructions), std::move(reloc_info),
701 1140 : std::move(source_position_table), WasmCode::kFunction, tier);
702 :
703 228 : if (!code->protected_instructions_.is_empty()) {
704 16 : code->RegisterTrapHandlerData();
705 : }
706 228 : base::MutexGuard lock(&allocation_mutex_);
707 228 : InstallCode(code);
708 : // Note: we do not flush the i-cache here, since the code needs to be
709 : // relocated anyway. The caller is responsible for flushing the i-cache later.
710 228 : return code;
711 : }
712 :
713 2133433 : void NativeModule::PublishCode(WasmCode* code) {
714 1066712 : base::MutexGuard lock(&allocation_mutex_);
715 : // Skip publishing code if there is an active redirection to the interpreter
716 : // for the given function index, in order to preserve the redirection.
717 2133442 : if (has_interpreter_redirection(code->index())) return;
718 :
719 1066709 : if (!code->protected_instructions_.is_empty()) {
720 68399 : code->RegisterTrapHandlerData();
721 : }
722 1066709 : InstallCode(code);
723 : }
724 :
725 2444 : void NativeModule::PublishInterpreterEntry(WasmCode* code,
726 : uint32_t func_index) {
727 2444 : code->index_ = func_index;
728 2444 : base::MutexGuard lock(&allocation_mutex_);
729 2444 : InstallCode(code);
730 2444 : SetInterpreterRedirection(func_index);
731 2444 : }
732 :
733 201 : std::vector<WasmCode*> NativeModule::SnapshotCodeTable() const {
734 201 : base::MutexGuard lock(&allocation_mutex_);
735 : std::vector<WasmCode*> result;
736 201 : result.reserve(code_table().size());
737 201 : for (WasmCode* code : code_table()) result.push_back(code);
738 201 : return result;
739 : }
740 :
741 2472941 : WasmCode* NativeModule::CreateEmptyJumpTable(uint32_t jump_table_size) {
742 : // Only call this if we really need a jump table.
743 : DCHECK_LT(0, jump_table_size);
744 2472941 : OwnedVector<byte> instructions = OwnedVector<byte>::New(jump_table_size);
745 : memset(instructions.start(), 0, instructions.size());
746 : return AddOwnedCode(WasmCode::kAnonymousFuncIndex, // index
747 : instructions.as_vector(), // instructions
748 : 0, // stack_slots
749 : 0, // tagged_parameter_slots
750 : 0, // safepoint_table_offset
751 : 0, // handler_table_offset
752 : instructions.size(), // constant_pool_offset
753 : instructions.size(), // code_comments_offset
754 : instructions.size(), // unpadded_binary_size
755 : {}, // protected_instructions
756 : {}, // reloc_info
757 : {}, // source_pos
758 : WasmCode::kJumpTable, // kind
759 14837646 : WasmCode::kOther); // tier
760 : }
761 :
762 3205696 : void NativeModule::InstallCode(WasmCode* code) {
763 : DCHECK_LT(code->index(), num_functions());
764 : DCHECK_LE(module_->num_imported_functions, code->index());
765 :
766 : // Update code table, except for interpreter entries.
767 1069380 : if (code->kind() != WasmCode::kInterpreterEntry) {
768 3203252 : code_table_[code->index() - module_->num_imported_functions] = code;
769 : }
770 :
771 : // Patch jump table.
772 1069380 : uint32_t slot_idx = code->index() - module_->num_imported_functions;
773 : JumpTableAssembler::PatchJumpTableSlot(jump_table_->instruction_start(),
774 : slot_idx, code->instruction_start(),
775 2138760 : WasmCode::kFlushICache);
776 1069381 : }
777 :
778 3552253 : Vector<byte> NativeModule::AllocateForCode(size_t size) {
779 : DCHECK_LT(0, size);
780 3552253 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
781 : // This happens under a lock assumed by the caller.
782 : size = RoundUp(size, kCodeAlignment);
783 3552253 : base::AddressRegion code_space = free_code_space_.Allocate(size);
784 3552252 : if (code_space.is_empty()) {
785 0 : if (!can_request_more_memory_) {
786 : V8::FatalProcessOutOfMemory(nullptr,
787 0 : "NativeModule::AllocateForCode reservation");
788 : UNREACHABLE();
789 : }
790 :
791 : Address hint = owned_code_space_.empty() ? kNullAddress
792 0 : : owned_code_space_.back().end();
793 :
794 : VirtualMemory new_mem = engine_->code_manager()->TryAllocate(
795 0 : size, reinterpret_cast<void*>(hint));
796 0 : if (!new_mem.IsReserved()) {
797 : V8::FatalProcessOutOfMemory(nullptr,
798 0 : "NativeModule::AllocateForCode reservation");
799 : UNREACHABLE();
800 : }
801 : engine_->code_manager()->AssignRanges(new_mem.address(), new_mem.end(),
802 0 : this);
803 :
804 0 : free_code_space_.Merge(new_mem.region());
805 0 : owned_code_space_.emplace_back(std::move(new_mem));
806 0 : code_space = free_code_space_.Allocate(size);
807 0 : DCHECK(!code_space.is_empty());
808 : }
809 3552252 : const Address page_size = page_allocator->AllocatePageSize();
810 3552250 : Address commit_start = RoundUp(code_space.begin(), page_size);
811 : Address commit_end = RoundUp(code_space.end(), page_size);
812 : // {commit_start} will be either code_space.start or the start of the next
813 : // page. {commit_end} will be the start of the page after the one in which
814 : // the allocation ends.
815 : // We start from an aligned start, and we know we allocated vmem in
816 : // page multiples.
817 : // We just need to commit what's not committed. The page in which we
818 : // start is already committed (or we start at the beginning of a page).
819 : // The end needs to be committed all through the end of the page.
820 3552250 : if (commit_start < commit_end) {
821 1244437 : committed_code_space_.fetch_add(commit_end - commit_start);
822 : // Committed code cannot grow bigger than maximum code space size.
823 : DCHECK_LE(committed_code_space_.load(), kMaxWasmCodeMemory);
824 : #if V8_OS_WIN
825 : // On Windows, we cannot commit a region that straddles different
826 : // reservations of virtual memory. Because we bump-allocate, and because, if
827 : // we need more memory, we append that memory at the end of the
828 : // owned_code_space_ list, we traverse that list in reverse order to find
829 : // the reservation(s) that guide how to chunk the region to commit.
830 : for (auto& vmem : base::Reversed(owned_code_space_)) {
831 : if (commit_end <= vmem.address() || vmem.end() <= commit_start) continue;
832 : Address start = std::max(commit_start, vmem.address());
833 : Address end = std::min(commit_end, vmem.end());
834 : size_t commit_size = static_cast<size_t>(end - start);
835 : if (!engine_->code_manager()->Commit(start, commit_size)) {
836 : V8::FatalProcessOutOfMemory(nullptr,
837 : "NativeModule::AllocateForCode commit");
838 : UNREACHABLE();
839 : }
840 : // Opportunistically reduce the commit range. This might terminate the
841 : // loop early.
842 : if (commit_start == start) commit_start = end;
843 : if (commit_end == end) commit_end = start;
844 : if (commit_start >= commit_end) break;
845 : }
846 : #else
847 1244437 : if (!engine_->code_manager()->Commit(commit_start,
848 1244437 : commit_end - commit_start)) {
849 : V8::FatalProcessOutOfMemory(nullptr,
850 0 : "NativeModule::AllocateForCode commit");
851 : UNREACHABLE();
852 : }
853 : #endif
854 : }
855 : DCHECK(IsAligned(code_space.begin(), kCodeAlignment));
856 3552250 : allocated_code_space_.Merge(code_space);
857 : TRACE_HEAP("Code alloc for %p: %" PRIxPTR ",+%zu\n", this, code_space.begin(),
858 : size);
859 3552252 : return {reinterpret_cast<byte*>(code_space.begin()), code_space.size()};
860 : }
861 :
862 : namespace {
863 2336022 : class NativeModuleWireBytesStorage final : public WireBytesStorage {
864 : public:
865 : explicit NativeModuleWireBytesStorage(
866 : std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes)
867 2336022 : : wire_bytes_(std::move(wire_bytes)) {}
868 :
869 1073181 : Vector<const uint8_t> GetCode(WireBytesRef ref) const final {
870 2146362 : return wire_bytes_->as_vector().SubVector(ref.offset(), ref.end_offset());
871 : }
872 :
873 : private:
874 : const std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes_;
875 : };
876 : } // namespace
877 :
878 3429846 : void NativeModule::SetWireBytes(OwnedVector<const uint8_t> wire_bytes) {
879 : auto shared_wire_bytes =
880 : std::make_shared<OwnedVector<const uint8_t>>(std::move(wire_bytes));
881 : wire_bytes_ = shared_wire_bytes;
882 3429846 : if (!shared_wire_bytes->is_empty()) {
883 : compilation_state_->SetWireBytesStorage(
884 : std::make_shared<NativeModuleWireBytesStorage>(
885 7008066 : std::move(shared_wire_bytes)));
886 : }
887 3429846 : }
888 :
889 5714705 : WasmCode* NativeModule::Lookup(Address pc) const {
890 5714705 : base::MutexGuard lock(&allocation_mutex_);
891 5714705 : if (owned_code_.empty()) return nullptr;
892 : auto iter = std::upper_bound(owned_code_.begin(), owned_code_.end(), pc,
893 : WasmCodeUniquePtrComparator());
894 5714705 : if (iter == owned_code_.begin()) return nullptr;
895 : --iter;
896 : WasmCode* candidate = iter->get();
897 : DCHECK_NOT_NULL(candidate);
898 5714705 : return candidate->contains(pc) ? candidate : nullptr;
899 : }
900 :
901 358728 : Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const {
902 : // TODO(clemensh): Measure performance win of returning instruction start
903 : // directly if we have turbofan code. Downside: Redirecting functions (e.g.
904 : // for debugging) gets much harder.
905 :
906 : // Return the jump table slot for that function index.
907 : DCHECK_NOT_NULL(jump_table_);
908 389513 : uint32_t slot_idx = func_index - module_->num_imported_functions;
909 : uint32_t slot_offset = JumpTableAssembler::SlotIndexToOffset(slot_idx);
910 : DCHECK_LT(slot_offset, jump_table_->instructions().size());
911 779026 : return jump_table_->instruction_start() + slot_offset;
912 : }
913 :
914 160 : uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(
915 : Address slot_address) const {
916 : DCHECK(is_jump_table_slot(slot_address));
917 : uint32_t slot_offset =
918 320 : static_cast<uint32_t>(slot_address - jump_table_->instruction_start());
919 : uint32_t slot_idx = JumpTableAssembler::SlotOffsetToIndex(slot_offset);
920 : DCHECK_LT(slot_idx, module_->num_declared_functions);
921 160 : return module_->num_imported_functions + slot_idx;
922 : }
923 :
924 38 : void NativeModule::DisableTrapHandler() {
925 : // Switch {use_trap_handler_} from true to false.
926 : DCHECK(use_trap_handler_);
927 38 : use_trap_handler_ = kNoTrapHandler;
928 :
929 : // Clear the code table (just to increase the chances to hit an error if we
930 : // forget to re-add all code).
931 38 : uint32_t num_wasm_functions = module_->num_declared_functions;
932 38 : memset(code_table_.get(), 0, num_wasm_functions * sizeof(WasmCode*));
933 :
934 : // TODO(clemensh): Actually free the owned code, such that the memory can be
935 : // recycled.
936 38 : }
937 :
938 0 : const char* NativeModule::GetRuntimeStubName(Address runtime_stub_entry) const {
939 : #define RETURN_NAME(Name) \
940 : if (runtime_stub_entries_[WasmCode::k##Name] == runtime_stub_entry) { \
941 : return #Name; \
942 : }
943 : #define RETURN_NAME_TRAP(Name) RETURN_NAME(ThrowWasm##Name)
944 0 : WASM_RUNTIME_STUB_LIST(RETURN_NAME, RETURN_NAME_TRAP)
945 : #undef RETURN_NAME_TRAP
946 : #undef RETURN_NAME
947 0 : return "<unknown>";
948 : }
949 :
950 2480046 : NativeModule::~NativeModule() {
951 : TRACE_HEAP("Deleting native module: %p\n", reinterpret_cast<void*>(this));
952 : // Cancel all background compilation before resetting any field of the
953 : // NativeModule or freeing anything.
954 1240023 : compilation_state_->AbortCompilation();
955 1240023 : engine_->FreeNativeModule(this);
956 1240023 : }
957 :
958 59619 : WasmCodeManager::WasmCodeManager(WasmMemoryTracker* memory_tracker,
959 : size_t max_committed)
960 : : memory_tracker_(memory_tracker),
961 : remaining_uncommitted_code_space_(max_committed),
962 178857 : critical_uncommitted_code_space_(max_committed / 2) {
963 : DCHECK_LE(max_committed, kMaxWasmCodeMemory);
964 59619 : }
965 :
966 1244436 : bool WasmCodeManager::Commit(Address start, size_t size) {
967 : // TODO(v8:8462) Remove eager commit once perf supports remapping.
968 1244436 : if (FLAG_perf_prof) return true;
969 : DCHECK(IsAligned(start, AllocatePageSize()));
970 : DCHECK(IsAligned(size, AllocatePageSize()));
971 : // Reserve the size. Use CAS loop to avoid underflow on
972 : // {remaining_uncommitted_}. Temporary underflow would allow concurrent
973 : // threads to over-commit.
974 1244436 : size_t old_value = remaining_uncommitted_code_space_.load();
975 : while (true) {
976 1244437 : if (old_value < size) return false;
977 1244437 : if (remaining_uncommitted_code_space_.compare_exchange_weak(
978 1244437 : old_value, old_value - size)) {
979 : break;
980 : }
981 : }
982 : PageAllocator::Permission permission = FLAG_wasm_write_protect_code_memory
983 : ? PageAllocator::kReadWrite
984 1244437 : : PageAllocator::kReadWriteExecute;
985 :
986 : bool ret =
987 1244437 : SetPermissions(GetPlatformPageAllocator(), start, size, permission);
988 : TRACE_HEAP("Setting rw permissions for %p:%p\n",
989 : reinterpret_cast<void*>(start),
990 : reinterpret_cast<void*>(start + size));
991 :
992 1244437 : if (!ret) {
993 : // Highly unlikely.
994 : remaining_uncommitted_code_space_.fetch_add(size);
995 0 : return false;
996 : }
997 : return ret;
998 : }
999 :
1000 0 : void WasmCodeManager::AssignRanges(Address start, Address end,
1001 : NativeModule* native_module) {
1002 0 : base::MutexGuard lock(&native_modules_mutex_);
1003 0 : lookup_map_.insert(std::make_pair(start, std::make_pair(end, native_module)));
1004 0 : }
1005 :
1006 1240968 : VirtualMemory WasmCodeManager::TryAllocate(size_t size, void* hint) {
1007 1240968 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
1008 : DCHECK_GT(size, 0);
1009 1240969 : size = RoundUp(size, page_allocator->AllocatePageSize());
1010 1240973 : if (!memory_tracker_->ReserveAddressSpace(size,
1011 1240967 : WasmMemoryTracker::kHardLimit)) {
1012 950 : return {};
1013 : }
1014 1240023 : if (hint == nullptr) hint = page_allocator->GetRandomMmapAddr();
1015 :
1016 : VirtualMemory mem(page_allocator, size, hint,
1017 1240024 : page_allocator->AllocatePageSize());
1018 1240023 : if (!mem.IsReserved()) {
1019 0 : memory_tracker_->ReleaseReservation(size);
1020 0 : return {};
1021 : }
1022 : TRACE_HEAP("VMem alloc: %p:%p (%zu)\n",
1023 : reinterpret_cast<void*>(mem.address()),
1024 : reinterpret_cast<void*>(mem.end()), mem.size());
1025 :
1026 : // TODO(v8:8462) Remove eager commit once perf supports remapping.
1027 1240023 : if (FLAG_perf_prof) {
1028 : SetPermissions(GetPlatformPageAllocator(), mem.address(), mem.size(),
1029 0 : PageAllocator::kReadWriteExecute);
1030 : }
1031 1240023 : return mem;
1032 : }
1033 :
1034 14 : void WasmCodeManager::SetMaxCommittedMemoryForTesting(size_t limit) {
1035 : remaining_uncommitted_code_space_.store(limit);
1036 14 : critical_uncommitted_code_space_.store(limit / 2);
1037 14 : }
1038 :
1039 : // static
1040 1382918 : size_t WasmCodeManager::EstimateNativeModuleCodeSize(const WasmModule* module) {
1041 : constexpr size_t kCodeSizeMultiplier = 4;
1042 : constexpr size_t kCodeOverhead = 32; // for prologue, stack check, ...
1043 : constexpr size_t kStaticCodeSize = 512; // runtime stubs, ...
1044 : constexpr size_t kImportSize = 64 * kSystemPointerSize;
1045 :
1046 : size_t estimate = kStaticCodeSize;
1047 3513775 : for (auto& function : module->functions) {
1048 747939 : estimate += kCodeOverhead + kCodeSizeMultiplier * function.code.length();
1049 : }
1050 : estimate +=
1051 2765836 : JumpTableAssembler::SizeForNumberOfSlots(module->num_declared_functions);
1052 1382918 : estimate += kImportSize * module->num_imported_functions;
1053 :
1054 1382918 : return estimate;
1055 : }
1056 :
1057 : // static
1058 1237609 : size_t WasmCodeManager::EstimateNativeModuleNonCodeSize(
1059 : const WasmModule* module) {
1060 1237609 : size_t wasm_module_estimate = EstimateStoredSize(module);
1061 :
1062 1237610 : uint32_t num_wasm_functions = module->num_declared_functions;
1063 :
1064 : // TODO(wasm): Include wire bytes size.
1065 : size_t native_module_estimate =
1066 : sizeof(NativeModule) + /* NativeModule struct */
1067 1237610 : (sizeof(WasmCode*) * num_wasm_functions) + /* code table size */
1068 1237610 : (sizeof(WasmCode) * num_wasm_functions); /* code object size */
1069 :
1070 1237610 : return wasm_module_estimate + native_module_estimate;
1071 : }
1072 :
1073 1240018 : std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
1074 : WasmEngine* engine, Isolate* isolate, const WasmFeatures& enabled,
1075 : size_t code_size_estimate, bool can_request_more,
1076 : std::shared_ptr<const WasmModule> module) {
1077 : DCHECK_EQ(this, isolate->wasm_engine()->code_manager());
1078 1240018 : if (remaining_uncommitted_code_space_.load() <
1079 : critical_uncommitted_code_space_.load()) {
1080 : (reinterpret_cast<v8::Isolate*>(isolate))
1081 61 : ->MemoryPressureNotification(MemoryPressureLevel::kCritical);
1082 : critical_uncommitted_code_space_.store(
1083 61 : remaining_uncommitted_code_space_.load() / 2);
1084 : }
1085 :
1086 : // If the code must be contiguous, reserve enough address space up front.
1087 : size_t code_vmem_size =
1088 : kRequiresCodeRange ? kMaxWasmCodeMemory : code_size_estimate;
1089 : // Try up to two times; getting rid of dead JSArrayBuffer allocations might
1090 : // require two GCs because the first GC maybe incremental and may have
1091 : // floating garbage.
1092 : static constexpr int kAllocationRetries = 2;
1093 1240018 : VirtualMemory code_space;
1094 950 : for (int retries = 0;; ++retries) {
1095 2481941 : code_space = TryAllocate(code_vmem_size);
1096 1240973 : if (code_space.IsReserved()) break;
1097 950 : if (retries == kAllocationRetries) {
1098 0 : V8::FatalProcessOutOfMemory(isolate, "WasmCodeManager::NewNativeModule");
1099 : UNREACHABLE();
1100 : }
1101 : // Run one GC, then try the allocation again.
1102 : isolate->heap()->MemoryPressureNotification(MemoryPressureLevel::kCritical,
1103 950 : true);
1104 950 : }
1105 :
1106 : Address start = code_space.address();
1107 : size_t size = code_space.size();
1108 : Address end = code_space.end();
1109 : std::unique_ptr<NativeModule> ret(
1110 : new NativeModule(engine, enabled, can_request_more, std::move(code_space),
1111 3720069 : std::move(module), isolate->async_counters()));
1112 : TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", ret.get(), start,
1113 : size);
1114 1240023 : base::MutexGuard lock(&native_modules_mutex_);
1115 1240023 : lookup_map_.insert(std::make_pair(start, std::make_pair(end, ret.get())));
1116 1240023 : return ret;
1117 : }
1118 :
1119 9270956 : bool NativeModule::SetExecutable(bool executable) {
1120 9270956 : if (is_executable_ == executable) return true;
1121 : TRACE_HEAP("Setting module %p as executable: %d.\n", this, executable);
1122 :
1123 728956 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
1124 :
1125 728956 : if (FLAG_wasm_write_protect_code_memory) {
1126 : PageAllocator::Permission permission =
1127 0 : executable ? PageAllocator::kReadExecute : PageAllocator::kReadWrite;
1128 : #if V8_OS_WIN
1129 : // On windows, we need to switch permissions per separate virtual memory
1130 : // reservation. This is really just a problem when the NativeModule is
1131 : // growable (meaning can_request_more_memory_). That's 32-bit in production,
1132 : // or unittests.
1133 : // For now, in that case, we commit at reserved memory granularity.
1134 : // Technically, that may be a waste, because we may reserve more than we
1135 : // use. On 32-bit though, the scarce resource is the address space -
1136 : // committed or not.
1137 : if (can_request_more_memory_) {
1138 : for (auto& vmem : owned_code_space_) {
1139 : if (!SetPermissions(page_allocator, vmem.address(), vmem.size(),
1140 : permission)) {
1141 : return false;
1142 : }
1143 : TRACE_HEAP("Set %p:%p to executable:%d\n", vmem.address(), vmem.end(),
1144 : executable);
1145 : }
1146 : is_executable_ = executable;
1147 : return true;
1148 : }
1149 : #endif
1150 0 : for (auto& region : allocated_code_space_.regions()) {
1151 : // allocated_code_space_ is fine-grained, so we need to
1152 : // page-align it.
1153 : size_t region_size =
1154 0 : RoundUp(region.size(), page_allocator->AllocatePageSize());
1155 0 : if (!SetPermissions(page_allocator, region.begin(), region_size,
1156 : permission)) {
1157 : return false;
1158 : }
1159 : TRACE_HEAP("Set %p:%p to executable:%d\n",
1160 : reinterpret_cast<void*>(region.begin()),
1161 : reinterpret_cast<void*>(region.end()), executable);
1162 : }
1163 : }
1164 728956 : is_executable_ = executable;
1165 728956 : return true;
1166 : }
1167 :
1168 1240023 : void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
1169 1240023 : base::MutexGuard lock(&native_modules_mutex_);
1170 : TRACE_HEAP("Freeing NativeModule %p\n", native_module);
1171 4960092 : for (auto& code_space : native_module->owned_code_space_) {
1172 : DCHECK(code_space.IsReserved());
1173 : TRACE_HEAP("VMem Release: %" PRIxPTR ":%" PRIxPTR " (%zu)\n",
1174 : code_space.address(), code_space.end(), code_space.size());
1175 2480046 : lookup_map_.erase(code_space.address());
1176 1240023 : memory_tracker_->ReleaseReservation(code_space.size());
1177 1240023 : code_space.Free();
1178 : DCHECK(!code_space.IsReserved());
1179 : }
1180 : native_module->owned_code_space_.clear();
1181 :
1182 : size_t code_size = native_module->committed_code_space_.load();
1183 : DCHECK(IsAligned(code_size, AllocatePageSize()));
1184 : remaining_uncommitted_code_space_.fetch_add(code_size);
1185 : // Remaining code space cannot grow bigger than maximum code space size.
1186 : DCHECK_LE(remaining_uncommitted_code_space_.load(), kMaxWasmCodeMemory);
1187 1240023 : }
1188 :
1189 53874696 : NativeModule* WasmCodeManager::LookupNativeModule(Address pc) const {
1190 53874696 : base::MutexGuard lock(&native_modules_mutex_);
1191 53874782 : if (lookup_map_.empty()) return nullptr;
1192 :
1193 : auto iter = lookup_map_.upper_bound(pc);
1194 13359258 : if (iter == lookup_map_.begin()) return nullptr;
1195 : --iter;
1196 13092890 : Address region_start = iter->first;
1197 13092890 : Address region_end = iter->second.first;
1198 13092890 : NativeModule* candidate = iter->second.second;
1199 :
1200 : DCHECK_NOT_NULL(candidate);
1201 13092890 : return region_start <= pc && pc < region_end ? candidate : nullptr;
1202 : }
1203 :
1204 53868002 : WasmCode* WasmCodeManager::LookupCode(Address pc) const {
1205 53868002 : NativeModule* candidate = LookupNativeModule(pc);
1206 53868084 : return candidate ? candidate->Lookup(pc) : nullptr;
1207 : }
1208 :
1209 19 : size_t WasmCodeManager::remaining_uncommitted_code_space() const {
1210 19 : return remaining_uncommitted_code_space_.load();
1211 : }
1212 :
1213 : // TODO(v8:7424): Code protection scopes are not yet supported with shared code
1214 : // enabled and need to be revisited to work with --wasm-shared-code as well.
1215 144500 : NativeModuleModificationScope::NativeModuleModificationScope(
1216 : NativeModule* native_module)
1217 144500 : : native_module_(native_module) {
1218 144500 : if (FLAG_wasm_write_protect_code_memory && native_module_ &&
1219 0 : (native_module_->modification_scope_depth_++) == 0) {
1220 0 : bool success = native_module_->SetExecutable(false);
1221 0 : CHECK(success);
1222 : }
1223 144500 : }
1224 :
1225 144498 : NativeModuleModificationScope::~NativeModuleModificationScope() {
1226 144498 : if (FLAG_wasm_write_protect_code_memory && native_module_ &&
1227 0 : (native_module_->modification_scope_depth_--) == 1) {
1228 0 : bool success = native_module_->SetExecutable(true);
1229 0 : CHECK(success);
1230 : }
1231 144498 : }
1232 :
1233 : } // namespace wasm
1234 : } // namespace internal
1235 178779 : } // namespace v8
1236 : #undef TRACE_HEAP
|