Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/wasm/wasm-code-manager.h"
6 :
7 : #include <iomanip>
8 :
9 : #include "src/assembler-inl.h"
10 : #include "src/base/adapters.h"
11 : #include "src/base/macros.h"
12 : #include "src/base/platform/platform.h"
13 : #include "src/disassembler.h"
14 : #include "src/globals.h"
15 : #include "src/log.h"
16 : #include "src/macro-assembler-inl.h"
17 : #include "src/macro-assembler.h"
18 : #include "src/objects-inl.h"
19 : #include "src/ostreams.h"
20 : #include "src/snapshot/embedded-data.h"
21 : #include "src/vector.h"
22 : #include "src/wasm/compilation-environment.h"
23 : #include "src/wasm/function-compiler.h"
24 : #include "src/wasm/jump-table-assembler.h"
25 : #include "src/wasm/wasm-import-wrapper-cache-inl.h"
26 : #include "src/wasm/wasm-module.h"
27 : #include "src/wasm/wasm-objects-inl.h"
28 : #include "src/wasm/wasm-objects.h"
29 :
30 : #define TRACE_HEAP(...) \
31 : do { \
32 : if (FLAG_trace_wasm_native_heap) PrintF(__VA_ARGS__); \
33 : } while (false)
34 :
35 : namespace v8 {
36 : namespace internal {
37 : namespace wasm {
38 :
39 : using trap_handler::ProtectedInstructionData;
40 :
41 3824088 : void DisjointAllocationPool::Merge(base::AddressRegion region) {
42 : auto dest_it = regions_.begin();
43 : auto dest_end = regions_.end();
44 :
45 : // Skip over dest regions strictly before {region}.
46 6406291 : while (dest_it != dest_end && dest_it->end() < region.begin()) ++dest_it;
47 :
48 : // After last dest region: insert and done.
49 3824088 : if (dest_it == dest_end) {
50 1241925 : regions_.push_back(region);
51 : return;
52 : }
53 :
54 : // Adjacent (from below) to dest: merge and done.
55 2582163 : if (dest_it->begin() == region.end()) {
56 : base::AddressRegion merged_region{region.begin(),
57 1 : region.size() + dest_it->size()};
58 : DCHECK_EQ(merged_region.end(), dest_it->end());
59 1 : *dest_it = merged_region;
60 : return;
61 : }
62 :
63 : // Before dest: insert and done.
64 2582162 : if (dest_it->begin() > region.end()) {
65 0 : regions_.insert(dest_it, region);
66 0 : return;
67 : }
68 :
69 : // Src is adjacent from above. Merge and check whether the merged region is
70 : // now adjacent to the next region.
71 : DCHECK_EQ(dest_it->end(), region.begin());
72 2582162 : dest_it->set_size(dest_it->size() + region.size());
73 : DCHECK_EQ(dest_it->end(), region.end());
74 : auto next_dest = dest_it;
75 : ++next_dest;
76 2582168 : if (next_dest != dest_end && dest_it->end() == next_dest->begin()) {
77 6 : dest_it->set_size(dest_it->size() + next_dest->size());
78 : DCHECK_EQ(dest_it->end(), next_dest->end());
79 : regions_.erase(next_dest);
80 : }
81 : }
82 :
83 3824061 : base::AddressRegion DisjointAllocationPool::Allocate(size_t size) {
84 3824065 : for (auto it = regions_.begin(), end = regions_.end(); it != end; ++it) {
85 3824063 : if (size > it->size()) continue;
86 : base::AddressRegion ret{it->begin(), size};
87 3824059 : if (size == it->size()) {
88 : regions_.erase(it);
89 : } else {
90 3824057 : *it = base::AddressRegion{it->begin() + size, it->size() - size};
91 : }
92 3824059 : return ret;
93 : }
94 2 : return {};
95 : }
96 :
97 1157 : Address WasmCode::constant_pool() const {
98 : if (FLAG_enable_embedded_constant_pool) {
99 : if (constant_pool_offset_ < code_comments_offset_) {
100 : return instruction_start() + constant_pool_offset_;
101 : }
102 : }
103 : return kNullAddress;
104 : }
105 :
106 0 : Address WasmCode::code_comments() const {
107 0 : if (code_comments_offset_ < unpadded_binary_size_) {
108 0 : return instruction_start() + code_comments_offset_;
109 : }
110 : return kNullAddress;
111 : }
112 :
113 0 : size_t WasmCode::trap_handler_index() const {
114 136812 : CHECK(HasTrapHandlerIndex());
115 68406 : return static_cast<size_t>(trap_handler_index_);
116 : }
117 :
118 0 : void WasmCode::set_trap_handler_index(size_t value) {
119 68405 : trap_handler_index_ = value;
120 0 : }
121 :
122 1440209 : void WasmCode::RegisterTrapHandlerData() {
123 : DCHECK(!HasTrapHandlerIndex());
124 1440209 : if (kind() != WasmCode::kFunction) return;
125 1065777 : if (protected_instructions_.is_empty()) return;
126 :
127 : Address base = instruction_start();
128 :
129 : size_t size = instructions().size();
130 : const int index =
131 : RegisterHandlerData(base, size, protected_instructions().size(),
132 68232 : protected_instructions().start());
133 :
134 : // TODO(eholk): if index is negative, fail.
135 68405 : CHECK_LE(0, index);
136 : set_trap_handler_index(static_cast<size_t>(index));
137 : }
138 :
139 3919645 : bool WasmCode::HasTrapHandlerIndex() const { return trap_handler_index_ >= 0; }
140 :
141 801124 : bool WasmCode::ShouldBeLogged(Isolate* isolate) {
142 : // The return value is cached in {WasmEngine::IsolateData::log_codes}. Ensure
143 : // to call {WasmEngine::EnableCodeLogging} if this return value would change
144 : // for any isolate. Otherwise we might lose code events.
145 938409 : return isolate->logger()->is_listening_to_code_events() ||
146 801124 : isolate->is_profiling();
147 : }
148 :
149 4 : void WasmCode::LogCode(Isolate* isolate) const {
150 : DCHECK(ShouldBeLogged(isolate));
151 4 : if (IsAnonymous()) return;
152 :
153 : ModuleWireBytes wire_bytes(native_module()->wire_bytes());
154 : // TODO(herhut): Allow to log code without on-heap round-trip of the name.
155 : WireBytesRef name_ref =
156 4 : native_module()->module()->LookupFunctionName(wire_bytes, index());
157 4 : WasmName name_vec = wire_bytes.GetNameOrNull(name_ref);
158 4 : if (!name_vec.is_empty()) {
159 : HandleScope scope(isolate);
160 : MaybeHandle<String> maybe_name = isolate->factory()->NewStringFromUtf8(
161 4 : Vector<const char>::cast(name_vec));
162 : Handle<String> name;
163 4 : if (!maybe_name.ToHandle(&name)) {
164 0 : name = isolate->factory()->NewStringFromAsciiChecked("<name too long>");
165 : }
166 : int name_length;
167 : auto cname =
168 : name->ToCString(AllowNullsFlag::DISALLOW_NULLS,
169 4 : RobustnessFlag::ROBUST_STRING_TRAVERSAL, &name_length);
170 8 : PROFILE(isolate,
171 : CodeCreateEvent(CodeEventListener::FUNCTION_TAG, this,
172 : {cname.get(), static_cast<size_t>(name_length)}));
173 : } else {
174 : EmbeddedVector<char, 32> generated_name;
175 0 : int length = SNPrintF(generated_name, "wasm-function[%d]", index());
176 0 : generated_name.Truncate(length);
177 0 : PROFILE(isolate, CodeCreateEvent(CodeEventListener::FUNCTION_TAG, this,
178 : generated_name));
179 : }
180 :
181 4 : if (!source_positions().is_empty()) {
182 4 : LOG_CODE_EVENT(isolate, CodeLinePosInfoRecordEvent(instruction_start(),
183 : source_positions()));
184 : }
185 : }
186 :
187 236 : void WasmCode::Validate() const {
188 : #ifdef DEBUG
189 : // We expect certain relocation info modes to never appear in {WasmCode}
190 : // objects or to be restricted to a small set of valid values. Hence the
191 : // iteration below does not use a mask, but visits all relocation data.
192 : for (RelocIterator it(instructions(), reloc_info(), constant_pool());
193 : !it.done(); it.next()) {
194 : RelocInfo::Mode mode = it.rinfo()->rmode();
195 : switch (mode) {
196 : case RelocInfo::WASM_CALL: {
197 : Address target = it.rinfo()->wasm_call_address();
198 : WasmCode* code = native_module_->Lookup(target);
199 : CHECK_NOT_NULL(code);
200 : CHECK_EQ(WasmCode::kJumpTable, code->kind());
201 : CHECK_EQ(native_module()->jump_table_, code);
202 : CHECK(code->contains(target));
203 : break;
204 : }
205 : case RelocInfo::WASM_STUB_CALL: {
206 : Address target = it.rinfo()->wasm_stub_call_address();
207 : WasmCode* code = native_module_->Lookup(target);
208 : CHECK_NOT_NULL(code);
209 : #ifdef V8_EMBEDDED_BUILTINS
210 : CHECK_EQ(WasmCode::kJumpTable, code->kind());
211 : CHECK_EQ(native_module()->runtime_stub_table_, code);
212 : CHECK(code->contains(target));
213 : #else
214 : CHECK_EQ(WasmCode::kRuntimeStub, code->kind());
215 : CHECK_EQ(target, code->instruction_start());
216 : #endif
217 : break;
218 : }
219 : case RelocInfo::INTERNAL_REFERENCE:
220 : case RelocInfo::INTERNAL_REFERENCE_ENCODED: {
221 : Address target = it.rinfo()->target_internal_reference();
222 : CHECK(contains(target));
223 : break;
224 : }
225 : case RelocInfo::EXTERNAL_REFERENCE:
226 : case RelocInfo::CONST_POOL:
227 : case RelocInfo::VENEER_POOL:
228 : // These are OK to appear.
229 : break;
230 : default:
231 : FATAL("Unexpected mode: %d", mode);
232 : }
233 : }
234 : #endif
235 236 : }
236 :
237 1441078 : void WasmCode::MaybePrint(const char* name) const {
238 : // Determines whether flags want this code to be printed.
239 1441140 : if ((FLAG_print_wasm_code && kind() == kFunction) ||
240 2882097 : (FLAG_print_wasm_stub_code && kind() != kFunction) || FLAG_print_code) {
241 62 : Print(name);
242 : }
243 1441078 : }
244 :
245 62 : void WasmCode::Print(const char* name) const {
246 124 : StdoutStream os;
247 62 : os << "--- WebAssembly code ---\n";
248 62 : Disassemble(name, os);
249 62 : os << "--- End code ---\n";
250 62 : }
251 :
252 62 : void WasmCode::Disassemble(const char* name, std::ostream& os,
253 : Address current_pc) const {
254 62 : if (name) os << "name: " << name << "\n";
255 124 : if (!IsAnonymous()) os << "index: " << index() << "\n";
256 124 : os << "kind: " << GetWasmCodeKindAsString(kind_) << "\n";
257 124 : os << "compiler: " << (is_liftoff() ? "Liftoff" : "TurboFan") << "\n";
258 62 : size_t padding = instructions().size() - unpadded_binary_size_;
259 : os << "Body (size = " << instructions().size() << " = "
260 62 : << unpadded_binary_size_ << " + " << padding << " padding)\n";
261 :
262 : #ifdef ENABLE_DISASSEMBLER
263 : size_t instruction_size = unpadded_binary_size_;
264 : if (constant_pool_offset_ < instruction_size) {
265 : instruction_size = constant_pool_offset_;
266 : }
267 : if (safepoint_table_offset_ && safepoint_table_offset_ < instruction_size) {
268 : instruction_size = safepoint_table_offset_;
269 : }
270 : if (handler_table_offset_ && handler_table_offset_ < instruction_size) {
271 : instruction_size = handler_table_offset_;
272 : }
273 : DCHECK_LT(0, instruction_size);
274 : os << "Instructions (size = " << instruction_size << ")\n";
275 : Disassembler::Decode(nullptr, &os, instructions().start(),
276 : instructions().start() + instruction_size,
277 : CodeReference(this), current_pc);
278 : os << "\n";
279 :
280 : if (handler_table_offset_ > 0) {
281 : HandlerTable table(instruction_start(), handler_table_offset_);
282 : os << "Exception Handler Table (size = " << table.NumberOfReturnEntries()
283 : << "):\n";
284 : table.HandlerTableReturnPrint(os);
285 : os << "\n";
286 : }
287 :
288 : if (!protected_instructions_.is_empty()) {
289 : os << "Protected instructions:\n pc offset land pad\n";
290 : for (auto& data : protected_instructions()) {
291 : os << std::setw(10) << std::hex << data.instr_offset << std::setw(10)
292 : << std::hex << data.landing_offset << "\n";
293 : }
294 : os << "\n";
295 : }
296 :
297 : if (!source_positions().is_empty()) {
298 : os << "Source positions:\n pc offset position\n";
299 : for (SourcePositionTableIterator it(source_positions()); !it.done();
300 : it.Advance()) {
301 : os << std::setw(10) << std::hex << it.code_offset() << std::dec
302 : << std::setw(10) << it.source_position().ScriptOffset()
303 : << (it.is_statement() ? " statement" : "") << "\n";
304 : }
305 : os << "\n";
306 : }
307 :
308 : if (safepoint_table_offset_ > 0) {
309 : SafepointTable table(instruction_start(), safepoint_table_offset_,
310 : stack_slots_);
311 : os << "Safepoints (size = " << table.size() << ")\n";
312 : for (uint32_t i = 0; i < table.length(); i++) {
313 : uintptr_t pc_offset = table.GetPcOffset(i);
314 : os << reinterpret_cast<const void*>(instruction_start() + pc_offset);
315 : os << std::setw(6) << std::hex << pc_offset << " " << std::dec;
316 : table.PrintEntry(i, os);
317 : os << " (sp -> fp)";
318 : SafepointEntry entry = table.GetEntry(i);
319 : if (entry.trampoline_pc() != -1) {
320 : os << " trampoline: " << std::hex << entry.trampoline_pc() << std::dec;
321 : }
322 : if (entry.has_deoptimization_index()) {
323 : os << " deopt: " << std::setw(6) << entry.deoptimization_index();
324 : }
325 : os << "\n";
326 : }
327 : os << "\n";
328 : }
329 :
330 : os << "RelocInfo (size = " << reloc_info_.size() << ")\n";
331 : for (RelocIterator it(instructions(), reloc_info(), constant_pool());
332 : !it.done(); it.next()) {
333 : it.rinfo()->Print(nullptr, os);
334 : }
335 : os << "\n";
336 :
337 : if (code_comments_offset() < unpadded_binary_size_) {
338 : Address code_comments = reinterpret_cast<Address>(instructions().start() +
339 : code_comments_offset());
340 : PrintCodeCommentsSection(os, code_comments);
341 : }
342 : #endif // ENABLE_DISASSEMBLER
343 62 : }
344 :
345 0 : const char* GetWasmCodeKindAsString(WasmCode::Kind kind) {
346 62 : switch (kind) {
347 : case WasmCode::kFunction:
348 : return "wasm function";
349 : case WasmCode::kWasmToJsWrapper:
350 0 : return "wasm-to-js";
351 : case WasmCode::kRuntimeStub:
352 0 : return "runtime-stub";
353 : case WasmCode::kInterpreterEntry:
354 0 : return "interpreter entry";
355 : case WasmCode::kJumpTable:
356 0 : return "jump table";
357 : }
358 0 : return "unknown kind";
359 : }
360 :
361 7839290 : WasmCode::~WasmCode() {
362 3919645 : if (HasTrapHandlerIndex()) {
363 68406 : CHECK_LT(trap_handler_index(),
364 : static_cast<size_t>(std::numeric_limits<int>::max()));
365 68406 : trap_handler::ReleaseHandlerData(static_cast<int>(trap_handler_index()));
366 : }
367 3919645 : }
368 :
369 1241902 : NativeModule::NativeModule(WasmEngine* engine, const WasmFeatures& enabled,
370 : bool can_request_more, VirtualMemory code_space,
371 : std::shared_ptr<const WasmModule> module,
372 : std::shared_ptr<Counters> async_counters,
373 : std::shared_ptr<NativeModule>* shared_this)
374 : : enabled_features_(enabled),
375 : module_(std::move(module)),
376 : import_wrapper_cache_(std::unique_ptr<WasmImportWrapperCache>(
377 : new WasmImportWrapperCache(this))),
378 : free_code_space_(code_space.region()),
379 : engine_(engine),
380 : can_request_more_memory_(can_request_more),
381 : use_trap_handler_(trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler
382 9935216 : : kNoTrapHandler) {
383 : // We receive a pointer to an empty {std::shared_ptr}, and install ourselve
384 : // there.
385 : DCHECK_NOT_NULL(shared_this);
386 : DCHECK_NULL(*shared_this);
387 1241902 : shared_this->reset(this);
388 1241902 : compilation_state_ =
389 3725706 : CompilationState::New(*shared_this, std::move(async_counters));
390 : DCHECK_NOT_NULL(module_);
391 1241902 : owned_code_space_.emplace_back(std::move(code_space));
392 1241902 : owned_code_.reserve(num_functions());
393 :
394 1241902 : uint32_t num_wasm_functions = module_->num_declared_functions;
395 1241902 : if (num_wasm_functions > 0) {
396 141578 : code_table_.reset(new WasmCode* [num_wasm_functions] {});
397 :
398 141578 : jump_table_ = CreateEmptyJumpTable(
399 141578 : JumpTableAssembler::SizeForNumberOfSlots(num_wasm_functions));
400 : }
401 1241902 : }
402 :
403 1093948 : void NativeModule::ReserveCodeTableForTesting(uint32_t max_functions) {
404 : DCHECK_LE(num_functions(), max_functions);
405 1093948 : WasmCode** new_table = new WasmCode* [max_functions] {};
406 1093948 : if (module_->num_declared_functions > 0) {
407 0 : memcpy(new_table, code_table_.get(),
408 : module_->num_declared_functions * sizeof(*new_table));
409 : }
410 : code_table_.reset(new_table);
411 :
412 : // Re-allocate jump table.
413 1093948 : jump_table_ = CreateEmptyJumpTable(
414 1093948 : JumpTableAssembler::SizeForNumberOfSlots(max_functions));
415 1093948 : }
416 :
417 137285 : void NativeModule::LogWasmCodes(Isolate* isolate) {
418 137285 : if (!WasmCode::ShouldBeLogged(isolate)) return;
419 :
420 : // TODO(titzer): we skip the logging of the import wrappers
421 : // here, but they should be included somehow.
422 12 : for (WasmCode* code : code_table()) {
423 4 : if (code != nullptr) code->LogCode(isolate);
424 : }
425 : }
426 :
427 176334 : CompilationEnv NativeModule::CreateCompilationEnv() const {
428 176334 : return {module(), use_trap_handler_, kRuntimeExceptionSupport,
429 176334 : enabled_features_};
430 : }
431 :
432 753 : WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) {
433 753 : return AddAndPublishAnonymousCode(code, WasmCode::kFunction);
434 : }
435 :
436 2637 : void NativeModule::SetLazyBuiltin() {
437 2637 : uint32_t num_wasm_functions = module_->num_declared_functions;
438 2637 : if (num_wasm_functions == 0) return;
439 : // Fill the jump table with jumps to the lazy compile stub.
440 : Address lazy_compile_target = runtime_stub_entry(WasmCode::kWasmCompileLazy);
441 32647 : for (uint32_t i = 0; i < num_wasm_functions; ++i) {
442 15029 : JumpTableAssembler::EmitLazyCompileJumpSlot(
443 15029 : jump_table_->instruction_start(), i,
444 15029 : i + module_->num_imported_functions, lazy_compile_target,
445 15029 : WasmCode::kNoFlushICache);
446 : }
447 : FlushInstructionCache(jump_table_->instructions().start(),
448 2589 : jump_table_->instructions().size());
449 : }
450 :
451 : // TODO(mstarzinger): Remove {Isolate} parameter once {V8_EMBEDDED_BUILTINS}
452 : // was removed and embedded builtins are no longer optional.
453 1241133 : void NativeModule::SetRuntimeStubs(Isolate* isolate) {
454 : DCHECK_EQ(kNullAddress, runtime_stub_entries_[0]); // Only called once.
455 : #ifdef V8_EMBEDDED_BUILTINS
456 : WasmCode* jump_table =
457 : CreateEmptyJumpTable(JumpTableAssembler::SizeForNumberOfStubSlots(
458 1241133 : WasmCode::kRuntimeStubCount));
459 : Address base = jump_table->instruction_start();
460 1241131 : EmbeddedData embedded_data = EmbeddedData::FromBlob();
461 : #define RUNTIME_STUB(Name) {Builtins::k##Name, WasmCode::k##Name},
462 : #define RUNTIME_STUB_TRAP(Name) RUNTIME_STUB(ThrowWasm##Name)
463 : std::pair<Builtins::Name, WasmCode::RuntimeStubId> wasm_runtime_stubs[] = {
464 1241131 : WASM_RUNTIME_STUB_LIST(RUNTIME_STUB, RUNTIME_STUB_TRAP)};
465 : #undef RUNTIME_STUB
466 : #undef RUNTIME_STUB_TRAP
467 73226837 : for (auto pair : wasm_runtime_stubs) {
468 35992849 : CHECK(embedded_data.ContainsBuiltin(pair.first));
469 35992849 : Address builtin = embedded_data.InstructionStartOfBuiltin(pair.first);
470 : JumpTableAssembler::EmitRuntimeStubSlot(base, pair.second, builtin,
471 35992848 : WasmCode::kNoFlushICache);
472 : uint32_t slot_offset =
473 : JumpTableAssembler::StubSlotIndexToOffset(pair.second);
474 35992853 : runtime_stub_entries_[pair.second] = base + slot_offset;
475 : }
476 : FlushInstructionCache(jump_table->instructions().start(),
477 1241133 : jump_table->instructions().size());
478 : DCHECK_NULL(runtime_stub_table_);
479 1241133 : runtime_stub_table_ = jump_table;
480 : #else // V8_EMBEDDED_BUILTINS
481 : HandleScope scope(isolate);
482 : USE(runtime_stub_table_); // Actually unused, but avoids ifdef's in header.
483 : #define COPY_BUILTIN(Name) \
484 : runtime_stub_entries_[WasmCode::k##Name] = \
485 : AddAndPublishAnonymousCode( \
486 : isolate->builtins()->builtin_handle(Builtins::k##Name), \
487 : WasmCode::kRuntimeStub, #Name) \
488 : ->instruction_start();
489 : #define COPY_BUILTIN_TRAP(Name) COPY_BUILTIN(ThrowWasm##Name)
490 : WASM_RUNTIME_STUB_LIST(COPY_BUILTIN, COPY_BUILTIN_TRAP)
491 : #undef COPY_BUILTIN_TRAP
492 : #undef COPY_BUILTIN
493 : #endif // V8_EMBEDDED_BUILTINS
494 : DCHECK_NE(kNullAddress, runtime_stub_entries_[0]);
495 1241133 : }
496 :
497 753 : WasmCode* NativeModule::AddAndPublishAnonymousCode(Handle<Code> code,
498 : WasmCode::Kind kind,
499 : const char* name) {
500 : // For off-heap builtins, we create a copy of the off-heap instruction stream
501 : // instead of the on-heap code object containing the trampoline. Ensure that
502 : // we do not apply the on-heap reloc info to the off-heap instructions.
503 : const size_t relocation_size =
504 753 : code->is_off_heap_trampoline() ? 0 : code->relocation_size();
505 : OwnedVector<byte> reloc_info;
506 753 : if (relocation_size > 0) {
507 : reloc_info = OwnedVector<byte>::New(relocation_size);
508 : memcpy(reloc_info.start(), code->relocation_start(), relocation_size);
509 : }
510 : Handle<ByteArray> source_pos_table(code->SourcePositionTable(),
511 1506 : code->GetIsolate());
512 : OwnedVector<byte> source_pos =
513 753 : OwnedVector<byte>::New(source_pos_table->length());
514 753 : if (source_pos_table->length() > 0) {
515 : source_pos_table->copy_out(0, source_pos.start(),
516 : source_pos_table->length());
517 : }
518 : Vector<const byte> instructions(
519 1506 : reinterpret_cast<byte*>(code->InstructionStart()),
520 3012 : static_cast<size_t>(code->InstructionSize()));
521 : const uint32_t stack_slots = static_cast<uint32_t>(
522 753 : code->has_safepoint_info() ? code->stack_slots() : 0);
523 :
524 : // TODO(jgruber,v8:8758): Remove this translation. It exists only because
525 : // Code objects contains real offsets but WasmCode expects an offset of 0 to
526 : // mean 'empty'.
527 : const size_t safepoint_table_offset = static_cast<size_t>(
528 1506 : code->has_safepoint_table() ? code->safepoint_table_offset() : 0);
529 : const size_t handler_table_offset = static_cast<size_t>(
530 753 : code->has_handler_table() ? code->handler_table_offset() : 0);
531 : const size_t constant_pool_offset =
532 753 : static_cast<size_t>(code->constant_pool_offset());
533 : const size_t code_comments_offset =
534 : static_cast<size_t>(code->code_comments_offset());
535 :
536 753 : Vector<uint8_t> dst_code_bytes = AllocateForCode(instructions.size());
537 : memcpy(dst_code_bytes.begin(), instructions.start(), instructions.size());
538 :
539 : // Apply the relocation delta by iterating over the RelocInfo.
540 1506 : intptr_t delta = reinterpret_cast<Address>(dst_code_bytes.begin()) -
541 1506 : code->InstructionStart();
542 753 : int mode_mask = RelocInfo::kApplyMask |
543 753 : RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
544 : Address constant_pool_start =
545 753 : reinterpret_cast<Address>(dst_code_bytes.begin()) + constant_pool_offset;
546 753 : RelocIterator orig_it(*code, mode_mask);
547 753 : for (RelocIterator it(dst_code_bytes, reloc_info.as_vector(),
548 753 : constant_pool_start, mode_mask);
549 0 : !it.done(); it.next(), orig_it.next()) {
550 : RelocInfo::Mode mode = it.rinfo()->rmode();
551 0 : if (RelocInfo::IsWasmStubCall(mode)) {
552 0 : uint32_t stub_call_tag = orig_it.rinfo()->wasm_call_tag();
553 : DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
554 : Address entry = runtime_stub_entry(
555 : static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
556 0 : it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
557 : } else {
558 : it.rinfo()->apply(delta);
559 : }
560 : }
561 :
562 : // Flush the i-cache after relocation.
563 753 : FlushInstructionCache(dst_code_bytes.start(), dst_code_bytes.size());
564 :
565 : std::unique_ptr<WasmCode> new_code{new WasmCode{
566 : this, // native_module
567 : WasmCode::kAnonymousFuncIndex, // index
568 : dst_code_bytes, // instructions
569 : stack_slots, // stack_slots
570 : 0, // tagged_parameter_slots
571 : safepoint_table_offset, // safepoint_table_offset
572 : handler_table_offset, // handler_table_offset
573 : constant_pool_offset, // constant_pool_offset
574 : code_comments_offset, // code_comments_offset
575 : instructions.size(), // unpadded_binary_size
576 : OwnedVector<ProtectedInstructionData>{}, // protected_instructions
577 : std::move(reloc_info), // reloc_info
578 : std::move(source_pos), // source positions
579 : kind, // kind
580 1506 : WasmCode::kOther}}; // tier
581 753 : new_code->MaybePrint(name);
582 : new_code->Validate();
583 :
584 1506 : return PublishCode(std::move(new_code));
585 : }
586 :
587 374339 : std::unique_ptr<WasmCode> NativeModule::AddCode(
588 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
589 : uint32_t tagged_parameter_slots,
590 : OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
591 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
592 : WasmCode::Tier tier) {
593 : return AddCodeWithCodeSpace(index, desc, stack_slots, tagged_parameter_slots,
594 : std::move(protected_instructions),
595 : std::move(source_position_table), kind, tier,
596 1497356 : AllocateForCode(desc.instr_size));
597 : }
598 :
599 1441251 : std::unique_ptr<WasmCode> NativeModule::AddCodeWithCodeSpace(
600 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
601 : uint32_t tagged_parameter_slots,
602 : OwnedVector<ProtectedInstructionData> protected_instructions,
603 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
604 : WasmCode::Tier tier, Vector<uint8_t> dst_code_bytes) {
605 : OwnedVector<byte> reloc_info;
606 1441251 : if (desc.reloc_size > 0) {
607 375642 : reloc_info = OwnedVector<byte>::New(desc.reloc_size);
608 375726 : memcpy(reloc_info.start(), desc.buffer + desc.buffer_size - desc.reloc_size,
609 375726 : desc.reloc_size);
610 : }
611 :
612 : // TODO(jgruber,v8:8758): Remove this translation. It exists only because
613 : // CodeDesc contains real offsets but WasmCode expects an offset of 0 to mean
614 : // 'empty'.
615 : const size_t safepoint_table_offset = static_cast<size_t>(
616 1441335 : desc.safepoint_table_size == 0 ? 0 : desc.safepoint_table_offset);
617 : const size_t handler_table_offset = static_cast<size_t>(
618 1441335 : desc.handler_table_size == 0 ? 0 : desc.handler_table_offset);
619 : const size_t constant_pool_offset =
620 1441335 : static_cast<size_t>(desc.constant_pool_offset);
621 : const size_t code_comments_offset =
622 1441335 : static_cast<size_t>(desc.code_comments_offset);
623 1441335 : const size_t instr_size = static_cast<size_t>(desc.instr_size);
624 :
625 1441335 : memcpy(dst_code_bytes.begin(), desc.buffer,
626 : static_cast<size_t>(desc.instr_size));
627 :
628 : // Apply the relocation delta by iterating over the RelocInfo.
629 1441335 : intptr_t delta = dst_code_bytes.begin() - desc.buffer;
630 1441335 : int mode_mask = RelocInfo::kApplyMask |
631 : RelocInfo::ModeMask(RelocInfo::WASM_CALL) |
632 1441335 : RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
633 : Address constant_pool_start =
634 1441335 : reinterpret_cast<Address>(dst_code_bytes.begin()) + constant_pool_offset;
635 2586650 : for (RelocIterator it(dst_code_bytes, reloc_info.as_vector(),
636 1441335 : constant_pool_start, mode_mask);
637 573023 : !it.done(); it.next()) {
638 : RelocInfo::Mode mode = it.rinfo()->rmode();
639 573164 : if (RelocInfo::IsWasmCall(mode)) {
640 31028 : uint32_t call_tag = it.rinfo()->wasm_call_tag();
641 : Address target = GetCallTargetForFunction(call_tag);
642 31032 : it.rinfo()->set_wasm_call_address(target, SKIP_ICACHE_FLUSH);
643 542136 : } else if (RelocInfo::IsWasmStubCall(mode)) {
644 541872 : uint32_t stub_call_tag = it.rinfo()->wasm_call_tag();
645 : DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
646 : Address entry = runtime_stub_entry(
647 : static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
648 541826 : it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
649 : } else {
650 : it.rinfo()->apply(delta);
651 : }
652 : }
653 :
654 : // Flush the i-cache after relocation.
655 1440574 : FlushInstructionCache(dst_code_bytes.start(), dst_code_bytes.size());
656 :
657 : std::unique_ptr<WasmCode> code{new WasmCode{
658 : this, index, dst_code_bytes, stack_slots, tagged_parameter_slots,
659 : safepoint_table_offset, handler_table_offset, constant_pool_offset,
660 : code_comments_offset, instr_size, std::move(protected_instructions),
661 1440434 : std::move(reloc_info), std::move(source_position_table), kind, tier}};
662 1441034 : code->MaybePrint();
663 : code->Validate();
664 :
665 1440062 : code->RegisterTrapHandlerData();
666 :
667 1440253 : return code;
668 : }
669 :
670 2851986 : WasmCode* NativeModule::PublishCode(std::unique_ptr<WasmCode> code) {
671 2851986 : base::MutexGuard lock(&allocation_mutex_);
672 5703974 : return PublishCodeLocked(std::move(code));
673 : }
674 :
675 3919642 : WasmCode* NativeModule::PublishCodeLocked(std::unique_ptr<WasmCode> code) {
676 : // The caller must hold the {allocation_mutex_}, thus we fail to lock it here.
677 : DCHECK(!allocation_mutex_.TryLock());
678 : // Skip publishing code if there is an active redirection to the interpreter
679 : // for the given function index, in order to preserve the redirection.
680 5355158 : if (!code->IsAnonymous() && !has_interpreter_redirection(code->index())) {
681 : DCHECK_LT(code->index(), num_functions());
682 : DCHECK_LE(module_->num_imported_functions, code->index());
683 :
684 : // Update code table, except for interpreter entries that would overwrite
685 : // existing code.
686 1435505 : uint32_t slot_idx = code->index() - module_->num_imported_functions;
687 1803557 : if (code->kind() != WasmCode::kInterpreterEntry ||
688 736104 : code_table_[slot_idx] == nullptr) {
689 2870642 : code_table_[slot_idx] = code.get();
690 : }
691 :
692 : // Patch jump table.
693 1435505 : JumpTableAssembler::PatchJumpTableSlot(jump_table_->instruction_start(),
694 : slot_idx, code->instruction_start(),
695 1435505 : WasmCode::kFlushICache);
696 : }
697 3919645 : if (code->kind_ == WasmCode::Kind::kInterpreterEntry) {
698 368052 : SetInterpreterRedirection(code->index());
699 : }
700 : WasmCode* ret = code.get();
701 3919645 : owned_code_.emplace_back(std::move(code));
702 3919645 : return ret;
703 : }
704 :
705 236 : WasmCode* NativeModule::AddDeserializedCode(
706 : uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
707 : uint32_t tagged_parameter_slots, size_t safepoint_table_offset,
708 : size_t handler_table_offset, size_t constant_pool_offset,
709 : size_t code_comments_offset, size_t unpadded_binary_size,
710 : OwnedVector<ProtectedInstructionData> protected_instructions,
711 : OwnedVector<const byte> reloc_info,
712 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
713 : WasmCode::Tier tier) {
714 236 : Vector<uint8_t> dst_code_bytes = AllocateForCode(instructions.size());
715 : memcpy(dst_code_bytes.begin(), instructions.start(), instructions.size());
716 :
717 : std::unique_ptr<WasmCode> code{new WasmCode{
718 : this, index, dst_code_bytes, stack_slots, tagged_parameter_slots,
719 : safepoint_table_offset, handler_table_offset, constant_pool_offset,
720 : code_comments_offset, unpadded_binary_size,
721 : std::move(protected_instructions), std::move(reloc_info),
722 708 : std::move(source_position_table), kind, tier}};
723 :
724 236 : code->RegisterTrapHandlerData();
725 :
726 : // Note: we do not flush the i-cache here, since the code needs to be
727 : // relocated anyway. The caller is responsible for flushing the i-cache later.
728 :
729 472 : return PublishCode(std::move(code));
730 : }
731 :
732 209 : std::vector<WasmCode*> NativeModule::SnapshotCodeTable() const {
733 209 : base::MutexGuard lock(&allocation_mutex_);
734 : std::vector<WasmCode*> result;
735 209 : result.reserve(code_table().size());
736 794 : for (WasmCode* code : code_table()) result.push_back(code);
737 209 : return result;
738 : }
739 :
740 2476659 : WasmCode* NativeModule::CreateEmptyJumpTable(uint32_t jump_table_size) {
741 : // Only call this if we really need a jump table.
742 : DCHECK_LT(0, jump_table_size);
743 2476659 : Vector<uint8_t> code_space = AllocateForCode(jump_table_size);
744 : ZapCode(reinterpret_cast<Address>(code_space.begin()), code_space.size());
745 : std::unique_ptr<WasmCode> code{new WasmCode{
746 : this, // native_module
747 : WasmCode::kAnonymousFuncIndex, // index
748 : code_space, // instructions
749 : 0, // stack_slots
750 : 0, // tagged_parameter_slots
751 : 0, // safepoint_table_offset
752 : 0, // handler_table_offset
753 : jump_table_size, // constant_pool_offset
754 : jump_table_size, // code_comments_offset
755 : jump_table_size, // unpadded_binary_size
756 : OwnedVector<ProtectedInstructionData>{}, // protected_instructions
757 : OwnedVector<const uint8_t>{}, // reloc_info
758 : OwnedVector<const uint8_t>{}, // source_pos
759 : WasmCode::kJumpTable, // kind
760 4953318 : WasmCode::kOther}}; // tier
761 4953318 : return PublishCode(std::move(code));
762 : }
763 :
764 3823595 : Vector<byte> NativeModule::AllocateForCode(size_t size) {
765 3823595 : base::MutexGuard lock(&allocation_mutex_);
766 : DCHECK_LT(0, size);
767 3824056 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
768 : // This happens under a lock assumed by the caller.
769 : size = RoundUp<kCodeAlignment>(size);
770 3824056 : base::AddressRegion code_space = free_code_space_.Allocate(size);
771 3824056 : if (code_space.is_empty()) {
772 0 : if (!can_request_more_memory_) {
773 : V8::FatalProcessOutOfMemory(nullptr,
774 0 : "NativeModule::AllocateForCode reservation");
775 : UNREACHABLE();
776 : }
777 :
778 : Address hint = owned_code_space_.empty() ? kNullAddress
779 0 : : owned_code_space_.back().end();
780 :
781 0 : VirtualMemory new_mem = engine_->code_manager()->TryAllocate(
782 0 : size, reinterpret_cast<void*>(hint));
783 0 : if (!new_mem.IsReserved()) {
784 : V8::FatalProcessOutOfMemory(nullptr,
785 0 : "NativeModule::AllocateForCode reservation");
786 : UNREACHABLE();
787 : }
788 0 : engine_->code_manager()->AssignRanges(new_mem.address(), new_mem.end(),
789 0 : this);
790 :
791 0 : free_code_space_.Merge(new_mem.region());
792 0 : owned_code_space_.emplace_back(std::move(new_mem));
793 0 : code_space = free_code_space_.Allocate(size);
794 : DCHECK(!code_space.is_empty());
795 : }
796 3824056 : const Address page_size = page_allocator->AllocatePageSize();
797 3824055 : Address commit_start = RoundUp(code_space.begin(), page_size);
798 : Address commit_end = RoundUp(code_space.end(), page_size);
799 : // {commit_start} will be either code_space.start or the start of the next
800 : // page. {commit_end} will be the start of the page after the one in which
801 : // the allocation ends.
802 : // We start from an aligned start, and we know we allocated vmem in
803 : // page multiples.
804 : // We just need to commit what's not committed. The page in which we
805 : // start is already committed (or we start at the beginning of a page).
806 : // The end needs to be committed all through the end of the page.
807 3824055 : if (commit_start < commit_end) {
808 1246058 : committed_code_space_.fetch_add(commit_end - commit_start);
809 : // Committed code cannot grow bigger than maximum code space size.
810 : DCHECK_LE(committed_code_space_.load(), kMaxWasmCodeMemory);
811 : #if V8_OS_WIN
812 : // On Windows, we cannot commit a region that straddles different
813 : // reservations of virtual memory. Because we bump-allocate, and because, if
814 : // we need more memory, we append that memory at the end of the
815 : // owned_code_space_ list, we traverse that list in reverse order to find
816 : // the reservation(s) that guide how to chunk the region to commit.
817 : for (auto& vmem : base::Reversed(owned_code_space_)) {
818 : if (commit_end <= vmem.address() || vmem.end() <= commit_start) continue;
819 : Address start = std::max(commit_start, vmem.address());
820 : Address end = std::min(commit_end, vmem.end());
821 : size_t commit_size = static_cast<size_t>(end - start);
822 : if (!engine_->code_manager()->Commit(start, commit_size)) {
823 : V8::FatalProcessOutOfMemory(nullptr,
824 : "NativeModule::AllocateForCode commit");
825 : UNREACHABLE();
826 : }
827 : // Opportunistically reduce the commit range. This might terminate the
828 : // loop early.
829 : if (commit_start == start) commit_start = end;
830 : if (commit_end == end) commit_end = start;
831 : if (commit_start >= commit_end) break;
832 : }
833 : #else
834 2492116 : if (!engine_->code_manager()->Commit(commit_start,
835 : commit_end - commit_start)) {
836 : V8::FatalProcessOutOfMemory(nullptr,
837 0 : "NativeModule::AllocateForCode commit");
838 : UNREACHABLE();
839 : }
840 : #endif
841 : }
842 : DCHECK(IsAligned(code_space.begin(), kCodeAlignment));
843 3824055 : allocated_code_space_.Merge(code_space);
844 : generated_code_size_.fetch_add(code_space.size(), std::memory_order_relaxed);
845 :
846 : TRACE_HEAP("Code alloc for %p: %" PRIxPTR ",+%zu\n", this, code_space.begin(),
847 : size);
848 7648107 : return {reinterpret_cast<byte*>(code_space.begin()), code_space.size()};
849 : }
850 :
851 : namespace {
852 4677786 : class NativeModuleWireBytesStorage final : public WireBytesStorage {
853 : public:
854 : explicit NativeModuleWireBytesStorage(
855 : std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes)
856 2338893 : : wire_bytes_(std::move(wire_bytes)) {}
857 :
858 1075241 : Vector<const uint8_t> GetCode(WireBytesRef ref) const final {
859 2150482 : return wire_bytes_->as_vector().SubVector(ref.offset(), ref.end_offset());
860 : }
861 :
862 : private:
863 : const std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes_;
864 : };
865 : } // namespace
866 :
867 3433077 : void NativeModule::SetWireBytes(OwnedVector<const uint8_t> wire_bytes) {
868 : auto shared_wire_bytes =
869 : std::make_shared<OwnedVector<const uint8_t>>(std::move(wire_bytes));
870 : wire_bytes_ = shared_wire_bytes;
871 3433077 : if (!shared_wire_bytes->is_empty()) {
872 2338893 : compilation_state_->SetWireBytesStorage(
873 2338893 : std::make_shared<NativeModuleWireBytesStorage>(
874 2338893 : std::move(shared_wire_bytes)));
875 : }
876 3433077 : }
877 :
878 5623659 : WasmCode* NativeModule::Lookup(Address pc) const {
879 5623659 : base::MutexGuard lock(&allocation_mutex_);
880 5623659 : if (owned_code_.empty()) return nullptr;
881 : // First update the sorted portion counter.
882 5623659 : if (owned_code_sorted_portion_ == 0) ++owned_code_sorted_portion_;
883 12282687 : while (owned_code_sorted_portion_ < owned_code_.size() &&
884 926229 : owned_code_[owned_code_sorted_portion_ - 1]->instruction_start() <=
885 : owned_code_[owned_code_sorted_portion_]->instruction_start()) {
886 36380 : ++owned_code_sorted_portion_;
887 : }
888 : // Execute at most two rounds: First check whether the {pc} is within the
889 : // sorted portion of {owned_code_}. If it's not, then sort the whole vector
890 : // and retry.
891 114 : while (true) {
892 : auto iter =
893 : std::upper_bound(owned_code_.begin(), owned_code_.end(), pc,
894 : [](Address pc, const std::unique_ptr<WasmCode>& code) {
895 : DCHECK_NE(kNullAddress, pc);
896 : DCHECK_NOT_NULL(code);
897 : return pc < code->instruction_start();
898 : });
899 5623773 : if (iter != owned_code_.begin()) {
900 : --iter;
901 : WasmCode* candidate = iter->get();
902 : DCHECK_NOT_NULL(candidate);
903 5623773 : if (candidate->contains(pc)) return candidate;
904 : }
905 232 : if (owned_code_sorted_portion_ == owned_code_.size()) return nullptr;
906 : std::sort(owned_code_.begin(), owned_code_.end(),
907 : [](const std::unique_ptr<WasmCode>& code1,
908 : const std::unique_ptr<WasmCode>& code2) {
909 : return code1->instruction_start() < code2->instruction_start();
910 : });
911 114 : owned_code_sorted_portion_ = owned_code_.size();
912 : }
913 : }
914 :
915 372717 : Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const {
916 : // TODO(clemensh): Measure performance win of returning instruction start
917 : // directly if we have turbofan code. Downside: Redirecting functions (e.g.
918 : // for debugging) gets much harder.
919 :
920 : // Return the jump table slot for that function index.
921 : DCHECK_NOT_NULL(jump_table_);
922 403749 : uint32_t slot_idx = func_index - module_->num_imported_functions;
923 : uint32_t slot_offset = JumpTableAssembler::SlotIndexToOffset(slot_idx);
924 : DCHECK_LT(slot_offset, jump_table_->instructions().size());
925 807498 : return jump_table_->instruction_start() + slot_offset;
926 : }
927 :
928 40512 : uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(
929 : Address slot_address) const {
930 : DCHECK(is_jump_table_slot(slot_address));
931 : uint32_t slot_offset =
932 81024 : static_cast<uint32_t>(slot_address - jump_table_->instruction_start());
933 : uint32_t slot_idx = JumpTableAssembler::SlotOffsetToIndex(slot_offset);
934 : DCHECK_LT(slot_idx, module_->num_declared_functions);
935 40512 : return module_->num_imported_functions + slot_idx;
936 : }
937 :
938 38 : void NativeModule::DisableTrapHandler() {
939 : // Switch {use_trap_handler_} from true to false.
940 : DCHECK(use_trap_handler_);
941 38 : use_trap_handler_ = kNoTrapHandler;
942 :
943 : // Clear the code table (just to increase the chances to hit an error if we
944 : // forget to re-add all code).
945 38 : uint32_t num_wasm_functions = module_->num_declared_functions;
946 38 : memset(code_table_.get(), 0, num_wasm_functions * sizeof(WasmCode*));
947 :
948 : // TODO(clemensh): Actually free the owned code, such that the memory can be
949 : // recycled.
950 38 : }
951 :
952 0 : const char* NativeModule::GetRuntimeStubName(Address runtime_stub_entry) const {
953 : #define RETURN_NAME(Name) \
954 : if (runtime_stub_entries_[WasmCode::k##Name] == runtime_stub_entry) { \
955 : return #Name; \
956 : }
957 : #define RETURN_NAME_TRAP(Name) RETURN_NAME(ThrowWasm##Name)
958 0 : WASM_RUNTIME_STUB_LIST(RETURN_NAME, RETURN_NAME_TRAP)
959 : #undef RETURN_NAME_TRAP
960 : #undef RETURN_NAME
961 0 : return "<unknown>";
962 : }
963 :
964 4967607 : NativeModule::~NativeModule() {
965 : TRACE_HEAP("Deleting native module: %p\n", reinterpret_cast<void*>(this));
966 : // Cancel all background compilation before resetting any field of the
967 : // NativeModule or freeing anything.
968 1241901 : compilation_state_->AbortCompilation();
969 1241902 : engine_->FreeNativeModule(this);
970 1241902 : }
971 :
972 60134 : WasmCodeManager::WasmCodeManager(WasmMemoryTracker* memory_tracker,
973 : size_t max_committed)
974 : : memory_tracker_(memory_tracker),
975 : remaining_uncommitted_code_space_(max_committed),
976 180402 : critical_uncommitted_code_space_(max_committed / 2) {
977 : DCHECK_LE(max_committed, kMaxWasmCodeMemory);
978 60134 : }
979 :
980 1246058 : bool WasmCodeManager::Commit(Address start, size_t size) {
981 : // TODO(v8:8462) Remove eager commit once perf supports remapping.
982 1246058 : if (FLAG_perf_prof) return true;
983 : DCHECK(IsAligned(start, AllocatePageSize()));
984 : DCHECK(IsAligned(size, AllocatePageSize()));
985 : // Reserve the size. Use CAS loop to avoid underflow on
986 : // {remaining_uncommitted_}. Temporary underflow would allow concurrent
987 : // threads to over-commit.
988 1246058 : size_t old_value = remaining_uncommitted_code_space_.load();
989 : while (true) {
990 1246058 : if (old_value < size) return false;
991 2492116 : if (remaining_uncommitted_code_space_.compare_exchange_weak(
992 : old_value, old_value - size)) {
993 : break;
994 : }
995 : }
996 : PageAllocator::Permission permission = FLAG_wasm_write_protect_code_memory
997 : ? PageAllocator::kReadWrite
998 1246058 : : PageAllocator::kReadWriteExecute;
999 :
1000 : bool ret =
1001 1246058 : SetPermissions(GetPlatformPageAllocator(), start, size, permission);
1002 : TRACE_HEAP("Setting rw permissions for %p:%p\n",
1003 : reinterpret_cast<void*>(start),
1004 : reinterpret_cast<void*>(start + size));
1005 :
1006 1246058 : if (!ret) {
1007 : // Highly unlikely.
1008 : remaining_uncommitted_code_space_.fetch_add(size);
1009 0 : return false;
1010 : }
1011 : return ret;
1012 : }
1013 :
1014 0 : void WasmCodeManager::AssignRanges(Address start, Address end,
1015 : NativeModule* native_module) {
1016 0 : base::MutexGuard lock(&native_modules_mutex_);
1017 0 : lookup_map_.insert(std::make_pair(start, std::make_pair(end, native_module)));
1018 0 : }
1019 :
1020 1242789 : VirtualMemory WasmCodeManager::TryAllocate(size_t size, void* hint) {
1021 1242789 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
1022 : DCHECK_GT(size, 0);
1023 1242796 : size = RoundUp(size, page_allocator->AllocatePageSize());
1024 1242787 : if (!memory_tracker_->ReserveAddressSpace(size,
1025 : WasmMemoryTracker::kHardLimit)) {
1026 900 : return {};
1027 : }
1028 1241902 : if (hint == nullptr) hint = page_allocator->GetRandomMmapAddr();
1029 :
1030 : VirtualMemory mem(page_allocator, size, hint,
1031 2483804 : page_allocator->AllocatePageSize());
1032 1241902 : if (!mem.IsReserved()) {
1033 0 : memory_tracker_->ReleaseReservation(size);
1034 0 : return {};
1035 : }
1036 : TRACE_HEAP("VMem alloc: %p:%p (%zu)\n",
1037 : reinterpret_cast<void*>(mem.address()),
1038 : reinterpret_cast<void*>(mem.end()), mem.size());
1039 :
1040 : // TODO(v8:8462) Remove eager commit once perf supports remapping.
1041 1241902 : if (FLAG_perf_prof) {
1042 0 : SetPermissions(GetPlatformPageAllocator(), mem.address(), mem.size(),
1043 : PageAllocator::kReadWriteExecute);
1044 : }
1045 : return mem;
1046 : }
1047 :
1048 14 : void WasmCodeManager::SetMaxCommittedMemoryForTesting(size_t limit) {
1049 : remaining_uncommitted_code_space_.store(limit);
1050 14 : critical_uncommitted_code_space_.store(limit / 2);
1051 14 : }
1052 :
1053 : // static
1054 1385423 : size_t WasmCodeManager::EstimateNativeModuleCodeSize(const WasmModule* module) {
1055 : constexpr size_t kCodeSizeMultiplier = 4;
1056 : constexpr size_t kCodeOverhead = 32; // for prologue, stack check, ...
1057 : constexpr size_t kStaticCodeSize = 512; // runtime stubs, ...
1058 : constexpr size_t kImportSize = 64 * kSystemPointerSize;
1059 :
1060 : size_t estimate = kStaticCodeSize;
1061 2136295 : for (auto& function : module->functions) {
1062 750872 : estimate += kCodeOverhead + kCodeSizeMultiplier * function.code.length();
1063 : }
1064 : estimate +=
1065 2770846 : JumpTableAssembler::SizeForNumberOfSlots(module->num_declared_functions);
1066 1385423 : estimate += kImportSize * module->num_imported_functions;
1067 :
1068 1385423 : return estimate;
1069 : }
1070 :
1071 : // static
1072 1238604 : size_t WasmCodeManager::EstimateNativeModuleNonCodeSize(
1073 : const WasmModule* module) {
1074 1238604 : size_t wasm_module_estimate = EstimateStoredSize(module);
1075 :
1076 1238605 : uint32_t num_wasm_functions = module->num_declared_functions;
1077 :
1078 : // TODO(wasm): Include wire bytes size.
1079 : size_t native_module_estimate =
1080 : sizeof(NativeModule) + /* NativeModule struct */
1081 1238605 : (sizeof(WasmCode*) * num_wasm_functions) + /* code table size */
1082 1238605 : (sizeof(WasmCode) * num_wasm_functions); /* code object size */
1083 :
1084 1238605 : return wasm_module_estimate + native_module_estimate;
1085 : }
1086 :
1087 1241891 : std::shared_ptr<NativeModule> WasmCodeManager::NewNativeModule(
1088 : WasmEngine* engine, Isolate* isolate, const WasmFeatures& enabled,
1089 : size_t code_size_estimate, bool can_request_more,
1090 : std::shared_ptr<const WasmModule> module) {
1091 : DCHECK_EQ(this, isolate->wasm_engine()->code_manager());
1092 1241891 : if (remaining_uncommitted_code_space_.load() <
1093 : critical_uncommitted_code_space_.load()) {
1094 : (reinterpret_cast<v8::Isolate*>(isolate))
1095 60 : ->MemoryPressureNotification(MemoryPressureLevel::kCritical);
1096 60 : critical_uncommitted_code_space_.store(
1097 : remaining_uncommitted_code_space_.load() / 2);
1098 : }
1099 :
1100 : // If the code must be contiguous, reserve enough address space up front.
1101 : size_t code_vmem_size =
1102 : kRequiresCodeRange ? kMaxWasmCodeMemory : code_size_estimate;
1103 : // Try up to two times; getting rid of dead JSArrayBuffer allocations might
1104 : // require two GCs because the first GC maybe incremental and may have
1105 : // floating garbage.
1106 : static constexpr int kAllocationRetries = 2;
1107 2483793 : VirtualMemory code_space;
1108 900 : for (int retries = 0;; ++retries) {
1109 2485593 : code_space = TryAllocate(code_vmem_size);
1110 1242802 : if (code_space.IsReserved()) break;
1111 900 : if (retries == kAllocationRetries) {
1112 0 : V8::FatalProcessOutOfMemory(isolate, "WasmCodeManager::NewNativeModule");
1113 : UNREACHABLE();
1114 : }
1115 : // Run one GC, then try the allocation again.
1116 : isolate->heap()->MemoryPressureNotification(MemoryPressureLevel::kCritical,
1117 900 : true);
1118 : }
1119 :
1120 : Address start = code_space.address();
1121 : size_t size = code_space.size();
1122 : Address end = code_space.end();
1123 1241902 : std::shared_ptr<NativeModule> ret;
1124 : new NativeModule(engine, enabled, can_request_more, std::move(code_space),
1125 3725702 : std::move(module), isolate->async_counters(), &ret);
1126 : // The constructor initialized the shared_ptr.
1127 : DCHECK_NOT_NULL(ret);
1128 : TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", ret.get(), start,
1129 : size);
1130 1241902 : base::MutexGuard lock(&native_modules_mutex_);
1131 1241902 : lookup_map_.insert(std::make_pair(start, std::make_pair(end, ret.get())));
1132 1241902 : return ret;
1133 : }
1134 :
1135 9623168 : bool NativeModule::SetExecutable(bool executable) {
1136 9623168 : if (is_executable_ == executable) return true;
1137 : TRACE_HEAP("Setting module %p as executable: %d.\n", this, executable);
1138 :
1139 729212 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
1140 :
1141 729212 : if (FLAG_wasm_write_protect_code_memory) {
1142 : PageAllocator::Permission permission =
1143 0 : executable ? PageAllocator::kReadExecute : PageAllocator::kReadWrite;
1144 : #if V8_OS_WIN
1145 : // On windows, we need to switch permissions per separate virtual memory
1146 : // reservation. This is really just a problem when the NativeModule is
1147 : // growable (meaning can_request_more_memory_). That's 32-bit in production,
1148 : // or unittests.
1149 : // For now, in that case, we commit at reserved memory granularity.
1150 : // Technically, that may be a waste, because we may reserve more than we
1151 : // use. On 32-bit though, the scarce resource is the address space -
1152 : // committed or not.
1153 : if (can_request_more_memory_) {
1154 : for (auto& vmem : owned_code_space_) {
1155 : if (!SetPermissions(page_allocator, vmem.address(), vmem.size(),
1156 : permission)) {
1157 : return false;
1158 : }
1159 : TRACE_HEAP("Set %p:%p to executable:%d\n", vmem.address(), vmem.end(),
1160 : executable);
1161 : }
1162 : is_executable_ = executable;
1163 : return true;
1164 : }
1165 : #endif
1166 0 : for (auto& region : allocated_code_space_.regions()) {
1167 : // allocated_code_space_ is fine-grained, so we need to
1168 : // page-align it.
1169 : size_t region_size =
1170 0 : RoundUp(region.size(), page_allocator->AllocatePageSize());
1171 0 : if (!SetPermissions(page_allocator, region.begin(), region_size,
1172 : permission)) {
1173 : return false;
1174 : }
1175 : TRACE_HEAP("Set %p:%p to executable:%d\n",
1176 : reinterpret_cast<void*>(region.begin()),
1177 : reinterpret_cast<void*>(region.end()), executable);
1178 : }
1179 : }
1180 729212 : is_executable_ = executable;
1181 729212 : return true;
1182 : }
1183 :
1184 732231 : void NativeModule::SampleCodeSize(
1185 : Counters* counters, NativeModule::CodeSamplingTime sampling_time) const {
1186 : size_t code_size = sampling_time == kSampling
1187 : ? committed_code_space()
1188 732231 : : generated_code_size_.load(std::memory_order_relaxed);
1189 732231 : int code_size_mb = static_cast<int>(code_size / MB);
1190 : Histogram* histogram = nullptr;
1191 732231 : switch (sampling_time) {
1192 : case kAfterBaseline:
1193 : histogram = counters->wasm_module_code_size_mb_after_baseline();
1194 1259 : break;
1195 : case kAfterTopTier:
1196 : histogram = counters->wasm_module_code_size_mb_after_top_tier();
1197 346 : break;
1198 : case kSampling:
1199 : histogram = counters->wasm_module_code_size_mb();
1200 730626 : break;
1201 : }
1202 732231 : histogram->AddSample(code_size_mb);
1203 732231 : }
1204 :
1205 : namespace {
1206 : WasmCode::Tier GetCodeTierForExecutionTier(ExecutionTier tier) {
1207 1066876 : switch (tier) {
1208 : case ExecutionTier::kInterpreter:
1209 : return WasmCode::Tier::kOther;
1210 : case ExecutionTier::kBaseline:
1211 : return WasmCode::Tier::kLiftoff;
1212 : case ExecutionTier::kOptimized:
1213 : return WasmCode::Tier::kTurbofan;
1214 : }
1215 : }
1216 :
1217 : WasmCode::Kind GetCodeKindForExecutionTier(ExecutionTier tier) {
1218 1066876 : switch (tier) {
1219 : case ExecutionTier::kInterpreter:
1220 : return WasmCode::Kind::kInterpreterEntry;
1221 : case ExecutionTier::kBaseline:
1222 : case ExecutionTier::kOptimized:
1223 : return WasmCode::Kind::kFunction;
1224 : }
1225 : }
1226 : } // namespace
1227 :
1228 870803 : WasmCode* NativeModule::AddCompiledCode(WasmCompilationResult result) {
1229 1741611 : return AddCompiledCode({&result, 1})[0];
1230 : }
1231 :
1232 971729 : std::vector<WasmCode*> NativeModule::AddCompiledCode(
1233 : Vector<WasmCompilationResult> results) {
1234 : DCHECK(!results.is_empty());
1235 : // First, allocate code space for all the results.
1236 : size_t total_code_space = 0;
1237 3102257 : for (auto& result : results) {
1238 : DCHECK(result.succeeded());
1239 2130528 : total_code_space += RoundUp<kCodeAlignment>(result.code_desc.instr_size);
1240 : }
1241 971729 : Vector<byte> code_space = AllocateForCode(total_code_space);
1242 :
1243 972066 : std::vector<std::unique_ptr<WasmCode>> generated_code;
1244 972063 : generated_code.reserve(results.size());
1245 :
1246 : // Now copy the generated code into the code space and relocate it.
1247 3103596 : for (auto& result : results) {
1248 : DCHECK_EQ(result.code_desc.buffer, result.instr_buffer.get());
1249 2133752 : size_t code_size = RoundUp<kCodeAlignment>(result.code_desc.instr_size);
1250 1066876 : Vector<byte> this_code_space = code_space.SubVector(0, code_size);
1251 : code_space += code_size;
1252 6398176 : generated_code.emplace_back(AddCodeWithCodeSpace(
1253 1066876 : result.func_index, result.code_desc, result.frame_slot_count,
1254 : result.tagged_parameter_slots, std::move(result.protected_instructions),
1255 : std::move(result.source_positions),
1256 : GetCodeKindForExecutionTier(result.result_tier),
1257 2132774 : GetCodeTierForExecutionTier(result.result_tier), this_code_space));
1258 : }
1259 : DCHECK_EQ(0, code_space.size());
1260 :
1261 : // Under the {allocation_mutex_}, publish the code.
1262 : std::vector<WasmCode*> returned_code;
1263 970872 : returned_code.reserve(results.size());
1264 : {
1265 971766 : base::MutexGuard lock(&allocation_mutex_);
1266 2039727 : for (auto& result : generated_code) {
1267 2135315 : returned_code.push_back(PublishCodeLocked(std::move(result)));
1268 : }
1269 : }
1270 :
1271 972061 : return returned_code;
1272 : }
1273 :
1274 1241902 : void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
1275 1241902 : base::MutexGuard lock(&native_modules_mutex_);
1276 : TRACE_HEAP("Freeing NativeModule %p\n", native_module);
1277 2483804 : for (auto& code_space : native_module->owned_code_space_) {
1278 : DCHECK(code_space.IsReserved());
1279 : TRACE_HEAP("VMem Release: %" PRIxPTR ":%" PRIxPTR " (%zu)\n",
1280 : code_space.address(), code_space.end(), code_space.size());
1281 2483804 : lookup_map_.erase(code_space.address());
1282 1241902 : memory_tracker_->ReleaseReservation(code_space.size());
1283 1241902 : code_space.Free();
1284 : DCHECK(!code_space.IsReserved());
1285 : }
1286 : native_module->owned_code_space_.clear();
1287 :
1288 : size_t code_size = native_module->committed_code_space_.load();
1289 : DCHECK(IsAligned(code_size, AllocatePageSize()));
1290 : remaining_uncommitted_code_space_.fetch_add(code_size);
1291 : // Remaining code space cannot grow bigger than maximum code space size.
1292 : DCHECK_LE(remaining_uncommitted_code_space_.load(), kMaxWasmCodeMemory);
1293 1241902 : }
1294 :
1295 53121789 : NativeModule* WasmCodeManager::LookupNativeModule(Address pc) const {
1296 53121789 : base::MutexGuard lock(&native_modules_mutex_);
1297 53122070 : if (lookup_map_.empty()) return nullptr;
1298 :
1299 : auto iter = lookup_map_.upper_bound(pc);
1300 13528673 : if (iter == lookup_map_.begin()) return nullptr;
1301 : --iter;
1302 12689413 : Address region_start = iter->first;
1303 12689413 : Address region_end = iter->second.first;
1304 12689413 : NativeModule* candidate = iter->second.second;
1305 :
1306 : DCHECK_NOT_NULL(candidate);
1307 12689413 : return region_start <= pc && pc < region_end ? candidate : nullptr;
1308 : }
1309 :
1310 53074738 : WasmCode* WasmCodeManager::LookupCode(Address pc) const {
1311 53074738 : NativeModule* candidate = LookupNativeModule(pc);
1312 53074991 : return candidate ? candidate->Lookup(pc) : nullptr;
1313 : }
1314 :
1315 19 : size_t WasmCodeManager::remaining_uncommitted_code_space() const {
1316 19 : return remaining_uncommitted_code_space_.load();
1317 : }
1318 :
1319 : // TODO(v8:7424): Code protection scopes are not yet supported with shared code
1320 : // enabled and need to be revisited to work with --wasm-shared-code as well.
1321 144421 : NativeModuleModificationScope::NativeModuleModificationScope(
1322 : NativeModule* native_module)
1323 144421 : : native_module_(native_module) {
1324 144421 : if (FLAG_wasm_write_protect_code_memory && native_module_ &&
1325 0 : (native_module_->modification_scope_depth_++) == 0) {
1326 0 : bool success = native_module_->SetExecutable(false);
1327 0 : CHECK(success);
1328 : }
1329 144421 : }
1330 :
1331 288836 : NativeModuleModificationScope::~NativeModuleModificationScope() {
1332 144418 : if (FLAG_wasm_write_protect_code_memory && native_module_ &&
1333 0 : (native_module_->modification_scope_depth_--) == 1) {
1334 0 : bool success = native_module_->SetExecutable(true);
1335 0 : CHECK(success);
1336 : }
1337 144418 : }
1338 :
1339 : } // namespace wasm
1340 : } // namespace internal
1341 120216 : } // namespace v8
1342 : #undef TRACE_HEAP
|