Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/wasm/wasm-code-manager.h"
6 :
7 : #include <iomanip>
8 :
9 : #include "src/assembler-inl.h"
10 : #include "src/base/adapters.h"
11 : #include "src/base/macros.h"
12 : #include "src/base/platform/platform.h"
13 : #include "src/counters.h"
14 : #include "src/disassembler.h"
15 : #include "src/globals.h"
16 : #include "src/log.h"
17 : #include "src/macro-assembler-inl.h"
18 : #include "src/macro-assembler.h"
19 : #include "src/objects-inl.h"
20 : #include "src/ostreams.h"
21 : #include "src/snapshot/embedded-data.h"
22 : #include "src/vector.h"
23 : #include "src/wasm/compilation-environment.h"
24 : #include "src/wasm/function-compiler.h"
25 : #include "src/wasm/jump-table-assembler.h"
26 : #include "src/wasm/wasm-import-wrapper-cache.h"
27 : #include "src/wasm/wasm-module.h"
28 : #include "src/wasm/wasm-objects-inl.h"
29 : #include "src/wasm/wasm-objects.h"
30 :
31 : #if defined(V8_OS_WIN_X64)
32 : #include "src/unwinding-info-win64.h"
33 : #endif
34 :
35 : #define TRACE_HEAP(...) \
36 : do { \
37 : if (FLAG_trace_wasm_native_heap) PrintF(__VA_ARGS__); \
38 : } while (false)
39 :
40 : namespace v8 {
41 : namespace internal {
42 : namespace wasm {
43 :
44 : using trap_handler::ProtectedInstructionData;
45 :
46 3861893 : void DisjointAllocationPool::Merge(base::AddressRegion region) {
47 : auto dest_it = regions_.begin();
48 : auto dest_end = regions_.end();
49 :
50 : // Skip over dest regions strictly before {region}.
51 6481412 : while (dest_it != dest_end && dest_it->end() < region.begin()) ++dest_it;
52 :
53 : // After last dest region: insert and done.
54 3861893 : if (dest_it == dest_end) {
55 1242402 : regions_.push_back(region);
56 : return;
57 : }
58 :
59 : // Adjacent (from below) to dest: merge and done.
60 2619491 : if (dest_it->begin() == region.end()) {
61 : base::AddressRegion merged_region{region.begin(),
62 1 : region.size() + dest_it->size()};
63 : DCHECK_EQ(merged_region.end(), dest_it->end());
64 1 : *dest_it = merged_region;
65 : return;
66 : }
67 :
68 : // Before dest: insert and done.
69 2619490 : if (dest_it->begin() > region.end()) {
70 0 : regions_.insert(dest_it, region);
71 0 : return;
72 : }
73 :
74 : // Src is adjacent from above. Merge and check whether the merged region is
75 : // now adjacent to the next region.
76 : DCHECK_EQ(dest_it->end(), region.begin());
77 2619490 : dest_it->set_size(dest_it->size() + region.size());
78 : DCHECK_EQ(dest_it->end(), region.end());
79 : auto next_dest = dest_it;
80 : ++next_dest;
81 2619496 : if (next_dest != dest_end && dest_it->end() == next_dest->begin()) {
82 6 : dest_it->set_size(dest_it->size() + next_dest->size());
83 : DCHECK_EQ(dest_it->end(), next_dest->end());
84 : regions_.erase(next_dest);
85 : }
86 : }
87 :
88 3861874 : base::AddressRegion DisjointAllocationPool::Allocate(size_t size) {
89 3861878 : for (auto it = regions_.begin(), end = regions_.end(); it != end; ++it) {
90 3861876 : if (size > it->size()) continue;
91 : base::AddressRegion ret{it->begin(), size};
92 3861872 : if (size == it->size()) {
93 : regions_.erase(it);
94 : } else {
95 3861870 : *it = base::AddressRegion{it->begin() + size, it->size() - size};
96 : }
97 3861864 : return ret;
98 : }
99 2 : return {};
100 : }
101 :
102 1157 : Address WasmCode::constant_pool() const {
103 : if (FLAG_enable_embedded_constant_pool) {
104 : if (constant_pool_offset_ < code_comments_offset_) {
105 : return instruction_start() + constant_pool_offset_;
106 : }
107 : }
108 : return kNullAddress;
109 : }
110 :
111 0 : Address WasmCode::code_comments() const {
112 0 : return instruction_start() + code_comments_offset_;
113 : }
114 :
115 0 : uint32_t WasmCode::code_comments_size() const {
116 : DCHECK_GE(unpadded_binary_size_, code_comments_offset_);
117 0 : return static_cast<uint32_t>(unpadded_binary_size_ - code_comments_offset_);
118 : }
119 :
120 0 : size_t WasmCode::trap_handler_index() const {
121 128512 : CHECK(HasTrapHandlerIndex());
122 64256 : return static_cast<size_t>(trap_handler_index_);
123 : }
124 :
125 0 : void WasmCode::set_trap_handler_index(size_t value) {
126 64256 : trap_handler_index_ = value;
127 0 : }
128 :
129 1437163 : void WasmCode::RegisterTrapHandlerData() {
130 : DCHECK(!HasTrapHandlerIndex());
131 1437163 : if (kind() != WasmCode::kFunction) return;
132 1061351 : if (protected_instructions_.empty()) return;
133 :
134 : Address base = instruction_start();
135 :
136 : size_t size = instructions().size();
137 : const int index =
138 : RegisterHandlerData(base, size, protected_instructions().size(),
139 64109 : protected_instructions().start());
140 :
141 : // TODO(eholk): if index is negative, fail.
142 64256 : CHECK_LE(0, index);
143 : set_trap_handler_index(static_cast<size_t>(index));
144 : }
145 :
146 3917086 : bool WasmCode::HasTrapHandlerIndex() const { return trap_handler_index_ >= 0; }
147 :
148 802316 : bool WasmCode::ShouldBeLogged(Isolate* isolate) {
149 : // The return value is cached in {WasmEngine::IsolateData::log_codes}. Ensure
150 : // to call {WasmEngine::EnableCodeLogging} if this return value would change
151 : // for any isolate. Otherwise we might lose code events.
152 939729 : return isolate->logger()->is_listening_to_code_events() ||
153 802316 : isolate->is_profiling();
154 : }
155 :
156 11 : void WasmCode::LogCode(Isolate* isolate) const {
157 : DCHECK(ShouldBeLogged(isolate));
158 11 : if (IsAnonymous()) return;
159 :
160 : ModuleWireBytes wire_bytes(native_module()->wire_bytes());
161 : // TODO(herhut): Allow to log code without on-heap round-trip of the name.
162 : WireBytesRef name_ref =
163 11 : native_module()->module()->LookupFunctionName(wire_bytes, index());
164 11 : WasmName name_vec = wire_bytes.GetNameOrNull(name_ref);
165 11 : if (!name_vec.empty()) {
166 : HandleScope scope(isolate);
167 : MaybeHandle<String> maybe_name = isolate->factory()->NewStringFromUtf8(
168 11 : Vector<const char>::cast(name_vec));
169 : Handle<String> name;
170 11 : if (!maybe_name.ToHandle(&name)) {
171 0 : name = isolate->factory()->NewStringFromAsciiChecked("<name too long>");
172 : }
173 : int name_length;
174 : auto cname =
175 : name->ToCString(AllowNullsFlag::DISALLOW_NULLS,
176 11 : RobustnessFlag::ROBUST_STRING_TRAVERSAL, &name_length);
177 22 : PROFILE(isolate,
178 : CodeCreateEvent(CodeEventListener::FUNCTION_TAG, this,
179 : {cname.get(), static_cast<size_t>(name_length)}));
180 : } else {
181 : EmbeddedVector<char, 32> generated_name;
182 0 : int length = SNPrintF(generated_name, "wasm-function[%d]", index());
183 0 : generated_name.Truncate(length);
184 0 : PROFILE(isolate, CodeCreateEvent(CodeEventListener::FUNCTION_TAG, this,
185 : generated_name));
186 : }
187 :
188 11 : if (!source_positions().empty()) {
189 11 : LOG_CODE_EVENT(isolate, CodeLinePosInfoRecordEvent(instruction_start(),
190 : source_positions()));
191 : }
192 : }
193 :
194 236 : void WasmCode::Validate() const {
195 : #ifdef DEBUG
196 : // We expect certain relocation info modes to never appear in {WasmCode}
197 : // objects or to be restricted to a small set of valid values. Hence the
198 : // iteration below does not use a mask, but visits all relocation data.
199 : for (RelocIterator it(instructions(), reloc_info(), constant_pool());
200 : !it.done(); it.next()) {
201 : RelocInfo::Mode mode = it.rinfo()->rmode();
202 : switch (mode) {
203 : case RelocInfo::WASM_CALL: {
204 : Address target = it.rinfo()->wasm_call_address();
205 : WasmCode* code = native_module_->Lookup(target);
206 : CHECK_NOT_NULL(code);
207 : CHECK_EQ(WasmCode::kJumpTable, code->kind());
208 : CHECK_EQ(native_module()->jump_table_, code);
209 : CHECK(code->contains(target));
210 : break;
211 : }
212 : case RelocInfo::WASM_STUB_CALL: {
213 : Address target = it.rinfo()->wasm_stub_call_address();
214 : WasmCode* code = native_module_->Lookup(target);
215 : CHECK_NOT_NULL(code);
216 : #ifdef V8_EMBEDDED_BUILTINS
217 : CHECK_EQ(WasmCode::kJumpTable, code->kind());
218 : CHECK_EQ(native_module()->runtime_stub_table_, code);
219 : CHECK(code->contains(target));
220 : #else
221 : CHECK_EQ(WasmCode::kRuntimeStub, code->kind());
222 : CHECK_EQ(target, code->instruction_start());
223 : #endif
224 : break;
225 : }
226 : case RelocInfo::INTERNAL_REFERENCE:
227 : case RelocInfo::INTERNAL_REFERENCE_ENCODED: {
228 : Address target = it.rinfo()->target_internal_reference();
229 : CHECK(contains(target));
230 : break;
231 : }
232 : case RelocInfo::EXTERNAL_REFERENCE:
233 : case RelocInfo::CONST_POOL:
234 : case RelocInfo::VENEER_POOL:
235 : // These are OK to appear.
236 : break;
237 : default:
238 : FATAL("Unexpected mode: %d", mode);
239 : }
240 : }
241 : #endif
242 236 : }
243 :
244 1438304 : void WasmCode::MaybePrint(const char* name) const {
245 : // Determines whether flags want this code to be printed.
246 1438366 : if ((FLAG_print_wasm_code && kind() == kFunction) ||
247 2876349 : (FLAG_print_wasm_stub_code && kind() != kFunction) || FLAG_print_code) {
248 62 : Print(name);
249 : }
250 1438305 : }
251 :
252 62 : void WasmCode::Print(const char* name) const {
253 125 : StdoutStream os;
254 62 : os << "--- WebAssembly code ---\n";
255 63 : Disassemble(name, os);
256 63 : os << "--- End code ---\n";
257 63 : }
258 :
259 63 : void WasmCode::Disassemble(const char* name, std::ostream& os,
260 : Address current_pc) const {
261 63 : if (name) os << "name: " << name << "\n";
262 126 : if (!IsAnonymous()) os << "index: " << index() << "\n";
263 126 : os << "kind: " << GetWasmCodeKindAsString(kind_) << "\n";
264 126 : os << "compiler: " << (is_liftoff() ? "Liftoff" : "TurboFan") << "\n";
265 63 : size_t padding = instructions().size() - unpadded_binary_size_;
266 : os << "Body (size = " << instructions().size() << " = "
267 63 : << unpadded_binary_size_ << " + " << padding << " padding)\n";
268 :
269 : #ifdef ENABLE_DISASSEMBLER
270 : size_t instruction_size = unpadded_binary_size_;
271 : if (constant_pool_offset_ < instruction_size) {
272 : instruction_size = constant_pool_offset_;
273 : }
274 : if (safepoint_table_offset_ && safepoint_table_offset_ < instruction_size) {
275 : instruction_size = safepoint_table_offset_;
276 : }
277 : if (handler_table_offset_ && handler_table_offset_ < instruction_size) {
278 : instruction_size = handler_table_offset_;
279 : }
280 : DCHECK_LT(0, instruction_size);
281 : os << "Instructions (size = " << instruction_size << ")\n";
282 : Disassembler::Decode(nullptr, &os, instructions().start(),
283 : instructions().start() + instruction_size,
284 : CodeReference(this), current_pc);
285 : os << "\n";
286 :
287 : if (handler_table_offset_ > 0) {
288 : HandlerTable table(instruction_start(), handler_table_offset_);
289 : os << "Exception Handler Table (size = " << table.NumberOfReturnEntries()
290 : << "):\n";
291 : table.HandlerTableReturnPrint(os);
292 : os << "\n";
293 : }
294 :
295 : if (!protected_instructions_.empty()) {
296 : os << "Protected instructions:\n pc offset land pad\n";
297 : for (auto& data : protected_instructions()) {
298 : os << std::setw(10) << std::hex << data.instr_offset << std::setw(10)
299 : << std::hex << data.landing_offset << "\n";
300 : }
301 : os << "\n";
302 : }
303 :
304 : if (!source_positions().empty()) {
305 : os << "Source positions:\n pc offset position\n";
306 : for (SourcePositionTableIterator it(source_positions()); !it.done();
307 : it.Advance()) {
308 : os << std::setw(10) << std::hex << it.code_offset() << std::dec
309 : << std::setw(10) << it.source_position().ScriptOffset()
310 : << (it.is_statement() ? " statement" : "") << "\n";
311 : }
312 : os << "\n";
313 : }
314 :
315 : if (safepoint_table_offset_ > 0) {
316 : SafepointTable table(instruction_start(), safepoint_table_offset_,
317 : stack_slots_);
318 : os << "Safepoints (size = " << table.size() << ")\n";
319 : for (uint32_t i = 0; i < table.length(); i++) {
320 : uintptr_t pc_offset = table.GetPcOffset(i);
321 : os << reinterpret_cast<const void*>(instruction_start() + pc_offset);
322 : os << std::setw(6) << std::hex << pc_offset << " " << std::dec;
323 : table.PrintEntry(i, os);
324 : os << " (sp -> fp)";
325 : SafepointEntry entry = table.GetEntry(i);
326 : if (entry.trampoline_pc() != -1) {
327 : os << " trampoline: " << std::hex << entry.trampoline_pc() << std::dec;
328 : }
329 : if (entry.has_deoptimization_index()) {
330 : os << " deopt: " << std::setw(6) << entry.deoptimization_index();
331 : }
332 : os << "\n";
333 : }
334 : os << "\n";
335 : }
336 :
337 : os << "RelocInfo (size = " << reloc_info_.size() << ")\n";
338 : for (RelocIterator it(instructions(), reloc_info(), constant_pool());
339 : !it.done(); it.next()) {
340 : it.rinfo()->Print(nullptr, os);
341 : }
342 : os << "\n";
343 :
344 : if (code_comments_size() > 0) {
345 : PrintCodeCommentsSection(os, code_comments(), code_comments_size());
346 : }
347 : #endif // ENABLE_DISASSEMBLER
348 63 : }
349 :
350 0 : const char* GetWasmCodeKindAsString(WasmCode::Kind kind) {
351 63 : switch (kind) {
352 : case WasmCode::kFunction:
353 : return "wasm function";
354 : case WasmCode::kWasmToJsWrapper:
355 0 : return "wasm-to-js";
356 : case WasmCode::kRuntimeStub:
357 0 : return "runtime-stub";
358 : case WasmCode::kInterpreterEntry:
359 0 : return "interpreter entry";
360 : case WasmCode::kJumpTable:
361 0 : return "jump table";
362 : }
363 0 : return "unknown kind";
364 : }
365 :
366 7834172 : WasmCode::~WasmCode() {
367 3917086 : if (HasTrapHandlerIndex()) {
368 64256 : CHECK_LT(trap_handler_index(),
369 : static_cast<size_t>(std::numeric_limits<int>::max()));
370 64256 : trap_handler::ReleaseHandlerData(static_cast<int>(trap_handler_index()));
371 : }
372 3917086 : }
373 :
374 76902 : V8_WARN_UNUSED_RESULT bool WasmCode::DecRefOnPotentiallyDeadCode() {
375 76902 : if (native_module_->engine()->AddPotentiallyDeadCode(this)) {
376 : // The code just became potentially dead. The ref count we wanted to
377 : // decrement is now transferred to the set of potentially dead code, and
378 : // will be decremented when the next GC is run.
379 : return false;
380 : }
381 : // If we reach here, the code was already potentially dead. Decrement the ref
382 : // count, and return true if it drops to zero.
383 0 : int old_count = ref_count_.load(std::memory_order_relaxed);
384 : while (true) {
385 : DCHECK_LE(1, old_count);
386 0 : if (ref_count_.compare_exchange_weak(old_count, old_count - 1,
387 : std::memory_order_relaxed)) {
388 0 : return old_count == 1;
389 : }
390 : }
391 : }
392 :
393 : // static
394 62267579 : void WasmCode::DecrementRefCount(Vector<WasmCode*> code_vec) {
395 : // Decrement the ref counter of all given code objects. Keep the ones whose
396 : // ref count drops to zero.
397 : std::unordered_map<NativeModule*, std::vector<WasmCode*>> dead_code;
398 82104849 : for (WasmCode* code : code_vec) {
399 9918508 : if (code->DecRef()) dead_code[code->native_module()].push_back(code);
400 : }
401 :
402 : // For each native module, free all its code objects at once.
403 62267706 : for (auto& dead_code_entry : dead_code) {
404 : NativeModule* native_module = dead_code_entry.first;
405 : Vector<WasmCode*> code_vec = VectorOf(dead_code_entry.second);
406 : native_module->FreeCode(code_vec);
407 : }
408 62267718 : }
409 :
410 1242379 : NativeModule::NativeModule(WasmEngine* engine, const WasmFeatures& enabled,
411 : bool can_request_more, VirtualMemory code_space,
412 : std::shared_ptr<const WasmModule> module,
413 : std::shared_ptr<Counters> async_counters,
414 : std::shared_ptr<NativeModule>* shared_this)
415 : : enabled_features_(enabled),
416 : module_(std::move(module)),
417 : import_wrapper_cache_(std::unique_ptr<WasmImportWrapperCache>(
418 : new WasmImportWrapperCache(this))),
419 : free_code_space_(code_space.region()),
420 : engine_(engine),
421 : can_request_more_memory_(can_request_more),
422 : use_trap_handler_(trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler
423 9939032 : : kNoTrapHandler) {
424 : // We receive a pointer to an empty {std::shared_ptr}, and install ourselve
425 : // there.
426 : DCHECK_NOT_NULL(shared_this);
427 : DCHECK_NULL(*shared_this);
428 1242379 : shared_this->reset(this);
429 1242379 : compilation_state_ =
430 3727135 : CompilationState::New(*shared_this, std::move(async_counters));
431 : DCHECK_NOT_NULL(module_);
432 1242378 : owned_code_space_.emplace_back(std::move(code_space));
433 1242378 : owned_code_.reserve(num_functions());
434 :
435 : #if defined(V8_OS_WIN_X64)
436 : // On some platforms, specifically Win64, we need to reserve some pages at
437 : // the beginning of an executable space.
438 : // See src/heap/spaces.cc, MemoryAllocator::InitializeCodePageAllocator() and
439 : // https://cs.chromium.org/chromium/src/components/crash/content/app/crashpad_win.cc?rcl=fd680447881449fba2edcf0589320e7253719212&l=204
440 : // for details.
441 : if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
442 : FLAG_win64_unwinding_info) {
443 : AllocateForCode(Heap::GetCodeRangeReservedAreaSize());
444 : }
445 : #endif
446 :
447 1242379 : uint32_t num_wasm_functions = module_->num_declared_functions;
448 1242379 : if (num_wasm_functions > 0) {
449 141791 : code_table_.reset(new WasmCode* [num_wasm_functions] {});
450 :
451 141786 : WasmCodeRefScope code_ref_scope;
452 141791 : jump_table_ = CreateEmptyJumpTable(
453 141786 : JumpTableAssembler::SizeForNumberOfSlots(num_wasm_functions));
454 : }
455 1242379 : }
456 :
457 1094224 : void NativeModule::ReserveCodeTableForTesting(uint32_t max_functions) {
458 1094224 : WasmCodeRefScope code_ref_scope;
459 : DCHECK_LE(num_functions(), max_functions);
460 1094224 : WasmCode** new_table = new WasmCode* [max_functions] {};
461 1094224 : if (module_->num_declared_functions > 0) {
462 0 : memcpy(new_table, code_table_.get(),
463 : module_->num_declared_functions * sizeof(*new_table));
464 : }
465 : code_table_.reset(new_table);
466 :
467 : // Re-allocate jump table.
468 1094224 : jump_table_ = CreateEmptyJumpTable(
469 1094224 : JumpTableAssembler::SizeForNumberOfSlots(max_functions));
470 1094224 : }
471 :
472 137413 : void NativeModule::LogWasmCodes(Isolate* isolate) {
473 274822 : if (!WasmCode::ShouldBeLogged(isolate)) return;
474 :
475 : // TODO(titzer): we skip the logging of the import wrappers
476 : // here, but they should be included somehow.
477 4 : int start = module()->num_imported_functions;
478 4 : int end = start + module()->num_declared_functions;
479 4 : WasmCodeRefScope code_ref_scope;
480 12 : for (int func_index = start; func_index < end; ++func_index) {
481 4 : if (WasmCode* code = GetCode(func_index)) code->LogCode(isolate);
482 : }
483 : }
484 :
485 464161 : CompilationEnv NativeModule::CreateCompilationEnv() const {
486 464161 : return {module(), use_trap_handler_, kRuntimeExceptionSupport,
487 464161 : enabled_features_};
488 : }
489 :
490 753 : WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) {
491 753 : return AddAndPublishAnonymousCode(code, WasmCode::kFunction);
492 : }
493 :
494 2635 : void NativeModule::UseLazyStubs() {
495 2635 : uint32_t start = module_->num_imported_functions;
496 2635 : uint32_t end = start + module_->num_declared_functions;
497 32149 : for (uint32_t func_index = start; func_index < end; func_index++) {
498 14757 : UseLazyStub(func_index);
499 : }
500 2635 : }
501 :
502 15109 : void NativeModule::UseLazyStub(uint32_t func_index) {
503 : DCHECK_LE(module_->num_imported_functions, func_index);
504 : DCHECK_LT(func_index,
505 : module_->num_imported_functions + module_->num_declared_functions);
506 :
507 : // Add jump table entry for jump to the lazy compile stub.
508 15109 : uint32_t slot_index = func_index - module_->num_imported_functions;
509 : DCHECK_NE(runtime_stub_entry(WasmCode::kWasmCompileLazy), kNullAddress);
510 : JumpTableAssembler::EmitLazyCompileJumpSlot(
511 15109 : jump_table_->instruction_start(), slot_index, func_index,
512 15109 : runtime_stub_entry(WasmCode::kWasmCompileLazy), WasmCode::kFlushICache);
513 15109 : }
514 :
515 : // TODO(mstarzinger): Remove {Isolate} parameter once {V8_EMBEDDED_BUILTINS}
516 : // was removed and embedded builtins are no longer optional.
517 1241610 : void NativeModule::SetRuntimeStubs(Isolate* isolate) {
518 : DCHECK_EQ(kNullAddress, runtime_stub_entries_[0]); // Only called once.
519 : #ifdef V8_EMBEDDED_BUILTINS
520 1241610 : WasmCodeRefScope code_ref_scope;
521 : WasmCode* jump_table =
522 : CreateEmptyJumpTable(JumpTableAssembler::SizeForNumberOfStubSlots(
523 1241610 : WasmCode::kRuntimeStubCount));
524 : Address base = jump_table->instruction_start();
525 1241607 : EmbeddedData embedded_data = EmbeddedData::FromBlob();
526 : #define RUNTIME_STUB(Name) {Builtins::k##Name, WasmCode::k##Name},
527 : #define RUNTIME_STUB_TRAP(Name) RUNTIME_STUB(ThrowWasm##Name)
528 : std::pair<Builtins::Name, WasmCode::RuntimeStubId> wasm_runtime_stubs[] = {
529 1241607 : WASM_RUNTIME_STUB_LIST(RUNTIME_STUB, RUNTIME_STUB_TRAP)};
530 : #undef RUNTIME_STUB
531 : #undef RUNTIME_STUB_TRAP
532 73254963 : for (auto pair : wasm_runtime_stubs) {
533 36006674 : CHECK(embedded_data.ContainsBuiltin(pair.first));
534 36006674 : Address builtin = embedded_data.InstructionStartOfBuiltin(pair.first);
535 : JumpTableAssembler::EmitRuntimeStubSlot(base, pair.second, builtin,
536 36006671 : WasmCode::kNoFlushICache);
537 : uint32_t slot_offset =
538 : JumpTableAssembler::StubSlotIndexToOffset(pair.second);
539 36006678 : runtime_stub_entries_[pair.second] = base + slot_offset;
540 : }
541 : FlushInstructionCache(jump_table->instructions().start(),
542 1241610 : jump_table->instructions().size());
543 : DCHECK_NULL(runtime_stub_table_);
544 1241610 : runtime_stub_table_ = jump_table;
545 : #else // V8_EMBEDDED_BUILTINS
546 : HandleScope scope(isolate);
547 : WasmCodeRefScope code_ref_scope;
548 : USE(runtime_stub_table_); // Actually unused, but avoids ifdef's in header.
549 : #define COPY_BUILTIN(Name) \
550 : runtime_stub_entries_[WasmCode::k##Name] = \
551 : AddAndPublishAnonymousCode( \
552 : isolate->builtins()->builtin_handle(Builtins::k##Name), \
553 : WasmCode::kRuntimeStub, #Name) \
554 : ->instruction_start();
555 : #define COPY_BUILTIN_TRAP(Name) COPY_BUILTIN(ThrowWasm##Name)
556 : WASM_RUNTIME_STUB_LIST(COPY_BUILTIN, COPY_BUILTIN_TRAP)
557 : #undef COPY_BUILTIN_TRAP
558 : #undef COPY_BUILTIN
559 : #endif // V8_EMBEDDED_BUILTINS
560 : DCHECK_NE(kNullAddress, runtime_stub_entries_[0]);
561 1241610 : }
562 :
563 753 : WasmCode* NativeModule::AddAndPublishAnonymousCode(Handle<Code> code,
564 : WasmCode::Kind kind,
565 : const char* name) {
566 : // For off-heap builtins, we create a copy of the off-heap instruction stream
567 : // instead of the on-heap code object containing the trampoline. Ensure that
568 : // we do not apply the on-heap reloc info to the off-heap instructions.
569 : const size_t relocation_size =
570 1506 : code->is_off_heap_trampoline() ? 0 : code->relocation_size();
571 : OwnedVector<byte> reloc_info;
572 753 : if (relocation_size > 0) {
573 : reloc_info = OwnedVector<byte>::New(relocation_size);
574 : memcpy(reloc_info.start(), code->relocation_start(), relocation_size);
575 : }
576 : Handle<ByteArray> source_pos_table(code->SourcePositionTable(),
577 1506 : code->GetIsolate());
578 : OwnedVector<byte> source_pos =
579 753 : OwnedVector<byte>::New(source_pos_table->length());
580 753 : if (source_pos_table->length() > 0) {
581 : source_pos_table->copy_out(0, source_pos.start(),
582 : source_pos_table->length());
583 : }
584 : Vector<const byte> instructions(
585 1506 : reinterpret_cast<byte*>(code->InstructionStart()),
586 3012 : static_cast<size_t>(code->InstructionSize()));
587 : const uint32_t stack_slots = static_cast<uint32_t>(
588 753 : code->has_safepoint_info() ? code->stack_slots() : 0);
589 :
590 : // TODO(jgruber,v8:8758): Remove this translation. It exists only because
591 : // Code objects contains real offsets but WasmCode expects an offset of 0 to
592 : // mean 'empty'.
593 : const size_t safepoint_table_offset = static_cast<size_t>(
594 1506 : code->has_safepoint_table() ? code->safepoint_table_offset() : 0);
595 : const size_t handler_table_offset = static_cast<size_t>(
596 753 : code->has_handler_table() ? code->handler_table_offset() : 0);
597 : const size_t constant_pool_offset =
598 753 : static_cast<size_t>(code->constant_pool_offset());
599 : const size_t code_comments_offset =
600 : static_cast<size_t>(code->code_comments_offset());
601 :
602 753 : Vector<uint8_t> dst_code_bytes = AllocateForCode(instructions.size());
603 : memcpy(dst_code_bytes.begin(), instructions.start(), instructions.size());
604 :
605 : // Apply the relocation delta by iterating over the RelocInfo.
606 1506 : intptr_t delta = reinterpret_cast<Address>(dst_code_bytes.begin()) -
607 1506 : code->InstructionStart();
608 753 : int mode_mask = RelocInfo::kApplyMask |
609 753 : RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
610 : Address constant_pool_start =
611 753 : reinterpret_cast<Address>(dst_code_bytes.begin()) + constant_pool_offset;
612 753 : RelocIterator orig_it(*code, mode_mask);
613 753 : for (RelocIterator it(dst_code_bytes, reloc_info.as_vector(),
614 753 : constant_pool_start, mode_mask);
615 0 : !it.done(); it.next(), orig_it.next()) {
616 : RelocInfo::Mode mode = it.rinfo()->rmode();
617 0 : if (RelocInfo::IsWasmStubCall(mode)) {
618 0 : uint32_t stub_call_tag = orig_it.rinfo()->wasm_call_tag();
619 : DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
620 : Address entry = runtime_stub_entry(
621 : static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
622 0 : it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
623 : } else {
624 : it.rinfo()->apply(delta);
625 : }
626 : }
627 :
628 : // Flush the i-cache after relocation.
629 753 : FlushInstructionCache(dst_code_bytes.start(), dst_code_bytes.size());
630 :
631 : DCHECK_NE(kind, WasmCode::Kind::kInterpreterEntry);
632 : std::unique_ptr<WasmCode> new_code{new WasmCode{
633 : this, // native_module
634 : WasmCode::kAnonymousFuncIndex, // index
635 : dst_code_bytes, // instructions
636 : stack_slots, // stack_slots
637 : 0, // tagged_parameter_slots
638 : safepoint_table_offset, // safepoint_table_offset
639 : handler_table_offset, // handler_table_offset
640 : constant_pool_offset, // constant_pool_offset
641 : code_comments_offset, // code_comments_offset
642 : instructions.size(), // unpadded_binary_size
643 : OwnedVector<ProtectedInstructionData>{}, // protected_instructions
644 : std::move(reloc_info), // reloc_info
645 : std::move(source_pos), // source positions
646 : kind, // kind
647 1506 : ExecutionTier::kNone}}; // tier
648 753 : new_code->MaybePrint(name);
649 : new_code->Validate();
650 :
651 1506 : return PublishCode(std::move(new_code));
652 : }
653 :
654 374900 : std::unique_ptr<WasmCode> NativeModule::AddCode(
655 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
656 : uint32_t tagged_parameter_slots,
657 : OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
658 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
659 : ExecutionTier tier) {
660 : return AddCodeWithCodeSpace(index, desc, stack_slots, tagged_parameter_slots,
661 : std::move(protected_instructions),
662 : std::move(source_position_table), kind, tier,
663 1499600 : AllocateForCode(desc.instr_size));
664 : }
665 :
666 1437957 : std::unique_ptr<WasmCode> NativeModule::AddCodeWithCodeSpace(
667 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
668 : uint32_t tagged_parameter_slots,
669 : OwnedVector<ProtectedInstructionData> protected_instructions,
670 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
671 : ExecutionTier tier, Vector<uint8_t> dst_code_bytes) {
672 : OwnedVector<byte> reloc_info;
673 1437957 : if (desc.reloc_size > 0) {
674 359725 : reloc_info = OwnedVector<byte>::New(desc.reloc_size);
675 359808 : memcpy(reloc_info.start(), desc.buffer + desc.buffer_size - desc.reloc_size,
676 359808 : desc.reloc_size);
677 : }
678 :
679 : // TODO(jgruber,v8:8758): Remove this translation. It exists only because
680 : // CodeDesc contains real offsets but WasmCode expects an offset of 0 to mean
681 : // 'empty'.
682 : const size_t safepoint_table_offset = static_cast<size_t>(
683 1438040 : desc.safepoint_table_size == 0 ? 0 : desc.safepoint_table_offset);
684 : const size_t handler_table_offset = static_cast<size_t>(
685 1438040 : desc.handler_table_size == 0 ? 0 : desc.handler_table_offset);
686 : const size_t constant_pool_offset =
687 1438040 : static_cast<size_t>(desc.constant_pool_offset);
688 : const size_t code_comments_offset =
689 1438040 : static_cast<size_t>(desc.code_comments_offset);
690 1438040 : const size_t instr_size = static_cast<size_t>(desc.instr_size);
691 :
692 1438040 : memcpy(dst_code_bytes.begin(), desc.buffer,
693 : static_cast<size_t>(desc.instr_size));
694 :
695 : // Apply the relocation delta by iterating over the RelocInfo.
696 1438040 : intptr_t delta = dst_code_bytes.begin() - desc.buffer;
697 1438040 : int mode_mask = RelocInfo::kApplyMask |
698 : RelocInfo::ModeMask(RelocInfo::WASM_CALL) |
699 1438040 : RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
700 : Address constant_pool_start =
701 1438040 : reinterpret_cast<Address>(dst_code_bytes.begin()) + constant_pool_offset;
702 2799014 : for (RelocIterator it(dst_code_bytes, reloc_info.as_vector(),
703 1438040 : constant_pool_start, mode_mask);
704 680750 : !it.done(); it.next()) {
705 : RelocInfo::Mode mode = it.rinfo()->rmode();
706 680830 : if (RelocInfo::IsWasmCall(mode)) {
707 26030 : uint32_t call_tag = it.rinfo()->wasm_call_tag();
708 : Address target = GetCallTargetForFunction(call_tag);
709 26028 : it.rinfo()->set_wasm_call_address(target, SKIP_ICACHE_FLUSH);
710 654800 : } else if (RelocInfo::IsWasmStubCall(mode)) {
711 531414 : uint32_t stub_call_tag = it.rinfo()->wasm_call_tag();
712 : DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
713 : Address entry = runtime_stub_entry(
714 : static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
715 531439 : it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
716 : } else {
717 : it.rinfo()->apply(delta);
718 : }
719 : }
720 :
721 : // Flush the i-cache after relocation.
722 1437533 : FlushInstructionCache(dst_code_bytes.start(), dst_code_bytes.size());
723 :
724 : std::unique_ptr<WasmCode> code{new WasmCode{
725 : this, index, dst_code_bytes, stack_slots, tagged_parameter_slots,
726 : safepoint_table_offset, handler_table_offset, constant_pool_offset,
727 : code_comments_offset, instr_size, std::move(protected_instructions),
728 1437272 : std::move(reloc_info), std::move(source_position_table), kind, tier}};
729 1438342 : code->MaybePrint();
730 : code->Validate();
731 :
732 1437216 : code->RegisterTrapHandlerData();
733 :
734 1437435 : return code;
735 : }
736 :
737 2853513 : WasmCode* NativeModule::PublishCode(std::unique_ptr<WasmCode> code) {
738 2853513 : base::MutexGuard lock(&allocation_mutex_);
739 5707027 : return PublishCodeLocked(std::move(code));
740 : }
741 :
742 : namespace {
743 1063145 : WasmCode::Kind GetCodeKindForExecutionTier(ExecutionTier tier) {
744 1063145 : switch (tier) {
745 : case ExecutionTier::kInterpreter:
746 : return WasmCode::Kind::kInterpreterEntry;
747 : case ExecutionTier::kLiftoff:
748 : case ExecutionTier::kTurbofan:
749 1061746 : return WasmCode::Kind::kFunction;
750 : case ExecutionTier::kNone:
751 0 : UNREACHABLE();
752 : }
753 0 : }
754 : } // namespace
755 :
756 3917022 : WasmCode* NativeModule::PublishCodeLocked(std::unique_ptr<WasmCode> code) {
757 : // The caller must hold the {allocation_mutex_}, thus we fail to lock it here.
758 : DCHECK(!allocation_mutex_.TryLock());
759 :
760 3917022 : if (!code->IsAnonymous()) {
761 : DCHECK_LT(code->index(), num_functions());
762 : DCHECK_LE(module_->num_imported_functions, code->index());
763 :
764 : // Assume an order of execution tiers that represents the quality of their
765 : // generated code.
766 : static_assert(ExecutionTier::kNone < ExecutionTier::kInterpreter &&
767 : ExecutionTier::kInterpreter < ExecutionTier::kLiftoff &&
768 : ExecutionTier::kLiftoff < ExecutionTier::kTurbofan,
769 : "Assume an order on execution tiers");
770 :
771 : // Update code table but avoid to fall back to less optimized code. We use
772 : // the new code if it was compiled with a higher tier.
773 1431591 : uint32_t slot_idx = code->index() - module_->num_imported_functions;
774 2863182 : WasmCode* prior_code = code_table_[slot_idx];
775 1431591 : bool update_code_table = !prior_code || prior_code->tier() < code->tier();
776 1431591 : if (update_code_table) {
777 1411899 : code_table_[slot_idx] = code.get();
778 1411899 : if (prior_code) {
779 76913 : WasmCodeRefScope::AddRef(prior_code);
780 : // The code is added to the current {WasmCodeRefScope}, hence the ref
781 : // count cannot drop to zero here.
782 76897 : CHECK(!prior_code->DecRef());
783 : }
784 : }
785 :
786 : // Populate optimized code to the jump table unless there is an active
787 : // redirection to the interpreter that should be preserved.
788 : bool update_jump_table =
789 2843481 : update_code_table && !has_interpreter_redirection(code->index());
790 :
791 : // Ensure that interpreter entries always populate to the jump table.
792 1431586 : if (code->kind_ == WasmCode::Kind::kInterpreterEntry) {
793 369231 : SetInterpreterRedirection(code->index());
794 : update_jump_table = true;
795 : }
796 :
797 1431603 : if (update_jump_table) {
798 : JumpTableAssembler::PatchJumpTableSlot(
799 1412077 : jump_table_->instruction_start(), slot_idx, code->instruction_start(),
800 1412077 : WasmCode::kFlushICache);
801 : }
802 : }
803 3917048 : WasmCodeRefScope::AddRef(code.get());
804 : WasmCode* result = code.get();
805 3917081 : owned_code_.emplace_back(std::move(code));
806 3916987 : return result;
807 : }
808 :
809 236 : WasmCode* NativeModule::AddDeserializedCode(
810 : uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
811 : uint32_t tagged_parameter_slots, size_t safepoint_table_offset,
812 : size_t handler_table_offset, size_t constant_pool_offset,
813 : size_t code_comments_offset, size_t unpadded_binary_size,
814 : OwnedVector<ProtectedInstructionData> protected_instructions,
815 : OwnedVector<const byte> reloc_info,
816 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
817 : ExecutionTier tier) {
818 236 : Vector<uint8_t> dst_code_bytes = AllocateForCode(instructions.size());
819 : memcpy(dst_code_bytes.begin(), instructions.start(), instructions.size());
820 :
821 : std::unique_ptr<WasmCode> code{new WasmCode{
822 : this, index, dst_code_bytes, stack_slots, tagged_parameter_slots,
823 : safepoint_table_offset, handler_table_offset, constant_pool_offset,
824 : code_comments_offset, unpadded_binary_size,
825 : std::move(protected_instructions), std::move(reloc_info),
826 708 : std::move(source_position_table), kind, tier}};
827 :
828 236 : code->RegisterTrapHandlerData();
829 :
830 : // Note: we do not flush the i-cache here, since the code needs to be
831 : // relocated anyway. The caller is responsible for flushing the i-cache later.
832 :
833 472 : return PublishCode(std::move(code));
834 : }
835 :
836 209 : std::vector<WasmCode*> NativeModule::SnapshotCodeTable() const {
837 209 : base::MutexGuard lock(&allocation_mutex_);
838 : WasmCode** start = code_table_.get();
839 209 : WasmCode** end = start + module_->num_declared_functions;
840 209 : return std::vector<WasmCode*>{start, end};
841 : }
842 :
843 9653448 : WasmCode* NativeModule::GetCode(uint32_t index) const {
844 9653448 : base::MutexGuard guard(&allocation_mutex_);
845 : DCHECK_LT(index, num_functions());
846 : DCHECK_LE(module_->num_imported_functions, index);
847 19306896 : WasmCode* code = code_table_[index - module_->num_imported_functions];
848 9653448 : WasmCodeRefScope::AddRef(code);
849 9653448 : return code;
850 : }
851 :
852 0 : bool NativeModule::HasCode(uint32_t index) const {
853 0 : base::MutexGuard guard(&allocation_mutex_);
854 : DCHECK_LT(index, num_functions());
855 : DCHECK_LE(module_->num_imported_functions, index);
856 0 : return code_table_[index - module_->num_imported_functions] != nullptr;
857 : }
858 :
859 2477624 : WasmCode* NativeModule::CreateEmptyJumpTable(uint32_t jump_table_size) {
860 : // Only call this if we really need a jump table.
861 : DCHECK_LT(0, jump_table_size);
862 2477624 : Vector<uint8_t> code_space = AllocateForCode(jump_table_size);
863 : ZapCode(reinterpret_cast<Address>(code_space.begin()), code_space.size());
864 : std::unique_ptr<WasmCode> code{new WasmCode{
865 : this, // native_module
866 : WasmCode::kAnonymousFuncIndex, // index
867 : code_space, // instructions
868 : 0, // stack_slots
869 : 0, // tagged_parameter_slots
870 : 0, // safepoint_table_offset
871 : 0, // handler_table_offset
872 : jump_table_size, // constant_pool_offset
873 : jump_table_size, // code_comments_offset
874 : jump_table_size, // unpadded_binary_size
875 : OwnedVector<ProtectedInstructionData>{}, // protected_instructions
876 : OwnedVector<const uint8_t>{}, // reloc_info
877 : OwnedVector<const uint8_t>{}, // source_pos
878 : WasmCode::kJumpTable, // kind
879 4955249 : ExecutionTier::kNone}}; // tier
880 4955249 : return PublishCode(std::move(code));
881 : }
882 :
883 3861057 : Vector<byte> NativeModule::AllocateForCode(size_t size) {
884 3861057 : base::MutexGuard lock(&allocation_mutex_);
885 : DCHECK_LT(0, size);
886 3861880 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
887 : // This happens under a lock assumed by the caller.
888 : size = RoundUp<kCodeAlignment>(size);
889 3861870 : base::AddressRegion code_space = free_code_space_.Allocate(size);
890 3861860 : if (code_space.is_empty()) {
891 0 : if (!can_request_more_memory_) {
892 : V8::FatalProcessOutOfMemory(nullptr,
893 0 : "NativeModule::AllocateForCode reservation");
894 : UNREACHABLE();
895 : }
896 :
897 : Address hint = owned_code_space_.empty() ? kNullAddress
898 0 : : owned_code_space_.back().end();
899 :
900 0 : VirtualMemory new_mem = engine_->code_manager()->TryAllocate(
901 0 : size, reinterpret_cast<void*>(hint));
902 0 : if (!new_mem.IsReserved()) {
903 : V8::FatalProcessOutOfMemory(nullptr,
904 0 : "NativeModule::AllocateForCode reservation");
905 : UNREACHABLE();
906 : }
907 0 : engine_->code_manager()->AssignRanges(new_mem.address(), new_mem.end(),
908 0 : this);
909 :
910 0 : free_code_space_.Merge(new_mem.region());
911 0 : owned_code_space_.emplace_back(std::move(new_mem));
912 0 : code_space = free_code_space_.Allocate(size);
913 : DCHECK(!code_space.is_empty());
914 : }
915 3861860 : const Address page_size = page_allocator->AllocatePageSize();
916 3861801 : Address commit_start = RoundUp(code_space.begin(), page_size);
917 : Address commit_end = RoundUp(code_space.end(), page_size);
918 : // {commit_start} will be either code_space.start or the start of the next
919 : // page. {commit_end} will be the start of the page after the one in which
920 : // the allocation ends.
921 : // We start from an aligned start, and we know we allocated vmem in
922 : // page multiples.
923 : // We just need to commit what's not committed. The page in which we
924 : // start is already committed (or we start at the beginning of a page).
925 : // The end needs to be committed all through the end of the page.
926 3861801 : if (commit_start < commit_end) {
927 1245385 : committed_code_space_.fetch_add(commit_end - commit_start);
928 : // Committed code cannot grow bigger than maximum code space size.
929 : DCHECK_LE(committed_code_space_.load(), kMaxWasmCodeMemory);
930 : #if V8_OS_WIN
931 : // On Windows, we cannot commit a region that straddles different
932 : // reservations of virtual memory. Because we bump-allocate, and because, if
933 : // we need more memory, we append that memory at the end of the
934 : // owned_code_space_ list, we traverse that list in reverse order to find
935 : // the reservation(s) that guide how to chunk the region to commit.
936 : for (auto& vmem : base::Reversed(owned_code_space_)) {
937 : if (commit_end <= vmem.address() || vmem.end() <= commit_start) continue;
938 : Address start = std::max(commit_start, vmem.address());
939 : Address end = std::min(commit_end, vmem.end());
940 : size_t commit_size = static_cast<size_t>(end - start);
941 : if (!engine_->code_manager()->Commit(start, commit_size)) {
942 : V8::FatalProcessOutOfMemory(nullptr,
943 : "NativeModule::AllocateForCode commit");
944 : UNREACHABLE();
945 : }
946 : // Opportunistically reduce the commit range. This might terminate the
947 : // loop early.
948 : if (commit_start == start) commit_start = end;
949 : if (commit_end == end) commit_end = start;
950 : if (commit_start >= commit_end) break;
951 : }
952 : #else
953 2490770 : if (!engine_->code_manager()->Commit(commit_start,
954 : commit_end - commit_start)) {
955 : V8::FatalProcessOutOfMemory(nullptr,
956 0 : "NativeModule::AllocateForCode commit");
957 : UNREACHABLE();
958 : }
959 : #endif
960 : }
961 : DCHECK(IsAligned(code_space.begin(), kCodeAlignment));
962 3861802 : allocated_code_space_.Merge(code_space);
963 : generated_code_size_.fetch_add(code_space.size(), std::memory_order_relaxed);
964 :
965 : TRACE_HEAP("Code alloc for %p: %" PRIxPTR ",+%zu\n", this, code_space.begin(),
966 : size);
967 7723743 : return {reinterpret_cast<byte*>(code_space.begin()), code_space.size()};
968 : }
969 :
970 : namespace {
971 4679852 : class NativeModuleWireBytesStorage final : public WireBytesStorage {
972 : public:
973 : explicit NativeModuleWireBytesStorage(
974 : std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes)
975 2339926 : : wire_bytes_(std::move(wire_bytes)) {}
976 :
977 1072512 : Vector<const uint8_t> GetCode(WireBytesRef ref) const final {
978 2145024 : return wire_bytes_->as_vector().SubVector(ref.offset(), ref.end_offset());
979 : }
980 :
981 : private:
982 : const std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes_;
983 : };
984 : } // namespace
985 :
986 3434434 : void NativeModule::SetWireBytes(OwnedVector<const uint8_t> wire_bytes) {
987 : auto shared_wire_bytes =
988 : std::make_shared<OwnedVector<const uint8_t>>(std::move(wire_bytes));
989 : wire_bytes_ = shared_wire_bytes;
990 3434434 : if (!shared_wire_bytes->empty()) {
991 2339926 : compilation_state_->SetWireBytesStorage(
992 2339926 : std::make_shared<NativeModuleWireBytesStorage>(
993 2339926 : std::move(shared_wire_bytes)));
994 : }
995 3434434 : }
996 :
997 5931507 : WasmCode* NativeModule::Lookup(Address pc) const {
998 5931507 : base::MutexGuard lock(&allocation_mutex_);
999 5931507 : if (owned_code_.empty()) return nullptr;
1000 : // First update the sorted portion counter.
1001 5931507 : if (owned_code_sorted_portion_ == 0) ++owned_code_sorted_portion_;
1002 12669614 : while (owned_code_sorted_portion_ < owned_code_.size() &&
1003 706847 : owned_code_[owned_code_sorted_portion_ - 1]->instruction_start() <=
1004 : owned_code_[owned_code_sorted_portion_]->instruction_start()) {
1005 33251 : ++owned_code_sorted_portion_;
1006 : }
1007 : // Execute at most two rounds: First check whether the {pc} is within the
1008 : // sorted portion of {owned_code_}. If it's not, then sort the whole vector
1009 : // and retry.
1010 144 : while (true) {
1011 : auto iter =
1012 : std::upper_bound(owned_code_.begin(), owned_code_.end(), pc,
1013 : [](Address pc, const std::unique_ptr<WasmCode>& code) {
1014 : DCHECK_NE(kNullAddress, pc);
1015 : DCHECK_NOT_NULL(code);
1016 : return pc < code->instruction_start();
1017 : });
1018 5931651 : if (iter != owned_code_.begin()) {
1019 : --iter;
1020 : WasmCode* candidate = iter->get();
1021 : DCHECK_NOT_NULL(candidate);
1022 5931651 : if (candidate->contains(pc)) {
1023 5931505 : WasmCodeRefScope::AddRef(candidate);
1024 5931505 : return candidate;
1025 : }
1026 : }
1027 292 : if (owned_code_sorted_portion_ == owned_code_.size()) return nullptr;
1028 : std::sort(owned_code_.begin(), owned_code_.end(),
1029 : [](const std::unique_ptr<WasmCode>& code1,
1030 : const std::unique_ptr<WasmCode>& code2) {
1031 : return code1->instruction_start() < code2->instruction_start();
1032 : });
1033 144 : owned_code_sorted_portion_ = owned_code_.size();
1034 : }
1035 : }
1036 :
1037 369275 : Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const {
1038 : // TODO(clemensh): Measure performance win of returning instruction start
1039 : // directly if we have turbofan code. Downside: Redirecting functions (e.g.
1040 : // for debugging) gets much harder.
1041 :
1042 : // Return the jump table slot for that function index.
1043 : DCHECK_NOT_NULL(jump_table_);
1044 395303 : uint32_t slot_idx = func_index - module_->num_imported_functions;
1045 : uint32_t slot_offset = JumpTableAssembler::SlotIndexToOffset(slot_idx);
1046 : DCHECK_LT(slot_offset, jump_table_->instructions().size());
1047 790606 : return jump_table_->instruction_start() + slot_offset;
1048 : }
1049 :
1050 40656 : uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(
1051 : Address slot_address) const {
1052 : DCHECK(is_jump_table_slot(slot_address));
1053 : uint32_t slot_offset =
1054 81312 : static_cast<uint32_t>(slot_address - jump_table_->instruction_start());
1055 : uint32_t slot_idx = JumpTableAssembler::SlotOffsetToIndex(slot_offset);
1056 : DCHECK_LT(slot_idx, module_->num_declared_functions);
1057 40656 : return module_->num_imported_functions + slot_idx;
1058 : }
1059 :
1060 0 : const char* NativeModule::GetRuntimeStubName(Address runtime_stub_entry) const {
1061 : #define RETURN_NAME(Name) \
1062 : if (runtime_stub_entries_[WasmCode::k##Name] == runtime_stub_entry) { \
1063 : return #Name; \
1064 : }
1065 : #define RETURN_NAME_TRAP(Name) RETURN_NAME(ThrowWasm##Name)
1066 0 : WASM_RUNTIME_STUB_LIST(RETURN_NAME, RETURN_NAME_TRAP)
1067 : #undef RETURN_NAME_TRAP
1068 : #undef RETURN_NAME
1069 0 : return "<unknown>";
1070 : }
1071 :
1072 3727136 : NativeModule::~NativeModule() {
1073 : TRACE_HEAP("Deleting native module: %p\n", reinterpret_cast<void*>(this));
1074 : // Cancel all background compilation before resetting any field of the
1075 : // NativeModule or freeing anything.
1076 1242378 : compilation_state_->AbortCompilation();
1077 1242379 : engine_->FreeNativeModule(this);
1078 : // Free the import wrapper cache before releasing the {WasmCode} objects in
1079 : // {owned_code_}. The destructor of {WasmImportWrapperCache} still needs to
1080 : // decrease reference counts on the {WasmCode} objects.
1081 1242379 : import_wrapper_cache_.reset();
1082 1242379 : }
1083 :
1084 61024 : WasmCodeManager::WasmCodeManager(WasmMemoryTracker* memory_tracker,
1085 : size_t max_committed)
1086 : : memory_tracker_(memory_tracker),
1087 : max_committed_code_space_(max_committed),
1088 : total_committed_code_space_(0),
1089 183072 : critical_committed_code_space_(max_committed / 2) {
1090 : DCHECK_LE(max_committed, kMaxWasmCodeMemory);
1091 61024 : }
1092 :
1093 1245385 : bool WasmCodeManager::Commit(Address start, size_t size) {
1094 : // TODO(v8:8462) Remove eager commit once perf supports remapping.
1095 1245385 : if (FLAG_perf_prof) return true;
1096 : DCHECK(IsAligned(start, AllocatePageSize()));
1097 : DCHECK(IsAligned(size, AllocatePageSize()));
1098 : // Reserve the size. Use CAS loop to avoid overflow on
1099 : // {total_committed_code_space_}.
1100 1245385 : size_t old_value = total_committed_code_space_.load();
1101 : while (true) {
1102 : DCHECK_GE(max_committed_code_space_, old_value);
1103 1245386 : if (size > max_committed_code_space_ - old_value) return false;
1104 2490774 : if (total_committed_code_space_.compare_exchange_weak(old_value,
1105 : old_value + size)) {
1106 : break;
1107 : }
1108 : }
1109 : PageAllocator::Permission permission = FLAG_wasm_write_protect_code_memory
1110 : ? PageAllocator::kReadWrite
1111 1245386 : : PageAllocator::kReadWriteExecute;
1112 :
1113 : bool ret =
1114 1245386 : SetPermissions(GetPlatformPageAllocator(), start, size, permission);
1115 : TRACE_HEAP("Setting rw permissions for %p:%p\n",
1116 : reinterpret_cast<void*>(start),
1117 : reinterpret_cast<void*>(start + size));
1118 :
1119 1245386 : if (!ret) {
1120 : // Highly unlikely.
1121 : total_committed_code_space_.fetch_sub(size);
1122 0 : return false;
1123 : }
1124 : return true;
1125 : }
1126 :
1127 0 : void WasmCodeManager::AssignRanges(Address start, Address end,
1128 : NativeModule* native_module) {
1129 0 : base::MutexGuard lock(&native_modules_mutex_);
1130 0 : lookup_map_.insert(std::make_pair(start, std::make_pair(end, native_module)));
1131 0 : }
1132 :
1133 1243267 : VirtualMemory WasmCodeManager::TryAllocate(size_t size, void* hint) {
1134 1243267 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
1135 : DCHECK_GT(size, 0);
1136 1243268 : size = RoundUp(size, page_allocator->AllocatePageSize());
1137 1243269 : if (!memory_tracker_->ReserveAddressSpace(size)) return {};
1138 1242379 : if (hint == nullptr) hint = page_allocator->GetRandomMmapAddr();
1139 :
1140 : VirtualMemory mem(page_allocator, size, hint,
1141 2484758 : page_allocator->AllocatePageSize());
1142 1242379 : if (!mem.IsReserved()) {
1143 0 : memory_tracker_->ReleaseReservation(size);
1144 0 : return {};
1145 : }
1146 : TRACE_HEAP("VMem alloc: %p:%p (%zu)\n",
1147 : reinterpret_cast<void*>(mem.address()),
1148 : reinterpret_cast<void*>(mem.end()), mem.size());
1149 :
1150 : // TODO(v8:8462) Remove eager commit once perf supports remapping.
1151 1242379 : if (FLAG_perf_prof) {
1152 0 : SetPermissions(GetPlatformPageAllocator(), mem.address(), mem.size(),
1153 : PageAllocator::kReadWriteExecute);
1154 : }
1155 : return mem;
1156 : }
1157 :
1158 14 : void WasmCodeManager::SetMaxCommittedMemoryForTesting(size_t limit) {
1159 : // This has to be set before committing any memory.
1160 : DCHECK_EQ(0, total_committed_code_space_.load());
1161 14 : max_committed_code_space_ = limit;
1162 14 : critical_committed_code_space_.store(limit / 2);
1163 14 : }
1164 :
1165 : // static
1166 1386096 : size_t WasmCodeManager::EstimateNativeModuleCodeSize(const WasmModule* module) {
1167 : constexpr size_t kCodeSizeMultiplier = 4;
1168 : constexpr size_t kCodeOverhead = 32; // for prologue, stack check, ...
1169 : constexpr size_t kStaticCodeSize = 512; // runtime stubs, ...
1170 : constexpr size_t kImportSize = 64 * kSystemPointerSize;
1171 :
1172 : size_t estimate = kStaticCodeSize;
1173 2139126 : for (auto& function : module->functions) {
1174 753030 : estimate += kCodeOverhead + kCodeSizeMultiplier * function.code.length();
1175 : }
1176 : estimate +=
1177 2772192 : JumpTableAssembler::SizeForNumberOfSlots(module->num_declared_functions);
1178 1386096 : estimate += kImportSize * module->num_imported_functions;
1179 :
1180 1386096 : return estimate;
1181 : }
1182 :
1183 : // static
1184 1239074 : size_t WasmCodeManager::EstimateNativeModuleNonCodeSize(
1185 : const WasmModule* module) {
1186 1239074 : size_t wasm_module_estimate = EstimateStoredSize(module);
1187 :
1188 1239075 : uint32_t num_wasm_functions = module->num_declared_functions;
1189 :
1190 : // TODO(wasm): Include wire bytes size.
1191 : size_t native_module_estimate =
1192 : sizeof(NativeModule) + /* NativeModule struct */
1193 1239075 : (sizeof(WasmCode*) * num_wasm_functions) + /* code table size */
1194 1239075 : (sizeof(WasmCode) * num_wasm_functions); /* code object size */
1195 :
1196 1239075 : return wasm_module_estimate + native_module_estimate;
1197 : }
1198 :
1199 1242372 : std::shared_ptr<NativeModule> WasmCodeManager::NewNativeModule(
1200 : WasmEngine* engine, Isolate* isolate, const WasmFeatures& enabled,
1201 : size_t code_size_estimate, bool can_request_more,
1202 : std::shared_ptr<const WasmModule> module) {
1203 : DCHECK_EQ(this, isolate->wasm_engine()->code_manager());
1204 1242372 : if (total_committed_code_space_.load() >
1205 : critical_committed_code_space_.load()) {
1206 : (reinterpret_cast<v8::Isolate*>(isolate))
1207 60 : ->MemoryPressureNotification(MemoryPressureLevel::kCritical);
1208 : size_t committed = total_committed_code_space_.load();
1209 : DCHECK_GE(max_committed_code_space_, committed);
1210 60 : critical_committed_code_space_.store(
1211 60 : committed + (max_committed_code_space_ - committed) / 2);
1212 : }
1213 :
1214 : // If the code must be contiguous, reserve enough address space up front.
1215 : size_t code_vmem_size =
1216 : kRequiresCodeRange ? kMaxWasmCodeMemory : code_size_estimate;
1217 : // Try up to two times; getting rid of dead JSArrayBuffer allocations might
1218 : // require two GCs because the first GC maybe incremental and may have
1219 : // floating garbage.
1220 : static constexpr int kAllocationRetries = 2;
1221 2484751 : VirtualMemory code_space;
1222 897 : for (int retries = 0;; ++retries) {
1223 2486545 : code_space = TryAllocate(code_vmem_size);
1224 1243276 : if (code_space.IsReserved()) break;
1225 897 : if (retries == kAllocationRetries) {
1226 0 : V8::FatalProcessOutOfMemory(isolate, "WasmCodeManager::NewNativeModule");
1227 : UNREACHABLE();
1228 : }
1229 : // Run one GC, then try the allocation again.
1230 : isolate->heap()->MemoryPressureNotification(MemoryPressureLevel::kCritical,
1231 897 : true);
1232 : }
1233 :
1234 : Address start = code_space.address();
1235 : size_t size = code_space.size();
1236 : Address end = code_space.end();
1237 1242379 : std::shared_ptr<NativeModule> ret;
1238 : new NativeModule(engine, enabled, can_request_more, std::move(code_space),
1239 3727137 : std::move(module), isolate->async_counters(), &ret);
1240 : // The constructor initialized the shared_ptr.
1241 : DCHECK_NOT_NULL(ret);
1242 : TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", ret.get(), start,
1243 : size);
1244 :
1245 : #if defined(V8_OS_WIN_X64)
1246 : if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
1247 : FLAG_win64_unwinding_info) {
1248 : win64_unwindinfo::RegisterNonABICompliantCodeRange(
1249 : reinterpret_cast<void*>(start), size);
1250 : }
1251 : #endif
1252 :
1253 1242373 : base::MutexGuard lock(&native_modules_mutex_);
1254 1242379 : lookup_map_.insert(std::make_pair(start, std::make_pair(end, ret.get())));
1255 1242379 : return ret;
1256 : }
1257 :
1258 9628500 : bool NativeModule::SetExecutable(bool executable) {
1259 9628500 : if (is_executable_ == executable) return true;
1260 : TRACE_HEAP("Setting module %p as executable: %d.\n", this, executable);
1261 :
1262 729328 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
1263 :
1264 729328 : if (FLAG_wasm_write_protect_code_memory) {
1265 : PageAllocator::Permission permission =
1266 0 : executable ? PageAllocator::kReadExecute : PageAllocator::kReadWrite;
1267 : #if V8_OS_WIN
1268 : // On windows, we need to switch permissions per separate virtual memory
1269 : // reservation. This is really just a problem when the NativeModule is
1270 : // growable (meaning can_request_more_memory_). That's 32-bit in production,
1271 : // or unittests.
1272 : // For now, in that case, we commit at reserved memory granularity.
1273 : // Technically, that may be a waste, because we may reserve more than we
1274 : // use. On 32-bit though, the scarce resource is the address space -
1275 : // committed or not.
1276 : if (can_request_more_memory_) {
1277 : for (auto& vmem : owned_code_space_) {
1278 : if (!SetPermissions(page_allocator, vmem.address(), vmem.size(),
1279 : permission)) {
1280 : return false;
1281 : }
1282 : TRACE_HEAP("Set %p:%p to executable:%d\n", vmem.address(), vmem.end(),
1283 : executable);
1284 : }
1285 : is_executable_ = executable;
1286 : return true;
1287 : }
1288 : #endif
1289 0 : for (auto& region : allocated_code_space_.regions()) {
1290 : // allocated_code_space_ is fine-grained, so we need to
1291 : // page-align it.
1292 : size_t region_size =
1293 0 : RoundUp(region.size(), page_allocator->AllocatePageSize());
1294 0 : if (!SetPermissions(page_allocator, region.begin(), region_size,
1295 : permission)) {
1296 : return false;
1297 : }
1298 : TRACE_HEAP("Set %p:%p to executable:%d\n",
1299 : reinterpret_cast<void*>(region.begin()),
1300 : reinterpret_cast<void*>(region.end()), executable);
1301 : }
1302 : }
1303 729328 : is_executable_ = executable;
1304 729328 : return true;
1305 : }
1306 :
1307 688891 : void NativeModule::SampleCodeSize(
1308 : Counters* counters, NativeModule::CodeSamplingTime sampling_time) const {
1309 : size_t code_size = sampling_time == kSampling
1310 : ? committed_code_space()
1311 688891 : : generated_code_size_.load(std::memory_order_relaxed);
1312 688891 : int code_size_mb = static_cast<int>(code_size / MB);
1313 : Histogram* histogram = nullptr;
1314 688891 : switch (sampling_time) {
1315 : case kAfterBaseline:
1316 : histogram = counters->wasm_module_code_size_mb_after_baseline();
1317 2199 : break;
1318 : case kAfterTopTier:
1319 : histogram = counters->wasm_module_code_size_mb_after_top_tier();
1320 166 : break;
1321 : case kSampling:
1322 : histogram = counters->wasm_module_code_size_mb();
1323 686526 : break;
1324 : }
1325 688891 : histogram->AddSample(code_size_mb);
1326 688891 : }
1327 :
1328 739910 : WasmCode* NativeModule::AddCompiledCode(WasmCompilationResult result) {
1329 1479820 : return AddCompiledCode({&result, 1})[0];
1330 : }
1331 :
1332 1007716 : std::vector<WasmCode*> NativeModule::AddCompiledCode(
1333 : Vector<WasmCompilationResult> results) {
1334 : DCHECK(!results.empty());
1335 : // First, allocate code space for all the results.
1336 : size_t total_code_space = 0;
1337 3131462 : for (auto& result : results) {
1338 : DCHECK(result.succeeded());
1339 2123746 : total_code_space += RoundUp<kCodeAlignment>(result.code_desc.instr_size);
1340 : }
1341 1007716 : Vector<byte> code_space = AllocateForCode(total_code_space);
1342 :
1343 1008391 : std::vector<std::unique_ptr<WasmCode>> generated_code;
1344 1008386 : generated_code.reserve(results.size());
1345 :
1346 : // Now copy the generated code into the code space and relocate it.
1347 3133143 : for (auto& result : results) {
1348 : DCHECK_EQ(result.code_desc.buffer, result.instr_buffer.get());
1349 2126422 : size_t code_size = RoundUp<kCodeAlignment>(result.code_desc.instr_size);
1350 1063211 : Vector<byte> this_code_space = code_space.SubVector(0, code_size);
1351 : code_space += code_size;
1352 6376936 : generated_code.emplace_back(AddCodeWithCodeSpace(
1353 1063167 : result.func_index, result.code_desc, result.frame_slot_count,
1354 : result.tagged_parameter_slots, std::move(result.protected_instructions),
1355 : std::move(result.source_positions),
1356 : GetCodeKindForExecutionTier(result.result_tier), result.result_tier,
1357 1062553 : this_code_space));
1358 : }
1359 : DCHECK_EQ(0, code_space.size());
1360 :
1361 : // Under the {allocation_mutex_}, publish the code. The published code is put
1362 : // into the top-most surrounding {WasmCodeRefScope} by {PublishCodeLocked}.
1363 : std::vector<WasmCode*> code_vector;
1364 1007506 : code_vector.reserve(results.size());
1365 : {
1366 1008354 : base::MutexGuard lock(&allocation_mutex_);
1367 2071837 : for (auto& result : generated_code)
1368 2127004 : code_vector.push_back(PublishCodeLocked(std::move(result)));
1369 : }
1370 :
1371 1008371 : return code_vector;
1372 : }
1373 :
1374 0 : void NativeModule::FreeCode(Vector<WasmCode* const> codes) {
1375 : // TODO(clemensh): Implement.
1376 0 : }
1377 :
1378 1242379 : void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
1379 1242379 : base::MutexGuard lock(&native_modules_mutex_);
1380 : TRACE_HEAP("Freeing NativeModule %p\n", native_module);
1381 2484758 : for (auto& code_space : native_module->owned_code_space_) {
1382 : DCHECK(code_space.IsReserved());
1383 : TRACE_HEAP("VMem Release: %" PRIxPTR ":%" PRIxPTR " (%zu)\n",
1384 : code_space.address(), code_space.end(), code_space.size());
1385 :
1386 : #if defined(V8_OS_WIN_X64)
1387 : if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
1388 : FLAG_win64_unwinding_info) {
1389 : win64_unwindinfo::UnregisterNonABICompliantCodeRange(
1390 : reinterpret_cast<void*>(code_space.address()));
1391 : }
1392 : #endif
1393 :
1394 2484758 : lookup_map_.erase(code_space.address());
1395 1242379 : memory_tracker_->ReleaseReservation(code_space.size());
1396 1242379 : code_space.Free();
1397 : DCHECK(!code_space.IsReserved());
1398 : }
1399 : native_module->owned_code_space_.clear();
1400 :
1401 : size_t code_size = native_module->committed_code_space_.load();
1402 : DCHECK(IsAligned(code_size, AllocatePageSize()));
1403 : size_t old_committed = total_committed_code_space_.fetch_sub(code_size);
1404 : DCHECK_LE(code_size, old_committed);
1405 : USE(old_committed);
1406 1242379 : }
1407 :
1408 55825067 : NativeModule* WasmCodeManager::LookupNativeModule(Address pc) const {
1409 55825067 : base::MutexGuard lock(&native_modules_mutex_);
1410 55825398 : if (lookup_map_.empty()) return nullptr;
1411 :
1412 : auto iter = lookup_map_.upper_bound(pc);
1413 13819904 : if (iter == lookup_map_.begin()) return nullptr;
1414 : --iter;
1415 13546048 : Address region_start = iter->first;
1416 13546048 : Address region_end = iter->second.first;
1417 13546048 : NativeModule* candidate = iter->second.second;
1418 :
1419 : DCHECK_NOT_NULL(candidate);
1420 13546048 : return region_start <= pc && pc < region_end ? candidate : nullptr;
1421 : }
1422 :
1423 55776318 : WasmCode* WasmCodeManager::LookupCode(Address pc) const {
1424 55776318 : NativeModule* candidate = LookupNativeModule(pc);
1425 55776629 : return candidate ? candidate->Lookup(pc) : nullptr;
1426 : }
1427 :
1428 : // TODO(v8:7424): Code protection scopes are not yet supported with shared code
1429 : // enabled and need to be revisited to work with --wasm-shared-code as well.
1430 144362 : NativeModuleModificationScope::NativeModuleModificationScope(
1431 : NativeModule* native_module)
1432 144362 : : native_module_(native_module) {
1433 144362 : if (FLAG_wasm_write_protect_code_memory && native_module_ &&
1434 0 : (native_module_->modification_scope_depth_++) == 0) {
1435 0 : bool success = native_module_->SetExecutable(false);
1436 0 : CHECK(success);
1437 : }
1438 144362 : }
1439 :
1440 288722 : NativeModuleModificationScope::~NativeModuleModificationScope() {
1441 144361 : if (FLAG_wasm_write_protect_code_memory && native_module_ &&
1442 0 : (native_module_->modification_scope_depth_--) == 1) {
1443 0 : bool success = native_module_->SetExecutable(true);
1444 0 : CHECK(success);
1445 : }
1446 144361 : }
1447 :
1448 : namespace {
1449 : thread_local WasmCodeRefScope* current_code_refs_scope = nullptr;
1450 : } // namespace
1451 :
1452 58547001 : WasmCodeRefScope::WasmCodeRefScope()
1453 61024630 : : previous_scope_(current_code_refs_scope) {
1454 61024630 : current_code_refs_scope = this;
1455 58547001 : }
1456 :
1457 122050643 : WasmCodeRefScope::~WasmCodeRefScope() {
1458 : DCHECK_EQ(this, current_code_refs_scope);
1459 61025313 : current_code_refs_scope = previous_scope_;
1460 : std::vector<WasmCode*> code_ptrs;
1461 61025313 : code_ptrs.reserve(code_ptrs_.size());
1462 : code_ptrs.assign(code_ptrs_.begin(), code_ptrs_.end());
1463 61025243 : WasmCode::DecrementRefCount(VectorOf(code_ptrs));
1464 61025330 : }
1465 :
1466 : // static
1467 19578909 : void WasmCodeRefScope::AddRef(WasmCode* code) {
1468 19578909 : WasmCodeRefScope* current_scope = current_code_refs_scope;
1469 : DCHECK_NOT_NULL(current_scope);
1470 : auto entry = current_scope->code_ptrs_.insert(code);
1471 : // If we added a new entry, increment the ref counter.
1472 19578883 : if (entry.second) code->IncRef();
1473 19578883 : }
1474 :
1475 : } // namespace wasm
1476 : } // namespace internal
1477 121996 : } // namespace v8
1478 : #undef TRACE_HEAP
|