Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/wasm/wasm-code-manager.h"
6 :
7 : #include <iomanip>
8 :
9 : #include "src/assembler-inl.h"
10 : #include "src/base/adapters.h"
11 : #include "src/base/macros.h"
12 : #include "src/base/platform/platform.h"
13 : #include "src/counters.h"
14 : #include "src/disassembler.h"
15 : #include "src/globals.h"
16 : #include "src/log.h"
17 : #include "src/macro-assembler-inl.h"
18 : #include "src/macro-assembler.h"
19 : #include "src/objects-inl.h"
20 : #include "src/ostreams.h"
21 : #include "src/snapshot/embedded-data.h"
22 : #include "src/vector.h"
23 : #include "src/wasm/compilation-environment.h"
24 : #include "src/wasm/function-compiler.h"
25 : #include "src/wasm/jump-table-assembler.h"
26 : #include "src/wasm/wasm-import-wrapper-cache.h"
27 : #include "src/wasm/wasm-module.h"
28 : #include "src/wasm/wasm-objects-inl.h"
29 : #include "src/wasm/wasm-objects.h"
30 :
31 : #if defined(V8_OS_WIN_X64)
32 : #include "src/unwinding-info-win64.h"
33 : #endif
34 :
35 : #define TRACE_HEAP(...) \
36 : do { \
37 : if (FLAG_trace_wasm_native_heap) PrintF(__VA_ARGS__); \
38 : } while (false)
39 :
40 : namespace v8 {
41 : namespace internal {
42 : namespace wasm {
43 :
44 : using trap_handler::ProtectedInstructionData;
45 :
46 3862248 : void DisjointAllocationPool::Merge(base::AddressRegion region) {
47 : auto dest_it = regions_.begin();
48 : auto dest_end = regions_.end();
49 :
50 : // Skip over dest regions strictly before {region}.
51 6482098 : while (dest_it != dest_end && dest_it->end() < region.begin()) ++dest_it;
52 :
53 : // After last dest region: insert and done.
54 3862248 : if (dest_it == dest_end) {
55 1242430 : regions_.push_back(region);
56 : return;
57 : }
58 :
59 : // Adjacent (from below) to dest: merge and done.
60 2619818 : if (dest_it->begin() == region.end()) {
61 : base::AddressRegion merged_region{region.begin(),
62 1 : region.size() + dest_it->size()};
63 : DCHECK_EQ(merged_region.end(), dest_it->end());
64 1 : *dest_it = merged_region;
65 : return;
66 : }
67 :
68 : // Before dest: insert and done.
69 2619817 : if (dest_it->begin() > region.end()) {
70 0 : regions_.insert(dest_it, region);
71 0 : return;
72 : }
73 :
74 : // Src is adjacent from above. Merge and check whether the merged region is
75 : // now adjacent to the next region.
76 : DCHECK_EQ(dest_it->end(), region.begin());
77 2619817 : dest_it->set_size(dest_it->size() + region.size());
78 : DCHECK_EQ(dest_it->end(), region.end());
79 : auto next_dest = dest_it;
80 : ++next_dest;
81 2619823 : if (next_dest != dest_end && dest_it->end() == next_dest->begin()) {
82 6 : dest_it->set_size(dest_it->size() + next_dest->size());
83 : DCHECK_EQ(dest_it->end(), next_dest->end());
84 : regions_.erase(next_dest);
85 : }
86 : }
87 :
88 3862212 : base::AddressRegion DisjointAllocationPool::Allocate(size_t size) {
89 3862216 : for (auto it = regions_.begin(), end = regions_.end(); it != end; ++it) {
90 3862214 : if (size > it->size()) continue;
91 : base::AddressRegion ret{it->begin(), size};
92 3862210 : if (size == it->size()) {
93 : regions_.erase(it);
94 : } else {
95 3862208 : *it = base::AddressRegion{it->begin() + size, it->size() - size};
96 : }
97 3862208 : return ret;
98 : }
99 2 : return {};
100 : }
101 :
102 1157 : Address WasmCode::constant_pool() const {
103 : if (FLAG_enable_embedded_constant_pool) {
104 : if (constant_pool_offset_ < code_comments_offset_) {
105 : return instruction_start() + constant_pool_offset_;
106 : }
107 : }
108 : return kNullAddress;
109 : }
110 :
111 792 : Address WasmCode::handler_table() const {
112 792 : return instruction_start() + handler_table_offset_;
113 : }
114 :
115 2617287 : uint32_t WasmCode::handler_table_size() const {
116 : DCHECK_GE(constant_pool_offset_, handler_table_offset_);
117 2617287 : return static_cast<uint32_t>(constant_pool_offset_ - handler_table_offset_);
118 : }
119 :
120 0 : Address WasmCode::code_comments() const {
121 0 : return instruction_start() + code_comments_offset_;
122 : }
123 :
124 0 : uint32_t WasmCode::code_comments_size() const {
125 : DCHECK_GE(unpadded_binary_size_, code_comments_offset_);
126 0 : return static_cast<uint32_t>(unpadded_binary_size_ - code_comments_offset_);
127 : }
128 :
129 0 : size_t WasmCode::trap_handler_index() const {
130 128506 : CHECK(HasTrapHandlerIndex());
131 64253 : return static_cast<size_t>(trap_handler_index_);
132 : }
133 :
134 0 : void WasmCode::set_trap_handler_index(size_t value) {
135 64250 : trap_handler_index_ = value;
136 0 : }
137 :
138 1436875 : void WasmCode::RegisterTrapHandlerData() {
139 : DCHECK(!HasTrapHandlerIndex());
140 1436875 : if (kind() != WasmCode::kFunction) return;
141 1060906 : if (protected_instructions_.empty()) return;
142 :
143 : Address base = instruction_start();
144 :
145 : size_t size = instructions().size();
146 : const int index =
147 : RegisterHandlerData(base, size, protected_instructions().size(),
148 64111 : protected_instructions().start());
149 :
150 : // TODO(eholk): if index is negative, fail.
151 64250 : CHECK_LE(0, index);
152 : set_trap_handler_index(static_cast<size_t>(index));
153 : }
154 :
155 3917177 : bool WasmCode::HasTrapHandlerIndex() const { return trap_handler_index_ >= 0; }
156 :
157 802327 : bool WasmCode::ShouldBeLogged(Isolate* isolate) {
158 : // The return value is cached in {WasmEngine::IsolateData::log_codes}. Ensure
159 : // to call {WasmEngine::EnableCodeLogging} if this return value would change
160 : // for any isolate. Otherwise we might lose code events.
161 939751 : return isolate->logger()->is_listening_to_code_events() ||
162 802327 : isolate->is_profiling();
163 : }
164 :
165 11 : void WasmCode::LogCode(Isolate* isolate) const {
166 : DCHECK(ShouldBeLogged(isolate));
167 11 : if (IsAnonymous()) return;
168 :
169 : ModuleWireBytes wire_bytes(native_module()->wire_bytes());
170 : // TODO(herhut): Allow to log code without on-heap round-trip of the name.
171 : WireBytesRef name_ref =
172 11 : native_module()->module()->LookupFunctionName(wire_bytes, index());
173 11 : WasmName name_vec = wire_bytes.GetNameOrNull(name_ref);
174 11 : if (!name_vec.empty()) {
175 : HandleScope scope(isolate);
176 : MaybeHandle<String> maybe_name = isolate->factory()->NewStringFromUtf8(
177 11 : Vector<const char>::cast(name_vec));
178 : Handle<String> name;
179 11 : if (!maybe_name.ToHandle(&name)) {
180 0 : name = isolate->factory()->NewStringFromAsciiChecked("<name too long>");
181 : }
182 : int name_length;
183 : auto cname =
184 : name->ToCString(AllowNullsFlag::DISALLOW_NULLS,
185 11 : RobustnessFlag::ROBUST_STRING_TRAVERSAL, &name_length);
186 22 : PROFILE(isolate,
187 : CodeCreateEvent(CodeEventListener::FUNCTION_TAG, this,
188 : {cname.get(), static_cast<size_t>(name_length)}));
189 : } else {
190 : EmbeddedVector<char, 32> generated_name;
191 0 : int length = SNPrintF(generated_name, "wasm-function[%d]", index());
192 0 : generated_name.Truncate(length);
193 0 : PROFILE(isolate, CodeCreateEvent(CodeEventListener::FUNCTION_TAG, this,
194 : generated_name));
195 : }
196 :
197 11 : if (!source_positions().empty()) {
198 11 : LOG_CODE_EVENT(isolate, CodeLinePosInfoRecordEvent(instruction_start(),
199 : source_positions()));
200 : }
201 : }
202 :
203 236 : void WasmCode::Validate() const {
204 : #ifdef DEBUG
205 : // We expect certain relocation info modes to never appear in {WasmCode}
206 : // objects or to be restricted to a small set of valid values. Hence the
207 : // iteration below does not use a mask, but visits all relocation data.
208 : for (RelocIterator it(instructions(), reloc_info(), constant_pool());
209 : !it.done(); it.next()) {
210 : RelocInfo::Mode mode = it.rinfo()->rmode();
211 : switch (mode) {
212 : case RelocInfo::WASM_CALL: {
213 : Address target = it.rinfo()->wasm_call_address();
214 : WasmCode* code = native_module_->Lookup(target);
215 : CHECK_NOT_NULL(code);
216 : CHECK_EQ(WasmCode::kJumpTable, code->kind());
217 : CHECK_EQ(native_module()->jump_table_, code);
218 : CHECK(code->contains(target));
219 : break;
220 : }
221 : case RelocInfo::WASM_STUB_CALL: {
222 : Address target = it.rinfo()->wasm_stub_call_address();
223 : WasmCode* code = native_module_->Lookup(target);
224 : CHECK_NOT_NULL(code);
225 : #ifdef V8_EMBEDDED_BUILTINS
226 : CHECK_EQ(WasmCode::kJumpTable, code->kind());
227 : CHECK_EQ(native_module()->runtime_stub_table_, code);
228 : CHECK(code->contains(target));
229 : #else
230 : CHECK_EQ(WasmCode::kRuntimeStub, code->kind());
231 : CHECK_EQ(target, code->instruction_start());
232 : #endif
233 : break;
234 : }
235 : case RelocInfo::INTERNAL_REFERENCE:
236 : case RelocInfo::INTERNAL_REFERENCE_ENCODED: {
237 : Address target = it.rinfo()->target_internal_reference();
238 : CHECK(contains(target));
239 : break;
240 : }
241 : case RelocInfo::EXTERNAL_REFERENCE:
242 : case RelocInfo::CONST_POOL:
243 : case RelocInfo::VENEER_POOL:
244 : // These are OK to appear.
245 : break;
246 : default:
247 : FATAL("Unexpected mode: %d", mode);
248 : }
249 : }
250 : #endif
251 236 : }
252 :
253 1437911 : void WasmCode::MaybePrint(const char* name) const {
254 : // Determines whether flags want this code to be printed.
255 1437974 : if ((FLAG_print_wasm_code && kind() == kFunction) ||
256 2875505 : (FLAG_print_wasm_stub_code && kind() != kFunction) || FLAG_print_code) {
257 63 : Print(name);
258 : }
259 1437911 : }
260 :
261 63 : void WasmCode::Print(const char* name) const {
262 126 : StdoutStream os;
263 62 : os << "--- WebAssembly code ---\n";
264 63 : Disassemble(name, os);
265 63 : os << "--- End code ---\n";
266 63 : }
267 :
268 63 : void WasmCode::Disassemble(const char* name, std::ostream& os,
269 : Address current_pc) const {
270 63 : if (name) os << "name: " << name << "\n";
271 126 : if (!IsAnonymous()) os << "index: " << index() << "\n";
272 126 : os << "kind: " << GetWasmCodeKindAsString(kind_) << "\n";
273 126 : os << "compiler: " << (is_liftoff() ? "Liftoff" : "TurboFan") << "\n";
274 63 : size_t padding = instructions().size() - unpadded_binary_size_;
275 : os << "Body (size = " << instructions().size() << " = "
276 63 : << unpadded_binary_size_ << " + " << padding << " padding)\n";
277 :
278 : #ifdef ENABLE_DISASSEMBLER
279 : size_t instruction_size = unpadded_binary_size_;
280 : if (constant_pool_offset_ < instruction_size) {
281 : instruction_size = constant_pool_offset_;
282 : }
283 : if (safepoint_table_offset_ && safepoint_table_offset_ < instruction_size) {
284 : instruction_size = safepoint_table_offset_;
285 : }
286 : if (handler_table_offset_ < instruction_size) {
287 : instruction_size = handler_table_offset_;
288 : }
289 : DCHECK_LT(0, instruction_size);
290 : os << "Instructions (size = " << instruction_size << ")\n";
291 : Disassembler::Decode(nullptr, &os, instructions().start(),
292 : instructions().start() + instruction_size,
293 : CodeReference(this), current_pc);
294 : os << "\n";
295 :
296 : if (handler_table_size() > 0) {
297 : HandlerTable table(handler_table(), handler_table_size());
298 : os << "Exception Handler Table (size = " << table.NumberOfReturnEntries()
299 : << "):\n";
300 : table.HandlerTableReturnPrint(os);
301 : os << "\n";
302 : }
303 :
304 : if (!protected_instructions_.empty()) {
305 : os << "Protected instructions:\n pc offset land pad\n";
306 : for (auto& data : protected_instructions()) {
307 : os << std::setw(10) << std::hex << data.instr_offset << std::setw(10)
308 : << std::hex << data.landing_offset << "\n";
309 : }
310 : os << "\n";
311 : }
312 :
313 : if (!source_positions().empty()) {
314 : os << "Source positions:\n pc offset position\n";
315 : for (SourcePositionTableIterator it(source_positions()); !it.done();
316 : it.Advance()) {
317 : os << std::setw(10) << std::hex << it.code_offset() << std::dec
318 : << std::setw(10) << it.source_position().ScriptOffset()
319 : << (it.is_statement() ? " statement" : "") << "\n";
320 : }
321 : os << "\n";
322 : }
323 :
324 : if (safepoint_table_offset_ > 0) {
325 : SafepointTable table(instruction_start(), safepoint_table_offset_,
326 : stack_slots_);
327 : os << "Safepoints (size = " << table.size() << ")\n";
328 : for (uint32_t i = 0; i < table.length(); i++) {
329 : uintptr_t pc_offset = table.GetPcOffset(i);
330 : os << reinterpret_cast<const void*>(instruction_start() + pc_offset);
331 : os << std::setw(6) << std::hex << pc_offset << " " << std::dec;
332 : table.PrintEntry(i, os);
333 : os << " (sp -> fp)";
334 : SafepointEntry entry = table.GetEntry(i);
335 : if (entry.trampoline_pc() != -1) {
336 : os << " trampoline: " << std::hex << entry.trampoline_pc() << std::dec;
337 : }
338 : if (entry.has_deoptimization_index()) {
339 : os << " deopt: " << std::setw(6) << entry.deoptimization_index();
340 : }
341 : os << "\n";
342 : }
343 : os << "\n";
344 : }
345 :
346 : os << "RelocInfo (size = " << reloc_info_.size() << ")\n";
347 : for (RelocIterator it(instructions(), reloc_info(), constant_pool());
348 : !it.done(); it.next()) {
349 : it.rinfo()->Print(nullptr, os);
350 : }
351 : os << "\n";
352 :
353 : if (code_comments_size() > 0) {
354 : PrintCodeCommentsSection(os, code_comments(), code_comments_size());
355 : }
356 : #endif // ENABLE_DISASSEMBLER
357 63 : }
358 :
359 0 : const char* GetWasmCodeKindAsString(WasmCode::Kind kind) {
360 63 : switch (kind) {
361 : case WasmCode::kFunction:
362 : return "wasm function";
363 : case WasmCode::kWasmToJsWrapper:
364 0 : return "wasm-to-js";
365 : case WasmCode::kRuntimeStub:
366 0 : return "runtime-stub";
367 : case WasmCode::kInterpreterEntry:
368 0 : return "interpreter entry";
369 : case WasmCode::kJumpTable:
370 0 : return "jump table";
371 : }
372 0 : return "unknown kind";
373 : }
374 :
375 7834439 : WasmCode::~WasmCode() {
376 3917177 : if (HasTrapHandlerIndex()) {
377 64253 : CHECK_LT(trap_handler_index(),
378 : static_cast<size_t>(std::numeric_limits<int>::max()));
379 64253 : trap_handler::ReleaseHandlerData(static_cast<int>(trap_handler_index()));
380 : }
381 3917262 : }
382 :
383 76652 : V8_WARN_UNUSED_RESULT bool WasmCode::DecRefOnPotentiallyDeadCode() {
384 76652 : if (native_module_->engine()->AddPotentiallyDeadCode(this)) {
385 : // The code just became potentially dead. The ref count we wanted to
386 : // decrement is now transferred to the set of potentially dead code, and
387 : // will be decremented when the next GC is run.
388 : return false;
389 : }
390 : // If we reach here, the code was already potentially dead. Decrement the ref
391 : // count, and return true if it drops to zero.
392 0 : int old_count = ref_count_.load(std::memory_order_relaxed);
393 : while (true) {
394 : DCHECK_LE(1, old_count);
395 0 : if (ref_count_.compare_exchange_weak(old_count, old_count - 1,
396 : std::memory_order_relaxed)) {
397 0 : return old_count == 1;
398 : }
399 : }
400 : }
401 :
402 : // static
403 62211223 : void WasmCode::DecrementRefCount(Vector<WasmCode*> code_vec) {
404 : // Decrement the ref counter of all given code objects. Keep the ones whose
405 : // ref count drops to zero.
406 : std::unordered_map<NativeModule*, std::vector<WasmCode*>> dead_code;
407 82053015 : for (WasmCode* code : code_vec) {
408 9920833 : if (code->DecRef()) dead_code[code->native_module()].push_back(code);
409 : }
410 :
411 : // For each native module, free all its code objects at once.
412 62211286 : for (auto& dead_code_entry : dead_code) {
413 : NativeModule* native_module = dead_code_entry.first;
414 : Vector<WasmCode*> code_vec = VectorOf(dead_code_entry.second);
415 : native_module->FreeCode(code_vec);
416 : }
417 62211289 : }
418 :
419 1242407 : NativeModule::NativeModule(WasmEngine* engine, const WasmFeatures& enabled,
420 : bool can_request_more, VirtualMemory code_space,
421 : std::shared_ptr<const WasmModule> module,
422 : std::shared_ptr<Counters> async_counters,
423 : std::shared_ptr<NativeModule>* shared_this)
424 : : enabled_features_(enabled),
425 : module_(std::move(module)),
426 : import_wrapper_cache_(std::unique_ptr<WasmImportWrapperCache>(
427 : new WasmImportWrapperCache(this))),
428 : free_code_space_(code_space.region()),
429 : engine_(engine),
430 : can_request_more_memory_(can_request_more),
431 : use_trap_handler_(trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler
432 9939256 : : kNoTrapHandler) {
433 : // We receive a pointer to an empty {std::shared_ptr}, and install ourselve
434 : // there.
435 : DCHECK_NOT_NULL(shared_this);
436 : DCHECK_NULL(*shared_this);
437 1242407 : shared_this->reset(this);
438 1242407 : compilation_state_ =
439 3727221 : CompilationState::New(*shared_this, std::move(async_counters));
440 : DCHECK_NOT_NULL(module_);
441 1242407 : owned_code_space_.emplace_back(std::move(code_space));
442 1242407 : owned_code_.reserve(num_functions());
443 :
444 : #if defined(V8_OS_WIN_X64)
445 : // On some platforms, specifically Win64, we need to reserve some pages at
446 : // the beginning of an executable space.
447 : // See src/heap/spaces.cc, MemoryAllocator::InitializeCodePageAllocator() and
448 : // https://cs.chromium.org/chromium/src/components/crash/content/app/crashpad_win.cc?rcl=fd680447881449fba2edcf0589320e7253719212&l=204
449 : // for details.
450 : if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
451 : FLAG_win64_unwinding_info) {
452 : AllocateForCode(Heap::GetCodeRangeReservedAreaSize());
453 : }
454 : #endif
455 :
456 1242407 : uint32_t num_wasm_functions = module_->num_declared_functions;
457 1242407 : if (num_wasm_functions > 0) {
458 141819 : code_table_.reset(new WasmCode* [num_wasm_functions] {});
459 :
460 141818 : WasmCodeRefScope code_ref_scope;
461 141819 : jump_table_ = CreateEmptyJumpTable(
462 141818 : JumpTableAssembler::SizeForNumberOfSlots(num_wasm_functions));
463 : }
464 1242407 : }
465 :
466 1094224 : void NativeModule::ReserveCodeTableForTesting(uint32_t max_functions) {
467 1094224 : WasmCodeRefScope code_ref_scope;
468 : DCHECK_LE(num_functions(), max_functions);
469 1094224 : WasmCode** new_table = new WasmCode* [max_functions] {};
470 1094224 : if (module_->num_declared_functions > 0) {
471 0 : memcpy(new_table, code_table_.get(),
472 : module_->num_declared_functions * sizeof(*new_table));
473 : }
474 : code_table_.reset(new_table);
475 :
476 : // Re-allocate jump table.
477 1094224 : jump_table_ = CreateEmptyJumpTable(
478 1094224 : JumpTableAssembler::SizeForNumberOfSlots(max_functions));
479 1094224 : }
480 :
481 137424 : void NativeModule::LogWasmCodes(Isolate* isolate) {
482 274844 : if (!WasmCode::ShouldBeLogged(isolate)) return;
483 :
484 : // TODO(titzer): we skip the logging of the import wrappers
485 : // here, but they should be included somehow.
486 4 : int start = module()->num_imported_functions;
487 4 : int end = start + module()->num_declared_functions;
488 4 : WasmCodeRefScope code_ref_scope;
489 12 : for (int func_index = start; func_index < end; ++func_index) {
490 4 : if (WasmCode* code = GetCode(func_index)) code->LogCode(isolate);
491 : }
492 : }
493 :
494 466706 : CompilationEnv NativeModule::CreateCompilationEnv() const {
495 466706 : return {module(), use_trap_handler_, kRuntimeExceptionSupport,
496 466706 : enabled_features_};
497 : }
498 :
499 753 : WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) {
500 753 : return AddAndPublishAnonymousCode(code, WasmCode::kFunction);
501 : }
502 :
503 2629 : void NativeModule::UseLazyStubs() {
504 2629 : uint32_t start = module_->num_imported_functions;
505 2629 : uint32_t end = start + module_->num_declared_functions;
506 32087 : for (uint32_t func_index = start; func_index < end; func_index++) {
507 14729 : UseLazyStub(func_index);
508 : }
509 2629 : }
510 :
511 15108 : void NativeModule::UseLazyStub(uint32_t func_index) {
512 : DCHECK_LE(module_->num_imported_functions, func_index);
513 : DCHECK_LT(func_index,
514 : module_->num_imported_functions + module_->num_declared_functions);
515 :
516 : // Add jump table entry for jump to the lazy compile stub.
517 15108 : uint32_t slot_index = func_index - module_->num_imported_functions;
518 : DCHECK_NE(runtime_stub_entry(WasmCode::kWasmCompileLazy), kNullAddress);
519 : JumpTableAssembler::EmitLazyCompileJumpSlot(
520 15108 : jump_table_->instruction_start(), slot_index, func_index,
521 15108 : runtime_stub_entry(WasmCode::kWasmCompileLazy), WasmCode::kFlushICache);
522 15108 : }
523 :
524 : // TODO(mstarzinger): Remove {Isolate} parameter once {V8_EMBEDDED_BUILTINS}
525 : // was removed and embedded builtins are no longer optional.
526 1241638 : void NativeModule::SetRuntimeStubs(Isolate* isolate) {
527 : DCHECK_EQ(kNullAddress, runtime_stub_entries_[0]); // Only called once.
528 : #ifdef V8_EMBEDDED_BUILTINS
529 1241638 : WasmCodeRefScope code_ref_scope;
530 : WasmCode* jump_table =
531 : CreateEmptyJumpTable(JumpTableAssembler::SizeForNumberOfStubSlots(
532 1241638 : WasmCode::kRuntimeStubCount));
533 : Address base = jump_table->instruction_start();
534 1241635 : EmbeddedData embedded_data = EmbeddedData::FromBlob();
535 : #define RUNTIME_STUB(Name) {Builtins::k##Name, WasmCode::k##Name},
536 : #define RUNTIME_STUB_TRAP(Name) RUNTIME_STUB(ThrowWasm##Name)
537 : std::pair<Builtins::Name, WasmCode::RuntimeStubId> wasm_runtime_stubs[] = {
538 1241635 : WASM_RUNTIME_STUB_LIST(RUNTIME_STUB, RUNTIME_STUB_TRAP)};
539 : #undef RUNTIME_STUB
540 : #undef RUNTIME_STUB_TRAP
541 73256637 : for (auto pair : wasm_runtime_stubs) {
542 36007500 : CHECK(embedded_data.ContainsBuiltin(pair.first));
543 36007500 : Address builtin = embedded_data.InstructionStartOfBuiltin(pair.first);
544 : JumpTableAssembler::EmitRuntimeStubSlot(base, pair.second, builtin,
545 36007497 : WasmCode::kNoFlushICache);
546 : uint32_t slot_offset =
547 : JumpTableAssembler::StubSlotIndexToOffset(pair.second);
548 36007501 : runtime_stub_entries_[pair.second] = base + slot_offset;
549 : }
550 : FlushInstructionCache(jump_table->instructions().start(),
551 1241638 : jump_table->instructions().size());
552 : DCHECK_NULL(runtime_stub_table_);
553 1241638 : runtime_stub_table_ = jump_table;
554 : #else // V8_EMBEDDED_BUILTINS
555 : HandleScope scope(isolate);
556 : WasmCodeRefScope code_ref_scope;
557 : USE(runtime_stub_table_); // Actually unused, but avoids ifdef's in header.
558 : #define COPY_BUILTIN(Name) \
559 : runtime_stub_entries_[WasmCode::k##Name] = \
560 : AddAndPublishAnonymousCode( \
561 : isolate->builtins()->builtin_handle(Builtins::k##Name), \
562 : WasmCode::kRuntimeStub, #Name) \
563 : ->instruction_start();
564 : #define COPY_BUILTIN_TRAP(Name) COPY_BUILTIN(ThrowWasm##Name)
565 : WASM_RUNTIME_STUB_LIST(COPY_BUILTIN, COPY_BUILTIN_TRAP)
566 : #undef COPY_BUILTIN_TRAP
567 : #undef COPY_BUILTIN
568 : #endif // V8_EMBEDDED_BUILTINS
569 : DCHECK_NE(kNullAddress, runtime_stub_entries_[0]);
570 1241638 : }
571 :
572 753 : WasmCode* NativeModule::AddAndPublishAnonymousCode(Handle<Code> code,
573 : WasmCode::Kind kind,
574 : const char* name) {
575 : // For off-heap builtins, we create a copy of the off-heap instruction stream
576 : // instead of the on-heap code object containing the trampoline. Ensure that
577 : // we do not apply the on-heap reloc info to the off-heap instructions.
578 : const size_t relocation_size =
579 1506 : code->is_off_heap_trampoline() ? 0 : code->relocation_size();
580 : OwnedVector<byte> reloc_info;
581 753 : if (relocation_size > 0) {
582 : reloc_info = OwnedVector<byte>::New(relocation_size);
583 : memcpy(reloc_info.start(), code->relocation_start(), relocation_size);
584 : }
585 : Handle<ByteArray> source_pos_table(code->SourcePositionTable(),
586 1506 : code->GetIsolate());
587 : OwnedVector<byte> source_pos =
588 753 : OwnedVector<byte>::New(source_pos_table->length());
589 753 : if (source_pos_table->length() > 0) {
590 : source_pos_table->copy_out(0, source_pos.start(),
591 : source_pos_table->length());
592 : }
593 : Vector<const byte> instructions(
594 1506 : reinterpret_cast<byte*>(code->InstructionStart()),
595 3012 : static_cast<size_t>(code->InstructionSize()));
596 : const uint32_t stack_slots = static_cast<uint32_t>(
597 753 : code->has_safepoint_info() ? code->stack_slots() : 0);
598 :
599 : // TODO(jgruber,v8:8758): Remove this translation. It exists only because
600 : // Code objects contains real offsets but WasmCode expects an offset of 0 to
601 : // mean 'empty'.
602 : const size_t safepoint_table_offset = static_cast<size_t>(
603 1506 : code->has_safepoint_table() ? code->safepoint_table_offset() : 0);
604 : const size_t handler_table_offset =
605 753 : static_cast<size_t>(code->handler_table_offset());
606 : const size_t constant_pool_offset =
607 753 : static_cast<size_t>(code->constant_pool_offset());
608 : const size_t code_comments_offset =
609 : static_cast<size_t>(code->code_comments_offset());
610 :
611 753 : Vector<uint8_t> dst_code_bytes = AllocateForCode(instructions.size());
612 : memcpy(dst_code_bytes.begin(), instructions.start(), instructions.size());
613 :
614 : // Apply the relocation delta by iterating over the RelocInfo.
615 1506 : intptr_t delta = reinterpret_cast<Address>(dst_code_bytes.begin()) -
616 1506 : code->InstructionStart();
617 753 : int mode_mask = RelocInfo::kApplyMask |
618 753 : RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
619 : Address constant_pool_start =
620 753 : reinterpret_cast<Address>(dst_code_bytes.begin()) + constant_pool_offset;
621 753 : RelocIterator orig_it(*code, mode_mask);
622 753 : for (RelocIterator it(dst_code_bytes, reloc_info.as_vector(),
623 753 : constant_pool_start, mode_mask);
624 0 : !it.done(); it.next(), orig_it.next()) {
625 : RelocInfo::Mode mode = it.rinfo()->rmode();
626 0 : if (RelocInfo::IsWasmStubCall(mode)) {
627 0 : uint32_t stub_call_tag = orig_it.rinfo()->wasm_call_tag();
628 : DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
629 : Address entry = runtime_stub_entry(
630 : static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
631 0 : it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
632 : } else {
633 : it.rinfo()->apply(delta);
634 : }
635 : }
636 :
637 : // Flush the i-cache after relocation.
638 753 : FlushInstructionCache(dst_code_bytes.start(), dst_code_bytes.size());
639 :
640 : DCHECK_NE(kind, WasmCode::Kind::kInterpreterEntry);
641 : std::unique_ptr<WasmCode> new_code{new WasmCode{
642 : this, // native_module
643 : WasmCode::kAnonymousFuncIndex, // index
644 : dst_code_bytes, // instructions
645 : stack_slots, // stack_slots
646 : 0, // tagged_parameter_slots
647 : safepoint_table_offset, // safepoint_table_offset
648 : handler_table_offset, // handler_table_offset
649 : constant_pool_offset, // constant_pool_offset
650 : code_comments_offset, // code_comments_offset
651 : instructions.size(), // unpadded_binary_size
652 : OwnedVector<ProtectedInstructionData>{}, // protected_instructions
653 : std::move(reloc_info), // reloc_info
654 : std::move(source_pos), // source positions
655 : kind, // kind
656 1506 : ExecutionTier::kNone}}; // tier
657 753 : new_code->MaybePrint(name);
658 : new_code->Validate();
659 :
660 1506 : return PublishCode(std::move(new_code));
661 : }
662 :
663 374899 : std::unique_ptr<WasmCode> NativeModule::AddCode(
664 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
665 : uint32_t tagged_parameter_slots,
666 : OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
667 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
668 : ExecutionTier tier) {
669 : return AddCodeWithCodeSpace(index, desc, stack_slots, tagged_parameter_slots,
670 : std::move(protected_instructions),
671 : std::move(source_position_table), kind, tier,
672 1499596 : AllocateForCode(desc.instr_size));
673 : }
674 :
675 1437884 : std::unique_ptr<WasmCode> NativeModule::AddCodeWithCodeSpace(
676 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
677 : uint32_t tagged_parameter_slots,
678 : OwnedVector<ProtectedInstructionData> protected_instructions,
679 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
680 : ExecutionTier tier, Vector<uint8_t> dst_code_bytes) {
681 : OwnedVector<byte> reloc_info;
682 1437884 : if (desc.reloc_size > 0) {
683 359598 : reloc_info = OwnedVector<byte>::New(desc.reloc_size);
684 359759 : memcpy(reloc_info.start(), desc.buffer + desc.buffer_size - desc.reloc_size,
685 359759 : desc.reloc_size);
686 : }
687 :
688 : // TODO(jgruber,v8:8758): Remove this translation. It exists only because
689 : // CodeDesc contains real offsets but WasmCode expects an offset of 0 to mean
690 : // 'empty'.
691 : const size_t safepoint_table_offset = static_cast<size_t>(
692 1438045 : desc.safepoint_table_size == 0 ? 0 : desc.safepoint_table_offset);
693 : const size_t handler_table_offset =
694 1438045 : static_cast<size_t>(desc.handler_table_offset);
695 : const size_t constant_pool_offset =
696 1438045 : static_cast<size_t>(desc.constant_pool_offset);
697 : const size_t code_comments_offset =
698 1438045 : static_cast<size_t>(desc.code_comments_offset);
699 1438045 : const size_t instr_size = static_cast<size_t>(desc.instr_size);
700 :
701 1438045 : memcpy(dst_code_bytes.begin(), desc.buffer,
702 : static_cast<size_t>(desc.instr_size));
703 :
704 : // Apply the relocation delta by iterating over the RelocInfo.
705 1438045 : intptr_t delta = dst_code_bytes.begin() - desc.buffer;
706 1438045 : int mode_mask = RelocInfo::kApplyMask |
707 : RelocInfo::ModeMask(RelocInfo::WASM_CALL) |
708 1438045 : RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
709 : Address constant_pool_start =
710 1438045 : reinterpret_cast<Address>(dst_code_bytes.begin()) + constant_pool_offset;
711 2806928 : for (RelocIterator it(dst_code_bytes, reloc_info.as_vector(),
712 1438045 : constant_pool_start, mode_mask);
713 684884 : !it.done(); it.next()) {
714 : RelocInfo::Mode mode = it.rinfo()->rmode();
715 684879 : if (RelocInfo::IsWasmCall(mode)) {
716 26050 : uint32_t call_tag = it.rinfo()->wasm_call_tag();
717 : Address target = GetCallTargetForFunction(call_tag);
718 26047 : it.rinfo()->set_wasm_call_address(target, SKIP_ICACHE_FLUSH);
719 658829 : } else if (RelocInfo::IsWasmStubCall(mode)) {
720 535442 : uint32_t stub_call_tag = it.rinfo()->wasm_call_tag();
721 : DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
722 : Address entry = runtime_stub_entry(
723 : static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
724 535467 : it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
725 : } else {
726 : it.rinfo()->apply(delta);
727 : }
728 : }
729 :
730 : std::unique_ptr<WasmCode> code{new WasmCode{
731 : this, index, dst_code_bytes, stack_slots, tagged_parameter_slots,
732 : safepoint_table_offset, handler_table_offset, constant_pool_offset,
733 : code_comments_offset, instr_size, std::move(protected_instructions),
734 1437333 : std::move(reloc_info), std::move(source_position_table), kind, tier}};
735 1438106 : code->MaybePrint();
736 : code->Validate();
737 :
738 1436701 : code->RegisterTrapHandlerData();
739 :
740 : // Flush the i-cache for the region holding the relocated code.
741 : // Do this last, as this seems to trigger an LTO bug that clobbers a register
742 : // on arm, see https://crbug.com/952759#c6.
743 1437037 : FlushInstructionCache(dst_code_bytes.start(), dst_code_bytes.size());
744 :
745 1437130 : return code;
746 : }
747 :
748 2853568 : WasmCode* NativeModule::PublishCode(std::unique_ptr<WasmCode> code) {
749 2853568 : base::MutexGuard lock(&allocation_mutex_);
750 5707138 : return PublishCodeLocked(std::move(code));
751 : }
752 :
753 : namespace {
754 1062905 : WasmCode::Kind GetCodeKindForExecutionTier(ExecutionTier tier) {
755 1062905 : switch (tier) {
756 : case ExecutionTier::kInterpreter:
757 : return WasmCode::Kind::kInterpreterEntry;
758 : case ExecutionTier::kLiftoff:
759 : case ExecutionTier::kTurbofan:
760 1061592 : return WasmCode::Kind::kFunction;
761 : case ExecutionTier::kNone:
762 0 : UNREACHABLE();
763 : }
764 0 : }
765 : } // namespace
766 :
767 3917230 : WasmCode* NativeModule::PublishCodeLocked(std::unique_ptr<WasmCode> code) {
768 : // The caller must hold the {allocation_mutex_}, thus we fail to lock it here.
769 : DCHECK(!allocation_mutex_.TryLock());
770 :
771 3917230 : if (!code->IsAnonymous()) {
772 : DCHECK_LT(code->index(), num_functions());
773 : DCHECK_LE(module_->num_imported_functions, code->index());
774 :
775 : // Assume an order of execution tiers that represents the quality of their
776 : // generated code.
777 : static_assert(ExecutionTier::kNone < ExecutionTier::kInterpreter &&
778 : ExecutionTier::kInterpreter < ExecutionTier::kLiftoff &&
779 : ExecutionTier::kLiftoff < ExecutionTier::kTurbofan,
780 : "Assume an order on execution tiers");
781 :
782 : // Update code table but avoid to fall back to less optimized code. We use
783 : // the new code if it was compiled with a higher tier.
784 1431753 : uint32_t slot_idx = code->index() - module_->num_imported_functions;
785 2863506 : WasmCode* prior_code = code_table_[slot_idx];
786 1431753 : bool update_code_table = !prior_code || prior_code->tier() < code->tier();
787 1431753 : if (update_code_table) {
788 1411607 : code_table_[slot_idx] = code.get();
789 1411607 : if (prior_code) {
790 76641 : WasmCodeRefScope::AddRef(prior_code);
791 : // The code is added to the current {WasmCodeRefScope}, hence the ref
792 : // count cannot drop to zero here.
793 76624 : CHECK(!prior_code->DecRef());
794 : }
795 : }
796 :
797 : // Populate optimized code to the jump table unless there is an active
798 : // redirection to the interpreter that should be preserved.
799 : bool update_jump_table =
800 2843375 : update_code_table && !has_interpreter_redirection(code->index());
801 :
802 : // Ensure that interpreter entries always populate to the jump table.
803 1431759 : if (code->kind_ == WasmCode::Kind::kInterpreterEntry) {
804 369231 : SetInterpreterRedirection(code->index());
805 : update_jump_table = true;
806 : }
807 :
808 1431770 : if (update_jump_table) {
809 : JumpTableAssembler::PatchJumpTableSlot(
810 1411772 : jump_table_->instruction_start(), slot_idx, code->instruction_start(),
811 1411772 : WasmCode::kFlushICache);
812 : }
813 : }
814 3917281 : WasmCodeRefScope::AddRef(code.get());
815 : WasmCode* result = code.get();
816 3917328 : owned_code_.emplace_back(std::move(code));
817 3917263 : return result;
818 : }
819 :
820 236 : WasmCode* NativeModule::AddDeserializedCode(
821 : uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
822 : uint32_t tagged_parameter_slots, size_t safepoint_table_offset,
823 : size_t handler_table_offset, size_t constant_pool_offset,
824 : size_t code_comments_offset, size_t unpadded_binary_size,
825 : OwnedVector<ProtectedInstructionData> protected_instructions,
826 : OwnedVector<const byte> reloc_info,
827 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
828 : ExecutionTier tier) {
829 236 : Vector<uint8_t> dst_code_bytes = AllocateForCode(instructions.size());
830 : memcpy(dst_code_bytes.begin(), instructions.start(), instructions.size());
831 :
832 : std::unique_ptr<WasmCode> code{new WasmCode{
833 : this, index, dst_code_bytes, stack_slots, tagged_parameter_slots,
834 : safepoint_table_offset, handler_table_offset, constant_pool_offset,
835 : code_comments_offset, unpadded_binary_size,
836 : std::move(protected_instructions), std::move(reloc_info),
837 708 : std::move(source_position_table), kind, tier}};
838 :
839 236 : code->RegisterTrapHandlerData();
840 :
841 : // Note: we do not flush the i-cache here, since the code needs to be
842 : // relocated anyway. The caller is responsible for flushing the i-cache later.
843 :
844 472 : return PublishCode(std::move(code));
845 : }
846 :
847 209 : std::vector<WasmCode*> NativeModule::SnapshotCodeTable() const {
848 209 : base::MutexGuard lock(&allocation_mutex_);
849 : WasmCode** start = code_table_.get();
850 209 : WasmCode** end = start + module_->num_declared_functions;
851 209 : return std::vector<WasmCode*>{start, end};
852 : }
853 :
854 9653457 : WasmCode* NativeModule::GetCode(uint32_t index) const {
855 9653457 : base::MutexGuard guard(&allocation_mutex_);
856 : DCHECK_LT(index, num_functions());
857 : DCHECK_LE(module_->num_imported_functions, index);
858 19306914 : WasmCode* code = code_table_[index - module_->num_imported_functions];
859 9653457 : WasmCodeRefScope::AddRef(code);
860 9653457 : return code;
861 : }
862 :
863 12 : bool NativeModule::HasCode(uint32_t index) const {
864 12 : base::MutexGuard guard(&allocation_mutex_);
865 : DCHECK_LT(index, num_functions());
866 : DCHECK_LE(module_->num_imported_functions, index);
867 36 : return code_table_[index - module_->num_imported_functions] != nullptr;
868 : }
869 :
870 2477681 : WasmCode* NativeModule::CreateEmptyJumpTable(uint32_t jump_table_size) {
871 : // Only call this if we really need a jump table.
872 : DCHECK_LT(0, jump_table_size);
873 2477681 : Vector<uint8_t> code_space = AllocateForCode(jump_table_size);
874 : ZapCode(reinterpret_cast<Address>(code_space.begin()), code_space.size());
875 : std::unique_ptr<WasmCode> code{new WasmCode{
876 : this, // native_module
877 : WasmCode::kAnonymousFuncIndex, // index
878 : code_space, // instructions
879 : 0, // stack_slots
880 : 0, // tagged_parameter_slots
881 : 0, // safepoint_table_offset
882 : jump_table_size, // handler_table_offset
883 : jump_table_size, // constant_pool_offset
884 : jump_table_size, // code_comments_offset
885 : jump_table_size, // unpadded_binary_size
886 : OwnedVector<ProtectedInstructionData>{}, // protected_instructions
887 : OwnedVector<const uint8_t>{}, // reloc_info
888 : OwnedVector<const uint8_t>{}, // source_pos
889 : WasmCode::kJumpTable, // kind
890 4955362 : ExecutionTier::kNone}}; // tier
891 4955361 : return PublishCode(std::move(code));
892 : }
893 :
894 3861599 : Vector<byte> NativeModule::AllocateForCode(size_t size) {
895 3861599 : base::MutexGuard lock(&allocation_mutex_);
896 : DCHECK_LT(0, size);
897 3862235 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
898 : // This happens under a lock assumed by the caller.
899 : size = RoundUp<kCodeAlignment>(size);
900 3862221 : base::AddressRegion code_space = free_code_space_.Allocate(size);
901 3862205 : if (code_space.is_empty()) {
902 0 : if (!can_request_more_memory_) {
903 : V8::FatalProcessOutOfMemory(nullptr,
904 0 : "NativeModule::AllocateForCode reservation");
905 : UNREACHABLE();
906 : }
907 :
908 : Address hint = owned_code_space_.empty() ? kNullAddress
909 0 : : owned_code_space_.back().end();
910 :
911 0 : VirtualMemory new_mem = engine_->code_manager()->TryAllocate(
912 0 : size, reinterpret_cast<void*>(hint));
913 0 : if (!new_mem.IsReserved()) {
914 : V8::FatalProcessOutOfMemory(nullptr,
915 0 : "NativeModule::AllocateForCode reservation");
916 : UNREACHABLE();
917 : }
918 0 : engine_->code_manager()->AssignRanges(new_mem.address(), new_mem.end(),
919 0 : this);
920 :
921 0 : free_code_space_.Merge(new_mem.region());
922 0 : owned_code_space_.emplace_back(std::move(new_mem));
923 0 : code_space = free_code_space_.Allocate(size);
924 : DCHECK(!code_space.is_empty());
925 : }
926 3862205 : const Address page_size = page_allocator->AllocatePageSize();
927 3862150 : Address commit_start = RoundUp(code_space.begin(), page_size);
928 : Address commit_end = RoundUp(code_space.end(), page_size);
929 : // {commit_start} will be either code_space.start or the start of the next
930 : // page. {commit_end} will be the start of the page after the one in which
931 : // the allocation ends.
932 : // We start from an aligned start, and we know we allocated vmem in
933 : // page multiples.
934 : // We just need to commit what's not committed. The page in which we
935 : // start is already committed (or we start at the beginning of a page).
936 : // The end needs to be committed all through the end of the page.
937 3862150 : if (commit_start < commit_end) {
938 1245425 : committed_code_space_.fetch_add(commit_end - commit_start);
939 : // Committed code cannot grow bigger than maximum code space size.
940 : DCHECK_LE(committed_code_space_.load(), kMaxWasmCodeMemory);
941 : #if V8_OS_WIN
942 : // On Windows, we cannot commit a region that straddles different
943 : // reservations of virtual memory. Because we bump-allocate, and because, if
944 : // we need more memory, we append that memory at the end of the
945 : // owned_code_space_ list, we traverse that list in reverse order to find
946 : // the reservation(s) that guide how to chunk the region to commit.
947 : for (auto& vmem : base::Reversed(owned_code_space_)) {
948 : if (commit_end <= vmem.address() || vmem.end() <= commit_start) continue;
949 : Address start = std::max(commit_start, vmem.address());
950 : Address end = std::min(commit_end, vmem.end());
951 : size_t commit_size = static_cast<size_t>(end - start);
952 : if (!engine_->code_manager()->Commit(start, commit_size)) {
953 : V8::FatalProcessOutOfMemory(nullptr,
954 : "NativeModule::AllocateForCode commit");
955 : UNREACHABLE();
956 : }
957 : // Opportunistically reduce the commit range. This might terminate the
958 : // loop early.
959 : if (commit_start == start) commit_start = end;
960 : if (commit_end == end) commit_end = start;
961 : if (commit_start >= commit_end) break;
962 : }
963 : #else
964 2490850 : if (!engine_->code_manager()->Commit(commit_start,
965 : commit_end - commit_start)) {
966 : V8::FatalProcessOutOfMemory(nullptr,
967 0 : "NativeModule::AllocateForCode commit");
968 : UNREACHABLE();
969 : }
970 : #endif
971 : }
972 : DCHECK(IsAligned(code_space.begin(), kCodeAlignment));
973 3862150 : allocated_code_space_.Merge(code_space);
974 : generated_code_size_.fetch_add(code_space.size(), std::memory_order_relaxed);
975 :
976 : TRACE_HEAP("Code alloc for %p: %" PRIxPTR ",+%zu\n", this, code_space.begin(),
977 : size);
978 7724448 : return {reinterpret_cast<byte*>(code_space.begin()), code_space.size()};
979 : }
980 :
981 : namespace {
982 4679908 : class NativeModuleWireBytesStorage final : public WireBytesStorage {
983 : public:
984 : explicit NativeModuleWireBytesStorage(
985 : std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes)
986 2339954 : : wire_bytes_(std::move(wire_bytes)) {}
987 :
988 1072763 : Vector<const uint8_t> GetCode(WireBytesRef ref) const final {
989 2145526 : return wire_bytes_->as_vector().SubVector(ref.offset(), ref.end_offset());
990 : }
991 :
992 : private:
993 : const std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes_;
994 : };
995 : } // namespace
996 :
997 3434470 : void NativeModule::SetWireBytes(OwnedVector<const uint8_t> wire_bytes) {
998 : auto shared_wire_bytes =
999 : std::make_shared<OwnedVector<const uint8_t>>(std::move(wire_bytes));
1000 : wire_bytes_ = shared_wire_bytes;
1001 3434470 : if (!shared_wire_bytes->empty()) {
1002 2339954 : compilation_state_->SetWireBytesStorage(
1003 2339954 : std::make_shared<NativeModuleWireBytesStorage>(
1004 2339954 : std::move(shared_wire_bytes)));
1005 : }
1006 3434470 : }
1007 :
1008 5933674 : WasmCode* NativeModule::Lookup(Address pc) const {
1009 5933674 : base::MutexGuard lock(&allocation_mutex_);
1010 5933674 : if (owned_code_.empty()) return nullptr;
1011 : // First update the sorted portion counter.
1012 5933674 : if (owned_code_sorted_portion_ == 0) ++owned_code_sorted_portion_;
1013 12700312 : while (owned_code_sorted_portion_ < owned_code_.size() &&
1014 733130 : owned_code_[owned_code_sorted_portion_ - 1]->instruction_start() <=
1015 : owned_code_[owned_code_sorted_portion_]->instruction_start()) {
1016 33278 : ++owned_code_sorted_portion_;
1017 : }
1018 : // Execute at most two rounds: First check whether the {pc} is within the
1019 : // sorted portion of {owned_code_}. If it's not, then sort the whole vector
1020 : // and retry.
1021 126 : while (true) {
1022 : auto iter =
1023 : std::upper_bound(owned_code_.begin(), owned_code_.end(), pc,
1024 : [](Address pc, const std::unique_ptr<WasmCode>& code) {
1025 : DCHECK_NE(kNullAddress, pc);
1026 : DCHECK_NOT_NULL(code);
1027 : return pc < code->instruction_start();
1028 : });
1029 5933800 : if (iter != owned_code_.begin()) {
1030 : --iter;
1031 : WasmCode* candidate = iter->get();
1032 : DCHECK_NOT_NULL(candidate);
1033 5933800 : if (candidate->contains(pc)) {
1034 5933672 : WasmCodeRefScope::AddRef(candidate);
1035 5933672 : return candidate;
1036 : }
1037 : }
1038 256 : if (owned_code_sorted_portion_ == owned_code_.size()) return nullptr;
1039 : std::sort(owned_code_.begin(), owned_code_.end(),
1040 : [](const std::unique_ptr<WasmCode>& code1,
1041 : const std::unique_ptr<WasmCode>& code2) {
1042 : return code1->instruction_start() < code2->instruction_start();
1043 : });
1044 126 : owned_code_sorted_portion_ = owned_code_.size();
1045 : }
1046 : }
1047 :
1048 369290 : Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const {
1049 : // TODO(clemensh): Measure performance win of returning instruction start
1050 : // directly if we have turbofan code. Downside: Redirecting functions (e.g.
1051 : // for debugging) gets much harder.
1052 :
1053 : // Return the jump table slot for that function index.
1054 : DCHECK_NOT_NULL(jump_table_);
1055 395337 : uint32_t slot_idx = func_index - module_->num_imported_functions;
1056 : uint32_t slot_offset = JumpTableAssembler::SlotIndexToOffset(slot_idx);
1057 : DCHECK_LT(slot_offset, jump_table_->instructions().size());
1058 790674 : return jump_table_->instruction_start() + slot_offset;
1059 : }
1060 :
1061 40656 : uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(
1062 : Address slot_address) const {
1063 : DCHECK(is_jump_table_slot(slot_address));
1064 : uint32_t slot_offset =
1065 81312 : static_cast<uint32_t>(slot_address - jump_table_->instruction_start());
1066 : uint32_t slot_idx = JumpTableAssembler::SlotOffsetToIndex(slot_offset);
1067 : DCHECK_LT(slot_idx, module_->num_declared_functions);
1068 40656 : return module_->num_imported_functions + slot_idx;
1069 : }
1070 :
1071 0 : const char* NativeModule::GetRuntimeStubName(Address runtime_stub_entry) const {
1072 : #define RETURN_NAME(Name) \
1073 : if (runtime_stub_entries_[WasmCode::k##Name] == runtime_stub_entry) { \
1074 : return #Name; \
1075 : }
1076 : #define RETURN_NAME_TRAP(Name) RETURN_NAME(ThrowWasm##Name)
1077 0 : WASM_RUNTIME_STUB_LIST(RETURN_NAME, RETURN_NAME_TRAP)
1078 : #undef RETURN_NAME_TRAP
1079 : #undef RETURN_NAME
1080 0 : return "<unknown>";
1081 : }
1082 :
1083 3727221 : NativeModule::~NativeModule() {
1084 : TRACE_HEAP("Deleting native module: %p\n", reinterpret_cast<void*>(this));
1085 : // Cancel all background compilation before resetting any field of the
1086 : // NativeModule or freeing anything.
1087 1242407 : compilation_state_->AbortCompilation();
1088 1242407 : engine_->FreeNativeModule(this);
1089 : // Free the import wrapper cache before releasing the {WasmCode} objects in
1090 : // {owned_code_}. The destructor of {WasmImportWrapperCache} still needs to
1091 : // decrease reference counts on the {WasmCode} objects.
1092 1242407 : import_wrapper_cache_.reset();
1093 1242407 : }
1094 :
1095 61044 : WasmCodeManager::WasmCodeManager(WasmMemoryTracker* memory_tracker,
1096 : size_t max_committed)
1097 : : memory_tracker_(memory_tracker),
1098 : max_committed_code_space_(max_committed),
1099 : total_committed_code_space_(0),
1100 183132 : critical_committed_code_space_(max_committed / 2) {
1101 : DCHECK_LE(max_committed, kMaxWasmCodeMemory);
1102 61044 : }
1103 :
1104 1245425 : bool WasmCodeManager::Commit(Address start, size_t size) {
1105 : // TODO(v8:8462) Remove eager commit once perf supports remapping.
1106 1245425 : if (FLAG_perf_prof) return true;
1107 : DCHECK(IsAligned(start, AllocatePageSize()));
1108 : DCHECK(IsAligned(size, AllocatePageSize()));
1109 : // Reserve the size. Use CAS loop to avoid overflow on
1110 : // {total_committed_code_space_}.
1111 1245425 : size_t old_value = total_committed_code_space_.load();
1112 : while (true) {
1113 : DCHECK_GE(max_committed_code_space_, old_value);
1114 1245425 : if (size > max_committed_code_space_ - old_value) return false;
1115 2490850 : if (total_committed_code_space_.compare_exchange_weak(old_value,
1116 : old_value + size)) {
1117 : break;
1118 : }
1119 : }
1120 : PageAllocator::Permission permission = FLAG_wasm_write_protect_code_memory
1121 : ? PageAllocator::kReadWrite
1122 1245425 : : PageAllocator::kReadWriteExecute;
1123 :
1124 : bool ret =
1125 1245425 : SetPermissions(GetPlatformPageAllocator(), start, size, permission);
1126 : TRACE_HEAP("Setting rw permissions for %p:%p\n",
1127 : reinterpret_cast<void*>(start),
1128 : reinterpret_cast<void*>(start + size));
1129 :
1130 1245425 : if (!ret) {
1131 : // Highly unlikely.
1132 : total_committed_code_space_.fetch_sub(size);
1133 0 : return false;
1134 : }
1135 : return true;
1136 : }
1137 :
1138 0 : void WasmCodeManager::AssignRanges(Address start, Address end,
1139 : NativeModule* native_module) {
1140 0 : base::MutexGuard lock(&native_modules_mutex_);
1141 0 : lookup_map_.insert(std::make_pair(start, std::make_pair(end, native_module)));
1142 0 : }
1143 :
1144 1243299 : VirtualMemory WasmCodeManager::TryAllocate(size_t size, void* hint) {
1145 1243299 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
1146 : DCHECK_GT(size, 0);
1147 1243301 : size = RoundUp(size, page_allocator->AllocatePageSize());
1148 1243302 : if (!memory_tracker_->ReserveAddressSpace(size)) return {};
1149 1242407 : if (hint == nullptr) hint = page_allocator->GetRandomMmapAddr();
1150 :
1151 : VirtualMemory mem(page_allocator, size, hint,
1152 2484814 : page_allocator->AllocatePageSize());
1153 1242407 : if (!mem.IsReserved()) {
1154 0 : memory_tracker_->ReleaseReservation(size);
1155 0 : return {};
1156 : }
1157 : TRACE_HEAP("VMem alloc: %p:%p (%zu)\n",
1158 : reinterpret_cast<void*>(mem.address()),
1159 : reinterpret_cast<void*>(mem.end()), mem.size());
1160 :
1161 : // TODO(v8:8462) Remove eager commit once perf supports remapping.
1162 1242407 : if (FLAG_perf_prof) {
1163 0 : SetPermissions(GetPlatformPageAllocator(), mem.address(), mem.size(),
1164 : PageAllocator::kReadWriteExecute);
1165 : }
1166 : return mem;
1167 : }
1168 :
1169 14 : void WasmCodeManager::SetMaxCommittedMemoryForTesting(size_t limit) {
1170 : // This has to be set before committing any memory.
1171 : DCHECK_EQ(0, total_committed_code_space_.load());
1172 14 : max_committed_code_space_ = limit;
1173 14 : critical_committed_code_space_.store(limit / 2);
1174 14 : }
1175 :
1176 : // static
1177 1386151 : size_t WasmCodeManager::EstimateNativeModuleCodeSize(const WasmModule* module) {
1178 : constexpr size_t kCodeSizeMultiplier = 4;
1179 : constexpr size_t kCodeOverhead = 32; // for prologue, stack check, ...
1180 : constexpr size_t kStaticCodeSize = 512; // runtime stubs, ...
1181 : constexpr size_t kImportSize = 64 * kSystemPointerSize;
1182 :
1183 : size_t estimate = kStaticCodeSize;
1184 2139091 : for (auto& function : module->functions) {
1185 752940 : estimate += kCodeOverhead + kCodeSizeMultiplier * function.code.length();
1186 : }
1187 : estimate +=
1188 2772302 : JumpTableAssembler::SizeForNumberOfSlots(module->num_declared_functions);
1189 1386151 : estimate += kImportSize * module->num_imported_functions;
1190 :
1191 1386151 : return estimate;
1192 : }
1193 :
1194 : // static
1195 1239100 : size_t WasmCodeManager::EstimateNativeModuleNonCodeSize(
1196 : const WasmModule* module) {
1197 1239100 : size_t wasm_module_estimate = EstimateStoredSize(module);
1198 :
1199 1239100 : uint32_t num_wasm_functions = module->num_declared_functions;
1200 :
1201 : // TODO(wasm): Include wire bytes size.
1202 : size_t native_module_estimate =
1203 : sizeof(NativeModule) + /* NativeModule struct */
1204 1239100 : (sizeof(WasmCode*) * num_wasm_functions) + /* code table size */
1205 1239100 : (sizeof(WasmCode) * num_wasm_functions); /* code object size */
1206 :
1207 1239100 : return wasm_module_estimate + native_module_estimate;
1208 : }
1209 :
1210 1242402 : std::shared_ptr<NativeModule> WasmCodeManager::NewNativeModule(
1211 : WasmEngine* engine, Isolate* isolate, const WasmFeatures& enabled,
1212 : size_t code_size_estimate, bool can_request_more,
1213 : std::shared_ptr<const WasmModule> module) {
1214 : DCHECK_EQ(this, isolate->wasm_engine()->code_manager());
1215 1242402 : if (total_committed_code_space_.load() >
1216 : critical_committed_code_space_.load()) {
1217 : (reinterpret_cast<v8::Isolate*>(isolate))
1218 60 : ->MemoryPressureNotification(MemoryPressureLevel::kCritical);
1219 : size_t committed = total_committed_code_space_.load();
1220 : DCHECK_GE(max_committed_code_space_, committed);
1221 60 : critical_committed_code_space_.store(
1222 60 : committed + (max_committed_code_space_ - committed) / 2);
1223 : }
1224 :
1225 : // If the code must be contiguous, reserve enough address space up front.
1226 : size_t code_vmem_size =
1227 : kRequiresCodeRange ? kMaxWasmCodeMemory : code_size_estimate;
1228 : // Try up to two times; getting rid of dead JSArrayBuffer allocations might
1229 : // require two GCs because the first GC maybe incremental and may have
1230 : // floating garbage.
1231 : static constexpr int kAllocationRetries = 2;
1232 2484809 : VirtualMemory code_space;
1233 900 : for (int retries = 0;; ++retries) {
1234 2486609 : code_space = TryAllocate(code_vmem_size);
1235 1243307 : if (code_space.IsReserved()) break;
1236 900 : if (retries == kAllocationRetries) {
1237 0 : V8::FatalProcessOutOfMemory(isolate, "WasmCodeManager::NewNativeModule");
1238 : UNREACHABLE();
1239 : }
1240 : // Run one GC, then try the allocation again.
1241 : isolate->heap()->MemoryPressureNotification(MemoryPressureLevel::kCritical,
1242 900 : true);
1243 : }
1244 :
1245 : Address start = code_space.address();
1246 : size_t size = code_space.size();
1247 : Address end = code_space.end();
1248 1242407 : std::shared_ptr<NativeModule> ret;
1249 : new NativeModule(engine, enabled, can_request_more, std::move(code_space),
1250 3727221 : std::move(module), isolate->async_counters(), &ret);
1251 : // The constructor initialized the shared_ptr.
1252 : DCHECK_NOT_NULL(ret);
1253 : TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", ret.get(), start,
1254 : size);
1255 :
1256 : #if defined(V8_OS_WIN_X64)
1257 : if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
1258 : FLAG_win64_unwinding_info) {
1259 : win64_unwindinfo::RegisterNonABICompliantCodeRange(
1260 : reinterpret_cast<void*>(start), size);
1261 : }
1262 : #endif
1263 :
1264 1242407 : base::MutexGuard lock(&native_modules_mutex_);
1265 1242407 : lookup_map_.insert(std::make_pair(start, std::make_pair(end, ret.get())));
1266 1242407 : return ret;
1267 : }
1268 :
1269 9628500 : bool NativeModule::SetExecutable(bool executable) {
1270 9628500 : if (is_executable_ == executable) return true;
1271 : TRACE_HEAP("Setting module %p as executable: %d.\n", this, executable);
1272 :
1273 729328 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
1274 :
1275 729328 : if (FLAG_wasm_write_protect_code_memory) {
1276 : PageAllocator::Permission permission =
1277 0 : executable ? PageAllocator::kReadExecute : PageAllocator::kReadWrite;
1278 : #if V8_OS_WIN
1279 : // On windows, we need to switch permissions per separate virtual memory
1280 : // reservation. This is really just a problem when the NativeModule is
1281 : // growable (meaning can_request_more_memory_). That's 32-bit in production,
1282 : // or unittests.
1283 : // For now, in that case, we commit at reserved memory granularity.
1284 : // Technically, that may be a waste, because we may reserve more than we
1285 : // use. On 32-bit though, the scarce resource is the address space -
1286 : // committed or not.
1287 : if (can_request_more_memory_) {
1288 : for (auto& vmem : owned_code_space_) {
1289 : if (!SetPermissions(page_allocator, vmem.address(), vmem.size(),
1290 : permission)) {
1291 : return false;
1292 : }
1293 : TRACE_HEAP("Set %p:%p to executable:%d\n", vmem.address(), vmem.end(),
1294 : executable);
1295 : }
1296 : is_executable_ = executable;
1297 : return true;
1298 : }
1299 : #endif
1300 0 : for (auto& region : allocated_code_space_.regions()) {
1301 : // allocated_code_space_ is fine-grained, so we need to
1302 : // page-align it.
1303 : size_t region_size =
1304 0 : RoundUp(region.size(), page_allocator->AllocatePageSize());
1305 0 : if (!SetPermissions(page_allocator, region.begin(), region_size,
1306 : permission)) {
1307 : return false;
1308 : }
1309 : TRACE_HEAP("Set %p:%p to executable:%d\n",
1310 : reinterpret_cast<void*>(region.begin()),
1311 : reinterpret_cast<void*>(region.end()), executable);
1312 : }
1313 : }
1314 729328 : is_executable_ = executable;
1315 729328 : return true;
1316 : }
1317 :
1318 695943 : void NativeModule::SampleCodeSize(
1319 : Counters* counters, NativeModule::CodeSamplingTime sampling_time) const {
1320 : size_t code_size = sampling_time == kSampling
1321 : ? committed_code_space()
1322 695943 : : generated_code_size_.load(std::memory_order_relaxed);
1323 695943 : int code_size_mb = static_cast<int>(code_size / MB);
1324 : Histogram* histogram = nullptr;
1325 695943 : switch (sampling_time) {
1326 : case kAfterBaseline:
1327 : histogram = counters->wasm_module_code_size_mb_after_baseline();
1328 2218 : break;
1329 : case kAfterTopTier:
1330 : histogram = counters->wasm_module_code_size_mb_after_top_tier();
1331 209 : break;
1332 : case kSampling:
1333 : histogram = counters->wasm_module_code_size_mb();
1334 693516 : break;
1335 : }
1336 695943 : histogram->AddSample(code_size_mb);
1337 695943 : }
1338 :
1339 739901 : WasmCode* NativeModule::AddCompiledCode(WasmCompilationResult result) {
1340 1479802 : return AddCompiledCode({&result, 1})[0];
1341 : }
1342 :
1343 1008149 : std::vector<WasmCode*> NativeModule::AddCompiledCode(
1344 : Vector<WasmCompilationResult> results) {
1345 : DCHECK(!results.empty());
1346 : // First, allocate code space for all the results.
1347 : size_t total_code_space = 0;
1348 3132693 : for (auto& result : results) {
1349 : DCHECK(result.succeeded());
1350 2124544 : total_code_space += RoundUp<kCodeAlignment>(result.code_desc.instr_size);
1351 : }
1352 1008149 : Vector<byte> code_space = AllocateForCode(total_code_space);
1353 :
1354 1008694 : std::vector<std::unique_ptr<WasmCode>> generated_code;
1355 1008679 : generated_code.reserve(results.size());
1356 :
1357 : // Now copy the generated code into the code space and relocate it.
1358 3132760 : for (auto& result : results) {
1359 : DCHECK_EQ(result.code_desc.buffer, result.instr_buffer.get());
1360 2126030 : size_t code_size = RoundUp<kCodeAlignment>(result.code_desc.instr_size);
1361 1063015 : Vector<byte> this_code_space = code_space.SubVector(0, code_size);
1362 : code_space += code_size;
1363 6375423 : generated_code.emplace_back(AddCodeWithCodeSpace(
1364 1063004 : result.func_index, result.code_desc, result.frame_slot_count,
1365 : result.tagged_parameter_slots, std::move(result.protected_instructions),
1366 : std::move(result.source_positions),
1367 : GetCodeKindForExecutionTier(result.result_tier), result.result_tier,
1368 1062219 : this_code_space));
1369 : }
1370 : DCHECK_EQ(0, code_space.size());
1371 :
1372 : // Under the {allocation_mutex_}, publish the code. The published code is put
1373 : // into the top-most surrounding {WasmCodeRefScope} by {PublishCodeLocked}.
1374 : std::vector<WasmCode*> code_vector;
1375 1007622 : code_vector.reserve(results.size());
1376 : {
1377 1008663 : base::MutexGuard lock(&allocation_mutex_);
1378 2072341 : for (auto& result : generated_code)
1379 2127393 : code_vector.push_back(PublishCodeLocked(std::move(result)));
1380 : }
1381 :
1382 1008665 : return code_vector;
1383 : }
1384 :
1385 0 : void NativeModule::FreeCode(Vector<WasmCode* const> codes) {
1386 : // TODO(clemensh): Implement.
1387 0 : }
1388 :
1389 1242407 : void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
1390 1242407 : base::MutexGuard lock(&native_modules_mutex_);
1391 : TRACE_HEAP("Freeing NativeModule %p\n", native_module);
1392 2484814 : for (auto& code_space : native_module->owned_code_space_) {
1393 : DCHECK(code_space.IsReserved());
1394 : TRACE_HEAP("VMem Release: %" PRIxPTR ":%" PRIxPTR " (%zu)\n",
1395 : code_space.address(), code_space.end(), code_space.size());
1396 :
1397 : #if defined(V8_OS_WIN_X64)
1398 : if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
1399 : FLAG_win64_unwinding_info) {
1400 : win64_unwindinfo::UnregisterNonABICompliantCodeRange(
1401 : reinterpret_cast<void*>(code_space.address()));
1402 : }
1403 : #endif
1404 :
1405 2484814 : lookup_map_.erase(code_space.address());
1406 1242407 : memory_tracker_->ReleaseReservation(code_space.size());
1407 1242407 : code_space.Free();
1408 : DCHECK(!code_space.IsReserved());
1409 : }
1410 : native_module->owned_code_space_.clear();
1411 :
1412 : size_t code_size = native_module->committed_code_space_.load();
1413 : DCHECK(IsAligned(code_size, AllocatePageSize()));
1414 : size_t old_committed = total_committed_code_space_.fetch_sub(code_size);
1415 : DCHECK_LE(code_size, old_committed);
1416 : USE(old_committed);
1417 1242407 : }
1418 :
1419 55801605 : NativeModule* WasmCodeManager::LookupNativeModule(Address pc) const {
1420 55801605 : base::MutexGuard lock(&native_modules_mutex_);
1421 55801877 : if (lookup_map_.empty()) return nullptr;
1422 :
1423 : auto iter = lookup_map_.upper_bound(pc);
1424 13810384 : if (iter == lookup_map_.begin()) return nullptr;
1425 : --iter;
1426 13777454 : Address region_start = iter->first;
1427 13777454 : Address region_end = iter->second.first;
1428 13777454 : NativeModule* candidate = iter->second.second;
1429 :
1430 : DCHECK_NOT_NULL(candidate);
1431 13777454 : return region_start <= pc && pc < region_end ? candidate : nullptr;
1432 : }
1433 :
1434 55752858 : WasmCode* WasmCodeManager::LookupCode(Address pc) const {
1435 55752858 : NativeModule* candidate = LookupNativeModule(pc);
1436 55753115 : return candidate ? candidate->Lookup(pc) : nullptr;
1437 : }
1438 :
1439 : // TODO(v8:7424): Code protection scopes are not yet supported with shared code
1440 : // enabled and need to be revisited to work with --wasm-shared-code as well.
1441 144403 : NativeModuleModificationScope::NativeModuleModificationScope(
1442 : NativeModule* native_module)
1443 144403 : : native_module_(native_module) {
1444 144403 : if (FLAG_wasm_write_protect_code_memory && native_module_ &&
1445 0 : (native_module_->modification_scope_depth_++) == 0) {
1446 0 : bool success = native_module_->SetExecutable(false);
1447 0 : CHECK(success);
1448 : }
1449 144403 : }
1450 :
1451 288802 : NativeModuleModificationScope::~NativeModuleModificationScope() {
1452 144401 : if (FLAG_wasm_write_protect_code_memory && native_module_ &&
1453 0 : (native_module_->modification_scope_depth_--) == 1) {
1454 0 : bool success = native_module_->SetExecutable(true);
1455 0 : CHECK(success);
1456 : }
1457 144401 : }
1458 :
1459 : namespace {
1460 : thread_local WasmCodeRefScope* current_code_refs_scope = nullptr;
1461 : } // namespace
1462 :
1463 58490608 : WasmCodeRefScope::WasmCodeRefScope()
1464 60968293 : : previous_scope_(current_code_refs_scope) {
1465 60968293 : current_code_refs_scope = this;
1466 58490608 : }
1467 :
1468 121937748 : WasmCodeRefScope::~WasmCodeRefScope() {
1469 : DCHECK_EQ(this, current_code_refs_scope);
1470 60968864 : current_code_refs_scope = previous_scope_;
1471 : std::vector<WasmCode*> code_ptrs;
1472 60968864 : code_ptrs.reserve(code_ptrs_.size());
1473 : code_ptrs.assign(code_ptrs_.begin(), code_ptrs_.end());
1474 60968832 : WasmCode::DecrementRefCount(VectorOf(code_ptrs));
1475 60968884 : }
1476 :
1477 : // static
1478 19581032 : void WasmCodeRefScope::AddRef(WasmCode* code) {
1479 : DCHECK_NOT_NULL(code);
1480 19581032 : WasmCodeRefScope* current_scope = current_code_refs_scope;
1481 : DCHECK_NOT_NULL(current_scope);
1482 : auto entry = current_scope->code_ptrs_.insert(code);
1483 : // If we added a new entry, increment the ref counter.
1484 19580980 : if (entry.second) code->IncRef();
1485 19580980 : }
1486 :
1487 : } // namespace wasm
1488 : } // namespace internal
1489 122036 : } // namespace v8
1490 : #undef TRACE_HEAP
|