Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/wasm/wasm-code-manager.h"
6 :
7 : #include <iomanip>
8 :
9 : #include "src/assembler-inl.h"
10 : #include "src/base/adapters.h"
11 : #include "src/base/macros.h"
12 : #include "src/base/platform/platform.h"
13 : #include "src/counters.h"
14 : #include "src/disassembler.h"
15 : #include "src/globals.h"
16 : #include "src/log.h"
17 : #include "src/macro-assembler-inl.h"
18 : #include "src/macro-assembler.h"
19 : #include "src/objects-inl.h"
20 : #include "src/ostreams.h"
21 : #include "src/snapshot/embedded-data.h"
22 : #include "src/vector.h"
23 : #include "src/wasm/compilation-environment.h"
24 : #include "src/wasm/function-compiler.h"
25 : #include "src/wasm/jump-table-assembler.h"
26 : #include "src/wasm/wasm-import-wrapper-cache.h"
27 : #include "src/wasm/wasm-module.h"
28 : #include "src/wasm/wasm-objects-inl.h"
29 : #include "src/wasm/wasm-objects.h"
30 :
31 : #if defined(V8_OS_WIN_X64)
32 : #include "src/unwinding-info-win64.h"
33 : #endif
34 :
35 : #define TRACE_HEAP(...) \
36 : do { \
37 : if (FLAG_trace_wasm_native_heap) PrintF(__VA_ARGS__); \
38 : } while (false)
39 :
40 : namespace v8 {
41 : namespace internal {
42 : namespace wasm {
43 :
44 : using trap_handler::ProtectedInstructionData;
45 :
46 3862443 : void DisjointAllocationPool::Merge(base::AddressRegion region) {
47 : auto dest_it = regions_.begin();
48 : auto dest_end = regions_.end();
49 :
50 : // Skip over dest regions strictly before {region}.
51 6482478 : while (dest_it != dest_end && dest_it->end() < region.begin()) ++dest_it;
52 :
53 : // After last dest region: insert and done.
54 3862443 : if (dest_it == dest_end) {
55 1242415 : regions_.push_back(region);
56 : return;
57 : }
58 :
59 : // Adjacent (from below) to dest: merge and done.
60 2620028 : if (dest_it->begin() == region.end()) {
61 : base::AddressRegion merged_region{region.begin(),
62 1 : region.size() + dest_it->size()};
63 : DCHECK_EQ(merged_region.end(), dest_it->end());
64 1 : *dest_it = merged_region;
65 : return;
66 : }
67 :
68 : // Before dest: insert and done.
69 2620027 : if (dest_it->begin() > region.end()) {
70 0 : regions_.insert(dest_it, region);
71 0 : return;
72 : }
73 :
74 : // Src is adjacent from above. Merge and check whether the merged region is
75 : // now adjacent to the next region.
76 : DCHECK_EQ(dest_it->end(), region.begin());
77 2620027 : dest_it->set_size(dest_it->size() + region.size());
78 : DCHECK_EQ(dest_it->end(), region.end());
79 : auto next_dest = dest_it;
80 : ++next_dest;
81 2620033 : if (next_dest != dest_end && dest_it->end() == next_dest->begin()) {
82 6 : dest_it->set_size(dest_it->size() + next_dest->size());
83 : DCHECK_EQ(dest_it->end(), next_dest->end());
84 : regions_.erase(next_dest);
85 : }
86 : }
87 :
88 3862414 : base::AddressRegion DisjointAllocationPool::Allocate(size_t size) {
89 3862418 : for (auto it = regions_.begin(), end = regions_.end(); it != end; ++it) {
90 3862416 : if (size > it->size()) continue;
91 : base::AddressRegion ret{it->begin(), size};
92 3862412 : if (size == it->size()) {
93 : regions_.erase(it);
94 : } else {
95 3862410 : *it = base::AddressRegion{it->begin() + size, it->size() - size};
96 : }
97 3862405 : return ret;
98 : }
99 2 : return {};
100 : }
101 :
102 1157 : Address WasmCode::constant_pool() const {
103 : if (FLAG_enable_embedded_constant_pool) {
104 : if (constant_pool_offset_ < code_comments_offset_) {
105 : return instruction_start() + constant_pool_offset_;
106 : }
107 : }
108 : return kNullAddress;
109 : }
110 :
111 792 : Address WasmCode::handler_table() const {
112 792 : return instruction_start() + handler_table_offset_;
113 : }
114 :
115 2616563 : uint32_t WasmCode::handler_table_size() const {
116 : DCHECK_GE(constant_pool_offset_, handler_table_offset_);
117 2616563 : return static_cast<uint32_t>(constant_pool_offset_ - handler_table_offset_);
118 : }
119 :
120 0 : Address WasmCode::code_comments() const {
121 0 : return instruction_start() + code_comments_offset_;
122 : }
123 :
124 0 : uint32_t WasmCode::code_comments_size() const {
125 : DCHECK_GE(unpadded_binary_size_, code_comments_offset_);
126 0 : return static_cast<uint32_t>(unpadded_binary_size_ - code_comments_offset_);
127 : }
128 :
129 0 : size_t WasmCode::trap_handler_index() const {
130 128446 : CHECK(HasTrapHandlerIndex());
131 64223 : return static_cast<size_t>(trap_handler_index_);
132 : }
133 :
134 0 : void WasmCode::set_trap_handler_index(size_t value) {
135 64252 : trap_handler_index_ = value;
136 0 : }
137 :
138 1436150 : void WasmCode::RegisterTrapHandlerData() {
139 : DCHECK(!HasTrapHandlerIndex());
140 1436150 : if (kind() != WasmCode::kFunction) return;
141 1060416 : if (protected_instructions_.empty()) return;
142 :
143 : Address base = instruction_start();
144 :
145 : size_t size = instructions().size();
146 : const int index =
147 : RegisterHandlerData(base, size, protected_instructions().size(),
148 64085 : protected_instructions().start());
149 :
150 : // TODO(eholk): if index is negative, fail.
151 64252 : CHECK_LE(0, index);
152 : set_trap_handler_index(static_cast<size_t>(index));
153 : }
154 :
155 3917229 : bool WasmCode::HasTrapHandlerIndex() const { return trap_handler_index_ >= 0; }
156 :
157 802342 : bool WasmCode::ShouldBeLogged(Isolate* isolate) {
158 : // The return value is cached in {WasmEngine::IsolateData::log_codes}. Ensure
159 : // to call {WasmEngine::EnableCodeLogging} if this return value would change
160 : // for any isolate. Otherwise we might lose code events.
161 939768 : return isolate->logger()->is_listening_to_code_events() ||
162 802342 : isolate->is_profiling();
163 : }
164 :
165 11 : void WasmCode::LogCode(Isolate* isolate) const {
166 : DCHECK(ShouldBeLogged(isolate));
167 11 : if (IsAnonymous()) return;
168 :
169 : ModuleWireBytes wire_bytes(native_module()->wire_bytes());
170 : // TODO(herhut): Allow to log code without on-heap round-trip of the name.
171 : WireBytesRef name_ref =
172 11 : native_module()->module()->LookupFunctionName(wire_bytes, index());
173 11 : WasmName name_vec = wire_bytes.GetNameOrNull(name_ref);
174 11 : if (!name_vec.empty()) {
175 : HandleScope scope(isolate);
176 : MaybeHandle<String> maybe_name = isolate->factory()->NewStringFromUtf8(
177 11 : Vector<const char>::cast(name_vec));
178 : Handle<String> name;
179 11 : if (!maybe_name.ToHandle(&name)) {
180 0 : name = isolate->factory()->NewStringFromAsciiChecked("<name too long>");
181 : }
182 : int name_length;
183 : auto cname =
184 : name->ToCString(AllowNullsFlag::DISALLOW_NULLS,
185 11 : RobustnessFlag::ROBUST_STRING_TRAVERSAL, &name_length);
186 22 : PROFILE(isolate,
187 : CodeCreateEvent(CodeEventListener::FUNCTION_TAG, this,
188 : {cname.get(), static_cast<size_t>(name_length)}));
189 : } else {
190 : EmbeddedVector<char, 32> generated_name;
191 0 : int length = SNPrintF(generated_name, "wasm-function[%d]", index());
192 0 : generated_name.Truncate(length);
193 0 : PROFILE(isolate, CodeCreateEvent(CodeEventListener::FUNCTION_TAG, this,
194 : generated_name));
195 : }
196 :
197 11 : if (!source_positions().empty()) {
198 11 : LOG_CODE_EVENT(isolate, CodeLinePosInfoRecordEvent(instruction_start(),
199 : source_positions()));
200 : }
201 : }
202 :
203 236 : void WasmCode::Validate() const {
204 : #ifdef DEBUG
205 : // We expect certain relocation info modes to never appear in {WasmCode}
206 : // objects or to be restricted to a small set of valid values. Hence the
207 : // iteration below does not use a mask, but visits all relocation data.
208 : for (RelocIterator it(instructions(), reloc_info(), constant_pool());
209 : !it.done(); it.next()) {
210 : RelocInfo::Mode mode = it.rinfo()->rmode();
211 : switch (mode) {
212 : case RelocInfo::WASM_CALL: {
213 : Address target = it.rinfo()->wasm_call_address();
214 : WasmCode* code = native_module_->Lookup(target);
215 : CHECK_NOT_NULL(code);
216 : CHECK_EQ(WasmCode::kJumpTable, code->kind());
217 : CHECK_EQ(native_module()->jump_table_, code);
218 : CHECK(code->contains(target));
219 : break;
220 : }
221 : case RelocInfo::WASM_STUB_CALL: {
222 : Address target = it.rinfo()->wasm_stub_call_address();
223 : WasmCode* code = native_module_->Lookup(target);
224 : CHECK_NOT_NULL(code);
225 : #ifdef V8_EMBEDDED_BUILTINS
226 : CHECK_EQ(WasmCode::kJumpTable, code->kind());
227 : CHECK_EQ(native_module()->runtime_stub_table_, code);
228 : CHECK(code->contains(target));
229 : #else
230 : CHECK_EQ(WasmCode::kRuntimeStub, code->kind());
231 : CHECK_EQ(target, code->instruction_start());
232 : #endif
233 : break;
234 : }
235 : case RelocInfo::INTERNAL_REFERENCE:
236 : case RelocInfo::INTERNAL_REFERENCE_ENCODED: {
237 : Address target = it.rinfo()->target_internal_reference();
238 : CHECK(contains(target));
239 : break;
240 : }
241 : case RelocInfo::EXTERNAL_REFERENCE:
242 : case RelocInfo::CONST_POOL:
243 : case RelocInfo::VENEER_POOL:
244 : // These are OK to appear.
245 : break;
246 : default:
247 : FATAL("Unexpected mode: %d", mode);
248 : }
249 : }
250 : #endif
251 236 : }
252 :
253 1437296 : void WasmCode::MaybePrint(const char* name) const {
254 : // Determines whether flags want this code to be printed.
255 1437359 : if ((FLAG_print_wasm_code && kind() == kFunction) ||
256 2874172 : (FLAG_print_wasm_stub_code && kind() != kFunction) || FLAG_print_code) {
257 63 : Print(name);
258 : }
259 1437296 : }
260 :
261 63 : void WasmCode::Print(const char* name) const {
262 126 : StdoutStream os;
263 63 : os << "--- WebAssembly code ---\n";
264 63 : Disassemble(name, os);
265 63 : os << "--- End code ---\n";
266 63 : }
267 :
268 63 : void WasmCode::Disassemble(const char* name, std::ostream& os,
269 : Address current_pc) const {
270 63 : if (name) os << "name: " << name << "\n";
271 126 : if (!IsAnonymous()) os << "index: " << index() << "\n";
272 126 : os << "kind: " << GetWasmCodeKindAsString(kind_) << "\n";
273 126 : os << "compiler: " << (is_liftoff() ? "Liftoff" : "TurboFan") << "\n";
274 63 : size_t padding = instructions().size() - unpadded_binary_size_;
275 : os << "Body (size = " << instructions().size() << " = "
276 63 : << unpadded_binary_size_ << " + " << padding << " padding)\n";
277 :
278 : #ifdef ENABLE_DISASSEMBLER
279 : size_t instruction_size = unpadded_binary_size_;
280 : if (constant_pool_offset_ < instruction_size) {
281 : instruction_size = constant_pool_offset_;
282 : }
283 : if (safepoint_table_offset_ && safepoint_table_offset_ < instruction_size) {
284 : instruction_size = safepoint_table_offset_;
285 : }
286 : if (handler_table_offset_ < instruction_size) {
287 : instruction_size = handler_table_offset_;
288 : }
289 : DCHECK_LT(0, instruction_size);
290 : os << "Instructions (size = " << instruction_size << ")\n";
291 : Disassembler::Decode(nullptr, &os, instructions().start(),
292 : instructions().start() + instruction_size,
293 : CodeReference(this), current_pc);
294 : os << "\n";
295 :
296 : if (handler_table_size() > 0) {
297 : HandlerTable table(handler_table(), handler_table_size());
298 : os << "Exception Handler Table (size = " << table.NumberOfReturnEntries()
299 : << "):\n";
300 : table.HandlerTableReturnPrint(os);
301 : os << "\n";
302 : }
303 :
304 : if (!protected_instructions_.empty()) {
305 : os << "Protected instructions:\n pc offset land pad\n";
306 : for (auto& data : protected_instructions()) {
307 : os << std::setw(10) << std::hex << data.instr_offset << std::setw(10)
308 : << std::hex << data.landing_offset << "\n";
309 : }
310 : os << "\n";
311 : }
312 :
313 : if (!source_positions().empty()) {
314 : os << "Source positions:\n pc offset position\n";
315 : for (SourcePositionTableIterator it(source_positions()); !it.done();
316 : it.Advance()) {
317 : os << std::setw(10) << std::hex << it.code_offset() << std::dec
318 : << std::setw(10) << it.source_position().ScriptOffset()
319 : << (it.is_statement() ? " statement" : "") << "\n";
320 : }
321 : os << "\n";
322 : }
323 :
324 : if (safepoint_table_offset_ > 0) {
325 : SafepointTable table(instruction_start(), safepoint_table_offset_,
326 : stack_slots_);
327 : os << "Safepoints (size = " << table.size() << ")\n";
328 : for (uint32_t i = 0; i < table.length(); i++) {
329 : uintptr_t pc_offset = table.GetPcOffset(i);
330 : os << reinterpret_cast<const void*>(instruction_start() + pc_offset);
331 : os << std::setw(6) << std::hex << pc_offset << " " << std::dec;
332 : table.PrintEntry(i, os);
333 : os << " (sp -> fp)";
334 : SafepointEntry entry = table.GetEntry(i);
335 : if (entry.trampoline_pc() != -1) {
336 : os << " trampoline: " << std::hex << entry.trampoline_pc() << std::dec;
337 : }
338 : if (entry.has_deoptimization_index()) {
339 : os << " deopt: " << std::setw(6) << entry.deoptimization_index();
340 : }
341 : os << "\n";
342 : }
343 : os << "\n";
344 : }
345 :
346 : os << "RelocInfo (size = " << reloc_info_.size() << ")\n";
347 : for (RelocIterator it(instructions(), reloc_info(), constant_pool());
348 : !it.done(); it.next()) {
349 : it.rinfo()->Print(nullptr, os);
350 : }
351 : os << "\n";
352 :
353 : if (code_comments_size() > 0) {
354 : PrintCodeCommentsSection(os, code_comments(), code_comments_size());
355 : }
356 : #endif // ENABLE_DISASSEMBLER
357 63 : }
358 :
359 0 : const char* GetWasmCodeKindAsString(WasmCode::Kind kind) {
360 63 : switch (kind) {
361 : case WasmCode::kFunction:
362 : return "wasm function";
363 : case WasmCode::kWasmToJsWrapper:
364 0 : return "wasm-to-js";
365 : case WasmCode::kRuntimeStub:
366 0 : return "runtime-stub";
367 : case WasmCode::kInterpreterEntry:
368 0 : return "interpreter entry";
369 : case WasmCode::kJumpTable:
370 0 : return "jump table";
371 : }
372 0 : return "unknown kind";
373 : }
374 :
375 7834477 : WasmCode::~WasmCode() {
376 3917229 : if (HasTrapHandlerIndex()) {
377 64223 : CHECK_LT(trap_handler_index(),
378 : static_cast<size_t>(std::numeric_limits<int>::max()));
379 64223 : trap_handler::ReleaseHandlerData(static_cast<int>(trap_handler_index()));
380 : }
381 3917248 : }
382 :
383 76342 : V8_WARN_UNUSED_RESULT bool WasmCode::DecRefOnPotentiallyDeadCode() {
384 76342 : if (native_module_->engine()->AddPotentiallyDeadCode(this)) {
385 : // The code just became potentially dead. The ref count we wanted to
386 : // decrement is now transferred to the set of potentially dead code, and
387 : // will be decremented when the next GC is run.
388 : return false;
389 : }
390 : // If we reach here, the code was already potentially dead. Decrement the ref
391 : // count, and return true if it drops to zero.
392 0 : int old_count = ref_count_.load(std::memory_order_relaxed);
393 : while (true) {
394 : DCHECK_LE(1, old_count);
395 0 : if (ref_count_.compare_exchange_weak(old_count, old_count - 1,
396 : std::memory_order_relaxed)) {
397 0 : return old_count == 1;
398 : }
399 : }
400 : }
401 :
402 : // static
403 62099817 : void WasmCode::DecrementRefCount(Vector<WasmCode*> code_vec) {
404 : // Decrement the ref counter of all given code objects. Keep the ones whose
405 : // ref count drops to zero.
406 : std::unordered_map<NativeModule*, std::vector<WasmCode*>> dead_code;
407 81938255 : for (WasmCode* code : code_vec) {
408 9919114 : if (code->DecRef()) dead_code[code->native_module()].push_back(code);
409 : }
410 :
411 : // For each native module, free all its code objects at once.
412 62099922 : for (auto& dead_code_entry : dead_code) {
413 : NativeModule* native_module = dead_code_entry.first;
414 : Vector<WasmCode*> code_vec = VectorOf(dead_code_entry.second);
415 : native_module->FreeCode(code_vec);
416 : }
417 62099923 : }
418 :
419 1242392 : NativeModule::NativeModule(WasmEngine* engine, const WasmFeatures& enabled,
420 : bool can_request_more, VirtualMemory code_space,
421 : std::shared_ptr<const WasmModule> module,
422 : std::shared_ptr<Counters> async_counters,
423 : std::shared_ptr<NativeModule>* shared_this)
424 : : enabled_features_(enabled),
425 : module_(std::move(module)),
426 : import_wrapper_cache_(std::unique_ptr<WasmImportWrapperCache>(
427 : new WasmImportWrapperCache(this))),
428 : free_code_space_(code_space.region()),
429 : engine_(engine),
430 : can_request_more_memory_(can_request_more),
431 : use_trap_handler_(trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler
432 9939136 : : kNoTrapHandler) {
433 : // We receive a pointer to an empty {std::shared_ptr}, and install ourselve
434 : // there.
435 : DCHECK_NOT_NULL(shared_this);
436 : DCHECK_NULL(*shared_this);
437 1242392 : shared_this->reset(this);
438 1242392 : compilation_state_ =
439 3727176 : CompilationState::New(*shared_this, std::move(async_counters));
440 : DCHECK_NOT_NULL(module_);
441 1242392 : owned_code_space_.emplace_back(std::move(code_space));
442 1242392 : owned_code_.reserve(num_functions());
443 :
444 : #if defined(V8_OS_WIN_X64)
445 : // On some platforms, specifically Win64, we need to reserve some pages at
446 : // the beginning of an executable space.
447 : // See src/heap/spaces.cc, MemoryAllocator::InitializeCodePageAllocator() and
448 : // https://cs.chromium.org/chromium/src/components/crash/content/app/crashpad_win.cc?rcl=fd680447881449fba2edcf0589320e7253719212&l=204
449 : // for details.
450 : if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
451 : FLAG_win64_unwinding_info) {
452 : AllocateForCode(Heap::GetCodeRangeReservedAreaSize());
453 : }
454 : #endif
455 :
456 1242391 : uint32_t num_wasm_functions = module_->num_declared_functions;
457 1242391 : if (num_wasm_functions > 0) {
458 141804 : code_table_.reset(new WasmCode* [num_wasm_functions] {});
459 :
460 141804 : WasmCodeRefScope code_ref_scope;
461 141804 : jump_table_ = CreateEmptyJumpTable(
462 141804 : JumpTableAssembler::SizeForNumberOfSlots(num_wasm_functions));
463 : }
464 1242391 : }
465 :
466 1094224 : void NativeModule::ReserveCodeTableForTesting(uint32_t max_functions) {
467 1094224 : WasmCodeRefScope code_ref_scope;
468 : DCHECK_LE(num_functions(), max_functions);
469 1094224 : WasmCode** new_table = new WasmCode* [max_functions] {};
470 1094224 : if (module_->num_declared_functions > 0) {
471 0 : memcpy(new_table, code_table_.get(),
472 : module_->num_declared_functions * sizeof(*new_table));
473 : }
474 : code_table_.reset(new_table);
475 :
476 : // Re-allocate jump table.
477 1094224 : jump_table_ = CreateEmptyJumpTable(
478 1094224 : JumpTableAssembler::SizeForNumberOfSlots(max_functions));
479 1094224 : }
480 :
481 137426 : void NativeModule::LogWasmCodes(Isolate* isolate) {
482 274848 : if (!WasmCode::ShouldBeLogged(isolate)) return;
483 :
484 : // TODO(titzer): we skip the logging of the import wrappers
485 : // here, but they should be included somehow.
486 4 : int start = module()->num_imported_functions;
487 4 : int end = start + module()->num_declared_functions;
488 4 : WasmCodeRefScope code_ref_scope;
489 12 : for (int func_index = start; func_index < end; ++func_index) {
490 4 : if (WasmCode* code = GetCode(func_index)) code->LogCode(isolate);
491 : }
492 : }
493 :
494 479283 : CompilationEnv NativeModule::CreateCompilationEnv() const {
495 479283 : return {module(), use_trap_handler_, kRuntimeExceptionSupport,
496 479283 : enabled_features_};
497 : }
498 :
499 753 : WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) {
500 753 : return AddAndPublishAnonymousCode(code, WasmCode::kFunction);
501 : }
502 :
503 2639 : void NativeModule::UseLazyStubs() {
504 2639 : uint32_t start = module_->num_imported_functions;
505 2639 : uint32_t end = start + module_->num_declared_functions;
506 32165 : for (uint32_t func_index = start; func_index < end; func_index++) {
507 14763 : UseLazyStub(func_index);
508 : }
509 2639 : }
510 :
511 15118 : void NativeModule::UseLazyStub(uint32_t func_index) {
512 : DCHECK_LE(module_->num_imported_functions, func_index);
513 : DCHECK_LT(func_index,
514 : module_->num_imported_functions + module_->num_declared_functions);
515 :
516 : // Add jump table entry for jump to the lazy compile stub.
517 15118 : uint32_t slot_index = func_index - module_->num_imported_functions;
518 : DCHECK_NE(runtime_stub_entry(WasmCode::kWasmCompileLazy), kNullAddress);
519 : JumpTableAssembler::EmitLazyCompileJumpSlot(
520 15118 : jump_table_->instruction_start(), slot_index, func_index,
521 15118 : runtime_stub_entry(WasmCode::kWasmCompileLazy), WasmCode::kFlushICache);
522 15118 : }
523 :
524 : // TODO(mstarzinger): Remove {Isolate} parameter once {V8_EMBEDDED_BUILTINS}
525 : // was removed and embedded builtins are no longer optional.
526 1241623 : void NativeModule::SetRuntimeStubs(Isolate* isolate) {
527 : DCHECK_EQ(kNullAddress, runtime_stub_entries_[0]); // Only called once.
528 : #ifdef V8_EMBEDDED_BUILTINS
529 1241623 : WasmCodeRefScope code_ref_scope;
530 : WasmCode* jump_table =
531 : CreateEmptyJumpTable(JumpTableAssembler::SizeForNumberOfStubSlots(
532 1241623 : WasmCode::kRuntimeStubCount));
533 : Address base = jump_table->instruction_start();
534 1241621 : EmbeddedData embedded_data = EmbeddedData::FromBlob();
535 : #define RUNTIME_STUB(Name) {Builtins::k##Name, WasmCode::k##Name},
536 : #define RUNTIME_STUB_TRAP(Name) RUNTIME_STUB(ThrowWasm##Name)
537 : std::pair<Builtins::Name, WasmCode::RuntimeStubId> wasm_runtime_stubs[] = {
538 1241621 : WASM_RUNTIME_STUB_LIST(RUNTIME_STUB, RUNTIME_STUB_TRAP)};
539 : #undef RUNTIME_STUB
540 : #undef RUNTIME_STUB_TRAP
541 73255737 : for (auto pair : wasm_runtime_stubs) {
542 36007055 : CHECK(embedded_data.ContainsBuiltin(pair.first));
543 36007055 : Address builtin = embedded_data.InstructionStartOfBuiltin(pair.first);
544 : JumpTableAssembler::EmitRuntimeStubSlot(base, pair.second, builtin,
545 36007055 : WasmCode::kNoFlushICache);
546 : uint32_t slot_offset =
547 : JumpTableAssembler::StubSlotIndexToOffset(pair.second);
548 36007058 : runtime_stub_entries_[pair.second] = base + slot_offset;
549 : }
550 : FlushInstructionCache(jump_table->instructions().start(),
551 1241623 : jump_table->instructions().size());
552 : DCHECK_NULL(runtime_stub_table_);
553 1241623 : runtime_stub_table_ = jump_table;
554 : #else // V8_EMBEDDED_BUILTINS
555 : HandleScope scope(isolate);
556 : WasmCodeRefScope code_ref_scope;
557 : USE(runtime_stub_table_); // Actually unused, but avoids ifdef's in header.
558 : #define COPY_BUILTIN(Name) \
559 : runtime_stub_entries_[WasmCode::k##Name] = \
560 : AddAndPublishAnonymousCode( \
561 : isolate->builtins()->builtin_handle(Builtins::k##Name), \
562 : WasmCode::kRuntimeStub, #Name) \
563 : ->instruction_start();
564 : #define COPY_BUILTIN_TRAP(Name) COPY_BUILTIN(ThrowWasm##Name)
565 : WASM_RUNTIME_STUB_LIST(COPY_BUILTIN, COPY_BUILTIN_TRAP)
566 : #undef COPY_BUILTIN_TRAP
567 : #undef COPY_BUILTIN
568 : #endif // V8_EMBEDDED_BUILTINS
569 : DCHECK_NE(kNullAddress, runtime_stub_entries_[0]);
570 1241623 : }
571 :
572 753 : WasmCode* NativeModule::AddAndPublishAnonymousCode(Handle<Code> code,
573 : WasmCode::Kind kind,
574 : const char* name) {
575 : // For off-heap builtins, we create a copy of the off-heap instruction stream
576 : // instead of the on-heap code object containing the trampoline. Ensure that
577 : // we do not apply the on-heap reloc info to the off-heap instructions.
578 : const size_t relocation_size =
579 1506 : code->is_off_heap_trampoline() ? 0 : code->relocation_size();
580 : OwnedVector<byte> reloc_info;
581 753 : if (relocation_size > 0) {
582 : reloc_info = OwnedVector<byte>::New(relocation_size);
583 : memcpy(reloc_info.start(), code->relocation_start(), relocation_size);
584 : }
585 : Handle<ByteArray> source_pos_table(code->SourcePositionTable(),
586 1506 : code->GetIsolate());
587 : OwnedVector<byte> source_pos =
588 753 : OwnedVector<byte>::New(source_pos_table->length());
589 753 : if (source_pos_table->length() > 0) {
590 : source_pos_table->copy_out(0, source_pos.start(),
591 : source_pos_table->length());
592 : }
593 : Vector<const byte> instructions(
594 1506 : reinterpret_cast<byte*>(code->InstructionStart()),
595 3012 : static_cast<size_t>(code->InstructionSize()));
596 : const uint32_t stack_slots = static_cast<uint32_t>(
597 753 : code->has_safepoint_info() ? code->stack_slots() : 0);
598 :
599 : // TODO(jgruber,v8:8758): Remove this translation. It exists only because
600 : // Code objects contains real offsets but WasmCode expects an offset of 0 to
601 : // mean 'empty'.
602 : const size_t safepoint_table_offset = static_cast<size_t>(
603 1506 : code->has_safepoint_table() ? code->safepoint_table_offset() : 0);
604 : const size_t handler_table_offset =
605 753 : static_cast<size_t>(code->handler_table_offset());
606 : const size_t constant_pool_offset =
607 753 : static_cast<size_t>(code->constant_pool_offset());
608 : const size_t code_comments_offset =
609 : static_cast<size_t>(code->code_comments_offset());
610 :
611 753 : Vector<uint8_t> dst_code_bytes = AllocateForCode(instructions.size());
612 : memcpy(dst_code_bytes.begin(), instructions.start(), instructions.size());
613 :
614 : // Apply the relocation delta by iterating over the RelocInfo.
615 1506 : intptr_t delta = reinterpret_cast<Address>(dst_code_bytes.begin()) -
616 1506 : code->InstructionStart();
617 753 : int mode_mask = RelocInfo::kApplyMask |
618 753 : RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
619 : Address constant_pool_start =
620 753 : reinterpret_cast<Address>(dst_code_bytes.begin()) + constant_pool_offset;
621 753 : RelocIterator orig_it(*code, mode_mask);
622 753 : for (RelocIterator it(dst_code_bytes, reloc_info.as_vector(),
623 753 : constant_pool_start, mode_mask);
624 0 : !it.done(); it.next(), orig_it.next()) {
625 : RelocInfo::Mode mode = it.rinfo()->rmode();
626 0 : if (RelocInfo::IsWasmStubCall(mode)) {
627 0 : uint32_t stub_call_tag = orig_it.rinfo()->wasm_call_tag();
628 : DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
629 : Address entry = runtime_stub_entry(
630 : static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
631 0 : it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
632 : } else {
633 : it.rinfo()->apply(delta);
634 : }
635 : }
636 :
637 : // Flush the i-cache after relocation.
638 753 : FlushInstructionCache(dst_code_bytes.start(), dst_code_bytes.size());
639 :
640 : DCHECK_NE(kind, WasmCode::Kind::kInterpreterEntry);
641 : std::unique_ptr<WasmCode> new_code{new WasmCode{
642 : this, // native_module
643 : WasmCode::kAnonymousFuncIndex, // index
644 : dst_code_bytes, // instructions
645 : stack_slots, // stack_slots
646 : 0, // tagged_parameter_slots
647 : safepoint_table_offset, // safepoint_table_offset
648 : handler_table_offset, // handler_table_offset
649 : constant_pool_offset, // constant_pool_offset
650 : code_comments_offset, // code_comments_offset
651 : instructions.size(), // unpadded_binary_size
652 : OwnedVector<ProtectedInstructionData>{}, // protected_instructions
653 : std::move(reloc_info), // reloc_info
654 : std::move(source_pos), // source positions
655 : kind, // kind
656 1506 : ExecutionTier::kNone}}; // tier
657 753 : new_code->MaybePrint(name);
658 : new_code->Validate();
659 :
660 1506 : return PublishCode(std::move(new_code));
661 : }
662 :
663 374899 : std::unique_ptr<WasmCode> NativeModule::AddCode(
664 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
665 : uint32_t tagged_parameter_slots,
666 : OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
667 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
668 : ExecutionTier tier) {
669 : return AddCodeWithCodeSpace(index, desc, stack_slots, tagged_parameter_slots,
670 : std::move(protected_instructions),
671 : std::move(source_position_table), kind, tier,
672 1499596 : AllocateForCode(desc.instr_size));
673 : }
674 :
675 1437538 : std::unique_ptr<WasmCode> NativeModule::AddCodeWithCodeSpace(
676 : uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
677 : uint32_t tagged_parameter_slots,
678 : OwnedVector<ProtectedInstructionData> protected_instructions,
679 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
680 : ExecutionTier tier, Vector<uint8_t> dst_code_bytes) {
681 : OwnedVector<byte> reloc_info;
682 1437538 : if (desc.reloc_size > 0) {
683 359421 : reloc_info = OwnedVector<byte>::New(desc.reloc_size);
684 359691 : memcpy(reloc_info.start(), desc.buffer + desc.buffer_size - desc.reloc_size,
685 359691 : desc.reloc_size);
686 : }
687 :
688 : // TODO(jgruber,v8:8758): Remove this translation. It exists only because
689 : // CodeDesc contains real offsets but WasmCode expects an offset of 0 to mean
690 : // 'empty'.
691 : const size_t safepoint_table_offset = static_cast<size_t>(
692 1437808 : desc.safepoint_table_size == 0 ? 0 : desc.safepoint_table_offset);
693 : const size_t handler_table_offset =
694 1437808 : static_cast<size_t>(desc.handler_table_offset);
695 : const size_t constant_pool_offset =
696 1437808 : static_cast<size_t>(desc.constant_pool_offset);
697 : const size_t code_comments_offset =
698 1437808 : static_cast<size_t>(desc.code_comments_offset);
699 1437808 : const size_t instr_size = static_cast<size_t>(desc.instr_size);
700 :
701 1437808 : memcpy(dst_code_bytes.begin(), desc.buffer,
702 : static_cast<size_t>(desc.instr_size));
703 :
704 : // Apply the relocation delta by iterating over the RelocInfo.
705 1437808 : intptr_t delta = dst_code_bytes.begin() - desc.buffer;
706 1437808 : int mode_mask = RelocInfo::kApplyMask |
707 : RelocInfo::ModeMask(RelocInfo::WASM_CALL) |
708 1437808 : RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
709 : Address constant_pool_start =
710 1437808 : reinterpret_cast<Address>(dst_code_bytes.begin()) + constant_pool_offset;
711 2802016 : for (RelocIterator it(dst_code_bytes, reloc_info.as_vector(),
712 1437808 : constant_pool_start, mode_mask);
713 682802 : !it.done(); it.next()) {
714 : RelocInfo::Mode mode = it.rinfo()->rmode();
715 682941 : if (RelocInfo::IsWasmCall(mode)) {
716 26068 : uint32_t call_tag = it.rinfo()->wasm_call_tag();
717 : Address target = GetCallTargetForFunction(call_tag);
718 26066 : it.rinfo()->set_wasm_call_address(target, SKIP_ICACHE_FLUSH);
719 656873 : } else if (RelocInfo::IsWasmStubCall(mode)) {
720 533486 : uint32_t stub_call_tag = it.rinfo()->wasm_call_tag();
721 : DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
722 : Address entry = runtime_stub_entry(
723 : static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
724 533468 : it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
725 : } else {
726 : it.rinfo()->apply(delta);
727 : }
728 : }
729 :
730 : // Flush the i-cache after relocation.
731 1436424 : FlushInstructionCache(dst_code_bytes.start(), dst_code_bytes.size());
732 :
733 : std::unique_ptr<WasmCode> code{new WasmCode{
734 : this, index, dst_code_bytes, stack_slots, tagged_parameter_slots,
735 : safepoint_table_offset, handler_table_offset, constant_pool_offset,
736 : code_comments_offset, instr_size, std::move(protected_instructions),
737 1436372 : std::move(reloc_info), std::move(source_position_table), kind, tier}};
738 1438413 : code->MaybePrint();
739 : code->Validate();
740 :
741 1436091 : code->RegisterTrapHandlerData();
742 :
743 1436856 : return code;
744 : }
745 :
746 2853539 : WasmCode* NativeModule::PublishCode(std::unique_ptr<WasmCode> code) {
747 2853539 : base::MutexGuard lock(&allocation_mutex_);
748 5707078 : return PublishCodeLocked(std::move(code));
749 : }
750 :
751 : namespace {
752 1062840 : WasmCode::Kind GetCodeKindForExecutionTier(ExecutionTier tier) {
753 1062840 : switch (tier) {
754 : case ExecutionTier::kInterpreter:
755 : return WasmCode::Kind::kInterpreterEntry;
756 : case ExecutionTier::kLiftoff:
757 : case ExecutionTier::kTurbofan:
758 1061515 : return WasmCode::Kind::kFunction;
759 : case ExecutionTier::kNone:
760 0 : UNREACHABLE();
761 : }
762 0 : }
763 : } // namespace
764 :
765 3917203 : WasmCode* NativeModule::PublishCodeLocked(std::unique_ptr<WasmCode> code) {
766 : // The caller must hold the {allocation_mutex_}, thus we fail to lock it here.
767 : DCHECK(!allocation_mutex_.TryLock());
768 :
769 3917203 : if (!code->IsAnonymous()) {
770 : DCHECK_LT(code->index(), num_functions());
771 : DCHECK_LE(module_->num_imported_functions, code->index());
772 :
773 : // Assume an order of execution tiers that represents the quality of their
774 : // generated code.
775 : static_assert(ExecutionTier::kNone < ExecutionTier::kInterpreter &&
776 : ExecutionTier::kInterpreter < ExecutionTier::kLiftoff &&
777 : ExecutionTier::kLiftoff < ExecutionTier::kTurbofan,
778 : "Assume an order on execution tiers");
779 :
780 : // Update code table but avoid to fall back to less optimized code. We use
781 : // the new code if it was compiled with a higher tier.
782 1431723 : uint32_t slot_idx = code->index() - module_->num_imported_functions;
783 2863446 : WasmCode* prior_code = code_table_[slot_idx];
784 1431723 : bool update_code_table = !prior_code || prior_code->tier() < code->tier();
785 1431723 : if (update_code_table) {
786 1411328 : code_table_[slot_idx] = code.get();
787 1411328 : if (prior_code) {
788 76347 : WasmCodeRefScope::AddRef(prior_code);
789 : // The code is added to the current {WasmCodeRefScope}, hence the ref
790 : // count cannot drop to zero here.
791 76328 : CHECK(!prior_code->DecRef());
792 : }
793 : }
794 :
795 : // Populate optimized code to the jump table unless there is an active
796 : // redirection to the interpreter that should be preserved.
797 : bool update_jump_table =
798 2843052 : update_code_table && !has_interpreter_redirection(code->index());
799 :
800 : // Ensure that interpreter entries always populate to the jump table.
801 1431722 : if (code->kind_ == WasmCode::Kind::kInterpreterEntry) {
802 369231 : SetInterpreterRedirection(code->index());
803 : update_jump_table = true;
804 : }
805 :
806 1431765 : if (update_jump_table) {
807 : JumpTableAssembler::PatchJumpTableSlot(
808 1411521 : jump_table_->instruction_start(), slot_idx, code->instruction_start(),
809 1411521 : WasmCode::kFlushICache);
810 : }
811 : }
812 3917246 : WasmCodeRefScope::AddRef(code.get());
813 : WasmCode* result = code.get();
814 3917220 : owned_code_.emplace_back(std::move(code));
815 3917184 : return result;
816 : }
817 :
818 236 : WasmCode* NativeModule::AddDeserializedCode(
819 : uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
820 : uint32_t tagged_parameter_slots, size_t safepoint_table_offset,
821 : size_t handler_table_offset, size_t constant_pool_offset,
822 : size_t code_comments_offset, size_t unpadded_binary_size,
823 : OwnedVector<ProtectedInstructionData> protected_instructions,
824 : OwnedVector<const byte> reloc_info,
825 : OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
826 : ExecutionTier tier) {
827 236 : Vector<uint8_t> dst_code_bytes = AllocateForCode(instructions.size());
828 : memcpy(dst_code_bytes.begin(), instructions.start(), instructions.size());
829 :
830 : std::unique_ptr<WasmCode> code{new WasmCode{
831 : this, index, dst_code_bytes, stack_slots, tagged_parameter_slots,
832 : safepoint_table_offset, handler_table_offset, constant_pool_offset,
833 : code_comments_offset, unpadded_binary_size,
834 : std::move(protected_instructions), std::move(reloc_info),
835 708 : std::move(source_position_table), kind, tier}};
836 :
837 236 : code->RegisterTrapHandlerData();
838 :
839 : // Note: we do not flush the i-cache here, since the code needs to be
840 : // relocated anyway. The caller is responsible for flushing the i-cache later.
841 :
842 472 : return PublishCode(std::move(code));
843 : }
844 :
845 209 : std::vector<WasmCode*> NativeModule::SnapshotCodeTable() const {
846 209 : base::MutexGuard lock(&allocation_mutex_);
847 : WasmCode** start = code_table_.get();
848 209 : WasmCode** end = start + module_->num_declared_functions;
849 209 : return std::vector<WasmCode*>{start, end};
850 : }
851 :
852 9653457 : WasmCode* NativeModule::GetCode(uint32_t index) const {
853 9653457 : base::MutexGuard guard(&allocation_mutex_);
854 : DCHECK_LT(index, num_functions());
855 : DCHECK_LE(module_->num_imported_functions, index);
856 19306914 : WasmCode* code = code_table_[index - module_->num_imported_functions];
857 9653457 : WasmCodeRefScope::AddRef(code);
858 9653457 : return code;
859 : }
860 :
861 12 : bool NativeModule::HasCode(uint32_t index) const {
862 12 : base::MutexGuard guard(&allocation_mutex_);
863 : DCHECK_LT(index, num_functions());
864 : DCHECK_LE(module_->num_imported_functions, index);
865 36 : return code_table_[index - module_->num_imported_functions] != nullptr;
866 : }
867 :
868 2477650 : WasmCode* NativeModule::CreateEmptyJumpTable(uint32_t jump_table_size) {
869 : // Only call this if we really need a jump table.
870 : DCHECK_LT(0, jump_table_size);
871 2477650 : Vector<uint8_t> code_space = AllocateForCode(jump_table_size);
872 : ZapCode(reinterpret_cast<Address>(code_space.begin()), code_space.size());
873 : std::unique_ptr<WasmCode> code{new WasmCode{
874 : this, // native_module
875 : WasmCode::kAnonymousFuncIndex, // index
876 : code_space, // instructions
877 : 0, // stack_slots
878 : 0, // tagged_parameter_slots
879 : 0, // safepoint_table_offset
880 : jump_table_size, // handler_table_offset
881 : jump_table_size, // constant_pool_offset
882 : jump_table_size, // code_comments_offset
883 : jump_table_size, // unpadded_binary_size
884 : OwnedVector<ProtectedInstructionData>{}, // protected_instructions
885 : OwnedVector<const uint8_t>{}, // reloc_info
886 : OwnedVector<const uint8_t>{}, // source_pos
887 : WasmCode::kJumpTable, // kind
888 4955302 : ExecutionTier::kNone}}; // tier
889 4955302 : return PublishCode(std::move(code));
890 : }
891 :
892 3861538 : Vector<byte> NativeModule::AllocateForCode(size_t size) {
893 3861538 : base::MutexGuard lock(&allocation_mutex_);
894 : DCHECK_LT(0, size);
895 3862419 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
896 : // This happens under a lock assumed by the caller.
897 : size = RoundUp<kCodeAlignment>(size);
898 3862409 : base::AddressRegion code_space = free_code_space_.Allocate(size);
899 3862406 : if (code_space.is_empty()) {
900 0 : if (!can_request_more_memory_) {
901 : V8::FatalProcessOutOfMemory(nullptr,
902 0 : "NativeModule::AllocateForCode reservation");
903 : UNREACHABLE();
904 : }
905 :
906 : Address hint = owned_code_space_.empty() ? kNullAddress
907 0 : : owned_code_space_.back().end();
908 :
909 0 : VirtualMemory new_mem = engine_->code_manager()->TryAllocate(
910 0 : size, reinterpret_cast<void*>(hint));
911 0 : if (!new_mem.IsReserved()) {
912 : V8::FatalProcessOutOfMemory(nullptr,
913 0 : "NativeModule::AllocateForCode reservation");
914 : UNREACHABLE();
915 : }
916 0 : engine_->code_manager()->AssignRanges(new_mem.address(), new_mem.end(),
917 0 : this);
918 :
919 0 : free_code_space_.Merge(new_mem.region());
920 0 : owned_code_space_.emplace_back(std::move(new_mem));
921 0 : code_space = free_code_space_.Allocate(size);
922 : DCHECK(!code_space.is_empty());
923 : }
924 3862406 : const Address page_size = page_allocator->AllocatePageSize();
925 3862344 : Address commit_start = RoundUp(code_space.begin(), page_size);
926 : Address commit_end = RoundUp(code_space.end(), page_size);
927 : // {commit_start} will be either code_space.start or the start of the next
928 : // page. {commit_end} will be the start of the page after the one in which
929 : // the allocation ends.
930 : // We start from an aligned start, and we know we allocated vmem in
931 : // page multiples.
932 : // We just need to commit what's not committed. The page in which we
933 : // start is already committed (or we start at the beginning of a page).
934 : // The end needs to be committed all through the end of the page.
935 3862344 : if (commit_start < commit_end) {
936 1245408 : committed_code_space_.fetch_add(commit_end - commit_start);
937 : // Committed code cannot grow bigger than maximum code space size.
938 : DCHECK_LE(committed_code_space_.load(), kMaxWasmCodeMemory);
939 : #if V8_OS_WIN
940 : // On Windows, we cannot commit a region that straddles different
941 : // reservations of virtual memory. Because we bump-allocate, and because, if
942 : // we need more memory, we append that memory at the end of the
943 : // owned_code_space_ list, we traverse that list in reverse order to find
944 : // the reservation(s) that guide how to chunk the region to commit.
945 : for (auto& vmem : base::Reversed(owned_code_space_)) {
946 : if (commit_end <= vmem.address() || vmem.end() <= commit_start) continue;
947 : Address start = std::max(commit_start, vmem.address());
948 : Address end = std::min(commit_end, vmem.end());
949 : size_t commit_size = static_cast<size_t>(end - start);
950 : if (!engine_->code_manager()->Commit(start, commit_size)) {
951 : V8::FatalProcessOutOfMemory(nullptr,
952 : "NativeModule::AllocateForCode commit");
953 : UNREACHABLE();
954 : }
955 : // Opportunistically reduce the commit range. This might terminate the
956 : // loop early.
957 : if (commit_start == start) commit_start = end;
958 : if (commit_end == end) commit_end = start;
959 : if (commit_start >= commit_end) break;
960 : }
961 : #else
962 2490816 : if (!engine_->code_manager()->Commit(commit_start,
963 : commit_end - commit_start)) {
964 : V8::FatalProcessOutOfMemory(nullptr,
965 0 : "NativeModule::AllocateForCode commit");
966 : UNREACHABLE();
967 : }
968 : #endif
969 : }
970 : DCHECK(IsAligned(code_space.begin(), kCodeAlignment));
971 3862345 : allocated_code_space_.Merge(code_space);
972 : generated_code_size_.fetch_add(code_space.size(), std::memory_order_relaxed);
973 :
974 : TRACE_HEAP("Code alloc for %p: %" PRIxPTR ",+%zu\n", this, code_space.begin(),
975 : size);
976 7724809 : return {reinterpret_cast<byte*>(code_space.begin()), code_space.size()};
977 : }
978 :
979 : namespace {
980 4679878 : class NativeModuleWireBytesStorage final : public WireBytesStorage {
981 : public:
982 : explicit NativeModuleWireBytesStorage(
983 : std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes)
984 2339939 : : wire_bytes_(std::move(wire_bytes)) {}
985 :
986 1072611 : Vector<const uint8_t> GetCode(WireBytesRef ref) const final {
987 2145222 : return wire_bytes_->as_vector().SubVector(ref.offset(), ref.end_offset());
988 : }
989 :
990 : private:
991 : const std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes_;
992 : };
993 : } // namespace
994 :
995 3434447 : void NativeModule::SetWireBytes(OwnedVector<const uint8_t> wire_bytes) {
996 : auto shared_wire_bytes =
997 : std::make_shared<OwnedVector<const uint8_t>>(std::move(wire_bytes));
998 : wire_bytes_ = shared_wire_bytes;
999 3434447 : if (!shared_wire_bytes->empty()) {
1000 2339939 : compilation_state_->SetWireBytesStorage(
1001 2339939 : std::make_shared<NativeModuleWireBytesStorage>(
1002 2339939 : std::move(shared_wire_bytes)));
1003 : }
1004 3434447 : }
1005 :
1006 5932203 : WasmCode* NativeModule::Lookup(Address pc) const {
1007 5932203 : base::MutexGuard lock(&allocation_mutex_);
1008 5932203 : if (owned_code_.empty()) return nullptr;
1009 : // First update the sorted portion counter.
1010 5932203 : if (owned_code_sorted_portion_ == 0) ++owned_code_sorted_portion_;
1011 12869887 : while (owned_code_sorted_portion_ < owned_code_.size() &&
1012 906499 : owned_code_[owned_code_sorted_portion_ - 1]->instruction_start() <=
1013 : owned_code_[owned_code_sorted_portion_]->instruction_start()) {
1014 32994 : ++owned_code_sorted_portion_;
1015 : }
1016 : // Execute at most two rounds: First check whether the {pc} is within the
1017 : // sorted portion of {owned_code_}. If it's not, then sort the whole vector
1018 : // and retry.
1019 77 : while (true) {
1020 : auto iter =
1021 : std::upper_bound(owned_code_.begin(), owned_code_.end(), pc,
1022 : [](Address pc, const std::unique_ptr<WasmCode>& code) {
1023 : DCHECK_NE(kNullAddress, pc);
1024 : DCHECK_NOT_NULL(code);
1025 : return pc < code->instruction_start();
1026 : });
1027 5932280 : if (iter != owned_code_.begin()) {
1028 : --iter;
1029 : WasmCode* candidate = iter->get();
1030 : DCHECK_NOT_NULL(candidate);
1031 5932280 : if (candidate->contains(pc)) {
1032 5932201 : WasmCodeRefScope::AddRef(candidate);
1033 5932201 : return candidate;
1034 : }
1035 : }
1036 158 : if (owned_code_sorted_portion_ == owned_code_.size()) return nullptr;
1037 : std::sort(owned_code_.begin(), owned_code_.end(),
1038 : [](const std::unique_ptr<WasmCode>& code1,
1039 : const std::unique_ptr<WasmCode>& code2) {
1040 : return code1->instruction_start() < code2->instruction_start();
1041 : });
1042 77 : owned_code_sorted_portion_ = owned_code_.size();
1043 : }
1044 : }
1045 :
1046 369289 : Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const {
1047 : // TODO(clemensh): Measure performance win of returning instruction start
1048 : // directly if we have turbofan code. Downside: Redirecting functions (e.g.
1049 : // for debugging) gets much harder.
1050 :
1051 : // Return the jump table slot for that function index.
1052 : DCHECK_NOT_NULL(jump_table_);
1053 395355 : uint32_t slot_idx = func_index - module_->num_imported_functions;
1054 : uint32_t slot_offset = JumpTableAssembler::SlotIndexToOffset(slot_idx);
1055 : DCHECK_LT(slot_offset, jump_table_->instructions().size());
1056 790710 : return jump_table_->instruction_start() + slot_offset;
1057 : }
1058 :
1059 40656 : uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(
1060 : Address slot_address) const {
1061 : DCHECK(is_jump_table_slot(slot_address));
1062 : uint32_t slot_offset =
1063 81312 : static_cast<uint32_t>(slot_address - jump_table_->instruction_start());
1064 : uint32_t slot_idx = JumpTableAssembler::SlotOffsetToIndex(slot_offset);
1065 : DCHECK_LT(slot_idx, module_->num_declared_functions);
1066 40656 : return module_->num_imported_functions + slot_idx;
1067 : }
1068 :
1069 0 : const char* NativeModule::GetRuntimeStubName(Address runtime_stub_entry) const {
1070 : #define RETURN_NAME(Name) \
1071 : if (runtime_stub_entries_[WasmCode::k##Name] == runtime_stub_entry) { \
1072 : return #Name; \
1073 : }
1074 : #define RETURN_NAME_TRAP(Name) RETURN_NAME(ThrowWasm##Name)
1075 0 : WASM_RUNTIME_STUB_LIST(RETURN_NAME, RETURN_NAME_TRAP)
1076 : #undef RETURN_NAME_TRAP
1077 : #undef RETURN_NAME
1078 0 : return "<unknown>";
1079 : }
1080 :
1081 3727176 : NativeModule::~NativeModule() {
1082 : TRACE_HEAP("Deleting native module: %p\n", reinterpret_cast<void*>(this));
1083 : // Cancel all background compilation before resetting any field of the
1084 : // NativeModule or freeing anything.
1085 1242392 : compilation_state_->AbortCompilation();
1086 1242392 : engine_->FreeNativeModule(this);
1087 : // Free the import wrapper cache before releasing the {WasmCode} objects in
1088 : // {owned_code_}. The destructor of {WasmImportWrapperCache} still needs to
1089 : // decrease reference counts on the {WasmCode} objects.
1090 1242392 : import_wrapper_cache_.reset();
1091 1242392 : }
1092 :
1093 61040 : WasmCodeManager::WasmCodeManager(WasmMemoryTracker* memory_tracker,
1094 : size_t max_committed)
1095 : : memory_tracker_(memory_tracker),
1096 : max_committed_code_space_(max_committed),
1097 : total_committed_code_space_(0),
1098 183120 : critical_committed_code_space_(max_committed / 2) {
1099 : DCHECK_LE(max_committed, kMaxWasmCodeMemory);
1100 61040 : }
1101 :
1102 1245408 : bool WasmCodeManager::Commit(Address start, size_t size) {
1103 : // TODO(v8:8462) Remove eager commit once perf supports remapping.
1104 1245408 : if (FLAG_perf_prof) return true;
1105 : DCHECK(IsAligned(start, AllocatePageSize()));
1106 : DCHECK(IsAligned(size, AllocatePageSize()));
1107 : // Reserve the size. Use CAS loop to avoid overflow on
1108 : // {total_committed_code_space_}.
1109 1245408 : size_t old_value = total_committed_code_space_.load();
1110 : while (true) {
1111 : DCHECK_GE(max_committed_code_space_, old_value);
1112 1245409 : if (size > max_committed_code_space_ - old_value) return false;
1113 2490820 : if (total_committed_code_space_.compare_exchange_weak(old_value,
1114 : old_value + size)) {
1115 : break;
1116 : }
1117 : }
1118 : PageAllocator::Permission permission = FLAG_wasm_write_protect_code_memory
1119 : ? PageAllocator::kReadWrite
1120 1245409 : : PageAllocator::kReadWriteExecute;
1121 :
1122 : bool ret =
1123 1245409 : SetPermissions(GetPlatformPageAllocator(), start, size, permission);
1124 : TRACE_HEAP("Setting rw permissions for %p:%p\n",
1125 : reinterpret_cast<void*>(start),
1126 : reinterpret_cast<void*>(start + size));
1127 :
1128 1245409 : if (!ret) {
1129 : // Highly unlikely.
1130 : total_committed_code_space_.fetch_sub(size);
1131 0 : return false;
1132 : }
1133 : return true;
1134 : }
1135 :
1136 0 : void WasmCodeManager::AssignRanges(Address start, Address end,
1137 : NativeModule* native_module) {
1138 0 : base::MutexGuard lock(&native_modules_mutex_);
1139 0 : lookup_map_.insert(std::make_pair(start, std::make_pair(end, native_module)));
1140 0 : }
1141 :
1142 1243281 : VirtualMemory WasmCodeManager::TryAllocate(size_t size, void* hint) {
1143 1243281 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
1144 : DCHECK_GT(size, 0);
1145 1243281 : size = RoundUp(size, page_allocator->AllocatePageSize());
1146 1243280 : if (!memory_tracker_->ReserveAddressSpace(size)) return {};
1147 1242392 : if (hint == nullptr) hint = page_allocator->GetRandomMmapAddr();
1148 :
1149 : VirtualMemory mem(page_allocator, size, hint,
1150 2484784 : page_allocator->AllocatePageSize());
1151 1242392 : if (!mem.IsReserved()) {
1152 0 : memory_tracker_->ReleaseReservation(size);
1153 0 : return {};
1154 : }
1155 : TRACE_HEAP("VMem alloc: %p:%p (%zu)\n",
1156 : reinterpret_cast<void*>(mem.address()),
1157 : reinterpret_cast<void*>(mem.end()), mem.size());
1158 :
1159 : // TODO(v8:8462) Remove eager commit once perf supports remapping.
1160 1242392 : if (FLAG_perf_prof) {
1161 0 : SetPermissions(GetPlatformPageAllocator(), mem.address(), mem.size(),
1162 : PageAllocator::kReadWriteExecute);
1163 : }
1164 : return mem;
1165 : }
1166 :
1167 14 : void WasmCodeManager::SetMaxCommittedMemoryForTesting(size_t limit) {
1168 : // This has to be set before committing any memory.
1169 : DCHECK_EQ(0, total_committed_code_space_.load());
1170 14 : max_committed_code_space_ = limit;
1171 14 : critical_committed_code_space_.store(limit / 2);
1172 14 : }
1173 :
1174 : // static
1175 1386126 : size_t WasmCodeManager::EstimateNativeModuleCodeSize(const WasmModule* module) {
1176 : constexpr size_t kCodeSizeMultiplier = 4;
1177 : constexpr size_t kCodeOverhead = 32; // for prologue, stack check, ...
1178 : constexpr size_t kStaticCodeSize = 512; // runtime stubs, ...
1179 : constexpr size_t kImportSize = 64 * kSystemPointerSize;
1180 :
1181 : size_t estimate = kStaticCodeSize;
1182 2139191 : for (auto& function : module->functions) {
1183 753065 : estimate += kCodeOverhead + kCodeSizeMultiplier * function.code.length();
1184 : }
1185 : estimate +=
1186 2772252 : JumpTableAssembler::SizeForNumberOfSlots(module->num_declared_functions);
1187 1386126 : estimate += kImportSize * module->num_imported_functions;
1188 :
1189 1386126 : return estimate;
1190 : }
1191 :
1192 : // static
1193 1239090 : size_t WasmCodeManager::EstimateNativeModuleNonCodeSize(
1194 : const WasmModule* module) {
1195 1239090 : size_t wasm_module_estimate = EstimateStoredSize(module);
1196 :
1197 1239090 : uint32_t num_wasm_functions = module->num_declared_functions;
1198 :
1199 : // TODO(wasm): Include wire bytes size.
1200 : size_t native_module_estimate =
1201 : sizeof(NativeModule) + /* NativeModule struct */
1202 1239090 : (sizeof(WasmCode*) * num_wasm_functions) + /* code table size */
1203 1239090 : (sizeof(WasmCode) * num_wasm_functions); /* code object size */
1204 :
1205 1239090 : return wasm_module_estimate + native_module_estimate;
1206 : }
1207 :
1208 1242386 : std::shared_ptr<NativeModule> WasmCodeManager::NewNativeModule(
1209 : WasmEngine* engine, Isolate* isolate, const WasmFeatures& enabled,
1210 : size_t code_size_estimate, bool can_request_more,
1211 : std::shared_ptr<const WasmModule> module) {
1212 : DCHECK_EQ(this, isolate->wasm_engine()->code_manager());
1213 1242386 : if (total_committed_code_space_.load() >
1214 : critical_committed_code_space_.load()) {
1215 : (reinterpret_cast<v8::Isolate*>(isolate))
1216 59 : ->MemoryPressureNotification(MemoryPressureLevel::kCritical);
1217 : size_t committed = total_committed_code_space_.load();
1218 : DCHECK_GE(max_committed_code_space_, committed);
1219 59 : critical_committed_code_space_.store(
1220 59 : committed + (max_committed_code_space_ - committed) / 2);
1221 : }
1222 :
1223 : // If the code must be contiguous, reserve enough address space up front.
1224 : size_t code_vmem_size =
1225 : kRequiresCodeRange ? kMaxWasmCodeMemory : code_size_estimate;
1226 : // Try up to two times; getting rid of dead JSArrayBuffer allocations might
1227 : // require two GCs because the first GC maybe incremental and may have
1228 : // floating garbage.
1229 : static constexpr int kAllocationRetries = 2;
1230 2484778 : VirtualMemory code_space;
1231 895 : for (int retries = 0;; ++retries) {
1232 2486568 : code_space = TryAllocate(code_vmem_size);
1233 1243287 : if (code_space.IsReserved()) break;
1234 895 : if (retries == kAllocationRetries) {
1235 0 : V8::FatalProcessOutOfMemory(isolate, "WasmCodeManager::NewNativeModule");
1236 : UNREACHABLE();
1237 : }
1238 : // Run one GC, then try the allocation again.
1239 : isolate->heap()->MemoryPressureNotification(MemoryPressureLevel::kCritical,
1240 895 : true);
1241 : }
1242 :
1243 : Address start = code_space.address();
1244 : size_t size = code_space.size();
1245 : Address end = code_space.end();
1246 1242392 : std::shared_ptr<NativeModule> ret;
1247 : new NativeModule(engine, enabled, can_request_more, std::move(code_space),
1248 3727174 : std::move(module), isolate->async_counters(), &ret);
1249 : // The constructor initialized the shared_ptr.
1250 : DCHECK_NOT_NULL(ret);
1251 : TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", ret.get(), start,
1252 : size);
1253 :
1254 : #if defined(V8_OS_WIN_X64)
1255 : if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
1256 : FLAG_win64_unwinding_info) {
1257 : win64_unwindinfo::RegisterNonABICompliantCodeRange(
1258 : reinterpret_cast<void*>(start), size);
1259 : }
1260 : #endif
1261 :
1262 1242389 : base::MutexGuard lock(&native_modules_mutex_);
1263 1242392 : lookup_map_.insert(std::make_pair(start, std::make_pair(end, ret.get())));
1264 1242392 : return ret;
1265 : }
1266 :
1267 9628500 : bool NativeModule::SetExecutable(bool executable) {
1268 9628500 : if (is_executable_ == executable) return true;
1269 : TRACE_HEAP("Setting module %p as executable: %d.\n", this, executable);
1270 :
1271 729328 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
1272 :
1273 729328 : if (FLAG_wasm_write_protect_code_memory) {
1274 : PageAllocator::Permission permission =
1275 0 : executable ? PageAllocator::kReadExecute : PageAllocator::kReadWrite;
1276 : #if V8_OS_WIN
1277 : // On windows, we need to switch permissions per separate virtual memory
1278 : // reservation. This is really just a problem when the NativeModule is
1279 : // growable (meaning can_request_more_memory_). That's 32-bit in production,
1280 : // or unittests.
1281 : // For now, in that case, we commit at reserved memory granularity.
1282 : // Technically, that may be a waste, because we may reserve more than we
1283 : // use. On 32-bit though, the scarce resource is the address space -
1284 : // committed or not.
1285 : if (can_request_more_memory_) {
1286 : for (auto& vmem : owned_code_space_) {
1287 : if (!SetPermissions(page_allocator, vmem.address(), vmem.size(),
1288 : permission)) {
1289 : return false;
1290 : }
1291 : TRACE_HEAP("Set %p:%p to executable:%d\n", vmem.address(), vmem.end(),
1292 : executable);
1293 : }
1294 : is_executable_ = executable;
1295 : return true;
1296 : }
1297 : #endif
1298 0 : for (auto& region : allocated_code_space_.regions()) {
1299 : // allocated_code_space_ is fine-grained, so we need to
1300 : // page-align it.
1301 : size_t region_size =
1302 0 : RoundUp(region.size(), page_allocator->AllocatePageSize());
1303 0 : if (!SetPermissions(page_allocator, region.begin(), region_size,
1304 : permission)) {
1305 : return false;
1306 : }
1307 : TRACE_HEAP("Set %p:%p to executable:%d\n",
1308 : reinterpret_cast<void*>(region.begin()),
1309 : reinterpret_cast<void*>(region.end()), executable);
1310 : }
1311 : }
1312 729328 : is_executable_ = executable;
1313 729328 : return true;
1314 : }
1315 :
1316 691251 : void NativeModule::SampleCodeSize(
1317 : Counters* counters, NativeModule::CodeSamplingTime sampling_time) const {
1318 : size_t code_size = sampling_time == kSampling
1319 : ? committed_code_space()
1320 691251 : : generated_code_size_.load(std::memory_order_relaxed);
1321 691251 : int code_size_mb = static_cast<int>(code_size / MB);
1322 : Histogram* histogram = nullptr;
1323 691251 : switch (sampling_time) {
1324 : case kAfterBaseline:
1325 : histogram = counters->wasm_module_code_size_mb_after_baseline();
1326 2196 : break;
1327 : case kAfterTopTier:
1328 : histogram = counters->wasm_module_code_size_mb_after_top_tier();
1329 144 : break;
1330 : case kSampling:
1331 : histogram = counters->wasm_module_code_size_mb();
1332 688911 : break;
1333 : }
1334 691251 : histogram->AddSample(code_size_mb);
1335 691251 : }
1336 :
1337 739920 : WasmCode* NativeModule::AddCompiledCode(WasmCompilationResult result) {
1338 1479840 : return AddCompiledCode({&result, 1})[0];
1339 : }
1340 :
1341 1008180 : std::vector<WasmCode*> NativeModule::AddCompiledCode(
1342 : Vector<WasmCompilationResult> results) {
1343 : DCHECK(!results.empty());
1344 : // First, allocate code space for all the results.
1345 : size_t total_code_space = 0;
1346 3131412 : for (auto& result : results) {
1347 : DCHECK(result.succeeded());
1348 2123232 : total_code_space += RoundUp<kCodeAlignment>(result.code_desc.instr_size);
1349 : }
1350 1008180 : Vector<byte> code_space = AllocateForCode(total_code_space);
1351 :
1352 1008891 : std::vector<std::unique_ptr<WasmCode>> generated_code;
1353 1008892 : generated_code.reserve(results.size());
1354 :
1355 : // Now copy the generated code into the code space and relocate it.
1356 3131736 : for (auto& result : results) {
1357 : DCHECK_EQ(result.code_desc.buffer, result.instr_buffer.get());
1358 2125704 : size_t code_size = RoundUp<kCodeAlignment>(result.code_desc.instr_size);
1359 1062852 : Vector<byte> this_code_space = code_space.SubVector(0, code_size);
1360 : code_space += code_size;
1361 6373381 : generated_code.emplace_back(AddCodeWithCodeSpace(
1362 1062848 : result.func_index, result.code_desc, result.frame_slot_count,
1363 : result.tagged_parameter_slots, std::move(result.protected_instructions),
1364 : std::move(result.source_positions),
1365 : GetCodeKindForExecutionTier(result.result_tier), result.result_tier,
1366 1061989 : this_code_space));
1367 : }
1368 : DCHECK_EQ(0, code_space.size());
1369 :
1370 : // Under the {allocation_mutex_}, publish the code. The published code is put
1371 : // into the top-most surrounding {WasmCodeRefScope} by {PublishCodeLocked}.
1372 : std::vector<WasmCode*> code_vector;
1373 1007295 : code_vector.reserve(results.size());
1374 : {
1375 1008134 : base::MutexGuard lock(&allocation_mutex_);
1376 2072546 : for (auto& result : generated_code)
1377 2127354 : code_vector.push_back(PublishCodeLocked(std::move(result)));
1378 : }
1379 :
1380 1008890 : return code_vector;
1381 : }
1382 :
1383 0 : void NativeModule::FreeCode(Vector<WasmCode* const> codes) {
1384 : // TODO(clemensh): Implement.
1385 0 : }
1386 :
1387 1242392 : void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
1388 1242392 : base::MutexGuard lock(&native_modules_mutex_);
1389 : TRACE_HEAP("Freeing NativeModule %p\n", native_module);
1390 2484784 : for (auto& code_space : native_module->owned_code_space_) {
1391 : DCHECK(code_space.IsReserved());
1392 : TRACE_HEAP("VMem Release: %" PRIxPTR ":%" PRIxPTR " (%zu)\n",
1393 : code_space.address(), code_space.end(), code_space.size());
1394 :
1395 : #if defined(V8_OS_WIN_X64)
1396 : if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
1397 : FLAG_win64_unwinding_info) {
1398 : win64_unwindinfo::UnregisterNonABICompliantCodeRange(
1399 : reinterpret_cast<void*>(code_space.address()));
1400 : }
1401 : #endif
1402 :
1403 2484784 : lookup_map_.erase(code_space.address());
1404 1242392 : memory_tracker_->ReleaseReservation(code_space.size());
1405 1242392 : code_space.Free();
1406 : DCHECK(!code_space.IsReserved());
1407 : }
1408 : native_module->owned_code_space_.clear();
1409 :
1410 : size_t code_size = native_module->committed_code_space_.load();
1411 : DCHECK(IsAligned(code_size, AllocatePageSize()));
1412 : size_t old_committed = total_committed_code_space_.fetch_sub(code_size);
1413 : DCHECK_LE(code_size, old_committed);
1414 : USE(old_committed);
1415 1242392 : }
1416 :
1417 55531331 : NativeModule* WasmCodeManager::LookupNativeModule(Address pc) const {
1418 55531331 : base::MutexGuard lock(&native_modules_mutex_);
1419 55531731 : if (lookup_map_.empty()) return nullptr;
1420 :
1421 : auto iter = lookup_map_.upper_bound(pc);
1422 13806908 : if (iter == lookup_map_.begin()) return nullptr;
1423 : --iter;
1424 13799250 : Address region_start = iter->first;
1425 13799250 : Address region_end = iter->second.first;
1426 13799250 : NativeModule* candidate = iter->second.second;
1427 :
1428 : DCHECK_NOT_NULL(candidate);
1429 13799250 : return region_start <= pc && pc < region_end ? candidate : nullptr;
1430 : }
1431 :
1432 55482592 : WasmCode* WasmCodeManager::LookupCode(Address pc) const {
1433 55482592 : NativeModule* candidate = LookupNativeModule(pc);
1434 55482962 : return candidate ? candidate->Lookup(pc) : nullptr;
1435 : }
1436 :
1437 : // TODO(v8:7424): Code protection scopes are not yet supported with shared code
1438 : // enabled and need to be revisited to work with --wasm-shared-code as well.
1439 144376 : NativeModuleModificationScope::NativeModuleModificationScope(
1440 : NativeModule* native_module)
1441 144376 : : native_module_(native_module) {
1442 144376 : if (FLAG_wasm_write_protect_code_memory && native_module_ &&
1443 0 : (native_module_->modification_scope_depth_++) == 0) {
1444 0 : bool success = native_module_->SetExecutable(false);
1445 0 : CHECK(success);
1446 : }
1447 144376 : }
1448 :
1449 288752 : NativeModuleModificationScope::~NativeModuleModificationScope() {
1450 144376 : if (FLAG_wasm_write_protect_code_memory && native_module_ &&
1451 0 : (native_module_->modification_scope_depth_--) == 1) {
1452 0 : bool success = native_module_->SetExecutable(true);
1453 0 : CHECK(success);
1454 : }
1455 144376 : }
1456 :
1457 : namespace {
1458 : thread_local WasmCodeRefScope* current_code_refs_scope = nullptr;
1459 : } // namespace
1460 :
1461 58379082 : WasmCodeRefScope::WasmCodeRefScope()
1462 60856737 : : previous_scope_(current_code_refs_scope) {
1463 60856737 : current_code_refs_scope = this;
1464 58379082 : }
1465 :
1466 121715054 : WasmCodeRefScope::~WasmCodeRefScope() {
1467 : DCHECK_EQ(this, current_code_refs_scope);
1468 60857527 : current_code_refs_scope = previous_scope_;
1469 : std::vector<WasmCode*> code_ptrs;
1470 60857527 : code_ptrs.reserve(code_ptrs_.size());
1471 : code_ptrs.assign(code_ptrs_.begin(), code_ptrs_.end());
1472 60857499 : WasmCode::DecrementRefCount(VectorOf(code_ptrs));
1473 60857527 : }
1474 :
1475 : // static
1476 19579213 : void WasmCodeRefScope::AddRef(WasmCode* code) {
1477 : DCHECK_NOT_NULL(code);
1478 19579213 : WasmCodeRefScope* current_scope = current_code_refs_scope;
1479 : DCHECK_NOT_NULL(current_scope);
1480 : auto entry = current_scope->code_ptrs_.insert(code);
1481 : // If we added a new entry, increment the ref counter.
1482 19579165 : if (entry.second) code->IncRef();
1483 19579165 : }
1484 :
1485 : } // namespace wasm
1486 : } // namespace internal
1487 122028 : } // namespace v8
1488 : #undef TRACE_HEAP
|