LCOV - code coverage report
Current view: top level - src/wasm - wasm-code-manager.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 410 476 86.1 %
Date: 2019-04-18 Functions: 61 72 84.7 %

          Line data    Source code
       1             : // Copyright 2017 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/wasm/wasm-code-manager.h"
       6             : 
       7             : #include <iomanip>
       8             : 
       9             : #include "src/assembler-inl.h"
      10             : #include "src/base/adapters.h"
      11             : #include "src/base/macros.h"
      12             : #include "src/base/platform/platform.h"
      13             : #include "src/counters.h"
      14             : #include "src/disassembler.h"
      15             : #include "src/globals.h"
      16             : #include "src/log.h"
      17             : #include "src/macro-assembler-inl.h"
      18             : #include "src/macro-assembler.h"
      19             : #include "src/objects-inl.h"
      20             : #include "src/ostreams.h"
      21             : #include "src/snapshot/embedded-data.h"
      22             : #include "src/vector.h"
      23             : #include "src/wasm/compilation-environment.h"
      24             : #include "src/wasm/function-compiler.h"
      25             : #include "src/wasm/jump-table-assembler.h"
      26             : #include "src/wasm/wasm-import-wrapper-cache.h"
      27             : #include "src/wasm/wasm-module.h"
      28             : #include "src/wasm/wasm-objects-inl.h"
      29             : #include "src/wasm/wasm-objects.h"
      30             : 
      31             : #if defined(V8_OS_WIN_X64)
      32             : #include "src/unwinding-info-win64.h"
      33             : #endif
      34             : 
      35             : #define TRACE_HEAP(...)                                   \
      36             :   do {                                                    \
      37             :     if (FLAG_trace_wasm_native_heap) PrintF(__VA_ARGS__); \
      38             :   } while (false)
      39             : 
      40             : namespace v8 {
      41             : namespace internal {
      42             : namespace wasm {
      43             : 
      44             : using trap_handler::ProtectedInstructionData;
      45             : 
      46     3857308 : void DisjointAllocationPool::Merge(base::AddressRegion region) {
      47             :   auto dest_it = regions_.begin();
      48             :   auto dest_end = regions_.end();
      49             : 
      50             :   // Skip over dest regions strictly before {region}.
      51     6472262 :   while (dest_it != dest_end && dest_it->end() < region.begin()) ++dest_it;
      52             : 
      53             :   // After last dest region: insert and done.
      54     3857308 :   if (dest_it == dest_end) {
      55     1242380 :     regions_.push_back(region);
      56             :     return;
      57             :   }
      58             : 
      59             :   // Adjacent (from below) to dest: merge and done.
      60     2614928 :   if (dest_it->begin() == region.end()) {
      61             :     base::AddressRegion merged_region{region.begin(),
      62           1 :                                       region.size() + dest_it->size()};
      63             :     DCHECK_EQ(merged_region.end(), dest_it->end());
      64           1 :     *dest_it = merged_region;
      65             :     return;
      66             :   }
      67             : 
      68             :   // Before dest: insert and done.
      69     2614927 :   if (dest_it->begin() > region.end()) {
      70           0 :     regions_.insert(dest_it, region);
      71           0 :     return;
      72             :   }
      73             : 
      74             :   // Src is adjacent from above. Merge and check whether the merged region is
      75             :   // now adjacent to the next region.
      76             :   DCHECK_EQ(dest_it->end(), region.begin());
      77     2614927 :   dest_it->set_size(dest_it->size() + region.size());
      78             :   DCHECK_EQ(dest_it->end(), region.end());
      79             :   auto next_dest = dest_it;
      80             :   ++next_dest;
      81     2614933 :   if (next_dest != dest_end && dest_it->end() == next_dest->begin()) {
      82           6 :     dest_it->set_size(dest_it->size() + next_dest->size());
      83             :     DCHECK_EQ(dest_it->end(), next_dest->end());
      84             :     regions_.erase(next_dest);
      85             :   }
      86             : }
      87             : 
      88     3857277 : base::AddressRegion DisjointAllocationPool::Allocate(size_t size) {
      89     3857281 :   for (auto it = regions_.begin(), end = regions_.end(); it != end; ++it) {
      90     3857279 :     if (size > it->size()) continue;
      91             :     base::AddressRegion ret{it->begin(), size};
      92     3857275 :     if (size == it->size()) {
      93             :       regions_.erase(it);
      94             :     } else {
      95     3857273 :       *it = base::AddressRegion{it->begin() + size, it->size() - size};
      96             :     }
      97     3857271 :     return ret;
      98             :   }
      99           2 :   return {};
     100             : }
     101             : 
     102        1157 : Address WasmCode::constant_pool() const {
     103             :   if (FLAG_enable_embedded_constant_pool) {
     104             :     if (constant_pool_offset_ < code_comments_offset_) {
     105             :       return instruction_start() + constant_pool_offset_;
     106             :     }
     107             :   }
     108             :   return kNullAddress;
     109             : }
     110             : 
     111         792 : Address WasmCode::handler_table() const {
     112         792 :   return instruction_start() + handler_table_offset_;
     113             : }
     114             : 
     115     2616036 : uint32_t WasmCode::handler_table_size() const {
     116             :   DCHECK_GE(constant_pool_offset_, handler_table_offset_);
     117     2616036 :   return static_cast<uint32_t>(constant_pool_offset_ - handler_table_offset_);
     118             : }
     119             : 
     120           0 : Address WasmCode::code_comments() const {
     121           0 :   return instruction_start() + code_comments_offset_;
     122             : }
     123             : 
     124           0 : uint32_t WasmCode::code_comments_size() const {
     125             :   DCHECK_GE(unpadded_binary_size_, code_comments_offset_);
     126           0 :   return static_cast<uint32_t>(unpadded_binary_size_ - code_comments_offset_);
     127             : }
     128             : 
     129           0 : size_t WasmCode::trap_handler_index() const {
     130      109686 :   CHECK(HasTrapHandlerIndex());
     131       54843 :   return static_cast<size_t>(trap_handler_index_);
     132             : }
     133             : 
     134           0 : void WasmCode::set_trap_handler_index(size_t value) {
     135       54839 :   trap_handler_index_ = value;
     136           0 : }
     137             : 
     138     1427067 : void WasmCode::RegisterTrapHandlerData() {
     139             :   DCHECK(!HasTrapHandlerIndex());
     140     1427067 :   if (kind() != WasmCode::kFunction) return;
     141     1051131 :   if (protected_instructions_.empty()) return;
     142             : 
     143             :   Address base = instruction_start();
     144             : 
     145             :   size_t size = instructions().size();
     146             :   const int index =
     147             :       RegisterHandlerData(base, size, protected_instructions().size(),
     148       54761 :                           protected_instructions().start());
     149             : 
     150             :   // TODO(eholk): if index is negative, fail.
     151       54839 :   CHECK_LE(0, index);
     152             :   set_trap_handler_index(static_cast<size_t>(index));
     153             : }
     154             : 
     155     3907757 : bool WasmCode::HasTrapHandlerIndex() const { return trap_handler_index_ >= 0; }
     156             : 
     157      802325 : bool WasmCode::ShouldBeLogged(Isolate* isolate) {
     158             :   // The return value is cached in {WasmEngine::IsolateData::log_codes}. Ensure
     159             :   // to call {WasmEngine::EnableCodeLogging} if this return value would change
     160             :   // for any isolate. Otherwise we might lose code events.
     161      939699 :   return isolate->logger()->is_listening_to_code_events() ||
     162      802325 :          isolate->is_profiling();
     163             : }
     164             : 
     165          11 : void WasmCode::LogCode(Isolate* isolate) const {
     166             :   DCHECK(ShouldBeLogged(isolate));
     167          11 :   if (IsAnonymous()) return;
     168             : 
     169             :   ModuleWireBytes wire_bytes(native_module()->wire_bytes());
     170             :   // TODO(herhut): Allow to log code without on-heap round-trip of the name.
     171             :   WireBytesRef name_ref =
     172          11 :       native_module()->module()->LookupFunctionName(wire_bytes, index());
     173          11 :   WasmName name_vec = wire_bytes.GetNameOrNull(name_ref);
     174          11 :   if (!name_vec.empty()) {
     175             :     HandleScope scope(isolate);
     176             :     MaybeHandle<String> maybe_name = isolate->factory()->NewStringFromUtf8(
     177          11 :         Vector<const char>::cast(name_vec));
     178             :     Handle<String> name;
     179          11 :     if (!maybe_name.ToHandle(&name)) {
     180           0 :       name = isolate->factory()->NewStringFromAsciiChecked("<name too long>");
     181             :     }
     182             :     int name_length;
     183             :     auto cname =
     184             :         name->ToCString(AllowNullsFlag::DISALLOW_NULLS,
     185          11 :                         RobustnessFlag::ROBUST_STRING_TRAVERSAL, &name_length);
     186          22 :     PROFILE(isolate,
     187             :             CodeCreateEvent(CodeEventListener::FUNCTION_TAG, this,
     188             :                             {cname.get(), static_cast<size_t>(name_length)}));
     189             :   } else {
     190             :     EmbeddedVector<char, 32> generated_name;
     191           0 :     int length = SNPrintF(generated_name, "wasm-function[%d]", index());
     192           0 :     generated_name.Truncate(length);
     193           0 :     PROFILE(isolate, CodeCreateEvent(CodeEventListener::FUNCTION_TAG, this,
     194             :                                      generated_name));
     195             :   }
     196             : 
     197          11 :   if (!source_positions().empty()) {
     198          11 :     LOG_CODE_EVENT(isolate, CodeLinePosInfoRecordEvent(instruction_start(),
     199             :                                                        source_positions()));
     200             :   }
     201             : }
     202             : 
     203         236 : void WasmCode::Validate() const {
     204             : #ifdef DEBUG
     205             :   // We expect certain relocation info modes to never appear in {WasmCode}
     206             :   // objects or to be restricted to a small set of valid values. Hence the
     207             :   // iteration below does not use a mask, but visits all relocation data.
     208             :   for (RelocIterator it(instructions(), reloc_info(), constant_pool());
     209             :        !it.done(); it.next()) {
     210             :     RelocInfo::Mode mode = it.rinfo()->rmode();
     211             :     switch (mode) {
     212             :       case RelocInfo::WASM_CALL: {
     213             :         Address target = it.rinfo()->wasm_call_address();
     214             :         WasmCode* code = native_module_->Lookup(target);
     215             :         CHECK_NOT_NULL(code);
     216             :         CHECK_EQ(WasmCode::kJumpTable, code->kind());
     217             :         CHECK_EQ(native_module()->jump_table_, code);
     218             :         CHECK(code->contains(target));
     219             :         break;
     220             :       }
     221             :       case RelocInfo::WASM_STUB_CALL: {
     222             :         Address target = it.rinfo()->wasm_stub_call_address();
     223             :         WasmCode* code = native_module_->Lookup(target);
     224             :         CHECK_NOT_NULL(code);
     225             : #ifdef V8_EMBEDDED_BUILTINS
     226             :         CHECK_EQ(WasmCode::kJumpTable, code->kind());
     227             :         CHECK_EQ(native_module()->runtime_stub_table_, code);
     228             :         CHECK(code->contains(target));
     229             : #else
     230             :         CHECK_EQ(WasmCode::kRuntimeStub, code->kind());
     231             :         CHECK_EQ(target, code->instruction_start());
     232             : #endif
     233             :         break;
     234             :       }
     235             :       case RelocInfo::INTERNAL_REFERENCE:
     236             :       case RelocInfo::INTERNAL_REFERENCE_ENCODED: {
     237             :         Address target = it.rinfo()->target_internal_reference();
     238             :         CHECK(contains(target));
     239             :         break;
     240             :       }
     241             :       case RelocInfo::EXTERNAL_REFERENCE:
     242             :       case RelocInfo::CONST_POOL:
     243             :       case RelocInfo::VENEER_POOL:
     244             :         // These are OK to appear.
     245             :         break;
     246             :       default:
     247             :         FATAL("Unexpected mode: %d", mode);
     248             :     }
     249             :   }
     250             : #endif
     251         236 : }
     252             : 
     253     1428185 : void WasmCode::MaybePrint(const char* name) const {
     254             :   // Determines whether flags want this code to be printed.
     255     1428245 :   if ((FLAG_print_wasm_code && kind() == kFunction) ||
     256     2855996 :       (FLAG_print_wasm_stub_code && kind() != kFunction) || FLAG_print_code) {
     257          60 :     Print(name);
     258             :   }
     259     1428186 : }
     260             : 
     261          59 : void WasmCode::Print(const char* name) const {
     262         120 :   StdoutStream os;
     263          61 :   os << "--- WebAssembly code ---\n";
     264          61 :   Disassemble(name, os);
     265          61 :   os << "--- End code ---\n";
     266          61 : }
     267             : 
     268          61 : void WasmCode::Disassemble(const char* name, std::ostream& os,
     269             :                            Address current_pc) const {
     270          61 :   if (name) os << "name: " << name << "\n";
     271         121 :   if (!IsAnonymous()) os << "index: " << index() << "\n";
     272         121 :   os << "kind: " << GetWasmCodeKindAsString(kind_) << "\n";
     273         122 :   os << "compiler: " << (is_liftoff() ? "Liftoff" : "TurboFan") << "\n";
     274          61 :   size_t padding = instructions().size() - unpadded_binary_size_;
     275             :   os << "Body (size = " << instructions().size() << " = "
     276          61 :      << unpadded_binary_size_ << " + " << padding << " padding)\n";
     277             : 
     278             : #ifdef ENABLE_DISASSEMBLER
     279             :   size_t instruction_size = unpadded_binary_size_;
     280             :   if (constant_pool_offset_ < instruction_size) {
     281             :     instruction_size = constant_pool_offset_;
     282             :   }
     283             :   if (safepoint_table_offset_ && safepoint_table_offset_ < instruction_size) {
     284             :     instruction_size = safepoint_table_offset_;
     285             :   }
     286             :   if (handler_table_offset_ < instruction_size) {
     287             :     instruction_size = handler_table_offset_;
     288             :   }
     289             :   DCHECK_LT(0, instruction_size);
     290             :   os << "Instructions (size = " << instruction_size << ")\n";
     291             :   Disassembler::Decode(nullptr, &os, instructions().start(),
     292             :                        instructions().start() + instruction_size,
     293             :                        CodeReference(this), current_pc);
     294             :   os << "\n";
     295             : 
     296             :   if (handler_table_size() > 0) {
     297             :     HandlerTable table(handler_table(), handler_table_size());
     298             :     os << "Exception Handler Table (size = " << table.NumberOfReturnEntries()
     299             :        << "):\n";
     300             :     table.HandlerTableReturnPrint(os);
     301             :     os << "\n";
     302             :   }
     303             : 
     304             :   if (!protected_instructions_.empty()) {
     305             :     os << "Protected instructions:\n pc offset  land pad\n";
     306             :     for (auto& data : protected_instructions()) {
     307             :       os << std::setw(10) << std::hex << data.instr_offset << std::setw(10)
     308             :          << std::hex << data.landing_offset << "\n";
     309             :     }
     310             :     os << "\n";
     311             :   }
     312             : 
     313             :   if (!source_positions().empty()) {
     314             :     os << "Source positions:\n pc offset  position\n";
     315             :     for (SourcePositionTableIterator it(source_positions()); !it.done();
     316             :          it.Advance()) {
     317             :       os << std::setw(10) << std::hex << it.code_offset() << std::dec
     318             :          << std::setw(10) << it.source_position().ScriptOffset()
     319             :          << (it.is_statement() ? "  statement" : "") << "\n";
     320             :     }
     321             :     os << "\n";
     322             :   }
     323             : 
     324             :   if (safepoint_table_offset_ > 0) {
     325             :     SafepointTable table(instruction_start(), safepoint_table_offset_,
     326             :                          stack_slots_);
     327             :     os << "Safepoints (size = " << table.size() << ")\n";
     328             :     for (uint32_t i = 0; i < table.length(); i++) {
     329             :       uintptr_t pc_offset = table.GetPcOffset(i);
     330             :       os << reinterpret_cast<const void*>(instruction_start() + pc_offset);
     331             :       os << std::setw(6) << std::hex << pc_offset << "  " << std::dec;
     332             :       table.PrintEntry(i, os);
     333             :       os << " (sp -> fp)";
     334             :       SafepointEntry entry = table.GetEntry(i);
     335             :       if (entry.trampoline_pc() != -1) {
     336             :         os << " trampoline: " << std::hex << entry.trampoline_pc() << std::dec;
     337             :       }
     338             :       if (entry.has_deoptimization_index()) {
     339             :         os << " deopt: " << std::setw(6) << entry.deoptimization_index();
     340             :       }
     341             :       os << "\n";
     342             :     }
     343             :     os << "\n";
     344             :   }
     345             : 
     346             :   os << "RelocInfo (size = " << reloc_info_.size() << ")\n";
     347             :   for (RelocIterator it(instructions(), reloc_info(), constant_pool());
     348             :        !it.done(); it.next()) {
     349             :     it.rinfo()->Print(nullptr, os);
     350             :   }
     351             :   os << "\n";
     352             : 
     353             :   if (code_comments_size() > 0) {
     354             :     PrintCodeCommentsSection(os, code_comments(), code_comments_size());
     355             :   }
     356             : #endif  // ENABLE_DISASSEMBLER
     357          61 : }
     358             : 
     359           0 : const char* GetWasmCodeKindAsString(WasmCode::Kind kind) {
     360          60 :   switch (kind) {
     361             :     case WasmCode::kFunction:
     362             :       return "wasm function";
     363             :     case WasmCode::kWasmToJsWrapper:
     364           0 :       return "wasm-to-js";
     365             :     case WasmCode::kRuntimeStub:
     366           0 :       return "runtime-stub";
     367             :     case WasmCode::kInterpreterEntry:
     368           0 :       return "interpreter entry";
     369             :     case WasmCode::kJumpTable:
     370           0 :       return "jump table";
     371             :   }
     372           0 :   return "unknown kind";
     373             : }
     374             : 
     375     7815614 : WasmCode::~WasmCode() {
     376     3907757 :   if (HasTrapHandlerIndex()) {
     377       54843 :     CHECK_LT(trap_handler_index(),
     378             :              static_cast<size_t>(std::numeric_limits<int>::max()));
     379       54843 :     trap_handler::ReleaseHandlerData(static_cast<int>(trap_handler_index()));
     380             :   }
     381     3907857 : }
     382             : 
     383       76531 : V8_WARN_UNUSED_RESULT bool WasmCode::DecRefOnPotentiallyDeadCode() {
     384       76531 :   if (native_module_->engine()->AddPotentiallyDeadCode(this)) {
     385             :     // The code just became potentially dead. The ref count we wanted to
     386             :     // decrement is now transferred to the set of potentially dead code, and
     387             :     // will be decremented when the next GC is run.
     388             :     return false;
     389             :   }
     390             :   // If we reach here, the code was already potentially dead. Decrement the ref
     391             :   // count, and return true if it drops to zero.
     392           0 :   int old_count = ref_count_.load(std::memory_order_relaxed);
     393             :   while (true) {
     394             :     DCHECK_LE(1, old_count);
     395           0 :     if (ref_count_.compare_exchange_weak(old_count, old_count - 1,
     396             :                                          std::memory_order_relaxed)) {
     397           0 :       return old_count == 1;
     398             :     }
     399             :   }
     400             : }
     401             : 
     402             : // static
     403    62299577 : void WasmCode::DecrementRefCount(Vector<WasmCode*> code_vec) {
     404             :   // Decrement the ref counter of all given code objects. Keep the ones whose
     405             :   // ref count drops to zero.
     406             :   std::unordered_map<NativeModule*, std::vector<WasmCode*>> dead_code;
     407    82116763 :   for (WasmCode* code : code_vec) {
     408     9908535 :     if (code->DecRef()) dead_code[code->native_module()].push_back(code);
     409             :   }
     410             : 
     411             :   // For each native module, free all its code objects at once.
     412    62299635 :   for (auto& dead_code_entry : dead_code) {
     413             :     NativeModule* native_module = dead_code_entry.first;
     414             :     Vector<WasmCode*> code_vec = VectorOf(dead_code_entry.second);
     415             :     native_module->FreeCode(code_vec);
     416             :   }
     417    62299635 : }
     418             : 
     419     1242357 : NativeModule::NativeModule(WasmEngine* engine, const WasmFeatures& enabled,
     420             :                            bool can_request_more, VirtualMemory code_space,
     421             :                            std::shared_ptr<const WasmModule> module,
     422             :                            std::shared_ptr<Counters> async_counters,
     423             :                            std::shared_ptr<NativeModule>* shared_this)
     424             :     : enabled_features_(enabled),
     425             :       module_(std::move(module)),
     426             :       import_wrapper_cache_(std::unique_ptr<WasmImportWrapperCache>(
     427             :           new WasmImportWrapperCache(this))),
     428             :       free_code_space_(code_space.region()),
     429             :       engine_(engine),
     430             :       can_request_more_memory_(can_request_more),
     431             :       use_trap_handler_(trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler
     432     9938856 :                                                              : kNoTrapHandler) {
     433             :   // We receive a pointer to an empty {std::shared_ptr}, and install ourselve
     434             :   // there.
     435             :   DCHECK_NOT_NULL(shared_this);
     436             :   DCHECK_NULL(*shared_this);
     437     1242357 :   shared_this->reset(this);
     438     1242357 :   compilation_state_ =
     439     3727069 :       CompilationState::New(*shared_this, std::move(async_counters));
     440             :   DCHECK_NOT_NULL(module_);
     441     1242356 :   owned_code_space_.emplace_back(std::move(code_space));
     442     1242357 :   owned_code_.reserve(num_functions());
     443             : 
     444             : #if defined(V8_OS_WIN_X64)
     445             :   // On some platforms, specifically Win64, we need to reserve some pages at
     446             :   // the beginning of an executable space.
     447             :   // See src/heap/spaces.cc, MemoryAllocator::InitializeCodePageAllocator() and
     448             :   // https://cs.chromium.org/chromium/src/components/crash/content/app/crashpad_win.cc?rcl=fd680447881449fba2edcf0589320e7253719212&l=204
     449             :   // for details.
     450             :   if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
     451             :       FLAG_win64_unwinding_info) {
     452             :     AllocateForCode(Heap::GetCodeRangeReservedAreaSize());
     453             :   }
     454             : #endif
     455             : 
     456     1242357 :   uint32_t num_wasm_functions = module_->num_declared_functions;
     457     1242357 :   if (num_wasm_functions > 0) {
     458      141769 :     code_table_.reset(new WasmCode* [num_wasm_functions] {});
     459             : 
     460      141768 :     WasmCodeRefScope code_ref_scope;
     461      141769 :     jump_table_ = CreateEmptyJumpTable(
     462      141768 :         JumpTableAssembler::SizeForNumberOfSlots(num_wasm_functions));
     463             :   }
     464     1242357 : }
     465             : 
     466     1094224 : void NativeModule::ReserveCodeTableForTesting(uint32_t max_functions) {
     467     1094224 :   WasmCodeRefScope code_ref_scope;
     468             :   DCHECK_LE(num_functions(), max_functions);
     469     1094224 :   WasmCode** new_table = new WasmCode* [max_functions] {};
     470     1094224 :   if (module_->num_declared_functions > 0) {
     471           0 :     memcpy(new_table, code_table_.get(),
     472             :            module_->num_declared_functions * sizeof(*new_table));
     473             :   }
     474             :   code_table_.reset(new_table);
     475             : 
     476             :   // Re-allocate jump table.
     477     1094224 :   jump_table_ = CreateEmptyJumpTable(
     478     1094224 :       JumpTableAssembler::SizeForNumberOfSlots(max_functions));
     479     1094224 : }
     480             : 
     481      137374 : void NativeModule::LogWasmCodes(Isolate* isolate) {
     482      274744 :   if (!WasmCode::ShouldBeLogged(isolate)) return;
     483             : 
     484             :   // TODO(titzer): we skip the logging of the import wrappers
     485             :   // here, but they should be included somehow.
     486           4 :   int start = module()->num_imported_functions;
     487           4 :   int end = start + module()->num_declared_functions;
     488           4 :   WasmCodeRefScope code_ref_scope;
     489          12 :   for (int func_index = start; func_index < end; ++func_index) {
     490           4 :     if (WasmCode* code = GetCode(func_index)) code->LogCode(isolate);
     491             :   }
     492             : }
     493             : 
     494      482526 : CompilationEnv NativeModule::CreateCompilationEnv() const {
     495      482526 :   return {module(), use_trap_handler_, kRuntimeExceptionSupport,
     496      482526 :           enabled_features_};
     497             : }
     498             : 
     499         753 : WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) {
     500         753 :   return AddAndPublishAnonymousCode(code, WasmCode::kFunction);
     501             : }
     502             : 
     503        2628 : void NativeModule::UseLazyStubs() {
     504        2628 :   uint32_t start = module_->num_imported_functions;
     505        2628 :   uint32_t end = start + module_->num_declared_functions;
     506       32078 :   for (uint32_t func_index = start; func_index < end; func_index++) {
     507       14725 :     UseLazyStub(func_index);
     508             :   }
     509        2628 : }
     510             : 
     511       15104 : void NativeModule::UseLazyStub(uint32_t func_index) {
     512             :   DCHECK_LE(module_->num_imported_functions, func_index);
     513             :   DCHECK_LT(func_index,
     514             :             module_->num_imported_functions + module_->num_declared_functions);
     515             : 
     516             :   // Add jump table entry for jump to the lazy compile stub.
     517       15104 :   uint32_t slot_index = func_index - module_->num_imported_functions;
     518             :   DCHECK_NE(runtime_stub_entry(WasmCode::kWasmCompileLazy), kNullAddress);
     519             :   JumpTableAssembler::EmitLazyCompileJumpSlot(
     520       15104 :       jump_table_->instruction_start(), slot_index, func_index,
     521       15104 :       runtime_stub_entry(WasmCode::kWasmCompileLazy), WasmCode::kFlushICache);
     522       15104 : }
     523             : 
     524             : // TODO(mstarzinger): Remove {Isolate} parameter once {V8_EMBEDDED_BUILTINS}
     525             : // was removed and embedded builtins are no longer optional.
     526     1241588 : void NativeModule::SetRuntimeStubs(Isolate* isolate) {
     527             :   DCHECK_EQ(kNullAddress, runtime_stub_entries_[0]);  // Only called once.
     528             : #ifdef V8_EMBEDDED_BUILTINS
     529     1241588 :   WasmCodeRefScope code_ref_scope;
     530             :   WasmCode* jump_table =
     531             :       CreateEmptyJumpTable(JumpTableAssembler::SizeForNumberOfStubSlots(
     532     1241588 :           WasmCode::kRuntimeStubCount));
     533             :   Address base = jump_table->instruction_start();
     534     1241587 :   EmbeddedData embedded_data = EmbeddedData::FromBlob();
     535             : #define RUNTIME_STUB(Name) {Builtins::k##Name, WasmCode::k##Name},
     536             : #define RUNTIME_STUB_TRAP(Name) RUNTIME_STUB(ThrowWasm##Name)
     537             :   std::pair<Builtins::Name, WasmCode::RuntimeStubId> wasm_runtime_stubs[] = {
     538     1241587 :       WASM_RUNTIME_STUB_LIST(RUNTIME_STUB, RUNTIME_STUB_TRAP)};
     539             : #undef RUNTIME_STUB
     540             : #undef RUNTIME_STUB_TRAP
     541    73253687 :   for (auto pair : wasm_runtime_stubs) {
     542    36006050 :     CHECK(embedded_data.ContainsBuiltin(pair.first));
     543    36006050 :     Address builtin = embedded_data.InstructionStartOfBuiltin(pair.first);
     544             :     JumpTableAssembler::EmitRuntimeStubSlot(base, pair.second, builtin,
     545    36006049 :                                             WasmCode::kNoFlushICache);
     546             :     uint32_t slot_offset =
     547             :         JumpTableAssembler::StubSlotIndexToOffset(pair.second);
     548    36006050 :     runtime_stub_entries_[pair.second] = base + slot_offset;
     549             :   }
     550             :   FlushInstructionCache(jump_table->instructions().start(),
     551     1241588 :                         jump_table->instructions().size());
     552             :   DCHECK_NULL(runtime_stub_table_);
     553     1241588 :   runtime_stub_table_ = jump_table;
     554             : #else  // V8_EMBEDDED_BUILTINS
     555             :   HandleScope scope(isolate);
     556             :   WasmCodeRefScope code_ref_scope;
     557             :   USE(runtime_stub_table_);  // Actually unused, but avoids ifdef's in header.
     558             : #define COPY_BUILTIN(Name)                                        \
     559             :   runtime_stub_entries_[WasmCode::k##Name] =                      \
     560             :       AddAndPublishAnonymousCode(                                 \
     561             :           isolate->builtins()->builtin_handle(Builtins::k##Name), \
     562             :           WasmCode::kRuntimeStub, #Name)                          \
     563             :           ->instruction_start();
     564             : #define COPY_BUILTIN_TRAP(Name) COPY_BUILTIN(ThrowWasm##Name)
     565             :   WASM_RUNTIME_STUB_LIST(COPY_BUILTIN, COPY_BUILTIN_TRAP)
     566             : #undef COPY_BUILTIN_TRAP
     567             : #undef COPY_BUILTIN
     568             : #endif  // V8_EMBEDDED_BUILTINS
     569             :   DCHECK_NE(kNullAddress, runtime_stub_entries_[0]);
     570     1241588 : }
     571             : 
     572         753 : WasmCode* NativeModule::AddAndPublishAnonymousCode(Handle<Code> code,
     573             :                                                    WasmCode::Kind kind,
     574             :                                                    const char* name) {
     575             :   // For off-heap builtins, we create a copy of the off-heap instruction stream
     576             :   // instead of the on-heap code object containing the trampoline. Ensure that
     577             :   // we do not apply the on-heap reloc info to the off-heap instructions.
     578             :   const size_t relocation_size =
     579        1506 :       code->is_off_heap_trampoline() ? 0 : code->relocation_size();
     580             :   OwnedVector<byte> reloc_info;
     581         753 :   if (relocation_size > 0) {
     582             :     reloc_info = OwnedVector<byte>::New(relocation_size);
     583             :     memcpy(reloc_info.start(), code->relocation_start(), relocation_size);
     584             :   }
     585             :   Handle<ByteArray> source_pos_table(code->SourcePositionTable(),
     586        1506 :                                      code->GetIsolate());
     587             :   OwnedVector<byte> source_pos =
     588         753 :       OwnedVector<byte>::New(source_pos_table->length());
     589         753 :   if (source_pos_table->length() > 0) {
     590             :     source_pos_table->copy_out(0, source_pos.start(),
     591             :                                source_pos_table->length());
     592             :   }
     593             :   Vector<const byte> instructions(
     594        1506 :       reinterpret_cast<byte*>(code->InstructionStart()),
     595        3012 :       static_cast<size_t>(code->InstructionSize()));
     596             :   const uint32_t stack_slots = static_cast<uint32_t>(
     597         753 :       code->has_safepoint_info() ? code->stack_slots() : 0);
     598             : 
     599             :   // TODO(jgruber,v8:8758): Remove this translation. It exists only because
     600             :   // Code objects contains real offsets but WasmCode expects an offset of 0 to
     601             :   // mean 'empty'.
     602             :   const size_t safepoint_table_offset = static_cast<size_t>(
     603        1506 :       code->has_safepoint_table() ? code->safepoint_table_offset() : 0);
     604             :   const size_t handler_table_offset =
     605         753 :       static_cast<size_t>(code->handler_table_offset());
     606             :   const size_t constant_pool_offset =
     607         753 :       static_cast<size_t>(code->constant_pool_offset());
     608             :   const size_t code_comments_offset =
     609             :       static_cast<size_t>(code->code_comments_offset());
     610             : 
     611         753 :   Vector<uint8_t> dst_code_bytes = AllocateForCode(instructions.size());
     612             :   memcpy(dst_code_bytes.begin(), instructions.start(), instructions.size());
     613             : 
     614             :   // Apply the relocation delta by iterating over the RelocInfo.
     615        1506 :   intptr_t delta = reinterpret_cast<Address>(dst_code_bytes.begin()) -
     616        1506 :                    code->InstructionStart();
     617         753 :   int mode_mask = RelocInfo::kApplyMask |
     618         753 :                   RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
     619             :   Address constant_pool_start =
     620         753 :       reinterpret_cast<Address>(dst_code_bytes.begin()) + constant_pool_offset;
     621         753 :   RelocIterator orig_it(*code, mode_mask);
     622         753 :   for (RelocIterator it(dst_code_bytes, reloc_info.as_vector(),
     623         753 :                         constant_pool_start, mode_mask);
     624           0 :        !it.done(); it.next(), orig_it.next()) {
     625             :     RelocInfo::Mode mode = it.rinfo()->rmode();
     626           0 :     if (RelocInfo::IsWasmStubCall(mode)) {
     627           0 :       uint32_t stub_call_tag = orig_it.rinfo()->wasm_call_tag();
     628             :       DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
     629             :       Address entry = runtime_stub_entry(
     630             :           static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
     631           0 :       it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
     632             :     } else {
     633             :       it.rinfo()->apply(delta);
     634             :     }
     635             :   }
     636             : 
     637             :   // Flush the i-cache after relocation.
     638         753 :   FlushInstructionCache(dst_code_bytes.start(), dst_code_bytes.size());
     639             : 
     640             :   DCHECK_NE(kind, WasmCode::Kind::kInterpreterEntry);
     641             :   std::unique_ptr<WasmCode> new_code{new WasmCode{
     642             :       this,                                     // native_module
     643             :       WasmCode::kAnonymousFuncIndex,            // index
     644             :       dst_code_bytes,                           // instructions
     645             :       stack_slots,                              // stack_slots
     646             :       0,                                        // tagged_parameter_slots
     647             :       safepoint_table_offset,                   // safepoint_table_offset
     648             :       handler_table_offset,                     // handler_table_offset
     649             :       constant_pool_offset,                     // constant_pool_offset
     650             :       code_comments_offset,                     // code_comments_offset
     651             :       instructions.size(),                      // unpadded_binary_size
     652             :       OwnedVector<ProtectedInstructionData>{},  // protected_instructions
     653             :       std::move(reloc_info),                    // reloc_info
     654             :       std::move(source_pos),                    // source positions
     655             :       kind,                                     // kind
     656        1506 :       ExecutionTier::kNone}};                   // tier
     657         753 :   new_code->MaybePrint(name);
     658             :   new_code->Validate();
     659             : 
     660        1506 :   return PublishCode(std::move(new_code));
     661             : }
     662             : 
     663      374899 : std::unique_ptr<WasmCode> NativeModule::AddCode(
     664             :     uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
     665             :     uint32_t tagged_parameter_slots,
     666             :     OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
     667             :     OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
     668             :     ExecutionTier tier) {
     669             :   return AddCodeWithCodeSpace(index, desc, stack_slots, tagged_parameter_slots,
     670             :                               std::move(protected_instructions),
     671             :                               std::move(source_position_table), kind, tier,
     672     1499596 :                               AllocateForCode(desc.instr_size));
     673             : }
     674             : 
     675     1428329 : std::unique_ptr<WasmCode> NativeModule::AddCodeWithCodeSpace(
     676             :     uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
     677             :     uint32_t tagged_parameter_slots,
     678             :     OwnedVector<ProtectedInstructionData> protected_instructions,
     679             :     OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
     680             :     ExecutionTier tier, Vector<uint8_t> dst_code_bytes) {
     681             :   OwnedVector<byte> reloc_info;
     682     1428329 :   if (desc.reloc_size > 0) {
     683      350259 :     reloc_info = OwnedVector<byte>::New(desc.reloc_size);
     684      350376 :     memcpy(reloc_info.start(), desc.buffer + desc.buffer_size - desc.reloc_size,
     685      350376 :            desc.reloc_size);
     686             :   }
     687             : 
     688             :   // TODO(jgruber,v8:8758): Remove this translation. It exists only because
     689             :   // CodeDesc contains real offsets but WasmCode expects an offset of 0 to mean
     690             :   // 'empty'.
     691             :   const size_t safepoint_table_offset = static_cast<size_t>(
     692     1428446 :       desc.safepoint_table_size == 0 ? 0 : desc.safepoint_table_offset);
     693             :   const size_t handler_table_offset =
     694     1428446 :       static_cast<size_t>(desc.handler_table_offset);
     695             :   const size_t constant_pool_offset =
     696     1428446 :       static_cast<size_t>(desc.constant_pool_offset);
     697             :   const size_t code_comments_offset =
     698     1428446 :       static_cast<size_t>(desc.code_comments_offset);
     699     1428446 :   const size_t instr_size = static_cast<size_t>(desc.instr_size);
     700             : 
     701     1428446 :   memcpy(dst_code_bytes.begin(), desc.buffer,
     702             :          static_cast<size_t>(desc.instr_size));
     703             : 
     704             :   // Apply the relocation delta by iterating over the RelocInfo.
     705     1428446 :   intptr_t delta = dst_code_bytes.begin() - desc.buffer;
     706     1428446 :   int mode_mask = RelocInfo::kApplyMask |
     707             :                   RelocInfo::ModeMask(RelocInfo::WASM_CALL) |
     708     1428446 :                   RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
     709             :   Address constant_pool_start =
     710     1428446 :       reinterpret_cast<Address>(dst_code_bytes.begin()) + constant_pool_offset;
     711     2751622 :   for (RelocIterator it(dst_code_bytes, reloc_info.as_vector(),
     712     1428446 :                         constant_pool_start, mode_mask);
     713      662140 :        !it.done(); it.next()) {
     714             :     RelocInfo::Mode mode = it.rinfo()->rmode();
     715      662155 :     if (RelocInfo::IsWasmCall(mode)) {
     716       26025 :       uint32_t call_tag = it.rinfo()->wasm_call_tag();
     717             :       Address target = GetCallTargetForFunction(call_tag);
     718       26031 :       it.rinfo()->set_wasm_call_address(target, SKIP_ICACHE_FLUSH);
     719      636130 :     } else if (RelocInfo::IsWasmStubCall(mode)) {
     720      512745 :       uint32_t stub_call_tag = it.rinfo()->wasm_call_tag();
     721             :       DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
     722             :       Address entry = runtime_stub_entry(
     723             :           static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
     724      512774 :       it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
     725             :     } else {
     726             :       it.rinfo()->apply(delta);
     727             :     }
     728             :   }
     729             : 
     730             :   std::unique_ptr<WasmCode> code{new WasmCode{
     731             :       this, index, dst_code_bytes, stack_slots, tagged_parameter_slots,
     732             :       safepoint_table_offset, handler_table_offset, constant_pool_offset,
     733             :       code_comments_offset, instr_size, std::move(protected_instructions),
     734     1427447 :       std::move(reloc_info), std::move(source_position_table), kind, tier}};
     735     1428610 :   code->MaybePrint();
     736             :   code->Validate();
     737             : 
     738     1426941 :   code->RegisterTrapHandlerData();
     739             : 
     740             :   // Flush the i-cache for the region holding the relocated code.
     741             :   // Do this last, as this seems to trigger an LTO bug that clobbers a register
     742             :   // on arm, see https://crbug.com/952759#c6.
     743     1427190 :   FlushInstructionCache(dst_code_bytes.start(), dst_code_bytes.size());
     744             : 
     745     1427447 :   return code;
     746             : }
     747             : 
     748     2853469 : WasmCode* NativeModule::PublishCode(std::unique_ptr<WasmCode> code) {
     749     2853469 :   base::MutexGuard lock(&allocation_mutex_);
     750     5706938 :   return PublishCodeLocked(std::move(code));
     751             : }
     752             : 
     753             : namespace {
     754     1053328 : WasmCode::Kind GetCodeKindForExecutionTier(ExecutionTier tier) {
     755     1053328 :   switch (tier) {
     756             :     case ExecutionTier::kInterpreter:
     757             :       return WasmCode::Kind::kInterpreterEntry;
     758             :     case ExecutionTier::kLiftoff:
     759             :     case ExecutionTier::kTurbofan:
     760     1052084 :       return WasmCode::Kind::kFunction;
     761             :     case ExecutionTier::kNone:
     762           0 :       UNREACHABLE();
     763             :   }
     764           0 : }
     765             : }  // namespace
     766             : 
     767     3907757 : WasmCode* NativeModule::PublishCodeLocked(std::unique_ptr<WasmCode> code) {
     768             :   // The caller must hold the {allocation_mutex_}, thus we fail to lock it here.
     769             :   DCHECK(!allocation_mutex_.TryLock());
     770             : 
     771     3907757 :   if (!code->IsAnonymous()) {
     772             :     DCHECK_LT(code->index(), num_functions());
     773             :     DCHECK_LE(module_->num_imported_functions, code->index());
     774             : 
     775             :     // Assume an order of execution tiers that represents the quality of their
     776             :     // generated code.
     777             :     static_assert(ExecutionTier::kNone < ExecutionTier::kInterpreter &&
     778             :                       ExecutionTier::kInterpreter < ExecutionTier::kLiftoff &&
     779             :                       ExecutionTier::kLiftoff < ExecutionTier::kTurbofan,
     780             :                   "Assume an order on execution tiers");
     781             : 
     782             :     // Update code table but avoid to fall back to less optimized code. We use
     783             :     // the new code if it was compiled with a higher tier.
     784     1422375 :     uint32_t slot_idx = code->index() - module_->num_imported_functions;
     785     2844750 :     WasmCode* prior_code = code_table_[slot_idx];
     786     1422375 :     bool update_code_table = !prior_code || prior_code->tier() < code->tier();
     787     1422375 :     if (update_code_table) {
     788     1406832 :       code_table_[slot_idx] = code.get();
     789     1406832 :       if (prior_code) {
     790       76527 :         WasmCodeRefScope::AddRef(prior_code);
     791             :         // The code is added to the current {WasmCodeRefScope}, hence the ref
     792             :         // count cannot drop to zero here.
     793       76499 :         CHECK(!prior_code->DecRef());
     794             :       }
     795             :     }
     796             : 
     797             :     // Populate optimized code to the jump table unless there is an active
     798             :     // redirection to the interpreter that should be preserved.
     799             :     bool update_jump_table =
     800     2829219 :         update_code_table && !has_interpreter_redirection(code->index());
     801             : 
     802             :     // Ensure that interpreter entries always populate to the jump table.
     803     1422381 :     if (code->kind_ == WasmCode::Kind::kInterpreterEntry) {
     804      369231 :       SetInterpreterRedirection(code->index());
     805             :       update_jump_table = true;
     806             :     }
     807             : 
     808     1422416 :     if (update_jump_table) {
     809             :       JumpTableAssembler::PatchJumpTableSlot(
     810     1407003 :           jump_table_->instruction_start(), slot_idx, code->instruction_start(),
     811     1407003 :           WasmCode::kFlushICache);
     812             :     }
     813             :   }
     814     3907824 :   WasmCodeRefScope::AddRef(code.get());
     815             :   WasmCode* result = code.get();
     816     3907862 :   owned_code_.emplace_back(std::move(code));
     817     3907792 :   return result;
     818             : }
     819             : 
     820         236 : WasmCode* NativeModule::AddDeserializedCode(
     821             :     uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
     822             :     uint32_t tagged_parameter_slots, size_t safepoint_table_offset,
     823             :     size_t handler_table_offset, size_t constant_pool_offset,
     824             :     size_t code_comments_offset, size_t unpadded_binary_size,
     825             :     OwnedVector<ProtectedInstructionData> protected_instructions,
     826             :     OwnedVector<const byte> reloc_info,
     827             :     OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
     828             :     ExecutionTier tier) {
     829         236 :   Vector<uint8_t> dst_code_bytes = AllocateForCode(instructions.size());
     830             :   memcpy(dst_code_bytes.begin(), instructions.start(), instructions.size());
     831             : 
     832             :   std::unique_ptr<WasmCode> code{new WasmCode{
     833             :       this, index, dst_code_bytes, stack_slots, tagged_parameter_slots,
     834             :       safepoint_table_offset, handler_table_offset, constant_pool_offset,
     835             :       code_comments_offset, unpadded_binary_size,
     836             :       std::move(protected_instructions), std::move(reloc_info),
     837         708 :       std::move(source_position_table), kind, tier}};
     838             : 
     839         236 :   code->RegisterTrapHandlerData();
     840             : 
     841             :   // Note: we do not flush the i-cache here, since the code needs to be
     842             :   // relocated anyway. The caller is responsible for flushing the i-cache later.
     843             : 
     844         472 :   return PublishCode(std::move(code));
     845             : }
     846             : 
     847         209 : std::vector<WasmCode*> NativeModule::SnapshotCodeTable() const {
     848         209 :   base::MutexGuard lock(&allocation_mutex_);
     849             :   WasmCode** start = code_table_.get();
     850         209 :   WasmCode** end = start + module_->num_declared_functions;
     851         209 :   return std::vector<WasmCode*>{start, end};
     852             : }
     853             : 
     854     9653457 : WasmCode* NativeModule::GetCode(uint32_t index) const {
     855     9653457 :   base::MutexGuard guard(&allocation_mutex_);
     856             :   DCHECK_LT(index, num_functions());
     857             :   DCHECK_LE(module_->num_imported_functions, index);
     858    19306914 :   WasmCode* code = code_table_[index - module_->num_imported_functions];
     859     9653457 :   WasmCodeRefScope::AddRef(code);
     860     9653457 :   return code;
     861             : }
     862             : 
     863          12 : bool NativeModule::HasCode(uint32_t index) const {
     864          12 :   base::MutexGuard guard(&allocation_mutex_);
     865             :   DCHECK_LT(index, num_functions());
     866             :   DCHECK_LE(module_->num_imported_functions, index);
     867          36 :   return code_table_[index - module_->num_imported_functions] != nullptr;
     868             : }
     869             : 
     870     2477580 : WasmCode* NativeModule::CreateEmptyJumpTable(uint32_t jump_table_size) {
     871             :   // Only call this if we really need a jump table.
     872             :   DCHECK_LT(0, jump_table_size);
     873     2477580 :   Vector<uint8_t> code_space = AllocateForCode(jump_table_size);
     874             :   ZapCode(reinterpret_cast<Address>(code_space.begin()), code_space.size());
     875             :   std::unique_ptr<WasmCode> code{new WasmCode{
     876             :       this,                                     // native_module
     877             :       WasmCode::kAnonymousFuncIndex,            // index
     878             :       code_space,                               // instructions
     879             :       0,                                        // stack_slots
     880             :       0,                                        // tagged_parameter_slots
     881             :       0,                                        // safepoint_table_offset
     882             :       jump_table_size,                          // handler_table_offset
     883             :       jump_table_size,                          // constant_pool_offset
     884             :       jump_table_size,                          // code_comments_offset
     885             :       jump_table_size,                          // unpadded_binary_size
     886             :       OwnedVector<ProtectedInstructionData>{},  // protected_instructions
     887             :       OwnedVector<const uint8_t>{},             // reloc_info
     888             :       OwnedVector<const uint8_t>{},             // source_pos
     889             :       WasmCode::kJumpTable,                     // kind
     890     4955162 :       ExecutionTier::kNone}};                   // tier
     891     4955162 :   return PublishCode(std::move(code));
     892             : }
     893             : 
     894     3856406 : Vector<byte> NativeModule::AllocateForCode(size_t size) {
     895     3856406 :   base::MutexGuard lock(&allocation_mutex_);
     896             :   DCHECK_LT(0, size);
     897     3857292 :   v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
     898             :   // This happens under a lock assumed by the caller.
     899             :   size = RoundUp<kCodeAlignment>(size);
     900     3857289 :   base::AddressRegion code_space = free_code_space_.Allocate(size);
     901     3857277 :   if (code_space.is_empty()) {
     902           0 :     if (!can_request_more_memory_) {
     903             :       V8::FatalProcessOutOfMemory(nullptr,
     904           0 :                                   "NativeModule::AllocateForCode reservation");
     905             :       UNREACHABLE();
     906             :     }
     907             : 
     908             :     Address hint = owned_code_space_.empty() ? kNullAddress
     909           0 :                                              : owned_code_space_.back().end();
     910             : 
     911           0 :     VirtualMemory new_mem = engine_->code_manager()->TryAllocate(
     912           0 :         size, reinterpret_cast<void*>(hint));
     913           0 :     if (!new_mem.IsReserved()) {
     914             :       V8::FatalProcessOutOfMemory(nullptr,
     915           0 :                                   "NativeModule::AllocateForCode reservation");
     916             :       UNREACHABLE();
     917             :     }
     918           0 :     engine_->code_manager()->AssignRanges(new_mem.address(), new_mem.end(),
     919           0 :                                           this);
     920             : 
     921           0 :     free_code_space_.Merge(new_mem.region());
     922           0 :     owned_code_space_.emplace_back(std::move(new_mem));
     923           0 :     code_space = free_code_space_.Allocate(size);
     924             :     DCHECK(!code_space.is_empty());
     925             :   }
     926     3857277 :   const Address page_size = page_allocator->AllocatePageSize();
     927     3857230 :   Address commit_start = RoundUp(code_space.begin(), page_size);
     928             :   Address commit_end = RoundUp(code_space.end(), page_size);
     929             :   // {commit_start} will be either code_space.start or the start of the next
     930             :   // page. {commit_end} will be the start of the page after the one in which
     931             :   // the allocation ends.
     932             :   // We start from an aligned start, and we know we allocated vmem in
     933             :   // page multiples.
     934             :   // We just need to commit what's not committed. The page in which we
     935             :   // start is already committed (or we start at the beginning of a page).
     936             :   // The end needs to be committed all through the end of the page.
     937     3857230 :   if (commit_start < commit_end) {
     938     1245185 :     committed_code_space_.fetch_add(commit_end - commit_start);
     939             :     // Committed code cannot grow bigger than maximum code space size.
     940             :     DCHECK_LE(committed_code_space_.load(), kMaxWasmCodeMemory);
     941             : #if V8_OS_WIN
     942             :     // On Windows, we cannot commit a region that straddles different
     943             :     // reservations of virtual memory. Because we bump-allocate, and because, if
     944             :     // we need more memory, we append that memory at the end of the
     945             :     // owned_code_space_ list, we traverse that list in reverse order to find
     946             :     // the reservation(s) that guide how to chunk the region to commit.
     947             :     for (auto& vmem : base::Reversed(owned_code_space_)) {
     948             :       if (commit_end <= vmem.address() || vmem.end() <= commit_start) continue;
     949             :       Address start = std::max(commit_start, vmem.address());
     950             :       Address end = std::min(commit_end, vmem.end());
     951             :       size_t commit_size = static_cast<size_t>(end - start);
     952             :       if (!engine_->code_manager()->Commit(start, commit_size)) {
     953             :         V8::FatalProcessOutOfMemory(nullptr,
     954             :                                     "NativeModule::AllocateForCode commit");
     955             :         UNREACHABLE();
     956             :       }
     957             :       // Opportunistically reduce the commit range. This might terminate the
     958             :       // loop early.
     959             :       if (commit_start == start) commit_start = end;
     960             :       if (commit_end == end) commit_end = start;
     961             :       if (commit_start >= commit_end) break;
     962             :     }
     963             : #else
     964     2490370 :     if (!engine_->code_manager()->Commit(commit_start,
     965             :                                          commit_end - commit_start)) {
     966             :       V8::FatalProcessOutOfMemory(nullptr,
     967           0 :                                   "NativeModule::AllocateForCode commit");
     968             :       UNREACHABLE();
     969             :     }
     970             : #endif
     971             :   }
     972             :   DCHECK(IsAligned(code_space.begin(), kCodeAlignment));
     973     3857230 :   allocated_code_space_.Merge(code_space);
     974             :   generated_code_size_.fetch_add(code_space.size(), std::memory_order_relaxed);
     975             : 
     976             :   TRACE_HEAP("Code alloc for %p: %" PRIxPTR ",+%zu\n", this, code_space.begin(),
     977             :              size);
     978     7714590 :   return {reinterpret_cast<byte*>(code_space.begin()), code_space.size()};
     979             : }
     980             : 
     981             : namespace {
     982     4679808 : class NativeModuleWireBytesStorage final : public WireBytesStorage {
     983             :  public:
     984             :   explicit NativeModuleWireBytesStorage(
     985             :       std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes)
     986     2339904 :       : wire_bytes_(std::move(wire_bytes)) {}
     987             : 
     988     1063539 :   Vector<const uint8_t> GetCode(WireBytesRef ref) const final {
     989     2127078 :     return wire_bytes_->as_vector().SubVector(ref.offset(), ref.end_offset());
     990             :   }
     991             : 
     992             :  private:
     993             :   const std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes_;
     994             : };
     995             : }  // namespace
     996             : 
     997     3434420 : void NativeModule::SetWireBytes(OwnedVector<const uint8_t> wire_bytes) {
     998             :   auto shared_wire_bytes =
     999             :       std::make_shared<OwnedVector<const uint8_t>>(std::move(wire_bytes));
    1000             :   wire_bytes_ = shared_wire_bytes;
    1001     3434420 :   if (!shared_wire_bytes->empty()) {
    1002     2339904 :     compilation_state_->SetWireBytesStorage(
    1003     2339904 :         std::make_shared<NativeModuleWireBytesStorage>(
    1004     2339904 :             std::move(shared_wire_bytes)));
    1005             :   }
    1006     3434420 : }
    1007             : 
    1008     5931082 : WasmCode* NativeModule::Lookup(Address pc) const {
    1009     5931082 :   base::MutexGuard lock(&allocation_mutex_);
    1010     5931082 :   if (owned_code_.empty()) return nullptr;
    1011             :   // First update the sorted portion counter.
    1012     5931082 :   if (owned_code_sorted_portion_ == 0) ++owned_code_sorted_portion_;
    1013    12868684 :   while (owned_code_sorted_portion_ < owned_code_.size() &&
    1014      908675 :          owned_code_[owned_code_sorted_portion_ - 1]->instruction_start() <=
    1015             :              owned_code_[owned_code_sorted_portion_]->instruction_start()) {
    1016       32615 :     ++owned_code_sorted_portion_;
    1017             :   }
    1018             :   // Execute at most two rounds: First check whether the {pc} is within the
    1019             :   // sorted portion of {owned_code_}. If it's not, then sort the whole vector
    1020             :   // and retry.
    1021         130 :   while (true) {
    1022             :     auto iter =
    1023             :         std::upper_bound(owned_code_.begin(), owned_code_.end(), pc,
    1024             :                          [](Address pc, const std::unique_ptr<WasmCode>& code) {
    1025             :                            DCHECK_NE(kNullAddress, pc);
    1026             :                            DCHECK_NOT_NULL(code);
    1027             :                            return pc < code->instruction_start();
    1028             :                          });
    1029     5931212 :     if (iter != owned_code_.begin()) {
    1030             :       --iter;
    1031             :       WasmCode* candidate = iter->get();
    1032             :       DCHECK_NOT_NULL(candidate);
    1033     5931212 :       if (candidate->contains(pc)) {
    1034     5931080 :         WasmCodeRefScope::AddRef(candidate);
    1035     5931080 :         return candidate;
    1036             :       }
    1037             :     }
    1038         264 :     if (owned_code_sorted_portion_ == owned_code_.size()) return nullptr;
    1039             :     std::sort(owned_code_.begin(), owned_code_.end(),
    1040             :               [](const std::unique_ptr<WasmCode>& code1,
    1041             :                  const std::unique_ptr<WasmCode>& code2) {
    1042             :                 return code1->instruction_start() < code2->instruction_start();
    1043             :               });
    1044         130 :     owned_code_sorted_portion_ = owned_code_.size();
    1045             :   }
    1046             : }
    1047             : 
    1048      364587 : Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const {
    1049             :   // TODO(clemensh): Measure performance win of returning instruction start
    1050             :   // directly if we have turbofan code. Downside: Redirecting functions (e.g.
    1051             :   // for debugging) gets much harder.
    1052             : 
    1053             :   // Return the jump table slot for that function index.
    1054             :   DCHECK_NOT_NULL(jump_table_);
    1055      390618 :   uint32_t slot_idx = func_index - module_->num_imported_functions;
    1056             :   uint32_t slot_offset = JumpTableAssembler::SlotIndexToOffset(slot_idx);
    1057             :   DCHECK_LT(slot_offset, jump_table_->instructions().size());
    1058      781236 :   return jump_table_->instruction_start() + slot_offset;
    1059             : }
    1060             : 
    1061       40656 : uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(
    1062             :     Address slot_address) const {
    1063             :   DCHECK(is_jump_table_slot(slot_address));
    1064             :   uint32_t slot_offset =
    1065       81312 :       static_cast<uint32_t>(slot_address - jump_table_->instruction_start());
    1066             :   uint32_t slot_idx = JumpTableAssembler::SlotOffsetToIndex(slot_offset);
    1067             :   DCHECK_LT(slot_idx, module_->num_declared_functions);
    1068       40656 :   return module_->num_imported_functions + slot_idx;
    1069             : }
    1070             : 
    1071           0 : const char* NativeModule::GetRuntimeStubName(Address runtime_stub_entry) const {
    1072             : #define RETURN_NAME(Name)                                               \
    1073             :   if (runtime_stub_entries_[WasmCode::k##Name] == runtime_stub_entry) { \
    1074             :     return #Name;                                                       \
    1075             :   }
    1076             : #define RETURN_NAME_TRAP(Name) RETURN_NAME(ThrowWasm##Name)
    1077           0 :   WASM_RUNTIME_STUB_LIST(RETURN_NAME, RETURN_NAME_TRAP)
    1078             : #undef RETURN_NAME_TRAP
    1079             : #undef RETURN_NAME
    1080           0 :   return "<unknown>";
    1081             : }
    1082             : 
    1083     3727071 : NativeModule::~NativeModule() {
    1084             :   TRACE_HEAP("Deleting native module: %p\n", reinterpret_cast<void*>(this));
    1085             :   // Cancel all background compilation before resetting any field of the
    1086             :   // NativeModule or freeing anything.
    1087     1242357 :   compilation_state_->AbortCompilation();
    1088     1242357 :   engine_->FreeNativeModule(this);
    1089             :   // Free the import wrapper cache before releasing the {WasmCode} objects in
    1090             :   // {owned_code_}. The destructor of {WasmImportWrapperCache} still needs to
    1091             :   // decrease reference counts on the {WasmCode} objects.
    1092     1242357 :   import_wrapper_cache_.reset();
    1093     1242357 : }
    1094             : 
    1095       61044 : WasmCodeManager::WasmCodeManager(WasmMemoryTracker* memory_tracker,
    1096             :                                  size_t max_committed)
    1097             :     : memory_tracker_(memory_tracker),
    1098             :       max_committed_code_space_(max_committed),
    1099             :       total_committed_code_space_(0),
    1100      183132 :       critical_committed_code_space_(max_committed / 2) {
    1101             :   DCHECK_LE(max_committed, kMaxWasmCodeMemory);
    1102       61044 : }
    1103             : 
    1104     1245185 : bool WasmCodeManager::Commit(Address start, size_t size) {
    1105             :   // TODO(v8:8462) Remove eager commit once perf supports remapping.
    1106     1245185 :   if (FLAG_perf_prof) return true;
    1107             :   DCHECK(IsAligned(start, AllocatePageSize()));
    1108             :   DCHECK(IsAligned(size, AllocatePageSize()));
    1109             :   // Reserve the size. Use CAS loop to avoid overflow on
    1110             :   // {total_committed_code_space_}.
    1111     1245185 :   size_t old_value = total_committed_code_space_.load();
    1112             :   while (true) {
    1113             :     DCHECK_GE(max_committed_code_space_, old_value);
    1114     1245185 :     if (size > max_committed_code_space_ - old_value) return false;
    1115     2490370 :     if (total_committed_code_space_.compare_exchange_weak(old_value,
    1116             :                                                           old_value + size)) {
    1117             :       break;
    1118             :     }
    1119             :   }
    1120             :   PageAllocator::Permission permission = FLAG_wasm_write_protect_code_memory
    1121             :                                              ? PageAllocator::kReadWrite
    1122     1245185 :                                              : PageAllocator::kReadWriteExecute;
    1123             : 
    1124             :   bool ret =
    1125     1245185 :       SetPermissions(GetPlatformPageAllocator(), start, size, permission);
    1126             :   TRACE_HEAP("Setting rw permissions for %p:%p\n",
    1127             :              reinterpret_cast<void*>(start),
    1128             :              reinterpret_cast<void*>(start + size));
    1129             : 
    1130     1245185 :   if (!ret) {
    1131             :     // Highly unlikely.
    1132             :     total_committed_code_space_.fetch_sub(size);
    1133           0 :     return false;
    1134             :   }
    1135             :   return true;
    1136             : }
    1137             : 
    1138           0 : void WasmCodeManager::AssignRanges(Address start, Address end,
    1139             :                                    NativeModule* native_module) {
    1140           0 :   base::MutexGuard lock(&native_modules_mutex_);
    1141           0 :   lookup_map_.insert(std::make_pair(start, std::make_pair(end, native_module)));
    1142           0 : }
    1143             : 
    1144     1243252 : VirtualMemory WasmCodeManager::TryAllocate(size_t size, void* hint) {
    1145     1243252 :   v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
    1146             :   DCHECK_GT(size, 0);
    1147     1243257 :   size = RoundUp(size, page_allocator->AllocatePageSize());
    1148     1243253 :   if (!memory_tracker_->ReserveAddressSpace(size)) return {};
    1149     1242357 :   if (hint == nullptr) hint = page_allocator->GetRandomMmapAddr();
    1150             : 
    1151             :   VirtualMemory mem(page_allocator, size, hint,
    1152     2484715 :                     page_allocator->AllocatePageSize());
    1153     1242357 :   if (!mem.IsReserved()) {
    1154           0 :     memory_tracker_->ReleaseReservation(size);
    1155           0 :     return {};
    1156             :   }
    1157             :   TRACE_HEAP("VMem alloc: %p:%p (%zu)\n",
    1158             :              reinterpret_cast<void*>(mem.address()),
    1159             :              reinterpret_cast<void*>(mem.end()), mem.size());
    1160             : 
    1161             :   // TODO(v8:8462) Remove eager commit once perf supports remapping.
    1162     1242357 :   if (FLAG_perf_prof) {
    1163           0 :     SetPermissions(GetPlatformPageAllocator(), mem.address(), mem.size(),
    1164             :                    PageAllocator::kReadWriteExecute);
    1165             :   }
    1166             :   return mem;
    1167             : }
    1168             : 
    1169          14 : void WasmCodeManager::SetMaxCommittedMemoryForTesting(size_t limit) {
    1170             :   // This has to be set before committing any memory.
    1171             :   DCHECK_EQ(0, total_committed_code_space_.load());
    1172          14 :   max_committed_code_space_ = limit;
    1173          14 :   critical_committed_code_space_.store(limit / 2);
    1174          14 : }
    1175             : 
    1176             : // static
    1177     1386053 : size_t WasmCodeManager::EstimateNativeModuleCodeSize(const WasmModule* module) {
    1178             :   constexpr size_t kCodeSizeMultiplier = 4;
    1179             :   constexpr size_t kCodeOverhead = 32;     // for prologue, stack check, ...
    1180             :   constexpr size_t kStaticCodeSize = 512;  // runtime stubs, ...
    1181             :   constexpr size_t kImportSize = 64 * kSystemPointerSize;
    1182             : 
    1183             :   size_t estimate = kStaticCodeSize;
    1184     2129777 :   for (auto& function : module->functions) {
    1185      743724 :     estimate += kCodeOverhead + kCodeSizeMultiplier * function.code.length();
    1186             :   }
    1187             :   estimate +=
    1188     2772106 :       JumpTableAssembler::SizeForNumberOfSlots(module->num_declared_functions);
    1189     1386053 :   estimate += kImportSize * module->num_imported_functions;
    1190             : 
    1191     1386053 :   return estimate;
    1192             : }
    1193             : 
    1194             : // static
    1195     1239052 : size_t WasmCodeManager::EstimateNativeModuleNonCodeSize(
    1196             :     const WasmModule* module) {
    1197     1239052 :   size_t wasm_module_estimate = EstimateStoredSize(module);
    1198             : 
    1199     1239053 :   uint32_t num_wasm_functions = module->num_declared_functions;
    1200             : 
    1201             :   // TODO(wasm): Include wire bytes size.
    1202             :   size_t native_module_estimate =
    1203             :       sizeof(NativeModule) +                     /* NativeModule struct */
    1204     1239053 :       (sizeof(WasmCode*) * num_wasm_functions) + /* code table size */
    1205     1239053 :       (sizeof(WasmCode) * num_wasm_functions);   /* code object size */
    1206             : 
    1207     1239053 :   return wasm_module_estimate + native_module_estimate;
    1208             : }
    1209             : 
    1210     1242350 : std::shared_ptr<NativeModule> WasmCodeManager::NewNativeModule(
    1211             :     WasmEngine* engine, Isolate* isolate, const WasmFeatures& enabled,
    1212             :     size_t code_size_estimate, bool can_request_more,
    1213             :     std::shared_ptr<const WasmModule> module) {
    1214             :   DCHECK_EQ(this, isolate->wasm_engine()->code_manager());
    1215     1242350 :   if (total_committed_code_space_.load() >
    1216             :       critical_committed_code_space_.load()) {
    1217             :     (reinterpret_cast<v8::Isolate*>(isolate))
    1218          59 :         ->MemoryPressureNotification(MemoryPressureLevel::kCritical);
    1219             :     size_t committed = total_committed_code_space_.load();
    1220             :     DCHECK_GE(max_committed_code_space_, committed);
    1221          59 :     critical_committed_code_space_.store(
    1222          59 :         committed + (max_committed_code_space_ - committed) / 2);
    1223             :   }
    1224             : 
    1225             :   // If the code must be contiguous, reserve enough address space up front.
    1226             :   size_t code_vmem_size =
    1227             :       kRequiresCodeRange ? kMaxWasmCodeMemory : code_size_estimate;
    1228             :   // Try up to two times; getting rid of dead JSArrayBuffer allocations might
    1229             :   // require two GCs because the first GC maybe incremental and may have
    1230             :   // floating garbage.
    1231             :   static constexpr int kAllocationRetries = 2;
    1232     2484707 :   VirtualMemory code_space;
    1233         903 :   for (int retries = 0;; ++retries) {
    1234     2486513 :     code_space = TryAllocate(code_vmem_size);
    1235     1243260 :     if (code_space.IsReserved()) break;
    1236         903 :     if (retries == kAllocationRetries) {
    1237           0 :       V8::FatalProcessOutOfMemory(isolate, "WasmCodeManager::NewNativeModule");
    1238             :       UNREACHABLE();
    1239             :     }
    1240             :     // Run one GC, then try the allocation again.
    1241             :     isolate->heap()->MemoryPressureNotification(MemoryPressureLevel::kCritical,
    1242         903 :                                                 true);
    1243             :   }
    1244             : 
    1245             :   Address start = code_space.address();
    1246             :   size_t size = code_space.size();
    1247             :   Address end = code_space.end();
    1248     1242357 :   std::shared_ptr<NativeModule> ret;
    1249             :   new NativeModule(engine, enabled, can_request_more, std::move(code_space),
    1250     3727063 :                    std::move(module), isolate->async_counters(), &ret);
    1251             :   // The constructor initialized the shared_ptr.
    1252             :   DCHECK_NOT_NULL(ret);
    1253             :   TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", ret.get(), start,
    1254             :              size);
    1255             : 
    1256             : #if defined(V8_OS_WIN_X64)
    1257             :   if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
    1258             :       FLAG_win64_unwinding_info) {
    1259             :     win64_unwindinfo::RegisterNonABICompliantCodeRange(
    1260             :         reinterpret_cast<void*>(start), size);
    1261             :   }
    1262             : #endif
    1263             : 
    1264     1242351 :   base::MutexGuard lock(&native_modules_mutex_);
    1265     1242357 :   lookup_map_.insert(std::make_pair(start, std::make_pair(end, ret.get())));
    1266     1242357 :   return ret;
    1267             : }
    1268             : 
    1269     9628500 : bool NativeModule::SetExecutable(bool executable) {
    1270     9628500 :   if (is_executable_ == executable) return true;
    1271             :   TRACE_HEAP("Setting module %p as executable: %d.\n", this, executable);
    1272             : 
    1273      729328 :   v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
    1274             : 
    1275      729328 :   if (FLAG_wasm_write_protect_code_memory) {
    1276             :     PageAllocator::Permission permission =
    1277           0 :         executable ? PageAllocator::kReadExecute : PageAllocator::kReadWrite;
    1278             : #if V8_OS_WIN
    1279             :     // On windows, we need to switch permissions per separate virtual memory
    1280             :     // reservation. This is really just a problem when the NativeModule is
    1281             :     // growable (meaning can_request_more_memory_). That's 32-bit in production,
    1282             :     // or unittests.
    1283             :     // For now, in that case, we commit at reserved memory granularity.
    1284             :     // Technically, that may be a waste, because we may reserve more than we
    1285             :     // use. On 32-bit though, the scarce resource is the address space -
    1286             :     // committed or not.
    1287             :     if (can_request_more_memory_) {
    1288             :       for (auto& vmem : owned_code_space_) {
    1289             :         if (!SetPermissions(page_allocator, vmem.address(), vmem.size(),
    1290             :                             permission)) {
    1291             :           return false;
    1292             :         }
    1293             :         TRACE_HEAP("Set %p:%p to executable:%d\n", vmem.address(), vmem.end(),
    1294             :                    executable);
    1295             :       }
    1296             :       is_executable_ = executable;
    1297             :       return true;
    1298             :     }
    1299             : #endif
    1300           0 :     for (auto& region : allocated_code_space_.regions()) {
    1301             :       // allocated_code_space_ is fine-grained, so we need to
    1302             :       // page-align it.
    1303             :       size_t region_size =
    1304           0 :           RoundUp(region.size(), page_allocator->AllocatePageSize());
    1305           0 :       if (!SetPermissions(page_allocator, region.begin(), region_size,
    1306             :                           permission)) {
    1307             :         return false;
    1308             :       }
    1309             :       TRACE_HEAP("Set %p:%p to executable:%d\n",
    1310             :                  reinterpret_cast<void*>(region.begin()),
    1311             :                  reinterpret_cast<void*>(region.end()), executable);
    1312             :     }
    1313             :   }
    1314      729328 :   is_executable_ = executable;
    1315      729328 :   return true;
    1316             : }
    1317             : 
    1318      690435 : void NativeModule::SampleCodeSize(
    1319             :     Counters* counters, NativeModule::CodeSamplingTime sampling_time) const {
    1320             :   size_t code_size = sampling_time == kSampling
    1321             :                          ? committed_code_space()
    1322      690435 :                          : generated_code_size_.load(std::memory_order_relaxed);
    1323      690435 :   int code_size_mb = static_cast<int>(code_size / MB);
    1324             :   Histogram* histogram = nullptr;
    1325      690435 :   switch (sampling_time) {
    1326             :     case kAfterBaseline:
    1327             :       histogram = counters->wasm_module_code_size_mb_after_baseline();
    1328        2212 :       break;
    1329             :     case kAfterTopTier:
    1330             :       histogram = counters->wasm_module_code_size_mb_after_top_tier();
    1331         166 :       break;
    1332             :     case kSampling:
    1333             :       histogram = counters->wasm_module_code_size_mb();
    1334      688057 :       break;
    1335             :   }
    1336      690435 :   histogram->AddSample(code_size_mb);
    1337      690435 : }
    1338             : 
    1339      739899 : WasmCode* NativeModule::AddCompiledCode(WasmCompilationResult result) {
    1340     1479798 :   return AddCompiledCode({&result, 1})[0];
    1341             : }
    1342             : 
    1343     1003133 : std::vector<WasmCode*> NativeModule::AddCompiledCode(
    1344             :     Vector<WasmCompilationResult> results) {
    1345             :   DCHECK(!results.empty());
    1346             :   // First, allocate code space for all the results.
    1347             :   size_t total_code_space = 0;
    1348     3108495 :   for (auto& result : results) {
    1349             :     DCHECK(result.succeeded());
    1350     2105362 :     total_code_space += RoundUp<kCodeAlignment>(result.code_desc.instr_size);
    1351             :   }
    1352     1003133 :   Vector<byte> code_space = AllocateForCode(total_code_space);
    1353             : 
    1354     1003847 :   std::vector<std::unique_ptr<WasmCode>> generated_code;
    1355     1003839 :   generated_code.reserve(results.size());
    1356             : 
    1357             :   // Now copy the generated code into the code space and relocate it.
    1358     3108377 :   for (auto& result : results) {
    1359             :     DCHECK_EQ(result.code_desc.buffer, result.instr_buffer.get());
    1360     2106934 :     size_t code_size = RoundUp<kCodeAlignment>(result.code_desc.instr_size);
    1361     1053467 :     Vector<byte> this_code_space = code_space.SubVector(0, code_size);
    1362             :     code_space += code_size;
    1363     6317673 :     generated_code.emplace_back(AddCodeWithCodeSpace(
    1364     1053452 :         result.func_index, result.code_desc, result.frame_slot_count,
    1365             :         result.tagged_parameter_slots, std::move(result.protected_instructions),
    1366             :         std::move(result.source_positions),
    1367             :         GetCodeKindForExecutionTier(result.result_tier), result.result_tier,
    1368     1052537 :         this_code_space));
    1369             :   }
    1370             :   DCHECK_EQ(0, code_space.size());
    1371             : 
    1372             :   // Under the {allocation_mutex_}, publish the code. The published code is put
    1373             :   // into the top-most surrounding {WasmCodeRefScope} by {PublishCodeLocked}.
    1374             :   std::vector<WasmCode*> code_vector;
    1375     1002504 :   code_vector.reserve(results.size());
    1376             :   {
    1377     1003827 :     base::MutexGuard lock(&allocation_mutex_);
    1378     2058114 :     for (auto& result : generated_code)
    1379     2108645 :       code_vector.push_back(PublishCodeLocked(std::move(result)));
    1380             :   }
    1381             : 
    1382     1003828 :   return code_vector;
    1383             : }
    1384             : 
    1385           0 : void NativeModule::FreeCode(Vector<WasmCode* const> codes) {
    1386             :   // TODO(clemensh): Implement.
    1387           0 : }
    1388             : 
    1389     1242357 : void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
    1390     1242357 :   base::MutexGuard lock(&native_modules_mutex_);
    1391             :   TRACE_HEAP("Freeing NativeModule %p\n", native_module);
    1392     2484714 :   for (auto& code_space : native_module->owned_code_space_) {
    1393             :     DCHECK(code_space.IsReserved());
    1394             :     TRACE_HEAP("VMem Release: %" PRIxPTR ":%" PRIxPTR " (%zu)\n",
    1395             :                code_space.address(), code_space.end(), code_space.size());
    1396             : 
    1397             : #if defined(V8_OS_WIN_X64)
    1398             :     if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
    1399             :         FLAG_win64_unwinding_info) {
    1400             :       win64_unwindinfo::UnregisterNonABICompliantCodeRange(
    1401             :           reinterpret_cast<void*>(code_space.address()));
    1402             :     }
    1403             : #endif
    1404             : 
    1405     2484714 :     lookup_map_.erase(code_space.address());
    1406     1242357 :     memory_tracker_->ReleaseReservation(code_space.size());
    1407     1242357 :     code_space.Free();
    1408             :     DCHECK(!code_space.IsReserved());
    1409             :   }
    1410             :   native_module->owned_code_space_.clear();
    1411             : 
    1412             :   size_t code_size = native_module->committed_code_space_.load();
    1413             :   DCHECK(IsAligned(code_size, AllocatePageSize()));
    1414             :   size_t old_committed = total_committed_code_space_.fetch_sub(code_size);
    1415             :   DCHECK_LE(code_size, old_committed);
    1416             :   USE(old_committed);
    1417     1242357 : }
    1418             : 
    1419    55884369 : NativeModule* WasmCodeManager::LookupNativeModule(Address pc) const {
    1420    55884369 :   base::MutexGuard lock(&native_modules_mutex_);
    1421    55884652 :   if (lookup_map_.empty()) return nullptr;
    1422             : 
    1423             :   auto iter = lookup_map_.upper_bound(pc);
    1424    13825220 :   if (iter == lookup_map_.begin()) return nullptr;
    1425             :   --iter;
    1426    13790527 :   Address region_start = iter->first;
    1427    13790527 :   Address region_end = iter->second.first;
    1428    13790527 :   NativeModule* candidate = iter->second.second;
    1429             : 
    1430             :   DCHECK_NOT_NULL(candidate);
    1431    13790527 :   return region_start <= pc && pc < region_end ? candidate : nullptr;
    1432             : }
    1433             : 
    1434    55835620 : WasmCode* WasmCodeManager::LookupCode(Address pc) const {
    1435    55835620 :   NativeModule* candidate = LookupNativeModule(pc);
    1436    55835886 :   return candidate ? candidate->Lookup(pc) : nullptr;
    1437             : }
    1438             : 
    1439             : // TODO(v8:7424): Code protection scopes are not yet supported with shared code
    1440             : // enabled and need to be revisited to work with --wasm-shared-code as well.
    1441      144355 : NativeModuleModificationScope::NativeModuleModificationScope(
    1442             :     NativeModule* native_module)
    1443      144355 :     : native_module_(native_module) {
    1444      144355 :   if (FLAG_wasm_write_protect_code_memory && native_module_ &&
    1445           0 :       (native_module_->modification_scope_depth_++) == 0) {
    1446           0 :     bool success = native_module_->SetExecutable(false);
    1447           0 :     CHECK(success);
    1448             :   }
    1449      144355 : }
    1450             : 
    1451      288710 : NativeModuleModificationScope::~NativeModuleModificationScope() {
    1452      144355 :   if (FLAG_wasm_write_protect_code_memory && native_module_ &&
    1453           0 :       (native_module_->modification_scope_depth_--) == 1) {
    1454           0 :     bool success = native_module_->SetExecutable(true);
    1455           0 :     CHECK(success);
    1456             :   }
    1457      144355 : }
    1458             : 
    1459             : namespace {
    1460             : thread_local WasmCodeRefScope* current_code_refs_scope = nullptr;
    1461             : }  // namespace
    1462             : 
    1463    58579041 : WasmCodeRefScope::WasmCodeRefScope()
    1464    61056626 :     : previous_scope_(current_code_refs_scope) {
    1465    61056626 :   current_code_refs_scope = this;
    1466    58579041 : }
    1467             : 
    1468   122114547 : WasmCodeRefScope::~WasmCodeRefScope() {
    1469             :   DCHECK_EQ(this, current_code_refs_scope);
    1470    61057249 :   current_code_refs_scope = previous_scope_;
    1471             :   std::vector<WasmCode*> code_ptrs;
    1472    61057249 :   code_ptrs.reserve(code_ptrs_.size());
    1473             :   code_ptrs.assign(code_ptrs_.begin(), code_ptrs_.end());
    1474    61057231 :   WasmCode::DecrementRefCount(VectorOf(code_ptrs));
    1475    61057298 : }
    1476             : 
    1477             : // static
    1478    19568882 : void WasmCodeRefScope::AddRef(WasmCode* code) {
    1479             :   DCHECK_NOT_NULL(code);
    1480    19568882 :   WasmCodeRefScope* current_scope = current_code_refs_scope;
    1481             :   DCHECK_NOT_NULL(current_scope);
    1482             :   auto entry = current_scope->code_ptrs_.insert(code);
    1483             :   // If we added a new entry, increment the ref counter.
    1484    19568814 :   if (entry.second) code->IncRef();
    1485    19568814 : }
    1486             : 
    1487             : }  // namespace wasm
    1488             : }  // namespace internal
    1489      122036 : }  // namespace v8
    1490             : #undef TRACE_HEAP

Generated by: LCOV version 1.10