LCOV - code coverage report
Current view: top level - src/heap - heap-inl.h (source / functions) Hit Total Coverage
Test: app.info Lines: 182 199 91.5 %
Date: 2019-02-19 Functions: 283 285 99.3 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #ifndef V8_HEAP_HEAP_INL_H_
       6             : #define V8_HEAP_HEAP_INL_H_
       7             : 
       8             : #include <cmath>
       9             : 
      10             : // Clients of this interface shouldn't depend on lots of heap internals.
      11             : // Do not include anything from src/heap other than src/heap/heap.h and its
      12             : // write barrier here!
      13             : #include "src/heap/heap-write-barrier.h"
      14             : #include "src/heap/heap.h"
      15             : 
      16             : #include "src/base/atomic-utils.h"
      17             : #include "src/base/platform/platform.h"
      18             : #include "src/feedback-vector.h"
      19             : 
      20             : // TODO(mstarzinger): There is one more include to remove in order to no longer
      21             : // leak heap internals to users of this interface!
      22             : #include "src/heap/spaces-inl.h"
      23             : #include "src/isolate-data.h"
      24             : #include "src/isolate.h"
      25             : #include "src/msan.h"
      26             : #include "src/objects-inl.h"
      27             : #include "src/objects/allocation-site-inl.h"
      28             : #include "src/objects/api-callbacks-inl.h"
      29             : #include "src/objects/cell-inl.h"
      30             : #include "src/objects/descriptor-array.h"
      31             : #include "src/objects/feedback-cell-inl.h"
      32             : #include "src/objects/literal-objects-inl.h"
      33             : #include "src/objects/oddball.h"
      34             : #include "src/objects/property-cell.h"
      35             : #include "src/objects/scope-info.h"
      36             : #include "src/objects/script-inl.h"
      37             : #include "src/objects/struct-inl.h"
      38             : #include "src/profiler/heap-profiler.h"
      39             : #include "src/string-hasher.h"
      40             : #include "src/zone/zone-list-inl.h"
      41             : 
      42             : namespace v8 {
      43             : namespace internal {
      44             : 
      45             : AllocationSpace AllocationResult::RetrySpace() {
      46             :   DCHECK(IsRetry());
      47       19963 :   return static_cast<AllocationSpace>(Smi::ToInt(object_));
      48             : }
      49             : 
      50         258 : HeapObject AllocationResult::ToObjectChecked() {
      51         258 :   CHECK(!IsRetry());
      52         258 :   return HeapObject::cast(object_);
      53             : }
      54             : 
      55   421530951 : Isolate* Heap::isolate() {
      56             :   return reinterpret_cast<Isolate*>(
      57  2314518205 :       reinterpret_cast<intptr_t>(this) -
      58  2314518205 :       reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(16)->heap()) + 16);
      59             : }
      60             : 
      61             : int64_t Heap::external_memory() {
      62           0 :   return isolate()->isolate_data()->external_memory_;
      63             : }
      64             : 
      65             : void Heap::update_external_memory(int64_t delta) {
      66        5327 :   isolate()->isolate_data()->external_memory_ += delta;
      67             : }
      68             : 
      69             : void Heap::update_external_memory_concurrently_freed(intptr_t freed) {
      70       15472 :   external_memory_concurrently_freed_ += freed;
      71             : }
      72             : 
      73             : void Heap::account_external_memory_concurrently_freed() {
      74      345020 :   isolate()->isolate_data()->external_memory_ -=
      75      345020 :       external_memory_concurrently_freed_;
      76      172510 :   external_memory_concurrently_freed_ = 0;
      77             : }
      78             : 
      79   421338293 : RootsTable& Heap::roots_table() { return isolate()->roots_table(); }
      80             : 
      81             : #define ROOT_ACCESSOR(Type, name, CamelName)                           \
      82             :   Type Heap::name() {                                                  \
      83             :     return Type::cast(Object(roots_table()[RootIndex::k##CamelName])); \
      84             :   }
      85  1141313256 : MUTABLE_ROOT_LIST(ROOT_ACCESSOR)
      86             : #undef ROOT_ACCESSOR
      87             : 
      88             : #define ROOT_ACCESSOR(type, name, CamelName)                                   \
      89             :   void Heap::set_##name(type value) {                                          \
      90             :     /* The deserializer makes use of the fact that these common roots are */   \
      91             :     /* never in new space and never on a page that is being compacted.    */   \
      92             :     DCHECK_IMPLIES(deserialization_complete(),                                 \
      93             :                    !RootsTable::IsImmortalImmovable(RootIndex::k##CamelName)); \
      94             :     DCHECK_IMPLIES(RootsTable::IsImmortalImmovable(RootIndex::k##CamelName),   \
      95             :                    IsImmovable(HeapObject::cast(value)));                      \
      96             :     roots_table()[RootIndex::k##CamelName] = value->ptr();                     \
      97             :   }
      98    37749753 : ROOT_LIST(ROOT_ACCESSOR)
      99             : #undef ROOT_ACCESSOR
     100             : 
     101             : void Heap::SetRootMaterializedObjects(FixedArray objects) {
     102          48 :   roots_table()[RootIndex::kMaterializedObjects] = objects->ptr();
     103             : }
     104             : 
     105             : void Heap::SetRootScriptList(Object value) {
     106         209 :   roots_table()[RootIndex::kScriptList] = value->ptr();
     107             : }
     108             : 
     109             : void Heap::SetRootStringTable(StringTable value) {
     110    14498330 :   roots_table()[RootIndex::kStringTable] = value->ptr();
     111             : }
     112             : 
     113             : void Heap::SetRootNoScriptSharedFunctionInfos(Object value) {
     114           0 :   roots_table()[RootIndex::kNoScriptSharedFunctionInfos] = value->ptr();
     115             : }
     116             : 
     117             : void Heap::SetMessageListeners(TemplateList value) {
     118       30838 :   roots_table()[RootIndex::kMessageListeners] = value->ptr();
     119             : }
     120             : 
     121             : PagedSpace* Heap::paged_space(int idx) {
     122             :   DCHECK_NE(idx, LO_SPACE);
     123             :   DCHECK_NE(idx, NEW_SPACE);
     124             :   DCHECK_NE(idx, CODE_LO_SPACE);
     125             :   DCHECK_NE(idx, NEW_LO_SPACE);
     126     3451644 :   return static_cast<PagedSpace*>(space_[idx]);
     127             : }
     128             : 
     129     7276944 : Space* Heap::space(int idx) { return space_[idx]; }
     130             : 
     131             : Address* Heap::NewSpaceAllocationTopAddress() {
     132             :   return new_space_->allocation_top_address();
     133             : }
     134             : 
     135             : Address* Heap::NewSpaceAllocationLimitAddress() {
     136             :   return new_space_->allocation_limit_address();
     137             : }
     138             : 
     139             : Address* Heap::OldSpaceAllocationTopAddress() {
     140             :   return old_space_->allocation_top_address();
     141             : }
     142             : 
     143             : Address* Heap::OldSpaceAllocationLimitAddress() {
     144             :   return old_space_->allocation_limit_address();
     145             : }
     146             : 
     147             : void Heap::UpdateNewSpaceAllocationCounter() {
     148       98000 :   new_space_allocation_counter_ = NewSpaceAllocationCounter();
     149             : }
     150             : 
     151      196616 : size_t Heap::NewSpaceAllocationCounter() {
     152      393227 :   return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
     153             : }
     154             : 
     155   309911878 : AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
     156     1893282 :                                    AllocationAlignment alignment) {
     157             :   DCHECK(AllowHandleAllocation::IsAllowed());
     158             :   DCHECK(AllowHeapAllocation::IsAllowed());
     159             :   DCHECK(gc_state_ == NOT_IN_GC);
     160             : #ifdef V8_ENABLE_ALLOCATION_TIMEOUT
     161             :   if (FLAG_random_gc_interval > 0 || FLAG_gc_interval >= 0) {
     162             :     if (!always_allocate() && Heap::allocation_timeout_-- <= 0) {
     163             :       return AllocationResult::Retry(space);
     164             :     }
     165             :   }
     166             : #endif
     167             : #ifdef DEBUG
     168             :   IncrementObjectCounters();
     169             : #endif
     170             : 
     171   309911878 :   bool large_object = size_in_bytes > kMaxRegularHeapObjectSize;
     172             : 
     173   309911878 :   HeapObject object;
     174           0 :   AllocationResult allocation;
     175   309911878 :   if (NEW_SPACE == space) {
     176   160137045 :     if (large_object) {
     177             :       // TODO(hpayer): Implement a LO tenuring strategy.
     178        8049 :       space = FLAG_young_generation_large_objects ? NEW_LO_SPACE : LO_SPACE;
     179             :     } else {
     180   320257968 :       allocation = new_space_->AllocateRaw(size_in_bytes, alignment);
     181   160128972 :       if (allocation.To(&object)) {
     182   160109165 :         OnAllocationEvent(object, size_in_bytes);
     183             :       }
     184   160128910 :       return allocation;
     185             :     }
     186             :   }
     187             : 
     188             :   // Here we only allocate in the old generation.
     189   149782882 :   if (OLD_SPACE == space) {
     190   119308475 :     if (large_object) {
     191         771 :       allocation = lo_space_->AllocateRaw(size_in_bytes);
     192             :     } else {
     193   119307704 :       allocation = old_space_->AllocateRaw(size_in_bytes, alignment);
     194             :     }
     195    30474407 :   } else if (CODE_SPACE == space) {
     196     3786564 :     if (size_in_bytes <= code_space()->AreaSize() && !large_object) {
     197     1893246 :       allocation = code_space_->AllocateRawUnaligned(size_in_bytes);
     198             :     } else {
     199          36 :       allocation = code_lo_space_->AllocateRaw(size_in_bytes);
     200             :     }
     201    28581125 :   } else if (LO_SPACE == space) {
     202             :     DCHECK(large_object);
     203        9074 :     allocation = lo_space_->AllocateRaw(size_in_bytes);
     204    28572051 :   } else if (NEW_LO_SPACE == space) {
     205             :     DCHECK(FLAG_young_generation_large_objects);
     206           0 :     allocation = new_lo_space_->AllocateRaw(size_in_bytes);
     207    28572051 :   } else if (CODE_LO_SPACE == space) {
     208             :     DCHECK(large_object);
     209           0 :     allocation = code_lo_space_->AllocateRaw(size_in_bytes);
     210    28572051 :   } else if (MAP_SPACE == space) {
     211    28361435 :     allocation = map_space_->AllocateRawUnaligned(size_in_bytes);
     212      210616 :   } else if (RO_SPACE == space) {
     213             : #ifdef V8_USE_SNAPSHOT
     214             :     DCHECK(isolate_->serializer_enabled());
     215             : #endif
     216             :     DCHECK(!large_object);
     217             :     DCHECK(CanAllocateInReadOnlySpace());
     218      210616 :     allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment);
     219             :   } else {
     220             :     // NEW_SPACE is not allowed here.
     221           0 :     UNREACHABLE();
     222             :   }
     223             : 
     224   149782874 :   if (allocation.To(&object)) {
     225   149782384 :     if (space == CODE_SPACE) {
     226             :       // Unprotect the memory chunk of the object if it was not unprotected
     227             :       // already.
     228     1893279 :       UnprotectAndRegisterMemoryChunk(object);
     229     1893279 :       ZapCodeObject(object->address(), size_in_bytes);
     230             :     }
     231   149782383 :     OnAllocationEvent(object, size_in_bytes);
     232             :   }
     233             : 
     234   149782819 :   return allocation;
     235             : }
     236             : 
     237   740511469 : void Heap::OnAllocationEvent(HeapObject object, int size_in_bytes) {
     238  1481202101 :   for (auto& tracker : allocation_trackers_) {
     239      358326 :     tracker->AllocationEvent(object->address(), size_in_bytes);
     240             :   }
     241             : 
     242             :   if (FLAG_verify_predictable) {
     243             :     ++allocations_count_;
     244             :     // Advance synthetic time by making a time request.
     245             :     MonotonicallyIncreasingTimeInMs();
     246             : 
     247             :     UpdateAllocationsHash(object);
     248             :     UpdateAllocationsHash(size_in_bytes);
     249             : 
     250             :     if (allocations_count_ % FLAG_dump_allocations_digest_at_alloc == 0) {
     251             :       PrintAllocationsHash();
     252             :     }
     253   740511469 :   } else if (FLAG_fuzzer_gc_analysis) {
     254           0 :     ++allocations_count_;
     255   740511469 :   } else if (FLAG_trace_allocation_stack_interval > 0) {
     256           0 :     ++allocations_count_;
     257           0 :     if (allocations_count_ % FLAG_trace_allocation_stack_interval == 0) {
     258           0 :       isolate()->PrintStack(stdout, Isolate::kPrintStackConcise);
     259             :     }
     260             :   }
     261   740511469 : }
     262             : 
     263             : bool Heap::CanAllocateInReadOnlySpace() {
     264    14423092 :   return !deserialization_complete_ &&
     265       28280 :          (isolate()->serializer_enabled() ||
     266           0 :           !isolate()->initialized_from_snapshot());
     267             : }
     268             : 
     269             : void Heap::UpdateAllocationsHash(HeapObject object) {
     270             :   Address object_address = object->address();
     271             :   MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
     272             :   AllocationSpace allocation_space = memory_chunk->owner()->identity();
     273             : 
     274             :   STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32);
     275             :   uint32_t value =
     276             :       static_cast<uint32_t>(object_address - memory_chunk->address()) |
     277             :       (static_cast<uint32_t>(allocation_space) << kPageSizeBits);
     278             : 
     279             :   UpdateAllocationsHash(value);
     280             : }
     281             : 
     282             : void Heap::UpdateAllocationsHash(uint32_t value) {
     283             :   uint16_t c1 = static_cast<uint16_t>(value);
     284             :   uint16_t c2 = static_cast<uint16_t>(value >> 16);
     285             :   raw_allocations_hash_ =
     286             :       StringHasher::AddCharacterCore(raw_allocations_hash_, c1);
     287             :   raw_allocations_hash_ =
     288             :       StringHasher::AddCharacterCore(raw_allocations_hash_, c2);
     289             : }
     290             : 
     291             : void Heap::RegisterExternalString(String string) {
     292             :   DCHECK(string->IsExternalString());
     293             :   DCHECK(!string->IsThinString());
     294       86432 :   external_string_table_.AddString(string);
     295             : }
     296             : 
     297       86410 : void Heap::FinalizeExternalString(String string) {
     298             :   DCHECK(string->IsExternalString());
     299             :   Page* page = Page::FromHeapObject(string);
     300       86410 :   ExternalString ext_string = ExternalString::cast(string);
     301             : 
     302             :   page->DecrementExternalBackingStoreBytes(
     303             :       ExternalBackingStoreType::kExternalString,
     304       86410 :       ext_string->ExternalPayloadSize());
     305             : 
     306             :   v8::String::ExternalStringResourceBase** resource_addr =
     307             :       reinterpret_cast<v8::String::ExternalStringResourceBase**>(
     308       86412 :           string->address() + ExternalString::kResourceOffset);
     309             : 
     310             :   // Dispose of the C++ object if it has not already been disposed.
     311       86412 :   if (*resource_addr != nullptr) {
     312       86299 :     (*resource_addr)->Dispose();
     313       86299 :     *resource_addr = nullptr;
     314             :   }
     315       86412 : }
     316             : 
     317             : Address Heap::NewSpaceTop() { return new_space_->top(); }
     318             : 
     319   400170181 : bool Heap::InYoungGeneration(Object object) {
     320             :   DCHECK(!HasWeakHeapObjectTag(object));
     321   792192860 :   return object->IsHeapObject() && InYoungGeneration(HeapObject::cast(object));
     322             : }
     323             : 
     324             : // static
     325           0 : bool Heap::InYoungGeneration(MaybeObject object) {
     326           0 :   HeapObject heap_object;
     327           0 :   return object->GetHeapObject(&heap_object) && InYoungGeneration(heap_object);
     328             : }
     329             : 
     330             : // static
     331   348688472 : bool Heap::InYoungGeneration(HeapObject heap_object) {
     332  1144409631 :   bool result = MemoryChunk::FromHeapObject(heap_object)->InYoungGeneration();
     333             : #ifdef DEBUG
     334             :   // If in the young generation, then check we're either not in the middle of
     335             :   // GC or the object is in to-space.
     336             :   if (result) {
     337             :     // If the object is in the young generation, then it's not in RO_SPACE so
     338             :     // this is safe.
     339             :     Heap* heap = Heap::FromWritableHeapObject(heap_object);
     340             :     DCHECK_IMPLIES(heap->gc_state_ == NOT_IN_GC, InToPage(heap_object));
     341             :   }
     342             : #endif
     343   348688472 :   return result;
     344             : }
     345             : 
     346             : // static
     347      124698 : bool Heap::InFromPage(Object object) {
     348             :   DCHECK(!HasWeakHeapObjectTag(object));
     349      249396 :   return object->IsHeapObject() && InFromPage(HeapObject::cast(object));
     350             : }
     351             : 
     352             : // static
     353    43254937 : bool Heap::InFromPage(MaybeObject object) {
     354    43254937 :   HeapObject heap_object;
     355    86616773 :   return object->GetHeapObject(&heap_object) && InFromPage(heap_object);
     356             : }
     357             : 
     358             : // static
     359   287270180 : bool Heap::InFromPage(HeapObject heap_object) {
     360   658489946 :   return MemoryChunk::FromHeapObject(heap_object)->IsFromPage();
     361             : }
     362             : 
     363             : // static
     364             : bool Heap::InToPage(Object object) {
     365             :   DCHECK(!HasWeakHeapObjectTag(object));
     366             :   return object->IsHeapObject() && InToPage(HeapObject::cast(object));
     367             : }
     368             : 
     369             : // static
     370      865025 : bool Heap::InToPage(MaybeObject object) {
     371      865025 :   HeapObject heap_object;
     372     1726688 :   return object->GetHeapObject(&heap_object) && InToPage(heap_object);
     373             : }
     374             : 
     375             : // static
     376             : bool Heap::InToPage(HeapObject heap_object) {
     377    36926521 :   return MemoryChunk::FromHeapObject(heap_object)->IsToPage();
     378             : }
     379             : 
     380         338 : bool Heap::InOldSpace(Object object) { return old_space_->Contains(object); }
     381             : 
     382             : bool Heap::InReadOnlySpace(Object object) {
     383     5112831 :   return read_only_space_->Contains(object);
     384             : }
     385             : 
     386             : // static
     387             : Heap* Heap::FromWritableHeapObject(const HeapObject obj) {
     388   409565503 :   MemoryChunk* chunk = MemoryChunk::FromHeapObject(obj);
     389             :   // RO_SPACE can be shared between heaps, so we can't use RO_SPACE objects to
     390             :   // find a heap. The exception is when the ReadOnlySpace is writeable, during
     391             :   // bootstrapping, so explicitly allow this case.
     392             :   SLOW_DCHECK(chunk->owner()->identity() != RO_SPACE ||
     393             :               static_cast<ReadOnlySpace*>(chunk->owner())->writable());
     394             :   Heap* heap = chunk->heap();
     395             :   SLOW_DCHECK(heap != nullptr);
     396             :   return heap;
     397             : }
     398             : 
     399   150647213 : bool Heap::ShouldBePromoted(Address old_address) {
     400             :   Page* page = Page::FromAddress(old_address);
     401   150647213 :   Address age_mark = new_space_->age_mark();
     402   235350793 :   return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
     403    44445721 :          (!page->ContainsLimit(age_mark) || old_address < age_mark);
     404             : }
     405             : 
     406   116463920 : void Heap::CopyBlock(Address dst, Address src, int byte_size) {
     407             :   DCHECK(IsAligned(byte_size, kTaggedSize));
     408             :   STATIC_ASSERT(kTaggedSize == kSystemPointerSize);
     409   186190088 :   CopyWords(dst, src, static_cast<size_t>(byte_size / kTaggedSize));
     410   116658208 : }
     411             : 
     412             : template <Heap::FindMementoMode mode>
     413    16963544 : AllocationMemento Heap::FindAllocationMemento(Map map, HeapObject object) {
     414             :   Address object_address = object->address();
     415    16590611 :   Address memento_address = object_address + object->SizeFromMap(map);
     416    16578697 :   Address last_memento_word_address = memento_address + kTaggedSize;
     417             :   // If the memento would be on another page, bail out immediately.
     418    16578697 :   if (!Page::OnSamePage(object_address, last_memento_word_address)) {
     419         378 :     return AllocationMemento();
     420             :   }
     421             :   HeapObject candidate = HeapObject::FromAddress(memento_address);
     422             :   MapWordSlot candidate_map_slot = candidate->map_slot();
     423             :   // This fast check may peek at an uninitialized word. However, the slow check
     424             :   // below (memento_address == top) ensures that this is safe. Mark the word as
     425             :   // initialized to silence MemorySanitizer warnings.
     426             :   MSAN_MEMORY_IS_INITIALIZED(candidate_map_slot.address(), kTaggedSize);
     427    16580151 :   if (!candidate_map_slot.contains_value(
     428             :           ReadOnlyRoots(this).allocation_memento_map().ptr())) {
     429    14659631 :     return AllocationMemento();
     430             :   }
     431             : 
     432             :   // Bail out if the memento is below the age mark, which can happen when
     433             :   // mementos survived because a page got moved within new space.
     434             :   Page* object_page = Page::FromAddress(object_address);
     435     1920520 :   if (object_page->IsFlagSet(Page::NEW_SPACE_BELOW_AGE_MARK)) {
     436             :     Address age_mark =
     437      641851 :         reinterpret_cast<SemiSpace*>(object_page->owner())->age_mark();
     438      641851 :     if (!object_page->Contains(age_mark)) {
     439           0 :       return AllocationMemento();
     440             :     }
     441             :     // Do an exact check in the case where the age mark is on the same page.
     442      641851 :     if (object_address < age_mark) {
     443           0 :       return AllocationMemento();
     444             :     }
     445             :   }
     446             : 
     447      372933 :   AllocationMemento memento_candidate = AllocationMemento::cast(candidate);
     448             : 
     449             :   // Depending on what the memento is used for, we might need to perform
     450             :   // additional checks.
     451             :   Address top;
     452             :   switch (mode) {
     453             :     case Heap::kForGC:
     454     1547350 :       return memento_candidate;
     455             :     case Heap::kForRuntime:
     456      372933 :       if (memento_candidate.is_null()) return AllocationMemento();
     457             :       // Either the object is the last object in the new space, or there is
     458             :       // another object of at least word size (the header map word) following
     459             :       // it, so suffices to compare ptr and top here.
     460             :       top = NewSpaceTop();
     461             :       DCHECK(memento_address == top ||
     462             :              memento_address + HeapObject::kHeaderSize <= top ||
     463             :              !Page::OnSamePage(memento_address, top - 1));
     464      372933 :       if ((memento_address != top) && memento_candidate->IsValid()) {
     465      372929 :         return memento_candidate;
     466             :       }
     467           5 :       return AllocationMemento();
     468             :     default:
     469             :       UNREACHABLE();
     470             :   }
     471             :   UNREACHABLE();
     472             : }
     473             : 
     474   146530008 : void Heap::UpdateAllocationSite(Map map, HeapObject object,
     475             :                                 PretenuringFeedbackMap* pretenuring_feedback) {
     476             :   DCHECK_NE(pretenuring_feedback, &global_pretenuring_feedback_);
     477             : #ifdef DEBUG
     478             :   MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
     479             :   DCHECK_IMPLIES(chunk->IsToPage(),
     480             :                  chunk->IsFlagSet(MemoryChunk::PAGE_NEW_NEW_PROMOTION));
     481             :   DCHECK_IMPLIES(!chunk->InYoungGeneration(),
     482             :                  chunk->IsFlagSet(MemoryChunk::PAGE_NEW_OLD_PROMOTION));
     483             : #endif
     484   293102408 :   if (!FLAG_allocation_site_pretenuring ||
     485             :       !AllocationSite::CanTrack(map->instance_type())) {
     486             :     return;
     487             :   }
     488             :   AllocationMemento memento_candidate =
     489    15983742 :       FindAllocationMemento<kForGC>(map, object);
     490    15946030 :   if (memento_candidate.is_null()) return;
     491             : 
     492             :   // Entering cached feedback is used in the parallel case. We are not allowed
     493             :   // to dereference the allocation site and rather have to postpone all checks
     494             :   // till actually merging the data.
     495             :   Address key = memento_candidate->GetAllocationSiteUnchecked();
     496     3094507 :   (*pretenuring_feedback)[AllocationSite::unchecked_cast(Object(key))]++;
     497             : }
     498             : 
     499       86432 : void Heap::ExternalStringTable::AddString(String string) {
     500             :   DCHECK(string->IsExternalString());
     501             :   DCHECK(!Contains(string));
     502             : 
     503       86432 :   if (InYoungGeneration(string)) {
     504         466 :     young_strings_.push_back(string);
     505             :   } else {
     506       85966 :     old_strings_.push_back(string);
     507             :   }
     508       86432 : }
     509             : 
     510    30258337 : Oddball Heap::ToBoolean(bool condition) {
     511             :   ReadOnlyRoots roots(this);
     512    60516674 :   return condition ? roots.true_value() : roots.false_value();
     513             : }
     514             : 
     515     2905597 : int Heap::NextScriptId() {
     516     5811195 :   int last_id = last_script_id()->value();
     517     2905598 :   if (last_id == Smi::kMaxValue) last_id = v8::UnboundScript::kNoScriptId;
     518     2905598 :   last_id++;
     519     2905598 :   set_last_script_id(Smi::FromInt(last_id));
     520     2905598 :   return last_id;
     521             : }
     522             : 
     523          55 : int Heap::NextDebuggingId() {
     524         110 :   int last_id = last_debugging_id()->value();
     525          55 :   if (last_id == DebugInfo::DebuggingIdBits::kMax) {
     526             :     last_id = DebugInfo::kNoDebuggingId;
     527             :   }
     528          55 :   last_id++;
     529          55 :   set_last_debugging_id(Smi::FromInt(last_id));
     530          55 :   return last_id;
     531             : }
     532             : 
     533     4284416 : int Heap::GetNextTemplateSerialNumber() {
     534     8568832 :   int next_serial_number = next_template_serial_number()->value() + 1;
     535     4284416 :   set_next_template_serial_number(Smi::FromInt(next_serial_number));
     536     4284416 :   return next_serial_number;
     537             : }
     538             : 
     539             : int Heap::MaxNumberToStringCacheSize() const {
     540             :   // Compute the size of the number string cache based on the max newspace size.
     541             :   // The number string cache has a minimum size based on twice the initial cache
     542             :   // size to ensure that it is bigger after being made 'full size'.
     543    28727400 :   size_t number_string_cache_size = max_semi_space_size_ / 512;
     544             :   number_string_cache_size =
     545             :       Max(static_cast<size_t>(kInitialNumberStringCacheSize * 2),
     546             :           Min<size_t>(0x4000u, number_string_cache_size));
     547             :   // There is a string and a number per entry so the length is twice the number
     548             :   // of entries.
     549    28727400 :   return static_cast<int>(number_string_cache_size * 2);
     550             : }
     551             : 
     552             : void Heap::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
     553             :                                               size_t amount) {
     554             :   base::CheckedIncrement(&backing_store_bytes_, amount);
     555             :   // TODO(mlippautz): Implement interrupt for global memory allocations that can
     556             :   // trigger garbage collections.
     557             : }
     558             : 
     559             : void Heap::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
     560             :                                               size_t amount) {
     561             :   base::CheckedDecrement(&backing_store_bytes_, amount);
     562             : }
     563             : 
     564             : AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
     565             :     : heap_(isolate->heap()) {
     566             :   heap_->always_allocate_scope_count_++;
     567             : }
     568             : 
     569             : AlwaysAllocateScope::~AlwaysAllocateScope() {
     570             :   heap_->always_allocate_scope_count_--;
     571             : }
     572             : 
     573      420615 : CodeSpaceMemoryModificationScope::CodeSpaceMemoryModificationScope(Heap* heap)
     574      420615 :     : heap_(heap) {
     575     1261739 :   if (heap_->write_protect_code_memory()) {
     576          56 :     heap_->increment_code_space_memory_modification_scope_depth();
     577      841178 :     heap_->code_space()->SetReadAndWritable();
     578      420619 :     LargePage* page = heap_->code_lo_space()->first_page();
     579     1551389 :     while (page != nullptr) {
     580             :       DCHECK(page->IsFlagSet(MemoryChunk::IS_EXECUTABLE));
     581     1420302 :       CHECK(heap_->memory_allocator()->IsMemoryChunkExecutable(page));
     582      710151 :       page->SetReadAndWritable();
     583           0 :       page = page->next_page();
     584             :     }
     585             :   }
     586      420617 : }
     587             : 
     588      420617 : CodeSpaceMemoryModificationScope::~CodeSpaceMemoryModificationScope() {
     589     1219983 :   if (heap_->write_protect_code_memory()) {
     590       20935 :     heap_->decrement_code_space_memory_modification_scope_depth();
     591      820299 :     heap_->code_space()->SetDefaultCodePermissions();
     592      420619 :     LargePage* page = heap_->code_lo_space()->first_page();
     593     1551374 :     while (page != nullptr) {
     594             :       DCHECK(page->IsFlagSet(MemoryChunk::IS_EXECUTABLE));
     595     1413650 :       CHECK(heap_->memory_allocator()->IsMemoryChunkExecutable(page));
     596      710136 :       page->SetDefaultCodePermissions();
     597        6622 :       page = page->next_page();
     598             :     }
     599             :   }
     600      420619 : }
     601             : 
     602             : CodePageCollectionMemoryModificationScope::
     603             :     CodePageCollectionMemoryModificationScope(Heap* heap)
     604     1893265 :     : heap_(heap) {
     605     1893265 :   if (heap_->write_protect_code_memory() &&
     606             :       !heap_->code_space_memory_modification_scope_depth()) {
     607             :     heap_->EnableUnprotectedMemoryChunksRegistry();
     608             :   }
     609             : }
     610             : 
     611     1893267 : CodePageCollectionMemoryModificationScope::
     612             :     ~CodePageCollectionMemoryModificationScope() {
     613     1893267 :   if (heap_->write_protect_code_memory() &&
     614             :       !heap_->code_space_memory_modification_scope_depth()) {
     615     1758364 :     heap_->ProtectUnprotectedMemoryChunks();
     616     1758363 :     heap_->DisableUnprotectedMemoryChunksRegistry();
     617             :   }
     618     1893266 : }
     619             : 
     620     1293192 : CodePageMemoryModificationScope::CodePageMemoryModificationScope(
     621             :     MemoryChunk* chunk)
     622             :     : chunk_(chunk),
     623     2586361 :       scope_active_(chunk_->heap()->write_protect_code_memory() &&
     624     2586384 :                     chunk_->IsFlagSet(MemoryChunk::IS_EXECUTABLE)) {
     625     1293192 :   if (scope_active_) {
     626             :     DCHECK(chunk_->owner()->identity() == CODE_SPACE ||
     627             :            (chunk_->owner()->identity() == CODE_LO_SPACE));
     628      501098 :     chunk_->SetReadAndWritable();
     629             :   }
     630     1293192 : }
     631             : 
     632             : CodePageMemoryModificationScope::~CodePageMemoryModificationScope() {
     633     1293394 :   if (scope_active_) {
     634      501097 :     chunk_->SetDefaultCodePermissions();
     635             :   }
     636             : }
     637             : 
     638             : }  // namespace internal
     639             : }  // namespace v8
     640             : 
     641             : #endif  // V8_HEAP_HEAP_INL_H_

Generated by: LCOV version 1.10