LCOV - code coverage report
Current view: top level - src/heap - heap-inl.h (source / functions) Hit Total Coverage
Test: app.info Lines: 200 214 93.5 %
Date: 2019-01-20 Functions: 289 295 98.0 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #ifndef V8_HEAP_HEAP_INL_H_
       6             : #define V8_HEAP_HEAP_INL_H_
       7             : 
       8             : #include <cmath>
       9             : 
      10             : // Clients of this interface shouldn't depend on lots of heap internals.
      11             : // Do not include anything from src/heap other than src/heap/heap.h and its
      12             : // write barrier here!
      13             : #include "src/heap/heap-write-barrier.h"
      14             : #include "src/heap/heap.h"
      15             : 
      16             : #include "src/base/atomic-utils.h"
      17             : #include "src/base/platform/platform.h"
      18             : #include "src/feedback-vector.h"
      19             : 
      20             : // TODO(mstarzinger): There is one more include to remove in order to no longer
      21             : // leak heap internals to users of this interface!
      22             : #include "src/heap/spaces-inl.h"
      23             : #include "src/isolate-data.h"
      24             : #include "src/isolate.h"
      25             : #include "src/log.h"
      26             : #include "src/msan.h"
      27             : #include "src/objects-inl.h"
      28             : #include "src/objects/allocation-site-inl.h"
      29             : #include "src/objects/api-callbacks-inl.h"
      30             : #include "src/objects/cell-inl.h"
      31             : #include "src/objects/descriptor-array.h"
      32             : #include "src/objects/feedback-cell-inl.h"
      33             : #include "src/objects/literal-objects-inl.h"
      34             : #include "src/objects/oddball.h"
      35             : #include "src/objects/property-cell.h"
      36             : #include "src/objects/scope-info.h"
      37             : #include "src/objects/script-inl.h"
      38             : #include "src/objects/struct-inl.h"
      39             : #include "src/profiler/heap-profiler.h"
      40             : #include "src/string-hasher.h"
      41             : #include "src/zone/zone-list-inl.h"
      42             : 
      43             : // The following header includes the write barrier essentials that can also be
      44             : // used stand-alone without including heap-inl.h.
      45             : // TODO(mlippautz): Remove once users of object-macros.h include this file on
      46             : // their own.
      47             : #include "src/heap/heap-write-barrier-inl.h"
      48             : 
      49             : namespace v8 {
      50             : namespace internal {
      51             : 
      52             : AllocationSpace AllocationResult::RetrySpace() {
      53             :   DCHECK(IsRetry());
      54       20158 :   return static_cast<AllocationSpace>(Smi::ToInt(object_));
      55             : }
      56             : 
      57         258 : HeapObject AllocationResult::ToObjectChecked() {
      58         258 :   CHECK(!IsRetry());
      59         258 :   return HeapObject::cast(object_);
      60             : }
      61             : 
      62  3963327457 : Isolate* Heap::isolate() {
      63             :   return reinterpret_cast<Isolate*>(
      64 10798812790 :       reinterpret_cast<intptr_t>(this) -
      65 10798812791 :       reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(16)->heap()) + 16);
      66             : }
      67             : 
      68             : int64_t Heap::external_memory() {
      69         246 :   return isolate()->isolate_data()->external_memory_;
      70             : }
      71             : 
      72             : void Heap::update_external_memory(int64_t delta) {
      73        5571 :   isolate()->isolate_data()->external_memory_ += delta;
      74             : }
      75             : 
      76             : void Heap::update_external_memory_concurrently_freed(intptr_t freed) {
      77       17902 :   external_memory_concurrently_freed_ += freed;
      78             : }
      79             : 
      80             : void Heap::account_external_memory_concurrently_freed() {
      81      381156 :   isolate()->isolate_data()->external_memory_ -=
      82      381156 :       external_memory_concurrently_freed_;
      83      190578 :   external_memory_concurrently_freed_ = 0;
      84             : }
      85             : 
      86   450030277 : RootsTable& Heap::roots_table() { return isolate()->roots_table(); }
      87             : 
      88             : #define ROOT_ACCESSOR(Type, name, CamelName)                           \
      89             :   Type Heap::name() {                                                  \
      90             :     return Type::cast(Object(roots_table()[RootIndex::k##CamelName])); \
      91             :   }
      92  1217426153 : MUTABLE_ROOT_LIST(ROOT_ACCESSOR)
      93             : #undef ROOT_ACCESSOR
      94             : 
      95             : #define ROOT_ACCESSOR(type, name, CamelName)                                   \
      96             :   void Heap::set_##name(type value) {                                          \
      97             :     /* The deserializer makes use of the fact that these common roots are */   \
      98             :     /* never in new space and never on a page that is being compacted.    */   \
      99             :     DCHECK_IMPLIES(deserialization_complete(),                                 \
     100             :                    !RootsTable::IsImmortalImmovable(RootIndex::k##CamelName)); \
     101             :     DCHECK_IMPLIES(RootsTable::IsImmortalImmovable(RootIndex::k##CamelName),   \
     102             :                    IsImmovable(HeapObject::cast(value)));                      \
     103             :     roots_table()[RootIndex::k##CamelName] = value->ptr();                     \
     104             :   }
     105    42370038 : ROOT_LIST(ROOT_ACCESSOR)
     106             : #undef ROOT_ACCESSOR
     107             : 
     108             : void Heap::SetRootMaterializedObjects(FixedArray objects) {
     109          50 :   roots_table()[RootIndex::kMaterializedObjects] = objects->ptr();
     110             : }
     111             : 
     112             : void Heap::SetRootScriptList(Object value) {
     113         214 :   roots_table()[RootIndex::kScriptList] = value->ptr();
     114             : }
     115             : 
     116             : void Heap::SetRootStringTable(StringTable value) {
     117    14779674 :   roots_table()[RootIndex::kStringTable] = value->ptr();
     118             : }
     119             : 
     120             : void Heap::SetRootNoScriptSharedFunctionInfos(Object value) {
     121           0 :   roots_table()[RootIndex::kNoScriptSharedFunctionInfos] = value->ptr();
     122             : }
     123             : 
     124             : void Heap::SetMessageListeners(TemplateList value) {
     125       30087 :   roots_table()[RootIndex::kMessageListeners] = value->ptr();
     126             : }
     127             : 
     128             : PagedSpace* Heap::paged_space(int idx) {
     129             :   DCHECK_NE(idx, LO_SPACE);
     130             :   DCHECK_NE(idx, NEW_SPACE);
     131             :   DCHECK_NE(idx, CODE_LO_SPACE);
     132             :   DCHECK_NE(idx, NEW_LO_SPACE);
     133     3388650 :   return static_cast<PagedSpace*>(space_[idx]);
     134             : }
     135             : 
     136     8002120 : Space* Heap::space(int idx) { return space_[idx]; }
     137             : 
     138             : Address* Heap::NewSpaceAllocationTopAddress() {
     139             :   return new_space_->allocation_top_address();
     140             : }
     141             : 
     142             : Address* Heap::NewSpaceAllocationLimitAddress() {
     143             :   return new_space_->allocation_limit_address();
     144             : }
     145             : 
     146             : Address* Heap::OldSpaceAllocationTopAddress() {
     147             :   return old_space_->allocation_top_address();
     148             : }
     149             : 
     150             : Address* Heap::OldSpaceAllocationLimitAddress() {
     151             :   return old_space_->allocation_limit_address();
     152             : }
     153             : 
     154             : void Heap::UpdateNewSpaceAllocationCounter() {
     155      107086 :   new_space_allocation_counter_ = NewSpaceAllocationCounter();
     156             : }
     157             : 
     158      214753 : size_t Heap::NewSpaceAllocationCounter() {
     159      429501 :   return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
     160             : }
     161             : 
     162   317799961 : AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
     163     2200383 :                                    AllocationAlignment alignment) {
     164             :   DCHECK(AllowHandleAllocation::IsAllowed());
     165             :   DCHECK(AllowHeapAllocation::IsAllowed());
     166             :   DCHECK(gc_state_ == NOT_IN_GC);
     167             : #ifdef V8_ENABLE_ALLOCATION_TIMEOUT
     168             :   if (FLAG_random_gc_interval > 0 || FLAG_gc_interval >= 0) {
     169             :     if (!always_allocate() && Heap::allocation_timeout_-- <= 0) {
     170             :       return AllocationResult::Retry(space);
     171             :     }
     172             :   }
     173             : #endif
     174             : #ifdef DEBUG
     175             :   IncrementObjectCounters();
     176             : #endif
     177             : 
     178   317799961 :   bool large_object = size_in_bytes > kMaxRegularHeapObjectSize;
     179             : 
     180   317799961 :   HeapObject object;
     181             :   AllocationResult allocation;
     182   317799961 :   if (NEW_SPACE == space) {
     183   159765720 :     if (large_object) {
     184             :       // TODO(hpayer): Implement a LO tenuring strategy.
     185        8222 :       space = FLAG_young_generation_large_objects ? NEW_LO_SPACE : LO_SPACE;
     186             :     } else {
     187   319515009 :       allocation = new_space_->AllocateRaw(size_in_bytes, alignment);
     188   159757511 :       if (allocation.To(&object)) {
     189   159737542 :         OnAllocationEvent(object, size_in_bytes);
     190             :       }
     191   159757453 :       return allocation;
     192             :     }
     193             :   }
     194             : 
     195             :   // Here we only allocate in the old generation.
     196   158042463 :   if (OLD_SPACE == space) {
     197   124995616 :     if (large_object) {
     198         966 :       allocation = lo_space_->AllocateRaw(size_in_bytes);
     199             :     } else {
     200   124994650 :       allocation = old_space_->AllocateRaw(size_in_bytes, alignment);
     201             :     }
     202    33046847 :   } else if (CODE_SPACE == space) {
     203     4400766 :     if (size_in_bytes <= code_space()->AreaSize() && !large_object) {
     204     2200344 :       allocation = code_space_->AllocateRawUnaligned(size_in_bytes);
     205             :     } else {
     206          39 :       allocation = code_lo_space_->AllocateRaw(size_in_bytes);
     207             :     }
     208    30846464 :   } else if (LO_SPACE == space) {
     209             :     DCHECK(large_object);
     210        9221 :     allocation = lo_space_->AllocateRaw(size_in_bytes);
     211    30837243 :   } else if (NEW_LO_SPACE == space) {
     212             :     DCHECK(FLAG_young_generation_large_objects);
     213           0 :     allocation = new_lo_space_->AllocateRaw(size_in_bytes);
     214    30837243 :   } else if (CODE_LO_SPACE == space) {
     215             :     DCHECK(large_object);
     216           0 :     allocation = code_lo_space_->AllocateRaw(size_in_bytes);
     217    30837243 :   } else if (MAP_SPACE == space) {
     218    30570459 :     allocation = map_space_->AllocateRawUnaligned(size_in_bytes);
     219      266784 :   } else if (RO_SPACE == space) {
     220             : #ifdef V8_USE_SNAPSHOT
     221             :     DCHECK(isolate_->serializer_enabled());
     222             : #endif
     223             :     DCHECK(!large_object);
     224             :     DCHECK(CanAllocateInReadOnlySpace());
     225      266784 :     allocation = read_only_space_->AllocateRaw(size_in_bytes, alignment);
     226             :   } else {
     227             :     // NEW_SPACE is not allowed here.
     228           0 :     UNREACHABLE();
     229             :   }
     230             : 
     231   158042562 :   if (allocation.To(&object)) {
     232   158041820 :     if (space == CODE_SPACE) {
     233             :       // Unprotect the memory chunk of the object if it was not unprotected
     234             :       // already.
     235     2200385 :       UnprotectAndRegisterMemoryChunk(object);
     236     2200386 :       ZapCodeObject(object->address(), size_in_bytes);
     237             :     }
     238   158041820 :     OnAllocationEvent(object, size_in_bytes);
     239             :   }
     240             : 
     241   158042389 :   return allocation;
     242             : }
     243             : 
     244   753524282 : void Heap::OnAllocationEvent(HeapObject object, int size_in_bytes) {
     245  1507228136 :   for (auto& tracker : allocation_trackers_) {
     246      359144 :     tracker->AllocationEvent(object->address(), size_in_bytes);
     247             :   }
     248             : 
     249             :   if (FLAG_verify_predictable) {
     250             :     ++allocations_count_;
     251             :     // Advance synthetic time by making a time request.
     252             :     MonotonicallyIncreasingTimeInMs();
     253             : 
     254             :     UpdateAllocationsHash(object);
     255             :     UpdateAllocationsHash(size_in_bytes);
     256             : 
     257             :     if (allocations_count_ % FLAG_dump_allocations_digest_at_alloc == 0) {
     258             :       PrintAllocationsHash();
     259             :     }
     260   753524282 :   } else if (FLAG_fuzzer_gc_analysis) {
     261           0 :     ++allocations_count_;
     262   753524282 :   } else if (FLAG_trace_allocation_stack_interval > 0) {
     263           0 :     ++allocations_count_;
     264           0 :     if (allocations_count_ % FLAG_trace_allocation_stack_interval == 0) {
     265           0 :       isolate()->PrintStack(stdout, Isolate::kPrintStackConcise);
     266             :     }
     267             :   }
     268   753524282 : }
     269             : 
     270      729124 : void Heap::OnMoveEvent(HeapObject target, HeapObject source,
     271             :                        int size_in_bytes) {
     272      731313 :   HeapProfiler* heap_profiler = isolate_->heap_profiler();
     273      729124 :   if (heap_profiler->is_tracking_object_moves()) {
     274             :     heap_profiler->ObjectMoveEvent(source->address(), target->address(),
     275      171205 :                                    size_in_bytes);
     276             :   }
     277     1612660 :   for (auto& tracker : allocation_trackers_) {
     278      308644 :     tracker->MoveEvent(source->address(), target->address(), size_in_bytes);
     279             :   }
     280      728776 :   if (target->IsSharedFunctionInfo()) {
     281        6447 :     LOG_CODE_EVENT(isolate_, SharedFunctionInfoMoveEvent(source->address(),
     282             :                                                          target->address()));
     283             :   }
     284             : 
     285             :   if (FLAG_verify_predictable) {
     286             :     ++allocations_count_;
     287             :     // Advance synthetic time by making a time request.
     288             :     MonotonicallyIncreasingTimeInMs();
     289             : 
     290             :     UpdateAllocationsHash(source);
     291             :     UpdateAllocationsHash(target);
     292             :     UpdateAllocationsHash(size_in_bytes);
     293             : 
     294             :     if (allocations_count_ % FLAG_dump_allocations_digest_at_alloc == 0) {
     295             :       PrintAllocationsHash();
     296             :     }
     297      728774 :   } else if (FLAG_fuzzer_gc_analysis) {
     298           0 :     ++allocations_count_;
     299             :   }
     300      728774 : }
     301             : 
     302             : bool Heap::CanAllocateInReadOnlySpace() {
     303    14704392 :   return !deserialization_complete_ &&
     304       28336 :          (isolate()->serializer_enabled() ||
     305           0 :           !isolate()->initialized_from_snapshot());
     306             : }
     307             : 
     308             : void Heap::UpdateAllocationsHash(HeapObject object) {
     309             :   Address object_address = object->address();
     310             :   MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
     311             :   AllocationSpace allocation_space = memory_chunk->owner()->identity();
     312             : 
     313             :   STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32);
     314             :   uint32_t value =
     315             :       static_cast<uint32_t>(object_address - memory_chunk->address()) |
     316             :       (static_cast<uint32_t>(allocation_space) << kPageSizeBits);
     317             : 
     318             :   UpdateAllocationsHash(value);
     319             : }
     320             : 
     321             : void Heap::UpdateAllocationsHash(uint32_t value) {
     322             :   uint16_t c1 = static_cast<uint16_t>(value);
     323             :   uint16_t c2 = static_cast<uint16_t>(value >> 16);
     324             :   raw_allocations_hash_ =
     325             :       StringHasher::AddCharacterCore(raw_allocations_hash_, c1);
     326             :   raw_allocations_hash_ =
     327             :       StringHasher::AddCharacterCore(raw_allocations_hash_, c2);
     328             : }
     329             : 
     330             : void Heap::RegisterExternalString(String string) {
     331             :   DCHECK(string->IsExternalString());
     332             :   DCHECK(!string->IsThinString());
     333       88356 :   external_string_table_.AddString(string);
     334             : }
     335             : 
     336       88383 : void Heap::UpdateExternalString(String string, size_t old_payload,
     337             :                                 size_t new_payload) {
     338             :   DCHECK(string->IsExternalString());
     339             :   Page* page = Page::FromHeapObject(string);
     340             : 
     341       88383 :   if (old_payload > new_payload)
     342             :     page->DecrementExternalBackingStoreBytes(
     343          17 :         ExternalBackingStoreType::kExternalString, old_payload - new_payload);
     344             :   else
     345             :     page->IncrementExternalBackingStoreBytes(
     346       88366 :         ExternalBackingStoreType::kExternalString, new_payload - old_payload);
     347       88384 : }
     348             : 
     349       88329 : void Heap::FinalizeExternalString(String string) {
     350             :   DCHECK(string->IsExternalString());
     351             :   Page* page = Page::FromHeapObject(string);
     352       88329 :   ExternalString ext_string = ExternalString::cast(string);
     353             : 
     354             :   page->DecrementExternalBackingStoreBytes(
     355             :       ExternalBackingStoreType::kExternalString,
     356       88329 :       ext_string->ExternalPayloadSize());
     357             : 
     358             :   v8::String::ExternalStringResourceBase** resource_addr =
     359             :       reinterpret_cast<v8::String::ExternalStringResourceBase**>(
     360       88336 :           string->address() + ExternalString::kResourceOffset);
     361             : 
     362             :   // Dispose of the C++ object if it has not already been disposed.
     363       88336 :   if (*resource_addr != nullptr) {
     364       88223 :     (*resource_addr)->Dispose();
     365       88223 :     *resource_addr = nullptr;
     366             :   }
     367       88336 : }
     368             : 
     369             : Address Heap::NewSpaceTop() { return new_space_->top(); }
     370             : 
     371             : // static
     372   762863161 : bool Heap::InNewSpace(Object object) {
     373             :   DCHECK(!HasWeakHeapObjectTag(object));
     374  1517596609 :   return object->IsHeapObject() && InNewSpace(HeapObject::cast(object));
     375             : }
     376             : 
     377             : // static
     378           0 : bool Heap::InNewSpace(MaybeObject object) {
     379           0 :   HeapObject heap_object;
     380           0 :   return object->GetHeapObject(&heap_object) && InNewSpace(heap_object);
     381             : }
     382             : 
     383             : // static
     384   356390652 : bool Heap::InNewSpace(HeapObject heap_object) {
     385             :   // Inlined check from NewSpace::Contains.
     386  1162163749 :   bool result = MemoryChunk::FromHeapObject(heap_object)->InNewSpace();
     387             : #ifdef DEBUG
     388             :   // If in NEW_SPACE, then check we're either not in the middle of GC or the
     389             :   // object is in to-space.
     390             :   if (result) {
     391             :     // If the object is in NEW_SPACE, then it's not in RO_SPACE so this is safe.
     392             :     Heap* heap = Heap::FromWritableHeapObject(heap_object);
     393             :     DCHECK(heap->gc_state_ != NOT_IN_GC || InToSpace(heap_object));
     394             :   }
     395             : #endif
     396   356390652 :   return result;
     397             : }
     398             : 
     399             : // static
     400      128815 : bool Heap::InFromSpace(Object object) {
     401             :   DCHECK(!HasWeakHeapObjectTag(object));
     402      257630 :   return object->IsHeapObject() && InFromSpace(HeapObject::cast(object));
     403             : }
     404             : 
     405             : // static
     406    40762199 : bool Heap::InFromSpace(MaybeObject object) {
     407    40762199 :   HeapObject heap_object;
     408    81629486 :   return object->GetHeapObject(&heap_object) && InFromSpace(heap_object);
     409             : }
     410             : 
     411             : // static
     412   286994677 : bool Heap::InFromSpace(HeapObject heap_object) {
     413             :   return MemoryChunk::FromHeapObject(heap_object)
     414   286994677 :       ->IsFlagSet(Page::IN_FROM_SPACE);
     415             : }
     416             : 
     417             : // static
     418             : bool Heap::InToSpace(Object object) {
     419             :   DCHECK(!HasWeakHeapObjectTag(object));
     420             :   return object->IsHeapObject() && InToSpace(HeapObject::cast(object));
     421             : }
     422             : 
     423             : // static
     424      673216 : bool Heap::InToSpace(MaybeObject object) {
     425      673216 :   HeapObject heap_object;
     426     1344017 :   return object->GetHeapObject(&heap_object) && InToSpace(heap_object);
     427             : }
     428             : 
     429             : // static
     430        7396 : bool Heap::InToSpace(HeapObject heap_object) {
     431        7396 :   return MemoryChunk::FromHeapObject(heap_object)->IsFlagSet(Page::IN_TO_SPACE);
     432             : }
     433             : 
     434         322 : bool Heap::InOldSpace(Object object) { return old_space_->Contains(object); }
     435             : 
     436             : bool Heap::InReadOnlySpace(Object object) {
     437     1837708 :   return read_only_space_->Contains(object);
     438             : }
     439             : 
     440             : // static
     441             : Heap* Heap::FromWritableHeapObject(const HeapObject obj) {
     442   505732351 :   MemoryChunk* chunk = MemoryChunk::FromHeapObject(obj);
     443             :   // RO_SPACE can be shared between heaps, so we can't use RO_SPACE objects to
     444             :   // find a heap. The exception is when the ReadOnlySpace is writeable, during
     445             :   // bootstrapping, so explicitly allow this case.
     446             :   SLOW_DCHECK(chunk->owner()->identity() != RO_SPACE ||
     447             :               static_cast<ReadOnlySpace*>(chunk->owner())->writable());
     448             :   Heap* heap = chunk->heap();
     449             :   SLOW_DCHECK(heap != nullptr);
     450             :   return heap;
     451             : }
     452             : 
     453   152263174 : bool Heap::ShouldBePromoted(Address old_address) {
     454             :   Page* page = Page::FromAddress(old_address);
     455   152263174 :   Address age_mark = new_space_->age_mark();
     456   238411245 :   return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
     457    45871974 :          (!page->ContainsLimit(age_mark) || old_address < age_mark);
     458             : }
     459             : 
     460   115927248 : void Heap::CopyBlock(Address dst, Address src, int byte_size) {
     461             :   DCHECK(IsAligned(byte_size, kTaggedSize));
     462             :   STATIC_ASSERT(kTaggedSize == kSystemPointerSize);
     463   187684155 :   CopyWords(dst, src, static_cast<size_t>(byte_size / kTaggedSize));
     464   116186904 : }
     465             : 
     466             : template <Heap::FindMementoMode mode>
     467    17141508 : AllocationMemento Heap::FindAllocationMemento(Map map, HeapObject object) {
     468             :   Address object_address = object->address();
     469    16774520 :   Address memento_address = object_address + object->SizeFromMap(map);
     470    16771304 :   Address last_memento_word_address = memento_address + kTaggedSize;
     471             :   // If the memento would be on another page, bail out immediately.
     472    16771304 :   if (!Page::OnSamePage(object_address, last_memento_word_address)) {
     473         274 :     return AllocationMemento();
     474             :   }
     475             :   HeapObject candidate = HeapObject::FromAddress(memento_address);
     476             :   MapWordSlot candidate_map_slot = candidate->map_slot();
     477             :   // This fast check may peek at an uninitialized word. However, the slow check
     478             :   // below (memento_address == top) ensures that this is safe. Mark the word as
     479             :   // initialized to silence MemorySanitizer warnings.
     480             :   MSAN_MEMORY_IS_INITIALIZED(candidate_map_slot.address(), kTaggedSize);
     481    16753561 :   if (!candidate_map_slot.contains_value(
     482             :           ReadOnlyRoots(this).allocation_memento_map().ptr())) {
     483    14855487 :     return AllocationMemento();
     484             :   }
     485             : 
     486             :   // Bail out if the memento is below the age mark, which can happen when
     487             :   // mementos survived because a page got moved within new space.
     488             :   Page* object_page = Page::FromAddress(object_address);
     489     1898074 :   if (object_page->IsFlagSet(Page::NEW_SPACE_BELOW_AGE_MARK)) {
     490             :     Address age_mark =
     491      687910 :         reinterpret_cast<SemiSpace*>(object_page->owner())->age_mark();
     492      687910 :     if (!object_page->Contains(age_mark)) {
     493           3 :       return AllocationMemento();
     494             :     }
     495             :     // Do an exact check in the case where the age mark is on the same page.
     496      687907 :     if (object_address < age_mark) {
     497           0 :       return AllocationMemento();
     498             :     }
     499             :   }
     500             : 
     501      366988 :   AllocationMemento memento_candidate = AllocationMemento::cast(candidate);
     502             : 
     503             :   // Depending on what the memento is used for, we might need to perform
     504             :   // additional checks.
     505             :   Address top;
     506             :   switch (mode) {
     507             :     case Heap::kForGC:
     508     1530855 :       return memento_candidate;
     509             :     case Heap::kForRuntime:
     510      366988 :       if (memento_candidate.is_null()) return AllocationMemento();
     511             :       // Either the object is the last object in the new space, or there is
     512             :       // another object of at least word size (the header map word) following
     513             :       // it, so suffices to compare ptr and top here.
     514             :       top = NewSpaceTop();
     515             :       DCHECK(memento_address == top ||
     516             :              memento_address + HeapObject::kHeaderSize <= top ||
     517             :              !Page::OnSamePage(memento_address, top - 1));
     518      366988 :       if ((memento_address != top) && memento_candidate->IsValid()) {
     519      366980 :         return memento_candidate;
     520             :       }
     521          11 :       return AllocationMemento();
     522             :     default:
     523             :       UNREACHABLE();
     524             :   }
     525             :   UNREACHABLE();
     526             : }
     527             : 
     528   145909617 : void Heap::UpdateAllocationSite(Map map, HeapObject object,
     529             :                                 PretenuringFeedbackMap* pretenuring_feedback) {
     530             :   DCHECK_NE(pretenuring_feedback, &global_pretenuring_feedback_);
     531             :   DCHECK(InFromSpace(object) ||
     532             :          (InToSpace(object) && Page::FromHeapObject(object)->IsFlagSet(
     533             :                                    Page::PAGE_NEW_NEW_PROMOTION)) ||
     534             :          (!InNewSpace(object) && Page::FromHeapObject(object)->IsFlagSet(
     535             :                                      Page::PAGE_NEW_OLD_PROMOTION)));
     536   291844799 :   if (!FLAG_allocation_site_pretenuring ||
     537             :       !AllocationSite::CanTrack(map->instance_type())) {
     538   144375879 :     return;
     539             :   }
     540             :   AllocationMemento memento_candidate =
     541    16133640 :       FindAllocationMemento<kForGC>(map, object);
     542    16130744 :   if (memento_candidate.is_null()) return;
     543             : 
     544             :   // Entering cached feedback is used in the parallel case. We are not allowed
     545             :   // to dereference the allocation site and rather have to postpone all checks
     546             :   // till actually merging the data.
     547             :   Address key = memento_candidate->GetAllocationSiteUnchecked();
     548     3061515 :   (*pretenuring_feedback)[AllocationSite::unchecked_cast(Object(key))]++;
     549             : }
     550             : 
     551       88353 : void Heap::ExternalStringTable::AddString(String string) {
     552             :   DCHECK(string->IsExternalString());
     553             :   DCHECK(!Contains(string));
     554             : 
     555       88353 :   if (InNewSpace(string)) {
     556         466 :     new_space_strings_.push_back(string);
     557             :   } else {
     558       87887 :     old_space_strings_.push_back(string);
     559             :   }
     560       88354 : }
     561             : 
     562    30285947 : Oddball Heap::ToBoolean(bool condition) {
     563             :   ReadOnlyRoots roots(this);
     564    60571894 :   return condition ? roots.true_value() : roots.false_value();
     565             : }
     566             : 
     567   265736661 : uint64_t Heap::HashSeed() {
     568             :   uint64_t seed;
     569             :   ReadOnlyRoots(this).hash_seed()->copy_out(0, reinterpret_cast<byte*>(&seed),
     570             :                                             kInt64Size);
     571             :   DCHECK(FLAG_randomize_hashes || seed == 0);
     572   265736496 :   return seed;
     573             : }
     574             : 
     575     3187727 : int Heap::NextScriptId() {
     576     6375455 :   int last_id = last_script_id()->value();
     577     3187728 :   if (last_id == Smi::kMaxValue) last_id = v8::UnboundScript::kNoScriptId;
     578     3187728 :   last_id++;
     579     3187728 :   set_last_script_id(Smi::FromInt(last_id));
     580     3187728 :   return last_id;
     581             : }
     582             : 
     583          55 : int Heap::NextDebuggingId() {
     584         110 :   int last_id = last_debugging_id()->value();
     585          55 :   if (last_id == DebugInfo::DebuggingIdBits::kMax) {
     586             :     last_id = DebugInfo::kNoDebuggingId;
     587             :   }
     588          55 :   last_id++;
     589          55 :   set_last_debugging_id(Smi::FromInt(last_id));
     590          55 :   return last_id;
     591             : }
     592             : 
     593     4552239 : int Heap::GetNextTemplateSerialNumber() {
     594     9104478 :   int next_serial_number = next_template_serial_number()->value() + 1;
     595     4552239 :   set_next_template_serial_number(Smi::FromInt(next_serial_number));
     596     4552238 :   return next_serial_number;
     597             : }
     598             : 
     599             : int Heap::MaxNumberToStringCacheSize() const {
     600             :   // Compute the size of the number string cache based on the max newspace size.
     601             :   // The number string cache has a minimum size based on twice the initial cache
     602             :   // size to ensure that it is bigger after being made 'full size'.
     603    28708599 :   size_t number_string_cache_size = max_semi_space_size_ / 512;
     604             :   number_string_cache_size =
     605             :       Max(static_cast<size_t>(kInitialNumberStringCacheSize * 2),
     606             :           Min<size_t>(0x4000u, number_string_cache_size));
     607             :   // There is a string and a number per entry so the length is twice the number
     608             :   // of entries.
     609    28708599 :   return static_cast<int>(number_string_cache_size * 2);
     610             : }
     611             : 
     612             : void Heap::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
     613             :                                               size_t amount) {
     614             :   base::CheckedIncrement(&backing_store_bytes_, amount);
     615             :   // TODO(mlippautz): Implement interrupt for global memory allocations that can
     616             :   // trigger garbage collections.
     617             : }
     618             : 
     619             : void Heap::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
     620             :                                               size_t amount) {
     621             :   base::CheckedDecrement(&backing_store_bytes_, amount);
     622             : }
     623             : 
     624             : AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
     625             :     : heap_(isolate->heap()) {
     626             :   heap_->always_allocate_scope_count_++;
     627             : }
     628             : 
     629             : AlwaysAllocateScope::~AlwaysAllocateScope() {
     630             :   heap_->always_allocate_scope_count_--;
     631             : }
     632             : 
     633      465724 : CodeSpaceMemoryModificationScope::CodeSpaceMemoryModificationScope(Heap* heap)
     634      465724 :     : heap_(heap) {
     635     2066204 :   if (heap_->write_protect_code_memory()) {
     636             :     heap_->increment_code_space_memory_modification_scope_depth();
     637      931448 :     heap_->code_space()->SetReadAndWritable();
     638      465726 :     LargePage* page = heap_->code_lo_space()->first_page();
     639     1600482 :     while (page != nullptr) {
     640             :       DCHECK(page->IsFlagSet(MemoryChunk::IS_EXECUTABLE));
     641     1338060 :       CHECK(heap_->memory_allocator()->IsMemoryChunkExecutable(page));
     642      669030 :       page->SetReadAndWritable();
     643             :       page = page->next_page();
     644             :     }
     645             :   }
     646      465726 : }
     647             : 
     648      465725 : CodeSpaceMemoryModificationScope::~CodeSpaceMemoryModificationScope() {
     649     2066187 :   if (heap_->write_protect_code_memory()) {
     650             :     heap_->decrement_code_space_memory_modification_scope_depth();
     651      931450 :     heap_->code_space()->SetDefaultCodePermissions();
     652      465726 :     LargePage* page = heap_->code_lo_space()->first_page();
     653     1600463 :     while (page != nullptr) {
     654             :       DCHECK(page->IsFlagSet(MemoryChunk::IS_EXECUTABLE));
     655     1338022 :       CHECK(heap_->memory_allocator()->IsMemoryChunkExecutable(page));
     656      669011 :       page->SetDefaultCodePermissions();
     657             :       page = page->next_page();
     658             :     }
     659             :   }
     660      465726 : }
     661             : 
     662             : CodePageCollectionMemoryModificationScope::
     663             :     CodePageCollectionMemoryModificationScope(Heap* heap)
     664     2200371 :     : heap_(heap) {
     665     2200371 :   if (heap_->write_protect_code_memory() &&
     666             :       !heap_->code_space_memory_modification_scope_depth()) {
     667             :     heap_->EnableUnprotectedMemoryChunksRegistry();
     668             :   }
     669             : }
     670             : 
     671     2200366 : CodePageCollectionMemoryModificationScope::
     672             :     ~CodePageCollectionMemoryModificationScope() {
     673     2200366 :   if (heap_->write_protect_code_memory() &&
     674             :       !heap_->code_space_memory_modification_scope_depth()) {
     675     2048529 :     heap_->ProtectUnprotectedMemoryChunks();
     676     2048530 :     heap_->DisableUnprotectedMemoryChunksRegistry();
     677             :   }
     678     2200367 : }
     679             : 
     680     1392728 : CodePageMemoryModificationScope::CodePageMemoryModificationScope(
     681             :     MemoryChunk* chunk)
     682             :     : chunk_(chunk),
     683     2785436 :       scope_active_(chunk_->heap()->write_protect_code_memory() &&
     684     2785456 :                     chunk_->IsFlagSet(MemoryChunk::IS_EXECUTABLE)) {
     685     1392728 :   if (scope_active_) {
     686             :     DCHECK(chunk_->owner()->identity() == CODE_SPACE ||
     687             :            (chunk_->owner()->identity() == CODE_LO_SPACE));
     688      530265 :     chunk_->SetReadAndWritable();
     689             :   }
     690     1392727 : }
     691             : 
     692             : CodePageMemoryModificationScope::~CodePageMemoryModificationScope() {
     693     1392787 :   if (scope_active_) {
     694      530265 :     chunk_->SetDefaultCodePermissions();
     695             :   }
     696             : }
     697             : 
     698             : }  // namespace internal
     699             : }  // namespace v8
     700             : 
     701             : #endif  // V8_HEAP_HEAP_INL_H_

Generated by: LCOV version 1.10