LCOV - code coverage report
Current view: top level - src/heap - heap-inl.h (source / functions) Hit Total Coverage
Test: app.info Lines: 195 220 88.6 %
Date: 2017-04-26 Functions: 365 432 84.5 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #ifndef V8_HEAP_HEAP_INL_H_
       6             : #define V8_HEAP_HEAP_INL_H_
       7             : 
       8             : #include <cmath>
       9             : 
      10             : #include "src/base/platform/platform.h"
      11             : #include "src/counters-inl.h"
      12             : #include "src/feedback-vector-inl.h"
      13             : #include "src/heap/heap.h"
      14             : #include "src/heap/incremental-marking-inl.h"
      15             : #include "src/heap/mark-compact.h"
      16             : #include "src/heap/object-stats.h"
      17             : #include "src/heap/remembered-set.h"
      18             : #include "src/heap/spaces-inl.h"
      19             : #include "src/heap/store-buffer.h"
      20             : #include "src/isolate.h"
      21             : #include "src/list-inl.h"
      22             : #include "src/log.h"
      23             : #include "src/msan.h"
      24             : #include "src/objects-inl.h"
      25             : #include "src/objects/scope-info.h"
      26             : 
      27             : namespace v8 {
      28             : namespace internal {
      29             : 
      30             : AllocationSpace AllocationResult::RetrySpace() {
      31             :   DCHECK(IsRetry());
      32       65434 :   return static_cast<AllocationSpace>(Smi::cast(object_)->value());
      33             : }
      34             : 
      35        8751 : HeapObject* AllocationResult::ToObjectChecked() {
      36        8751 :   CHECK(!IsRetry());
      37        8751 :   return HeapObject::cast(object_);
      38             : }
      39             : 
      40    36206312 : void PromotionQueue::insert(HeapObject* target, int32_t size,
      41             :                             bool was_marked_black) {
      42    36206312 :   if (emergency_stack_ != NULL) {
      43    10340872 :     emergency_stack_->Add(Entry(target, size, was_marked_black));
      44    10340872 :     return;
      45             :   }
      46             : 
      47    25865440 :   if ((rear_ - 1) < limit_) {
      48         226 :     RelocateQueueHead();
      49         226 :     emergency_stack_->Add(Entry(target, size, was_marked_black));
      50         226 :     return;
      51             :   }
      52             : 
      53    25865214 :   struct Entry* entry = reinterpret_cast<struct Entry*>(--rear_);
      54    25865214 :   entry->obj_ = target;
      55    25865214 :   entry->size_ = size;
      56    25865214 :   entry->was_marked_black_ = was_marked_black;
      57             : 
      58             : // Assert no overflow into live objects.
      59             : #ifdef DEBUG
      60             :   SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(),
      61             :                               reinterpret_cast<Address>(rear_));
      62             : #endif
      63             : }
      64             : 
      65    36206312 : void PromotionQueue::remove(HeapObject** target, int32_t* size,
      66             :                             bool* was_marked_black) {
      67             :   DCHECK(!is_empty());
      68    36206312 :   if (front_ == rear_) {
      69    11681314 :     Entry e = emergency_stack_->RemoveLast();
      70    11681314 :     *target = e.obj_;
      71    11681314 :     *size = e.size_;
      72    11681314 :     *was_marked_black = e.was_marked_black_;
      73    36206312 :     return;
      74             :   }
      75             : 
      76    24524998 :   struct Entry* entry = reinterpret_cast<struct Entry*>(--front_);
      77    24524998 :   *target = entry->obj_;
      78    24524998 :   *size = entry->size_;
      79    24524998 :   *was_marked_black = entry->was_marked_black_;
      80             : 
      81             :   // Assert no underflow.
      82             :   SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
      83             :                               reinterpret_cast<Address>(front_));
      84             : }
      85             : 
      86             : Page* PromotionQueue::GetHeadPage() {
      87             :   return Page::FromAllocationAreaAddress(reinterpret_cast<Address>(rear_));
      88             : }
      89             : 
      90   140712429 : void PromotionQueue::SetNewLimit(Address limit) {
      91             :   // If we are already using an emergency stack, we can ignore it.
      92    70510089 :   if (emergency_stack_) return;
      93             : 
      94             :   // If the limit is not on the same page, we can ignore it.
      95    70202340 :   if (Page::FromAllocationAreaAddress(limit) != GetHeadPage()) return;
      96             : 
      97     2204801 :   limit_ = reinterpret_cast<struct Entry*>(limit);
      98             : 
      99     2204801 :   if (limit_ <= rear_) {
     100             :     return;
     101             :   }
     102             : 
     103           6 :   RelocateQueueHead();
     104             : }
     105             : 
     106             : bool PromotionQueue::IsBelowPromotionQueue(Address to_space_top) {
     107             :   // If an emergency stack is used, the to-space address cannot interfere
     108             :   // with the promotion queue.
     109             :   if (emergency_stack_) return true;
     110             : 
     111             :   // If the given to-space top pointer and the head of the promotion queue
     112             :   // are not on the same page, then the to-space objects are below the
     113             :   // promotion queue.
     114             :   if (GetHeadPage() != Page::FromAddress(to_space_top)) {
     115             :     return true;
     116             :   }
     117             :   // If the to space top pointer is smaller or equal than the promotion
     118             :   // queue head, then the to-space objects are below the promotion queue.
     119             :   return reinterpret_cast<struct Entry*>(to_space_top) <= rear_;
     120             : }
     121             : 
     122             : #define ROOT_ACCESSOR(type, name, camel_name) \
     123             :   type* Heap::name() { return type::cast(roots_[k##camel_name##RootIndex]); }
     124  3292987617 : ROOT_LIST(ROOT_ACCESSOR)
     125             : #undef ROOT_ACCESSOR
     126             : 
     127             : #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
     128             :   Map* Heap::name##_map() { return Map::cast(roots_[k##Name##MapRootIndex]); }
     129             : STRUCT_LIST(STRUCT_MAP_ACCESSOR)
     130             : #undef STRUCT_MAP_ACCESSOR
     131             : 
     132             : #define STRING_ACCESSOR(name, str) \
     133             :   String* Heap::name() { return String::cast(roots_[k##name##RootIndex]); }
     134         102 : INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
     135             : #undef STRING_ACCESSOR
     136             : 
     137             : #define SYMBOL_ACCESSOR(name) \
     138             :   Symbol* Heap::name() { return Symbol::cast(roots_[k##name##RootIndex]); }
     139         210 : PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
     140             : #undef SYMBOL_ACCESSOR
     141             : 
     142             : #define SYMBOL_ACCESSOR(name, description) \
     143             :   Symbol* Heap::name() { return Symbol::cast(roots_[k##name##RootIndex]); }
     144             : PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
     145             : WELL_KNOWN_SYMBOL_LIST(SYMBOL_ACCESSOR)
     146             : #undef SYMBOL_ACCESSOR
     147             : 
     148             : #define ROOT_ACCESSOR(type, name, camel_name)                                 \
     149             :   void Heap::set_##name(type* value) {                                        \
     150             :     /* The deserializer makes use of the fact that these common roots are */  \
     151             :     /* never in new space and never on a page that is being compacted.    */  \
     152             :     DCHECK(!deserialization_complete() ||                                     \
     153             :            RootCanBeWrittenAfterInitialization(k##camel_name##RootIndex));    \
     154             :     DCHECK(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
     155             :     roots_[k##camel_name##RootIndex] = value;                                 \
     156             :   }
     157    24504792 : ROOT_LIST(ROOT_ACCESSOR)
     158             : #undef ROOT_ACCESSOR
     159             : 
     160             : PagedSpace* Heap::paged_space(int idx) {
     161             :   DCHECK_NE(idx, LO_SPACE);
     162             :   DCHECK_NE(idx, NEW_SPACE);
     163      830306 :   return static_cast<PagedSpace*>(space_[idx]);
     164             : }
     165             : 
     166      135000 : Space* Heap::space(int idx) { return space_[idx]; }
     167             : 
     168             : Address* Heap::NewSpaceAllocationTopAddress() {
     169             :   return new_space_->allocation_top_address();
     170             : }
     171             : 
     172             : Address* Heap::NewSpaceAllocationLimitAddress() {
     173             :   return new_space_->allocation_limit_address();
     174             : }
     175             : 
     176             : Address* Heap::OldSpaceAllocationTopAddress() {
     177             :   return old_space_->allocation_top_address();
     178             : }
     179             : 
     180             : Address* Heap::OldSpaceAllocationLimitAddress() {
     181             :   return old_space_->allocation_limit_address();
     182             : }
     183             : 
     184             : void Heap::UpdateNewSpaceAllocationCounter() {
     185      122535 :   new_space_allocation_counter_ = NewSpaceAllocationCounter();
     186             : }
     187             : 
     188      250502 : size_t Heap::NewSpaceAllocationCounter() {
     189      501004 :   return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
     190             : }
     191             : 
     192             : template <>
     193             : bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
     194             :   // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
     195             :   return chars == str.length();
     196             : }
     197             : 
     198             : 
     199             : template <>
     200             : bool inline Heap::IsOneByte(String* str, int chars) {
     201             :   return str->IsOneByteRepresentation();
     202             : }
     203             : 
     204             : 
     205      168852 : AllocationResult Heap::AllocateInternalizedStringFromUtf8(
     206      168251 :     Vector<const char> str, int chars, uint32_t hash_field) {
     207      168852 :   if (IsOneByte(str, chars)) {
     208             :     return AllocateOneByteInternalizedString(Vector<const uint8_t>::cast(str),
     209      168251 :                                              hash_field);
     210             :   }
     211         601 :   return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
     212             : }
     213             : 
     214             : 
     215             : template <typename T>
     216     1893437 : AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars,
     217             :                                                       uint32_t hash_field) {
     218     1893437 :   if (IsOneByte(t, chars)) {
     219     1867127 :     return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
     220             :   }
     221       26310 :   return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
     222             : }
     223             : 
     224             : 
     225    11171755 : AllocationResult Heap::AllocateOneByteInternalizedString(
     226    11171755 :     Vector<const uint8_t> str, uint32_t hash_field) {
     227    11171755 :   CHECK_GE(String::kMaxLength, str.length());
     228             :   // The canonical empty_string is the only zero-length string we allow.
     229             :   DCHECK_IMPLIES(str.length() == 0, roots_[kempty_stringRootIndex] == nullptr);
     230             :   // Compute map and object size.
     231             :   Map* map = one_byte_internalized_string_map();
     232             :   int size = SeqOneByteString::SizeFor(str.length());
     233             : 
     234             :   // Allocate string.
     235             :   HeapObject* result = nullptr;
     236             :   {
     237    11171755 :     AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
     238    11171757 :     if (!allocation.To(&result)) return allocation;
     239             :   }
     240             : 
     241             :   // String maps are all immortal immovable objects.
     242             :   result->set_map_no_write_barrier(map);
     243             :   // Set length and hash fields of the allocated string.
     244             :   String* answer = String::cast(result);
     245             :   answer->set_length(str.length());
     246             :   answer->set_hash_field(hash_field);
     247             : 
     248             :   DCHECK_EQ(size, answer->Size());
     249             : 
     250             :   // Fill in the characters.
     251    11171756 :   MemCopy(answer->address() + SeqOneByteString::kHeaderSize, str.start(),
     252             :           str.length());
     253             : 
     254    11171756 :   return answer;
     255             : }
     256             : 
     257             : 
     258       30439 : AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
     259       30439 :                                                          uint32_t hash_field) {
     260       30439 :   CHECK_GE(String::kMaxLength, str.length());
     261             :   DCHECK_NE(0, str.length());  // Use Heap::empty_string() instead.
     262             :   // Compute map and object size.
     263             :   Map* map = internalized_string_map();
     264             :   int size = SeqTwoByteString::SizeFor(str.length());
     265             : 
     266             :   // Allocate string.
     267             :   HeapObject* result = nullptr;
     268             :   {
     269       30439 :     AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
     270       30439 :     if (!allocation.To(&result)) return allocation;
     271             :   }
     272             : 
     273       30439 :   result->set_map(map);
     274             :   // Set length and hash fields of the allocated string.
     275             :   String* answer = String::cast(result);
     276             :   answer->set_length(str.length());
     277             :   answer->set_hash_field(hash_field);
     278             : 
     279             :   DCHECK_EQ(size, answer->Size());
     280             : 
     281             :   // Fill in the characters.
     282       30439 :   MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, str.start(),
     283       30439 :           str.length() * kUC16Size);
     284             : 
     285       30439 :   return answer;
     286             : }
     287             : 
     288     1931276 : AllocationResult Heap::CopyFixedArray(FixedArray* src) {
     289     1931276 :   if (src->length() == 0) return src;
     290     1559105 :   return CopyFixedArrayWithMap(src, src->map());
     291             : }
     292             : 
     293             : 
     294      314298 : AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
     295      314298 :   if (src->length() == 0) return src;
     296      314298 :   return CopyFixedDoubleArrayWithMap(src, src->map());
     297             : }
     298             : 
     299             : 
     300   593310794 : AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
     301     2722428 :                                    AllocationAlignment alignment) {
     302             :   DCHECK(AllowHandleAllocation::IsAllowed());
     303             :   DCHECK(AllowHeapAllocation::IsAllowed());
     304             :   DCHECK(gc_state_ == NOT_IN_GC);
     305             : #ifdef DEBUG
     306             :   if (FLAG_gc_interval >= 0 && !always_allocate() &&
     307             :       Heap::allocation_timeout_-- <= 0) {
     308             :     return AllocationResult::Retry(space);
     309             :   }
     310             :   isolate_->counters()->objs_since_last_full()->Increment();
     311             :   isolate_->counters()->objs_since_last_young()->Increment();
     312             : #endif
     313             : 
     314   593310794 :   bool large_object = size_in_bytes > kMaxRegularHeapObjectSize;
     315             :   HeapObject* object = nullptr;
     316             :   AllocationResult allocation;
     317   593310794 :   if (NEW_SPACE == space) {
     318   353829556 :     if (large_object) {
     319             :       space = LO_SPACE;
     320             :     } else {
     321   353819398 :       allocation = new_space_->AllocateRaw(size_in_bytes, alignment);
     322   353819412 :       if (allocation.To(&object)) {
     323   353755491 :         OnAllocationEvent(object, size_in_bytes);
     324             :       }
     325   353819414 :       return allocation;
     326             :     }
     327             :   }
     328             : 
     329             :   // Here we only allocate in the old generation.
     330   239491396 :   if (OLD_SPACE == space) {
     331   205032098 :     if (large_object) {
     332         360 :       allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
     333             :     } else {
     334   205031738 :       allocation = old_space_->AllocateRaw(size_in_bytes, alignment);
     335             :     }
     336    34459298 :   } else if (CODE_SPACE == space) {
     337     5444856 :     if (size_in_bytes <= code_space()->AreaSize()) {
     338     2722390 :       allocation = code_space_->AllocateRawUnaligned(size_in_bytes);
     339             :     } else {
     340          38 :       allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE);
     341             :     }
     342    31736870 :   } else if (LO_SPACE == space) {
     343             :     DCHECK(large_object);
     344       10274 :     allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
     345    31726596 :   } else if (MAP_SPACE == space) {
     346    31726596 :     allocation = map_space_->AllocateRawUnaligned(size_in_bytes);
     347             :   } else {
     348             :     // NEW_SPACE is not allowed here.
     349           0 :     UNREACHABLE();
     350             :   }
     351   239491716 :   if (allocation.To(&object)) {
     352   239489850 :     OnAllocationEvent(object, size_in_bytes);
     353             :   }
     354             : 
     355   239491522 :   return allocation;
     356             : }
     357             : 
     358             : 
     359  1237747075 : void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) {
     360  1237747075 :   HeapProfiler* profiler = isolate_->heap_profiler();
     361  1237747075 :   if (profiler->is_tracking_allocations()) {
     362       17228 :     profiler->AllocationEvent(object->address(), size_in_bytes);
     363             :   }
     364             : 
     365             :   if (FLAG_verify_predictable) {
     366             :     ++allocations_count_;
     367             :     // Advance synthetic time by making a time request.
     368             :     MonotonicallyIncreasingTimeInMs();
     369             : 
     370             :     UpdateAllocationsHash(object);
     371             :     UpdateAllocationsHash(size_in_bytes);
     372             : 
     373             :     if (allocations_count_ % FLAG_dump_allocations_digest_at_alloc == 0) {
     374             :       PrintAlloctionsHash();
     375             :     }
     376             :   }
     377             : 
     378  1237748203 :   if (FLAG_trace_allocation_stack_interval > 0) {
     379           0 :     if (!FLAG_verify_predictable) ++allocations_count_;
     380           0 :     if (allocations_count_ % FLAG_trace_allocation_stack_interval == 0) {
     381           0 :       isolate()->PrintStack(stdout, Isolate::kPrintStackConcise);
     382             :     }
     383             :   }
     384  1237748203 : }
     385             : 
     386             : 
     387      594810 : void Heap::OnMoveEvent(HeapObject* target, HeapObject* source,
     388             :                        int size_in_bytes) {
     389      596222 :   HeapProfiler* heap_profiler = isolate_->heap_profiler();
     390      594810 :   if (heap_profiler->is_tracking_object_moves()) {
     391             :     heap_profiler->ObjectMoveEvent(source->address(), target->address(),
     392      123131 :                                    size_in_bytes);
     393             :   }
     394      594826 :   if (target->IsSharedFunctionInfo()) {
     395        2824 :     LOG_CODE_EVENT(isolate_, SharedFunctionInfoMoveEvent(source->address(),
     396             :                                                          target->address()));
     397             :   }
     398             : 
     399             :   if (FLAG_verify_predictable) {
     400             :     ++allocations_count_;
     401             :     // Advance synthetic time by making a time request.
     402             :     MonotonicallyIncreasingTimeInMs();
     403             : 
     404             :     UpdateAllocationsHash(source);
     405             :     UpdateAllocationsHash(target);
     406             :     UpdateAllocationsHash(size_in_bytes);
     407             : 
     408             :     if (allocations_count_ % FLAG_dump_allocations_digest_at_alloc == 0) {
     409             :       PrintAlloctionsHash();
     410             :     }
     411             :   }
     412      594826 : }
     413             : 
     414             : 
     415             : void Heap::UpdateAllocationsHash(HeapObject* object) {
     416             :   Address object_address = object->address();
     417             :   MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
     418             :   AllocationSpace allocation_space = memory_chunk->owner()->identity();
     419             : 
     420             :   STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32);
     421             :   uint32_t value =
     422             :       static_cast<uint32_t>(object_address - memory_chunk->address()) |
     423             :       (static_cast<uint32_t>(allocation_space) << kPageSizeBits);
     424             : 
     425             :   UpdateAllocationsHash(value);
     426             : }
     427             : 
     428             : 
     429             : void Heap::UpdateAllocationsHash(uint32_t value) {
     430             :   uint16_t c1 = static_cast<uint16_t>(value);
     431             :   uint16_t c2 = static_cast<uint16_t>(value >> 16);
     432             :   raw_allocations_hash_ =
     433             :       StringHasher::AddCharacterCore(raw_allocations_hash_, c1);
     434             :   raw_allocations_hash_ =
     435             :       StringHasher::AddCharacterCore(raw_allocations_hash_, c2);
     436             : }
     437             : 
     438             : 
     439             : void Heap::RegisterExternalString(String* string) {
     440     1015184 :   external_string_table_.AddString(string);
     441             : }
     442             : 
     443             : 
     444             : void Heap::FinalizeExternalString(String* string) {
     445             :   DCHECK(string->IsExternalString());
     446             :   v8::String::ExternalStringResourceBase** resource_addr =
     447             :       reinterpret_cast<v8::String::ExternalStringResourceBase**>(
     448             :           reinterpret_cast<byte*>(string) + ExternalString::kResourceOffset -
     449             :           kHeapObjectTag);
     450             : 
     451             :   // Dispose of the C++ object if it has not already been disposed.
     452      990154 :   if (*resource_addr != NULL) {
     453      990142 :     (*resource_addr)->Dispose();
     454      990143 :     *resource_addr = NULL;
     455             :   }
     456             : }
     457             : 
     458             : Address Heap::NewSpaceTop() { return new_space_->top(); }
     459             : 
     460             : bool Heap::DeoptMaybeTenuredAllocationSites() {
     461      122535 :   return new_space_->IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
     462             : }
     463             : 
     464           0 : bool Heap::InNewSpace(Object* object) {
     465             :   // Inlined check from NewSpace::Contains.
     466             :   bool result =
     467  8970527804 :       object->IsHeapObject() &&
     468  4218950118 :       Page::FromAddress(HeapObject::cast(object)->address())->InNewSpace();
     469             :   DCHECK(!result ||                 // Either not in new space
     470             :          gc_state_ != NOT_IN_GC ||  // ... or in the middle of GC
     471             :          InToSpace(object));        // ... or in to-space (where we allocate).
     472           0 :   return result;
     473             : }
     474             : 
     475             : bool Heap::InFromSpace(Object* object) {
     476   718293534 :   return object->IsHeapObject() &&
     477             :          MemoryChunk::FromAddress(HeapObject::cast(object)->address())
     478   358961047 :              ->IsFlagSet(Page::IN_FROM_SPACE);
     479             : }
     480             : 
     481             : 
     482             : bool Heap::InToSpace(Object* object) {
     483   252900694 :   return object->IsHeapObject() &&
     484             :          MemoryChunk::FromAddress(HeapObject::cast(object)->address())
     485   126235076 :              ->IsFlagSet(Page::IN_TO_SPACE);
     486             : }
     487             : 
     488             : bool Heap::InOldSpace(Object* object) { return old_space_->Contains(object); }
     489             : 
     490             : bool Heap::InNewSpaceSlow(Address address) {
     491             :   return new_space_->ContainsSlow(address);
     492             : }
     493             : 
     494             : bool Heap::InOldSpaceSlow(Address address) {
     495             :   return old_space_->ContainsSlow(address);
     496             : }
     497             : 
     498   137953206 : bool Heap::ShouldBePromoted(Address old_address, int object_size) {
     499             :   Page* page = Page::FromAddress(old_address);
     500   137953206 :   Address age_mark = new_space_->age_mark();
     501   204566258 :   return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
     502    36197051 :          (!page->ContainsLimit(age_mark) || old_address < age_mark);
     503             : }
     504             : 
     505  2893562409 : void Heap::RecordWrite(Object* object, int offset, Object* o) {
     506  3343838483 :   if (!InNewSpace(o) || !object->IsHeapObject() || InNewSpace(object)) {
     507  2723826280 :     return;
     508             :   }
     509   169736114 :   store_buffer()->InsertEntry(HeapObject::cast(object)->address() + offset);
     510             : }
     511             : 
     512    16436555 : void Heap::RecordWriteIntoCode(Code* host, RelocInfo* rinfo, Object* value) {
     513    16436555 :   if (InNewSpace(value)) {
     514     1017322 :     RecordWriteIntoCodeSlow(host, rinfo, value);
     515             :   }
     516    16436555 : }
     517             : 
     518       19970 : void Heap::RecordFixedArrayElements(FixedArray* array, int offset, int length) {
     519       39940 :   if (InNewSpace(array)) return;
     520     1884822 :   for (int i = 0; i < length; i++) {
     521     3769644 :     if (!InNewSpace(array->get(offset + i))) continue;
     522             :     store_buffer()->InsertEntry(
     523             :         reinterpret_cast<Address>(array->RawFieldOfElementAt(offset + i)));
     524             :   }
     525             : }
     526             : 
     527      194093 : Address* Heap::store_buffer_top_address() {
     528             :   return store_buffer()->top_address();
     529             : }
     530             : 
     531             : bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
     532             :   // Object migration is governed by the following rules:
     533             :   //
     534             :   // 1) Objects in new-space can be migrated to the old space
     535             :   //    that matches their target space or they stay in new-space.
     536             :   // 2) Objects in old-space stay in the same space when migrating.
     537             :   // 3) Fillers (two or more words) can migrate due to left-trimming of
     538             :   //    fixed arrays in new-space or old space.
     539             :   // 4) Fillers (one word) can never migrate, they are skipped by
     540             :   //    incremental marking explicitly to prevent invalid pattern.
     541             :   //
     542             :   // Since this function is used for debugging only, we do not place
     543             :   // asserts here, but check everything explicitly.
     544             :   if (obj->map() == one_pointer_filler_map()) return false;
     545             :   InstanceType type = obj->map()->instance_type();
     546             :   MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
     547             :   AllocationSpace src = chunk->owner()->identity();
     548             :   switch (src) {
     549             :     case NEW_SPACE:
     550             :       return dst == src || dst == OLD_SPACE;
     551             :     case OLD_SPACE:
     552             :       return dst == src &&
     553             :              (dst == OLD_SPACE || obj->IsFiller() || obj->IsExternalString());
     554             :     case CODE_SPACE:
     555             :       return dst == src && type == CODE_TYPE;
     556             :     case MAP_SPACE:
     557             :     case LO_SPACE:
     558             :       return false;
     559             :   }
     560             :   UNREACHABLE();
     561             :   return false;
     562             : }
     563             : 
     564   116355462 : void Heap::CopyBlock(Address dst, Address src, int byte_size) {
     565             :   CopyWords(reinterpret_cast<Object**>(dst), reinterpret_cast<Object**>(src),
     566   155870094 :             static_cast<size_t>(byte_size / kPointerSize));
     567   116355462 : }
     568             : 
     569             : template <Heap::FindMementoMode mode>
     570   106190721 : AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
     571    52787252 :   Address object_address = object->address();
     572    52787252 :   Address memento_address = object_address + object->Size();
     573    52757055 :   Address last_memento_word_address = memento_address + kPointerSize;
     574             :   // If the memento would be on another page, bail out immediately.
     575    52757055 :   if (!Page::OnSamePage(object_address, last_memento_word_address)) {
     576             :     return nullptr;
     577             :   }
     578             :   HeapObject* candidate = HeapObject::FromAddress(memento_address);
     579             :   Map* candidate_map = candidate->map();
     580             :   // This fast check may peek at an uninitialized word. However, the slow check
     581             :   // below (memento_address == top) ensures that this is safe. Mark the word as
     582             :   // initialized to silence MemorySanitizer warnings.
     583             :   MSAN_MEMORY_IS_INITIALIZED(&candidate_map, sizeof(candidate_map));
     584    52765394 :   if (candidate_map != allocation_memento_map()) {
     585             :     return nullptr;
     586             :   }
     587             : 
     588             :   // Bail out if the memento is below the age mark, which can happen when
     589             :   // mementos survived because a page got moved within new space.
     590             :   Page* object_page = Page::FromAddress(object_address);
     591     4317880 :   if (object_page->IsFlagSet(Page::NEW_SPACE_BELOW_AGE_MARK)) {
     592             :     Address age_mark =
     593     1045676 :         reinterpret_cast<SemiSpace*>(object_page->owner())->age_mark();
     594     1045676 :     if (!object_page->Contains(age_mark)) {
     595             :       return nullptr;
     596             :     }
     597             :     // Do an exact check in the case where the age mark is on the same page.
     598     1045668 :     if (object_address < age_mark) {
     599             :       return nullptr;
     600             :     }
     601             :   }
     602             : 
     603      638075 :   AllocationMemento* memento_candidate = AllocationMemento::cast(candidate);
     604             : 
     605             :   // Depending on what the memento is used for, we might need to perform
     606             :   // additional checks.
     607             :   Address top;
     608             :   switch (mode) {
     609             :     case Heap::kForGC:
     610     3679579 :       return memento_candidate;
     611             :     case Heap::kForRuntime:
     612      638075 :       if (memento_candidate == nullptr) return nullptr;
     613             :       // Either the object is the last object in the new space, or there is
     614             :       // another object of at least word size (the header map word) following
     615             :       // it, so suffices to compare ptr and top here.
     616             :       top = NewSpaceTop();
     617             :       DCHECK(memento_address == top ||
     618             :              memento_address + HeapObject::kHeaderSize <= top ||
     619             :              !Page::OnSamePage(memento_address, top - 1));
     620      638075 :       if ((memento_address != top) && memento_candidate->IsValid()) {
     621      635739 :         return memento_candidate;
     622             :       }
     623             :       return nullptr;
     624             :     default:
     625             :       UNREACHABLE();
     626             :   }
     627             :   UNREACHABLE();
     628             :   return nullptr;
     629             : }
     630             : 
     631             : template <Heap::UpdateAllocationSiteMode mode>
     632   145051365 : void Heap::UpdateAllocationSite(HeapObject* object,
     633             :                                 base::HashMap* pretenuring_feedback) {
     634             :   DCHECK(InFromSpace(object) ||
     635             :          (InToSpace(object) &&
     636             :           Page::FromAddress(object->address())
     637             :               ->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION)) ||
     638             :          (!InNewSpace(object) &&
     639             :           Page::FromAddress(object->address())
     640             :               ->IsFlagSet(Page::PAGE_NEW_OLD_PROMOTION)));
     641   290180330 :   if (!FLAG_allocation_site_pretenuring ||
     642   145079913 :       !AllocationSite::CanTrack(object->map()->instance_type()))
     643             :     return;
     644    51543274 :   AllocationMemento* memento_candidate = FindAllocationMemento<kForGC>(object);
     645    51533553 :   if (memento_candidate == nullptr) return;
     646             : 
     647             :   if (mode == kGlobal) {
     648             :     DCHECK_EQ(pretenuring_feedback, global_pretenuring_feedback_);
     649             :     // Entering global pretenuring feedback is only used in the scavenger, where
     650             :     // we are allowed to actually touch the allocation site.
     651     2989471 :     if (!memento_candidate->IsValid()) return;
     652             :     AllocationSite* site = memento_candidate->GetAllocationSite();
     653             :     DCHECK(!site->IsZombie());
     654             :     // For inserting in the global pretenuring storage we need to first
     655             :     // increment the memento found count on the allocation site.
     656     2989471 :     if (site->IncrementMementoFoundCount()) {
     657             :       global_pretenuring_feedback_->LookupOrInsert(site,
     658     6887379 :                                                    ObjectHash(site->address()));
     659             :     }
     660             :   } else {
     661             :     DCHECK_EQ(mode, kCached);
     662             :     DCHECK_NE(pretenuring_feedback, global_pretenuring_feedback_);
     663             :     // Entering cached feedback is used in the parallel case. We are not allowed
     664             :     // to dereference the allocation site and rather have to postpone all checks
     665             :     // till actually merging the data.
     666             :     Address key = memento_candidate->GetAllocationSiteUnchecked();
     667             :     base::HashMap::Entry* e =
     668     1380698 :         pretenuring_feedback->LookupOrInsert(key, ObjectHash(key));
     669             :     DCHECK(e != nullptr);
     670      690240 :     (*bit_cast<intptr_t*>(&e->value))++;
     671             :   }
     672             : }
     673             : 
     674             : 
     675             : void Heap::RemoveAllocationSitePretenuringFeedback(AllocationSite* site) {
     676             :   global_pretenuring_feedback_->Remove(
     677           0 :       site, static_cast<uint32_t>(bit_cast<uintptr_t>(site)));
     678             : }
     679             : 
     680      104483 : bool Heap::CollectGarbage(AllocationSpace space,
     681             :                           GarbageCollectionReason gc_reason,
     682             :                           const v8::GCCallbackFlags callbackFlags) {
     683      104483 :   const char* collector_reason = NULL;
     684      104483 :   GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
     685      104483 :   return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
     686             : }
     687             : 
     688             : 
     689    82655296 : Isolate* Heap::isolate() {
     690             :   return reinterpret_cast<Isolate*>(
     691  8370509247 :       reinterpret_cast<intptr_t>(this) -
     692  8370509247 :       reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(16)->heap()) + 16);
     693             : }
     694             : 
     695           0 : void Heap::ExternalStringTable::PromoteAllNewSpaceStrings() {
     696           0 :   old_space_strings_.AddAll(new_space_strings_);
     697             :   new_space_strings_.Clear();
     698           0 : }
     699             : 
     700     1015184 : void Heap::ExternalStringTable::AddString(String* string) {
     701             :   DCHECK(string->IsExternalString());
     702     1015184 :   if (heap_->InNewSpace(string)) {
     703       24881 :     new_space_strings_.Add(string);
     704             :   } else {
     705      990303 :     old_space_strings_.Add(string);
     706             :   }
     707     1015184 : }
     708             : 
     709             : void Heap::ExternalStringTable::IterateNewSpaceStrings(RootVisitor* v) {
     710      116850 :   if (!new_space_strings_.is_empty()) {
     711             :     Object** start = &new_space_strings_[0];
     712             :     v->VisitRootPointers(Root::kExternalStringsTable, start,
     713         186 :                          start + new_space_strings_.length());
     714             :   }
     715             : }
     716             : 
     717      116850 : void Heap::ExternalStringTable::IterateAll(RootVisitor* v) {
     718             :   IterateNewSpaceStrings(v);
     719      116850 :   if (!old_space_strings_.is_empty()) {
     720             :     Object** start = &old_space_strings_[0];
     721             :     v->VisitRootPointers(Root::kExternalStringsTable, start,
     722      116754 :                          start + old_space_strings_.length());
     723             :   }
     724      116850 : }
     725             : 
     726             : 
     727             : // Verify() is inline to avoid ifdef-s around its calls in release
     728             : // mode.
     729             : void Heap::ExternalStringTable::Verify() {
     730             : #ifdef DEBUG
     731             :   for (int i = 0; i < new_space_strings_.length(); ++i) {
     732             :     Object* obj = Object::cast(new_space_strings_[i]);
     733             :     DCHECK(heap_->InNewSpace(obj));
     734             :     DCHECK(!obj->IsTheHole(heap_->isolate()));
     735             :   }
     736             :   for (int i = 0; i < old_space_strings_.length(); ++i) {
     737             :     Object* obj = Object::cast(old_space_strings_[i]);
     738             :     DCHECK(!heap_->InNewSpace(obj));
     739             :     DCHECK(!obj->IsTheHole(heap_->isolate()));
     740             :   }
     741             : #endif
     742             : }
     743             : 
     744             : 
     745             : void Heap::ExternalStringTable::AddOldString(String* string) {
     746             :   DCHECK(string->IsExternalString());
     747             :   DCHECK(!heap_->InNewSpace(string));
     748        1607 :   old_space_strings_.Add(string);
     749             : }
     750             : 
     751             : 
     752             : void Heap::ExternalStringTable::ShrinkNewStrings(int position) {
     753             :   new_space_strings_.Rewind(position);
     754             : #ifdef VERIFY_HEAP
     755             :   if (FLAG_verify_heap) {
     756             :     Verify();
     757             :   }
     758             : #endif
     759             : }
     760             : 
     761             : void Heap::ClearInstanceofCache() { set_instanceof_cache_function(Smi::kZero); }
     762             : 
     763    66422639 : Oddball* Heap::ToBoolean(bool condition) {
     764    84063907 :   return condition ? true_value() : false_value();
     765             : }
     766             : 
     767             : 
     768             : void Heap::CompletelyClearInstanceofCache() {
     769             :   set_instanceof_cache_map(Smi::kZero);
     770             :   set_instanceof_cache_function(Smi::kZero);
     771             : }
     772             : 
     773             : 
     774   230388731 : uint32_t Heap::HashSeed() {
     775   230388731 :   uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
     776             :   DCHECK(FLAG_randomize_hashes || seed == 0);
     777             :   return seed;
     778             : }
     779             : 
     780             : 
     781     2095115 : int Heap::NextScriptId() {
     782             :   int last_id = last_script_id()->value();
     783     2095115 :   if (last_id == Smi::kMaxValue) {
     784             :     last_id = 1;
     785             :   } else {
     786     2095115 :     last_id++;
     787             :   }
     788             :   set_last_script_id(Smi::FromInt(last_id));
     789             :   return last_id;
     790             : }
     791             : 
     792             : void Heap::SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
     793             :   DCHECK(arguments_adaptor_deopt_pc_offset() == Smi::kZero);
     794             :   set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
     795             : }
     796             : 
     797             : void Heap::SetConstructStubCreateDeoptPCOffset(int pc_offset) {
     798             :   DCHECK(construct_stub_create_deopt_pc_offset() == Smi::kZero);
     799             :   set_construct_stub_create_deopt_pc_offset(Smi::FromInt(pc_offset));
     800             : }
     801             : 
     802             : void Heap::SetConstructStubInvokeDeoptPCOffset(int pc_offset) {
     803             :   DCHECK(construct_stub_invoke_deopt_pc_offset() == Smi::kZero);
     804             :   set_construct_stub_invoke_deopt_pc_offset(Smi::FromInt(pc_offset));
     805             : }
     806             : 
     807             : void Heap::SetGetterStubDeoptPCOffset(int pc_offset) {
     808             :   DCHECK(getter_stub_deopt_pc_offset() == Smi::kZero);
     809             :   set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
     810             : }
     811             : 
     812             : void Heap::SetSetterStubDeoptPCOffset(int pc_offset) {
     813             :   DCHECK(setter_stub_deopt_pc_offset() == Smi::kZero);
     814             :   set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
     815             : }
     816             : 
     817             : void Heap::SetInterpreterEntryReturnPCOffset(int pc_offset) {
     818             :   DCHECK(interpreter_entry_return_pc_offset() == Smi::kZero);
     819             :   set_interpreter_entry_return_pc_offset(Smi::FromInt(pc_offset));
     820             : }
     821             : 
     822     4106837 : int Heap::GetNextTemplateSerialNumber() {
     823     4106837 :   int next_serial_number = next_template_serial_number()->value() + 1;
     824             :   set_next_template_serial_number(Smi::FromInt(next_serial_number));
     825             :   return next_serial_number;
     826             : }
     827             : 
     828             : void Heap::SetSerializedTemplates(FixedArray* templates) {
     829             :   DCHECK_EQ(empty_fixed_array(), serialized_templates());
     830             :   DCHECK(isolate()->serializer_enabled());
     831             :   set_serialized_templates(templates);
     832             : }
     833             : 
     834             : void Heap::SetSerializedGlobalProxySizes(FixedArray* sizes) {
     835             :   DCHECK_EQ(empty_fixed_array(), serialized_global_proxy_sizes());
     836             :   DCHECK(isolate()->serializer_enabled());
     837             :   set_serialized_global_proxy_sizes(sizes);
     838             : }
     839             : 
     840             : void Heap::CreateObjectStats() {
     841           0 :   if (V8_LIKELY(FLAG_gc_stats == 0)) return;
     842           0 :   if (!live_object_stats_) {
     843           0 :     live_object_stats_ = new ObjectStats(this);
     844             :   }
     845           0 :   if (!dead_object_stats_) {
     846           0 :     dead_object_stats_ = new ObjectStats(this);
     847             :   }
     848             : }
     849             : 
     850             : AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
     851       60782 :     : heap_(isolate->heap()) {
     852             :   heap_->always_allocate_scope_count_.Increment(1);
     853             : }
     854             : 
     855             : AlwaysAllocateScope::~AlwaysAllocateScope() {
     856             :   heap_->always_allocate_scope_count_.Decrement(1);
     857             : }
     858             : 
     859             : void VerifyPointersVisitor::VisitPointers(HeapObject* host, Object** start,
     860             :                                           Object** end) {
     861             :   VerifyPointers(start, end);
     862             : }
     863             : 
     864           0 : void VerifyPointersVisitor::VisitRootPointers(Root root, Object** start,
     865             :                                               Object** end) {
     866           0 :   VerifyPointers(start, end);
     867           0 : }
     868             : 
     869           0 : void VerifyPointersVisitor::VerifyPointers(Object** start, Object** end) {
     870           0 :   for (Object** current = start; current < end; current++) {
     871           0 :     if ((*current)->IsHeapObject()) {
     872             :       HeapObject* object = HeapObject::cast(*current);
     873           0 :       CHECK(object->GetIsolate()->heap()->Contains(object));
     874           0 :       CHECK(object->map()->IsMap());
     875             :     } else {
     876           0 :       CHECK((*current)->IsSmi());
     877             :     }
     878             :   }
     879           0 : }
     880             : 
     881             : void VerifySmisVisitor::VisitRootPointers(Root root, Object** start,
     882             :                                           Object** end) {
     883             :   for (Object** current = start; current < end; current++) {
     884             :     CHECK((*current)->IsSmi());
     885             :   }
     886             : }
     887             : }  // namespace internal
     888             : }  // namespace v8
     889             : 
     890             : #endif  // V8_HEAP_HEAP_INL_H_

Generated by: LCOV version 1.10