LCOV - code coverage report
Current view: top level - src/heap - spaces-inl.h (source / functions) Hit Total Coverage
Test: app.info Lines: 137 167 82.0 %
Date: 2019-04-17 Functions: 19 21 90.5 %

          Line data    Source code
       1             : // Copyright 2011 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #ifndef V8_HEAP_SPACES_INL_H_
       6             : #define V8_HEAP_SPACES_INL_H_
       7             : 
       8             : #include "src/heap/spaces.h"
       9             : 
      10             : #include "src/base/atomic-utils.h"
      11             : #include "src/base/bounded-page-allocator.h"
      12             : #include "src/base/v8-fallthrough.h"
      13             : #include "src/heap/heap-inl.h"
      14             : #include "src/heap/incremental-marking.h"
      15             : #include "src/msan.h"
      16             : #include "src/objects/code-inl.h"
      17             : 
      18             : namespace v8 {
      19             : namespace internal {
      20             : 
      21             : template <class PAGE_TYPE>
      22             : PageIteratorImpl<PAGE_TYPE>& PageIteratorImpl<PAGE_TYPE>::operator++() {
      23     1661553 :   p_ = p_->next_page();
      24             :   return *this;
      25             : }
      26             : 
      27             : template <class PAGE_TYPE>
      28             : PageIteratorImpl<PAGE_TYPE> PageIteratorImpl<PAGE_TYPE>::operator++(int) {
      29             :   PageIteratorImpl<PAGE_TYPE> tmp(*this);
      30             :   operator++();
      31             :   return tmp;
      32             : }
      33             : 
      34             : PageRange::PageRange(Address start, Address limit)
      35             :     : begin_(Page::FromAddress(start)),
      36             :       end_(Page::FromAllocationAreaAddress(limit)->next_page()) {
      37             : #ifdef DEBUG
      38             :   if (begin_->InNewSpace()) {
      39             :     SemiSpace::AssertValidRange(start, limit);
      40             :   }
      41             : #endif  // DEBUG
      42             : }
      43             : 
      44             : // -----------------------------------------------------------------------------
      45             : // SemiSpaceIterator
      46             : 
      47     2911473 : HeapObject SemiSpaceIterator::Next() {
      48     3104403 :   while (current_ != limit_) {
      49     3000072 :     if (Page::IsAlignedToPageSize(current_)) {
      50             :       Page* page = Page::FromAllocationAreaAddress(current_);
      51             :       page = page->next_page();
      52             :       DCHECK(page);
      53          71 :       current_ = page->area_start();
      54          71 :       if (current_ == limit_) return HeapObject();
      55             :     }
      56     6000132 :     HeapObject object = HeapObject::FromAddress(current_);
      57     3000066 :     current_ += object->Size();
      58     3000080 :     if (!object->IsFiller()) {
      59     2903615 :       return object;
      60             :     }
      61             :   }
      62        7866 :   return HeapObject();
      63             : }
      64             : 
      65             : // -----------------------------------------------------------------------------
      66             : // HeapObjectIterator
      67             : 
      68    88507812 : HeapObject HeapObjectIterator::Next() {
      69       90338 :   do {
      70    88566238 :     HeapObject next_obj = FromCurrentPage();
      71    88568424 :     if (!next_obj.is_null()) return next_obj;
      72             :   } while (AdvanceToNextPage());
      73       31912 :   return HeapObject();
      74             : }
      75             : 
      76    88566967 : HeapObject HeapObjectIterator::FromCurrentPage() {
      77    89078127 :   while (cur_addr_ != cur_end_) {
      78   177987116 :     if (cur_addr_ == space_->top() && cur_addr_ != space_->limit()) {
      79       11496 :       cur_addr_ = space_->limit();
      80       11496 :       continue;
      81             :     }
      82    88976293 :     HeapObject obj = HeapObject::FromAddress(cur_addr_);
      83    88976293 :     const int obj_size = obj->Size();
      84    88977511 :     cur_addr_ += obj_size;
      85             :     DCHECK_LE(cur_addr_, cur_end_);
      86    88977511 :     if (!obj->IsFiller()) {
      87             :       if (obj->IsCode()) {
      88             :         DCHECK_EQ(space_, space_->heap()->code_space());
      89             :         DCHECK_CODEOBJECT_SIZE(obj_size, space_);
      90             :       } else {
      91             :         DCHECK_OBJECT_SIZE(obj_size);
      92             :       }
      93    88477847 :       return obj;
      94             :     }
      95             :   }
      96       90338 :   return HeapObject();
      97             : }
      98             : 
      99     1912565 : void Space::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
     100             :                                                size_t amount) {
     101     1912565 :   base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
     102             :   heap()->IncrementExternalBackingStoreBytes(type, amount);
     103     1912565 : }
     104             : 
     105      475064 : void Space::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
     106             :                                                size_t amount) {
     107      475064 :   base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
     108             :   heap()->DecrementExternalBackingStoreBytes(type, amount);
     109      475064 : }
     110             : 
     111       43447 : void Space::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
     112             :                                           Space* from, Space* to,
     113             :                                           size_t amount) {
     114       43447 :   if (from == to) return;
     115             : 
     116       27679 :   base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
     117       27679 :   base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
     118             : }
     119             : 
     120             : // -----------------------------------------------------------------------------
     121             : // SemiSpace
     122             : 
     123             : bool SemiSpace::Contains(HeapObject o) {
     124             :   MemoryChunk* memory_chunk = MemoryChunk::FromHeapObject(o);
     125     1934599 :   if (memory_chunk->IsLargePage()) return false;
     126     1934594 :   return id_ == kToSpace ? memory_chunk->IsToPage()
     127     1934594 :                          : memory_chunk->IsFromPage();
     128             : }
     129             : 
     130             : bool SemiSpace::Contains(Object o) {
     131     3869198 :   return o->IsHeapObject() && Contains(HeapObject::cast(o));
     132             : }
     133             : 
     134             : bool SemiSpace::ContainsSlow(Address a) {
     135        1335 :   for (Page* p : *this) {
     136        1320 :     if (p == MemoryChunk::FromAddress(a)) return true;
     137             :   }
     138             :   return false;
     139             : }
     140             : 
     141             : // --------------------------------------------------------------------------
     142             : // NewSpace
     143             : 
     144             : bool NewSpace::Contains(Object o) {
     145             :   return o->IsHeapObject() && Contains(HeapObject::cast(o));
     146             : }
     147             : 
     148             : bool NewSpace::Contains(HeapObject o) {
     149             :   return MemoryChunk::FromHeapObject(o)->InNewSpace();
     150             : }
     151             : 
     152             : bool NewSpace::ContainsSlow(Address a) {
     153          20 :   return from_space_.ContainsSlow(a) || to_space_.ContainsSlow(a);
     154             : }
     155             : 
     156             : bool NewSpace::ToSpaceContainsSlow(Address a) {
     157             :   return to_space_.ContainsSlow(a);
     158             : }
     159             : 
     160             : bool NewSpace::ToSpaceContains(Object o) { return to_space_.Contains(o); }
     161             : bool NewSpace::FromSpaceContains(Object o) { return from_space_.Contains(o); }
     162             : 
     163           5 : bool PagedSpace::Contains(Address addr) {
     164           5 :   return MemoryChunk::FromAnyPointerAddress(addr)->owner() == this;
     165             : }
     166             : 
     167             : bool PagedSpace::Contains(Object o) {
     168     3891704 :   if (!o.IsHeapObject()) return false;
     169     3891704 :   return Page::FromAddress(o.ptr())->owner() == this;
     170             : }
     171             : 
     172             : void PagedSpace::UnlinkFreeListCategories(Page* page) {
     173             :   DCHECK_EQ(this, page->owner());
     174     1065168 :   page->ForAllFreeListCategories([this](FreeListCategory* category) {
     175             :     DCHECK_EQ(free_list(), category->owner());
     176             :     category->set_free_list(nullptr);
     177           6 :     free_list()->RemoveCategory(category);
     178      177528 :   });
     179             : }
     180             : 
     181             : size_t PagedSpace::RelinkFreeListCategories(Page* page) {
     182             :   DCHECK_EQ(this, page->owner());
     183     1125299 :   size_t added = 0;
     184    13503552 :   page->ForAllFreeListCategories([this, &added](FreeListCategory* category) {
     185     6751776 :     category->set_free_list(&free_list_);
     186     6751776 :     added += category->available();
     187             :     category->Relink();
     188     1125299 :   });
     189             : 
     190             :   DCHECK_IMPLIES(!page->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE),
     191             :                  page->AvailableInFreeList() ==
     192             :                      page->AvailableInFreeListFromAllocatedBytes());
     193     1125299 :   return added;
     194             : }
     195             : 
     196             : bool PagedSpace::TryFreeLast(HeapObject object, int object_size) {
     197      187078 :   if (allocation_info_.top() != kNullAddress) {
     198             :     const Address object_address = object->address();
     199      187078 :     if ((allocation_info_.top() - object_size) == object_address) {
     200             :       allocation_info_.set_top(object_address);
     201             :       return true;
     202             :     }
     203             :   }
     204             :   return false;
     205             : }
     206             : 
     207    51263082 : bool MemoryChunk::HasHeaderSentinel(Address slot_addr) {
     208             :   Address base = BaseAddress(slot_addr);
     209    51263082 :   if (slot_addr < base + kHeaderSize) return false;
     210    51249855 :   return HeapObject::FromAddress(base) ==
     211             :          ObjectSlot(base + kHeaderSentinelOffset).Relaxed_Load();
     212             : }
     213             : 
     214             : MemoryChunk* MemoryChunk::FromAnyPointerAddress(Address addr) {
     215    51263082 :   while (!HasHeaderSentinel(addr)) {
     216    31787270 :     addr = BaseAddress(addr) - 1;
     217             :   }
     218             :   return FromAddress(addr);
     219             : }
     220             : 
     221      614406 : void MemoryChunk::IncrementExternalBackingStoreBytes(
     222             :     ExternalBackingStoreType type, size_t amount) {
     223      614406 :   base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
     224      614406 :   owner()->IncrementExternalBackingStoreBytes(type, amount);
     225      614404 : }
     226             : 
     227      111592 : void MemoryChunk::DecrementExternalBackingStoreBytes(
     228             :     ExternalBackingStoreType type, size_t amount) {
     229      111592 :   base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
     230      111592 :   owner()->DecrementExternalBackingStoreBytes(type, amount);
     231      111597 : }
     232             : 
     233       43439 : void MemoryChunk::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
     234             :                                                 MemoryChunk* from,
     235             :                                                 MemoryChunk* to,
     236             :                                                 size_t amount) {
     237       43439 :   base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
     238       43439 :   base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
     239             :   Space::MoveExternalBackingStoreBytes(type, from->owner(), to->owner(),
     240       43439 :                                        amount);
     241       43447 : }
     242             : 
     243         165 : void Page::MarkNeverAllocateForTesting() {
     244             :   DCHECK(this->owner()->identity() != NEW_SPACE);
     245             :   DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
     246             :   SetFlag(NEVER_ALLOCATE_ON_PAGE);
     247             :   SetFlag(NEVER_EVACUATE);
     248         165 :   reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
     249         165 : }
     250             : 
     251       11449 : void Page::MarkEvacuationCandidate() {
     252             :   DCHECK(!IsFlagSet(NEVER_EVACUATE));
     253             :   DCHECK_NULL(slot_set<OLD_TO_OLD>());
     254             :   DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
     255             :   SetFlag(EVACUATION_CANDIDATE);
     256       11449 :   reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
     257       11449 : }
     258             : 
     259             : void Page::ClearEvacuationCandidate() {
     260             :   if (!IsFlagSet(COMPACTION_WAS_ABORTED)) {
     261             :     DCHECK_NULL(slot_set<OLD_TO_OLD>());
     262             :     DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
     263             :   }
     264             :   ClearFlag(EVACUATION_CANDIDATE);
     265         107 :   InitializeFreeListCategories();
     266             : }
     267             : 
     268             : HeapObject LargePage::GetObject() {
     269             :   return HeapObject::FromAddress(area_start());
     270             : }
     271             : 
     272             : OldGenerationMemoryChunkIterator::OldGenerationMemoryChunkIterator(Heap* heap)
     273             :     : heap_(heap),
     274             :       state_(kOldSpaceState),
     275             :       old_iterator_(heap->old_space()->begin()),
     276             :       code_iterator_(heap->code_space()->begin()),
     277             :       map_iterator_(heap->map_space()->begin()),
     278             :       lo_iterator_(heap->lo_space()->begin()),
     279       57970 :       code_lo_iterator_(heap->code_lo_space()->begin()) {}
     280             : 
     281     1661093 : MemoryChunk* OldGenerationMemoryChunkIterator::next() {
     282     1661093 :   switch (state_) {
     283             :     case kOldSpaceState: {
     284     1263063 :       if (old_iterator_ != heap_->old_space()->end()) return *(old_iterator_++);
     285       57970 :       state_ = kMapState;
     286             :       V8_FALLTHROUGH;
     287             :     }
     288             :     case kMapState: {
     289      207933 :       if (map_iterator_ != heap_->map_space()->end()) return *(map_iterator_++);
     290       57970 :       state_ = kCodeState;
     291             :       V8_FALLTHROUGH;
     292             :     }
     293             :     case kCodeState: {
     294      155930 :       if (code_iterator_ != heap_->code_space()->end())
     295             :         return *(code_iterator_++);
     296       57970 :       state_ = kLargeObjectState;
     297             :       V8_FALLTHROUGH;
     298             :     }
     299             :     case kLargeObjectState: {
     300      116860 :       if (lo_iterator_ != heap_->lo_space()->end()) return *(lo_iterator_++);
     301       57970 :       state_ = kCodeLargeObjectState;
     302             :       V8_FALLTHROUGH;
     303             :     }
     304             :     case kCodeLargeObjectState: {
     305      149187 :       if (code_lo_iterator_ != heap_->code_lo_space()->end())
     306             :         return *(code_lo_iterator_++);
     307       57970 :       state_ = kFinishedState;
     308             :       V8_FALLTHROUGH;
     309             :     }
     310             :     case kFinishedState:
     311             :       return nullptr;
     312             :     default:
     313             :       break;
     314             :   }
     315           0 :   UNREACHABLE();
     316             : }
     317             : 
     318             : Page* FreeList::GetPageForCategoryType(FreeListCategoryType type) {
     319      216175 :   return top(type) ? top(type)->page() : nullptr;
     320             : }
     321             : 
     322             : FreeList* FreeListCategory::owner() { return free_list_; }
     323             : 
     324             : bool FreeListCategory::is_linked() {
     325           0 :   return prev_ != nullptr || next_ != nullptr;
     326             : }
     327             : 
     328    88806820 : AllocationResult LocalAllocationBuffer::AllocateRawAligned(
     329             :     int size_in_bytes, AllocationAlignment alignment) {
     330             :   Address current_top = allocation_info_.top();
     331    88806820 :   int filler_size = Heap::GetFillToAlign(current_top, alignment);
     332             : 
     333    88716635 :   Address new_top = current_top + filler_size + size_in_bytes;
     334    88716635 :   if (new_top > allocation_info_.limit()) return AllocationResult::Retry();
     335             : 
     336             :   allocation_info_.set_top(new_top);
     337    86891758 :   if (filler_size > 0) {
     338           0 :     return heap_->PrecedeWithFiller(HeapObject::FromAddress(current_top),
     339           0 :                                     filler_size);
     340             :   }
     341             : 
     342    86891758 :   return AllocationResult(HeapObject::FromAddress(current_top));
     343             : }
     344             : 
     345             : bool PagedSpace::EnsureLinearAllocationArea(int size_in_bytes) {
     346   539336718 :   if (allocation_info_.top() + size_in_bytes <= allocation_info_.limit()) {
     347             :     return true;
     348             :   }
     349     1374810 :   return SlowRefillLinearAllocationArea(size_in_bytes);
     350             : }
     351             : 
     352             : HeapObject PagedSpace::AllocateLinearly(int size_in_bytes) {
     353             :   Address current_top = allocation_info_.top();
     354   269689052 :   Address new_top = current_top + size_in_bytes;
     355             :   DCHECK_LE(new_top, allocation_info_.limit());
     356             :   allocation_info_.set_top(new_top);
     357             :   return HeapObject::FromAddress(current_top);
     358             : }
     359             : 
     360           0 : HeapObject PagedSpace::TryAllocateLinearlyAligned(
     361             :     int* size_in_bytes, AllocationAlignment alignment) {
     362             :   Address current_top = allocation_info_.top();
     363           0 :   int filler_size = Heap::GetFillToAlign(current_top, alignment);
     364             : 
     365           0 :   Address new_top = current_top + filler_size + *size_in_bytes;
     366           0 :   if (new_top > allocation_info_.limit()) return HeapObject();
     367             : 
     368             :   allocation_info_.set_top(new_top);
     369           0 :   if (filler_size > 0) {
     370           0 :     *size_in_bytes += filler_size;
     371             :     return heap()->PrecedeWithFiller(HeapObject::FromAddress(current_top),
     372           0 :                                      filler_size);
     373             :   }
     374             : 
     375             :   return HeapObject::FromAddress(current_top);
     376             : }
     377             : 
     378   269668359 : AllocationResult PagedSpace::AllocateRawUnaligned(
     379             :     int size_in_bytes, UpdateSkipList update_skip_list) {
     380             :   DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
     381   269701322 :   if (!EnsureLinearAllocationArea(size_in_bytes)) {
     382             :     return AllocationResult::Retry(identity());
     383             :   }
     384             :   HeapObject object = AllocateLinearly(size_in_bytes);
     385             :   DCHECK(!object.is_null());
     386   269689052 :   if (update_skip_list == UPDATE_SKIP_LIST && identity() == CODE_SPACE) {
     387     1908888 :     SkipList::Update(object->address(), size_in_bytes);
     388             :   }
     389             :   MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
     390   269825518 :   return object;
     391             : }
     392             : 
     393             : 
     394           0 : AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
     395             :                                                 AllocationAlignment alignment) {
     396             :   DCHECK(identity() == OLD_SPACE || identity() == RO_SPACE);
     397             :   DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
     398           0 :   int allocation_size = size_in_bytes;
     399           0 :   HeapObject object = TryAllocateLinearlyAligned(&allocation_size, alignment);
     400           0 :   if (object.is_null()) {
     401             :     // We don't know exactly how much filler we need to align until space is
     402             :     // allocated, so assume the worst case.
     403           0 :     int filler_size = Heap::GetMaximumFillToAlign(alignment);
     404           0 :     allocation_size += filler_size;
     405           0 :     if (!EnsureLinearAllocationArea(allocation_size)) {
     406             :       return AllocationResult::Retry(identity());
     407             :     }
     408           0 :     allocation_size = size_in_bytes;
     409           0 :     object = TryAllocateLinearlyAligned(&allocation_size, alignment);
     410             :     DCHECK(!object.is_null());
     411             :   }
     412             :   MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
     413           0 :   return object;
     414             : }
     415             : 
     416             : 
     417   211322039 : AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
     418             :                                          AllocationAlignment alignment) {
     419   233505578 :   if (top_on_previous_step_ && top() < top_on_previous_step_ &&
     420           5 :       SupportsInlineAllocation()) {
     421             :     // Generated code decreased the top() pointer to do folded allocations.
     422             :     // The top_on_previous_step_ can be one byte beyond the current page.
     423             :     DCHECK_NE(top(), kNullAddress);
     424             :     DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
     425             :               Page::FromAllocationAreaAddress(top_on_previous_step_ - 1));
     426           5 :     top_on_previous_step_ = top();
     427             :   }
     428             :   size_t bytes_since_last =
     429   233505568 :       top_on_previous_step_ ? top() - top_on_previous_step_ : 0;
     430             : 
     431             :   DCHECK_IMPLIES(!SupportsInlineAllocation(), bytes_since_last == 0);
     432             : #ifdef V8_HOST_ARCH_32_BIT
     433             :   AllocationResult result = alignment != kWordAligned
     434             :                                 ? AllocateRawAligned(size_in_bytes, alignment)
     435             :                                 : AllocateRawUnaligned(size_in_bytes);
     436             : #else
     437   211322039 :   AllocationResult result = AllocateRawUnaligned(size_in_bytes);
     438             : #endif
     439             :   HeapObject heap_obj;
     440   422906089 :   if (!result.IsRetry() && result.To(&heap_obj) && !is_local()) {
     441   125372258 :     AllocationStep(static_cast<int>(size_in_bytes + bytes_since_last),
     442   125372258 :                    heap_obj->address(), size_in_bytes);
     443   125372203 :     StartNextInlineAllocationStep();
     444             :     DCHECK_IMPLIES(
     445             :         heap()->incremental_marking()->black_allocation(),
     446             :         heap()->incremental_marking()->marking_state()->IsBlack(heap_obj));
     447             :   }
     448   211425070 :   return result;
     449             : }
     450             : 
     451             : 
     452             : // -----------------------------------------------------------------------------
     453             : // NewSpace
     454             : 
     455             : 
     456             : AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
     457             :                                               AllocationAlignment alignment) {
     458             :   Address top = allocation_info_.top();
     459           0 :   int filler_size = Heap::GetFillToAlign(top, alignment);
     460           0 :   int aligned_size_in_bytes = size_in_bytes + filler_size;
     461             : 
     462           0 :   if (allocation_info_.limit() - top <
     463             :       static_cast<uintptr_t>(aligned_size_in_bytes)) {
     464             :     // See if we can create room.
     465           0 :     if (!EnsureAllocation(size_in_bytes, alignment)) {
     466             :       return AllocationResult::Retry();
     467             :     }
     468             : 
     469             :     top = allocation_info_.top();
     470           0 :     filler_size = Heap::GetFillToAlign(top, alignment);
     471           0 :     aligned_size_in_bytes = size_in_bytes + filler_size;
     472             :   }
     473             : 
     474             :   HeapObject obj = HeapObject::FromAddress(top);
     475           0 :   allocation_info_.set_top(top + aligned_size_in_bytes);
     476             :   DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
     477             : 
     478           0 :   if (filler_size > 0) {
     479           0 :     obj = heap()->PrecedeWithFiller(obj, filler_size);
     480             :   }
     481             : 
     482             :   MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
     483             : 
     484             :   return obj;
     485             : }
     486             : 
     487             : 
     488             : AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
     489             :   Address top = allocation_info_.top();
     490   169860585 :   if (allocation_info_.limit() < top + size_in_bytes) {
     491             :     // See if we can create room.
     492      485849 :     if (!EnsureAllocation(size_in_bytes, kWordAligned)) {
     493             :       return AllocationResult::Retry();
     494             :     }
     495             : 
     496             :     top = allocation_info_.top();
     497             :   }
     498             : 
     499             :   HeapObject obj = HeapObject::FromAddress(top);
     500   169838222 :   allocation_info_.set_top(top + size_in_bytes);
     501             :   DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
     502             : 
     503             :   MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
     504             : 
     505             :   return obj;
     506             : }
     507             : 
     508             : 
     509             : AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
     510             :                                        AllocationAlignment alignment) {
     511   169860352 :   if (top() < top_on_previous_step_) {
     512             :     // Generated code decreased the top() pointer to do folded allocations
     513             :     DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
     514             :               Page::FromAllocationAreaAddress(top_on_previous_step_));
     515           4 :     top_on_previous_step_ = top();
     516             :   }
     517             : #ifdef V8_HOST_ARCH_32_BIT
     518             :   return alignment != kWordAligned
     519             :              ? AllocateRawAligned(size_in_bytes, alignment)
     520             :              : AllocateRawUnaligned(size_in_bytes);
     521             : #else
     522             : #ifdef V8_COMPRESS_POINTERS
     523             :   // TODO(ishell, v8:8875): Consider using aligned allocations once the
     524             :   // allocation alignment inconsistency is fixed. For now we keep using
     525             :   // unaligned access since both x64 and arm64 architectures (where pointer
     526             :   // compression is supported) allow unaligned access to doubles and full words.
     527             : #endif  // V8_COMPRESS_POINTERS
     528             :   return AllocateRawUnaligned(size_in_bytes);
     529             : #endif
     530             : }
     531             : 
     532      507248 : V8_WARN_UNUSED_RESULT inline AllocationResult NewSpace::AllocateRawSynchronized(
     533             :     int size_in_bytes, AllocationAlignment alignment) {
     534      507248 :   base::MutexGuard guard(&mutex_);
     535      507457 :   return AllocateRaw(size_in_bytes, alignment);
     536             : }
     537             : 
     538      207299 : LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
     539             :                                                         AllocationResult result,
     540             :                                                         intptr_t size) {
     541      207299 :   if (result.IsRetry()) return InvalidBuffer();
     542             :   HeapObject obj;
     543             :   bool ok = result.To(&obj);
     544             :   USE(ok);
     545             :   DCHECK(ok);
     546             :   Address top = HeapObject::cast(obj)->address();
     547      413958 :   return LocalAllocationBuffer(heap, LinearAllocationArea(top, top + size));
     548             : }
     549             : 
     550             : 
     551             : bool LocalAllocationBuffer::TryMerge(LocalAllocationBuffer* other) {
     552      206928 :   if (allocation_info_.top() == other->allocation_info_.limit()) {
     553             :     allocation_info_.set_top(other->allocation_info_.top());
     554             :     other->allocation_info_.Reset(kNullAddress, kNullAddress);
     555             :     return true;
     556             :   }
     557             :   return false;
     558             : }
     559             : 
     560             : bool LocalAllocationBuffer::TryFreeLast(HeapObject object, int object_size) {
     561       32615 :   if (IsValid()) {
     562             :     const Address object_address = object->address();
     563       32613 :     if ((allocation_info_.top() - object_size) == object_address) {
     564             :       allocation_info_.set_top(object_address);
     565             :       return true;
     566             :     }
     567             :   }
     568             :   return false;
     569             : }
     570             : 
     571             : }  // namespace internal
     572             : }  // namespace v8
     573             : 
     574             : #endif  // V8_HEAP_SPACES_INL_H_

Generated by: LCOV version 1.10