LCOV - code coverage report
Current view: top level - test/cctest/heap - heap-utils.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 116 117 99.1 %
Date: 2019-02-19 Functions: 17 17 100.0 %

          Line data    Source code
       1             : // Copyright 2016 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "test/cctest/heap/heap-utils.h"
       6             : 
       7             : #include "src/heap/factory.h"
       8             : #include "src/heap/heap-inl.h"
       9             : #include "src/heap/incremental-marking.h"
      10             : #include "src/heap/mark-compact.h"
      11             : #include "src/isolate.h"
      12             : #include "test/cctest/cctest.h"
      13             : 
      14             : namespace v8 {
      15             : namespace internal {
      16             : namespace heap {
      17             : 
      18          25 : void InvokeScavenge() { CcTest::CollectGarbage(i::NEW_SPACE); }
      19             : 
      20          25 : void InvokeMarkSweep() { CcTest::CollectAllGarbage(); }
      21             : 
      22         195 : void SealCurrentObjects(Heap* heap) {
      23          65 :   CcTest::CollectAllGarbage();
      24          65 :   CcTest::CollectAllGarbage();
      25          65 :   heap->mark_compact_collector()->EnsureSweepingCompleted();
      26          65 :   heap->old_space()->FreeLinearAllocationArea();
      27         195 :   for (Page* page : *heap->old_space()) {
      28         130 :     page->MarkNeverAllocateForTesting();
      29             :   }
      30          65 : }
      31             : 
      32          10 : int FixedArrayLenFromSize(int size) {
      33        1243 :   return Min((size - FixedArray::kHeaderSize) / kTaggedSize,
      34        1253 :              FixedArray::kMaxRegularLength);
      35             : }
      36             : 
      37          15 : std::vector<Handle<FixedArray>> FillOldSpacePageWithFixedArrays(Heap* heap,
      38             :                                                                 int remainder) {
      39          15 :   PauseAllocationObserversScope pause_observers(heap);
      40             :   std::vector<Handle<FixedArray>> handles;
      41             :   Isolate* isolate = heap->isolate();
      42             :   const int kArraySize = 128;
      43             :   const int kArrayLen = heap::FixedArrayLenFromSize(kArraySize);
      44             :   Handle<FixedArray> array;
      45             :   int allocated = 0;
      46       61395 :   do {
      47      122790 :     if (allocated + kArraySize * 2 >
      48       61395 :         static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage())) {
      49             :       int size =
      50          15 :           kArraySize * 2 -
      51          15 :           ((allocated + kArraySize * 2) -
      52          15 :            static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage())) -
      53          15 :           remainder;
      54             :       int last_array_len = heap::FixedArrayLenFromSize(size);
      55          15 :       array = isolate->factory()->NewFixedArray(last_array_len, TENURED);
      56          15 :       CHECK_EQ(size, array->Size());
      57          15 :       allocated += array->Size() + remainder;
      58             :     } else {
      59       61380 :       array = isolate->factory()->NewFixedArray(kArrayLen, TENURED);
      60       61380 :       allocated += array->Size();
      61       61380 :       CHECK_EQ(kArraySize, array->Size());
      62             :     }
      63       61395 :     if (handles.empty()) {
      64             :       // Check that allocations started on a new page.
      65          30 :       CHECK_EQ(array->address(), Page::FromHeapObject(*array)->area_start());
      66             :     }
      67       61395 :     handles.push_back(array);
      68       61395 :   } while (allocated <
      69       61395 :            static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()));
      70          15 :   return handles;
      71             : }
      72             : 
      73        1356 : std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
      74             :                                               PretenureFlag tenure,
      75             :                                               int object_size) {
      76             :   std::vector<Handle<FixedArray>> handles;
      77             :   Isolate* isolate = heap->isolate();
      78             :   int allocate_memory;
      79             :   int length;
      80             :   int free_memory = padding_size;
      81         548 :   if (tenure == i::TENURED) {
      82          40 :     heap->old_space()->FreeLinearAllocationArea();
      83          40 :     int overall_free_memory = static_cast<int>(heap->old_space()->Available());
      84          40 :     CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
      85             :   } else {
      86             :     int overall_free_memory =
      87             :         static_cast<int>(*heap->new_space()->allocation_limit_address() -
      88         508 :                          *heap->new_space()->allocation_top_address());
      89         508 :     CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
      90             :   }
      91        1751 :   while (free_memory > 0) {
      92        1218 :     if (free_memory > object_size) {
      93             :       allocate_memory = object_size;
      94             :       length = FixedArrayLenFromSize(allocate_memory);
      95             :     } else {
      96             :       allocate_memory = free_memory;
      97             :       length = FixedArrayLenFromSize(allocate_memory);
      98         548 :       if (length <= 0) {
      99             :         // Not enough room to create another fixed array. Let's create a filler.
     100          15 :         if (free_memory > (2 * kTaggedSize)) {
     101             :           heap->CreateFillerObjectAt(
     102             :               *heap->old_space()->allocation_top_address(), free_memory,
     103           0 :               ClearRecordedSlots::kNo);
     104             :         }
     105             :         break;
     106             :       }
     107             :     }
     108        2406 :     handles.push_back(isolate->factory()->NewFixedArray(length, tenure));
     109        2406 :     CHECK((tenure == NOT_TENURED &&
     110             :            heap->new_space()->Contains(*handles.back())) ||
     111             :           (tenure == TENURED && heap->InOldSpace(*handles.back())));
     112        1203 :     free_memory -= handles.back()->Size();
     113             :   }
     114         548 :   return handles;
     115             : }
     116             : 
     117          81 : void AllocateAllButNBytes(v8::internal::NewSpace* space, int extra_bytes,
     118             :                           std::vector<Handle<FixedArray>>* out_handles) {
     119         162 :   PauseAllocationObserversScope pause_observers(space->heap());
     120             :   int space_remaining = static_cast<int>(*space->allocation_limit_address() -
     121          81 :                                          *space->allocation_top_address());
     122          81 :   CHECK(space_remaining >= extra_bytes);
     123          81 :   int new_linear_size = space_remaining - extra_bytes;
     124          81 :   if (new_linear_size == 0) return;
     125             :   std::vector<Handle<FixedArray>> handles =
     126          81 :       heap::CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED);
     127          81 :   if (out_handles != nullptr)
     128         111 :     out_handles->insert(out_handles->end(), handles.begin(), handles.end());
     129             : }
     130             : 
     131          20 : void FillCurrentPage(v8::internal::NewSpace* space,
     132             :                      std::vector<Handle<FixedArray>>* out_handles) {
     133          77 :   heap::AllocateAllButNBytes(space, 0, out_handles);
     134          20 : }
     135             : 
     136         911 : bool FillUpOnePage(v8::internal::NewSpace* space,
     137             :                    std::vector<Handle<FixedArray>>* out_handles) {
     138        1338 :   PauseAllocationObserversScope pause_observers(space->heap());
     139             :   int space_remaining = static_cast<int>(*space->allocation_limit_address() -
     140         911 :                                          *space->allocation_top_address());
     141         911 :   if (space_remaining == 0) return false;
     142             :   std::vector<Handle<FixedArray>> handles =
     143         427 :       heap::CreatePadding(space->heap(), space_remaining, i::NOT_TENURED);
     144         427 :   if (out_handles != nullptr)
     145         365 :     out_handles->insert(out_handles->end(), handles.begin(), handles.end());
     146         911 :   return true;
     147             : }
     148             : 
     149          57 : void SimulateFullSpace(v8::internal::NewSpace* space,
     150             :                        std::vector<Handle<FixedArray>>* out_handles) {
     151             :   heap::FillCurrentPage(space, out_handles);
     152         911 :   while (heap::FillUpOnePage(space, out_handles) || space->AddFreshPage()) {
     153             :   }
     154          57 : }
     155             : 
     156         307 : void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
     157             :   const double kStepSizeInMs = 100;
     158         307 :   CHECK(FLAG_incremental_marking);
     159        2615 :   i::IncrementalMarking* marking = heap->incremental_marking();
     160         307 :   i::MarkCompactCollector* collector = heap->mark_compact_collector();
     161         307 :   if (collector->sweeping_in_progress()) {
     162         124 :     collector->EnsureSweepingCompleted();
     163             :   }
     164         307 :   if (marking->IsSweeping()) {
     165           1 :     marking->FinalizeSweeping();
     166             :   }
     167         307 :   CHECK(marking->IsMarking() || marking->IsStopped() || marking->IsComplete());
     168         307 :   if (marking->IsStopped()) {
     169             :     heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
     170         268 :                                   i::GarbageCollectionReason::kTesting);
     171             :   }
     172         307 :   CHECK(marking->IsMarking() || marking->IsComplete());
     173         614 :   if (!force_completion) return;
     174             : 
     175        2882 :   while (!marking->IsComplete()) {
     176             :     marking->V8Step(kStepSizeInMs, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
     177        2615 :                     i::StepOrigin::kV8);
     178        2615 :     if (marking->IsReadyToOverApproximateWeakClosure()) {
     179         267 :       marking->FinalizeIncrementally();
     180             :     }
     181             :   }
     182         267 :   CHECK(marking->IsComplete());
     183             : }
     184             : 
     185         380 : void SimulateFullSpace(v8::internal::PagedSpace* space) {
     186         380 :   CodeSpaceMemoryModificationScope modification_scope(space->heap());
     187         380 :   i::MarkCompactCollector* collector = space->heap()->mark_compact_collector();
     188         380 :   if (collector->sweeping_in_progress()) {
     189          26 :     collector->EnsureSweepingCompleted();
     190             :   }
     191         380 :   space->FreeLinearAllocationArea();
     192         380 :   space->ResetFreeList();
     193         380 : }
     194             : 
     195          20 : void AbandonCurrentlyFreeMemory(PagedSpace* space) {
     196          20 :   space->FreeLinearAllocationArea();
     197          55 :   for (Page* page : *space) {
     198          35 :     page->MarkNeverAllocateForTesting();
     199             :   }
     200          20 : }
     201             : 
     202         230 : void GcAndSweep(Heap* heap, AllocationSpace space) {
     203         230 :   heap->CollectGarbage(space, GarbageCollectionReason::kTesting);
     204         460 :   if (heap->mark_compact_collector()->sweeping_in_progress()) {
     205         130 :     heap->mark_compact_collector()->EnsureSweepingCompleted();
     206             :   }
     207         230 : }
     208             : 
     209        5799 : void ForceEvacuationCandidate(Page* page) {
     210        5799 :   CHECK(FLAG_manual_evacuation_candidates_selection);
     211             :   page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
     212             :   PagedSpace* space = static_cast<PagedSpace*>(page->owner());
     213             :   Address top = space->top();
     214             :   Address limit = space->limit();
     215       11588 :   if (top < limit && Page::FromAllocationAreaAddress(top) == page) {
     216             :     // Create filler object to keep page iterable if it was iterable.
     217        5789 :     int remaining = static_cast<int>(limit - top);
     218             :     space->heap()->CreateFillerObjectAt(top, remaining,
     219        5789 :                                         ClearRecordedSlots::kNo);
     220        5789 :     space->FreeLinearAllocationArea();
     221             :   }
     222        5799 : }
     223             : 
     224             : }  // namespace heap
     225             : }  // namespace internal
     226       77625 : }  // namespace v8

Generated by: LCOV version 1.10