Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "test/cctest/heap/heap-utils.h"
6 :
7 : #include "src/heap/factory.h"
8 : #include "src/heap/heap-inl.h"
9 : #include "src/heap/incremental-marking.h"
10 : #include "src/heap/mark-compact.h"
11 : #include "src/isolate.h"
12 : #include "test/cctest/cctest.h"
13 :
14 : namespace v8 {
15 : namespace internal {
16 : namespace heap {
17 :
18 260 : void SealCurrentObjects(Heap* heap) {
19 65 : CcTest::CollectAllGarbage();
20 65 : CcTest::CollectAllGarbage();
21 65 : heap->mark_compact_collector()->EnsureSweepingCompleted();
22 65 : heap->old_space()->FreeLinearAllocationArea();
23 195 : for (Page* page : *heap->old_space()) {
24 130 : page->MarkNeverAllocateForTesting();
25 : }
26 65 : }
27 :
28 10 : int FixedArrayLenFromSize(int size) {
29 666 : return (size - FixedArray::kHeaderSize) / kTaggedSize;
30 : }
31 :
32 15 : std::vector<Handle<FixedArray>> FillOldSpacePageWithFixedArrays(Heap* heap,
33 : int remainder) {
34 15 : PauseAllocationObserversScope pause_observers(heap);
35 : std::vector<Handle<FixedArray>> handles;
36 : Isolate* isolate = heap->isolate();
37 : const int kArraySize = 128;
38 : const int kArrayLen = heap::FixedArrayLenFromSize(kArraySize);
39 : Handle<FixedArray> array;
40 : int allocated = 0;
41 61395 : do {
42 122790 : if (allocated + kArraySize * 2 >
43 61395 : static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage())) {
44 : int size =
45 15 : kArraySize * 2 -
46 15 : ((allocated + kArraySize * 2) -
47 15 : static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage())) -
48 15 : remainder;
49 : int last_array_len = heap::FixedArrayLenFromSize(size);
50 15 : array = isolate->factory()->NewFixedArray(last_array_len, TENURED);
51 15 : CHECK_EQ(size, array->Size());
52 15 : allocated += array->Size() + remainder;
53 : } else {
54 61380 : array = isolate->factory()->NewFixedArray(kArrayLen, TENURED);
55 61380 : allocated += array->Size();
56 61380 : CHECK_EQ(kArraySize, array->Size());
57 : }
58 61395 : if (handles.empty()) {
59 : // Check that allocations started on a new page.
60 30 : CHECK_EQ(array->address(), Page::FromHeapObject(*array)->area_start());
61 : }
62 61395 : handles.push_back(array);
63 61395 : } while (allocated <
64 61395 : static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()));
65 15 : return handles;
66 : }
67 :
68 1341 : std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
69 : PretenureFlag tenure,
70 : int object_size) {
71 : std::vector<Handle<FixedArray>> handles;
72 : Isolate* isolate = heap->isolate();
73 : int allocate_memory;
74 : int length;
75 : int free_memory = padding_size;
76 548 : if (tenure == i::TENURED) {
77 40 : heap->old_space()->FreeLinearAllocationArea();
78 40 : int overall_free_memory = static_cast<int>(heap->old_space()->Available());
79 40 : CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
80 : } else {
81 : int overall_free_memory =
82 : static_cast<int>(*heap->new_space()->allocation_limit_address() -
83 508 : *heap->new_space()->allocation_top_address());
84 508 : CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
85 : }
86 1722 : while (free_memory > 0) {
87 1189 : if (free_memory > object_size) {
88 : allocate_memory = object_size;
89 : length = FixedArrayLenFromSize(allocate_memory);
90 : } else {
91 : allocate_memory = free_memory;
92 : length = FixedArrayLenFromSize(allocate_memory);
93 548 : if (length <= 0) {
94 : // Not enough room to create another fixed array. Let's create a filler.
95 15 : if (free_memory > (2 * kTaggedSize)) {
96 : heap->CreateFillerObjectAt(
97 : *heap->old_space()->allocation_top_address(), free_memory,
98 0 : ClearRecordedSlots::kNo);
99 : }
100 : break;
101 : }
102 : }
103 2348 : handles.push_back(isolate->factory()->NewFixedArray(length, tenure));
104 2348 : CHECK((tenure == NOT_TENURED && Heap::InNewSpace(*handles.back())) ||
105 : (tenure == TENURED && heap->InOldSpace(*handles.back())));
106 1174 : free_memory -= allocate_memory;
107 : }
108 548 : return handles;
109 : }
110 :
111 81 : void AllocateAllButNBytes(v8::internal::NewSpace* space, int extra_bytes,
112 : std::vector<Handle<FixedArray>>* out_handles) {
113 162 : PauseAllocationObserversScope pause_observers(space->heap());
114 : int space_remaining = static_cast<int>(*space->allocation_limit_address() -
115 81 : *space->allocation_top_address());
116 81 : CHECK(space_remaining >= extra_bytes);
117 81 : int new_linear_size = space_remaining - extra_bytes;
118 81 : if (new_linear_size == 0) return;
119 : std::vector<Handle<FixedArray>> handles =
120 81 : heap::CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED);
121 81 : if (out_handles != nullptr)
122 111 : out_handles->insert(out_handles->end(), handles.begin(), handles.end());
123 : }
124 :
125 20 : void FillCurrentPage(v8::internal::NewSpace* space,
126 : std::vector<Handle<FixedArray>>* out_handles) {
127 77 : heap::AllocateAllButNBytes(space, 0, out_handles);
128 20 : }
129 :
130 911 : bool FillUpOnePage(v8::internal::NewSpace* space,
131 : std::vector<Handle<FixedArray>>* out_handles) {
132 1338 : PauseAllocationObserversScope pause_observers(space->heap());
133 : int space_remaining = static_cast<int>(*space->allocation_limit_address() -
134 911 : *space->allocation_top_address());
135 911 : if (space_remaining == 0) return false;
136 : std::vector<Handle<FixedArray>> handles =
137 427 : heap::CreatePadding(space->heap(), space_remaining, i::NOT_TENURED);
138 427 : if (out_handles != nullptr)
139 365 : out_handles->insert(out_handles->end(), handles.begin(), handles.end());
140 911 : return true;
141 : }
142 :
143 57 : void SimulateFullSpace(v8::internal::NewSpace* space,
144 : std::vector<Handle<FixedArray>>* out_handles) {
145 : heap::FillCurrentPage(space, out_handles);
146 911 : while (heap::FillUpOnePage(space, out_handles) || space->AddFreshPage()) {
147 : }
148 57 : }
149 :
150 610 : void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
151 305 : CHECK(FLAG_incremental_marking);
152 593 : i::IncrementalMarking* marking = heap->incremental_marking();
153 305 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
154 305 : if (collector->sweeping_in_progress()) {
155 131 : collector->EnsureSweepingCompleted();
156 : }
157 305 : if (marking->IsSweeping()) {
158 1 : marking->FinalizeSweeping();
159 : }
160 305 : CHECK(marking->IsMarking() || marking->IsStopped() || marking->IsComplete());
161 305 : if (marking->IsStopped()) {
162 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
163 280 : i::GarbageCollectionReason::kTesting);
164 : }
165 305 : CHECK(marking->IsMarking() || marking->IsComplete());
166 610 : if (!force_completion) return;
167 :
168 858 : while (!marking->IsComplete()) {
169 : marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
170 593 : i::StepOrigin::kV8);
171 593 : if (marking->IsReadyToOverApproximateWeakClosure()) {
172 263 : marking->FinalizeIncrementally();
173 : }
174 : }
175 265 : CHECK(marking->IsComplete());
176 : }
177 :
178 380 : void SimulateFullSpace(v8::internal::PagedSpace* space) {
179 380 : CodeSpaceMemoryModificationScope modification_scope(space->heap());
180 380 : i::MarkCompactCollector* collector = space->heap()->mark_compact_collector();
181 380 : if (collector->sweeping_in_progress()) {
182 27 : collector->EnsureSweepingCompleted();
183 : }
184 380 : space->FreeLinearAllocationArea();
185 380 : space->ResetFreeList();
186 380 : }
187 :
188 20 : void AbandonCurrentlyFreeMemory(PagedSpace* space) {
189 20 : space->FreeLinearAllocationArea();
190 55 : for (Page* page : *space) {
191 35 : page->MarkNeverAllocateForTesting();
192 : }
193 20 : }
194 :
195 460 : void GcAndSweep(Heap* heap, AllocationSpace space) {
196 230 : heap->CollectGarbage(space, GarbageCollectionReason::kTesting);
197 460 : if (heap->mark_compact_collector()->sweeping_in_progress()) {
198 130 : heap->mark_compact_collector()->EnsureSweepingCompleted();
199 : }
200 230 : }
201 :
202 5800 : void ForceEvacuationCandidate(Page* page) {
203 5800 : CHECK(FLAG_manual_evacuation_candidates_selection);
204 : page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
205 : PagedSpace* space = static_cast<PagedSpace*>(page->owner());
206 : Address top = space->top();
207 : Address limit = space->limit();
208 11590 : if (top < limit && Page::FromAllocationAreaAddress(top) == page) {
209 : // Create filler object to keep page iterable if it was iterable.
210 5790 : int remaining = static_cast<int>(limit - top);
211 : space->heap()->CreateFillerObjectAt(top, remaining,
212 5790 : ClearRecordedSlots::kNo);
213 5790 : space->FreeLinearAllocationArea();
214 : }
215 5800 : }
216 :
217 : } // namespace heap
218 : } // namespace internal
219 85011 : } // namespace v8
|