Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "test/cctest/heap/heap-utils.h"
6 :
7 : #include "src/heap/factory.h"
8 : #include "src/heap/heap-inl.h"
9 : #include "src/heap/incremental-marking.h"
10 : #include "src/heap/mark-compact.h"
11 : #include "src/isolate.h"
12 : #include "test/cctest/cctest.h"
13 :
14 : namespace v8 {
15 : namespace internal {
16 : namespace heap {
17 :
18 25 : void InvokeScavenge() { CcTest::CollectGarbage(i::NEW_SPACE); }
19 :
20 25 : void InvokeMarkSweep() { CcTest::CollectAllGarbage(); }
21 :
22 65 : void SealCurrentObjects(Heap* heap) {
23 65 : CcTest::CollectAllGarbage();
24 65 : CcTest::CollectAllGarbage();
25 65 : heap->mark_compact_collector()->EnsureSweepingCompleted();
26 65 : heap->old_space()->FreeLinearAllocationArea();
27 195 : for (Page* page : *heap->old_space()) {
28 130 : page->MarkNeverAllocateForTesting();
29 : }
30 65 : }
31 :
32 10 : int FixedArrayLenFromSize(int size) {
33 2264 : return Min((size - FixedArray::kHeaderSize) / kTaggedSize,
34 10 : FixedArray::kMaxRegularLength);
35 : }
36 :
37 15 : std::vector<Handle<FixedArray>> FillOldSpacePageWithFixedArrays(Heap* heap,
38 : int remainder) {
39 30 : PauseAllocationObserversScope pause_observers(heap);
40 : std::vector<Handle<FixedArray>> handles;
41 : Isolate* isolate = heap->isolate();
42 : const int kArraySize = 128;
43 : const int kArrayLen = heap::FixedArrayLenFromSize(kArraySize);
44 : Handle<FixedArray> array;
45 : int allocated = 0;
46 30675 : do {
47 61350 : if (allocated + kArraySize * 2 >
48 30675 : static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage())) {
49 : int size =
50 15 : kArraySize * 2 -
51 30 : ((allocated + kArraySize * 2) -
52 15 : static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage())) -
53 15 : remainder;
54 : int last_array_len = heap::FixedArrayLenFromSize(size);
55 : array = isolate->factory()->NewFixedArray(last_array_len,
56 15 : AllocationType::kOld);
57 15 : CHECK_EQ(size, array->Size());
58 15 : allocated += array->Size() + remainder;
59 : } else {
60 : array =
61 30660 : isolate->factory()->NewFixedArray(kArrayLen, AllocationType::kOld);
62 30660 : allocated += array->Size();
63 30660 : CHECK_EQ(kArraySize, array->Size());
64 : }
65 30675 : if (handles.empty()) {
66 : // Check that allocations started on a new page.
67 15 : CHECK_EQ(array->address(), Page::FromHeapObject(*array)->area_start());
68 : }
69 30675 : handles.push_back(array);
70 30675 : } while (allocated <
71 30675 : static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()));
72 15 : return handles;
73 : }
74 :
75 1042 : std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
76 : AllocationType allocation,
77 : int object_size) {
78 : std::vector<Handle<FixedArray>> handles;
79 : Isolate* isolate = heap->isolate();
80 : int allocate_memory;
81 : int length;
82 : int free_memory = padding_size;
83 1042 : if (allocation == i::AllocationType::kOld) {
84 40 : heap->old_space()->FreeLinearAllocationArea();
85 40 : int overall_free_memory = static_cast<int>(heap->old_space()->Available());
86 40 : CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
87 : } else {
88 : int overall_free_memory =
89 : static_cast<int>(*heap->new_space()->allocation_limit_address() -
90 1002 : *heap->new_space()->allocation_top_address());
91 1002 : CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
92 : }
93 5490 : while (free_memory > 0) {
94 2239 : if (free_memory > object_size) {
95 : allocate_memory = object_size;
96 : length = FixedArrayLenFromSize(allocate_memory);
97 : } else {
98 : allocate_memory = free_memory;
99 : length = FixedArrayLenFromSize(allocate_memory);
100 1042 : if (length <= 0) {
101 : // Not enough room to create another fixed array. Let's create a filler.
102 15 : if (free_memory > (2 * kTaggedSize)) {
103 : heap->CreateFillerObjectAt(
104 : *heap->old_space()->allocation_top_address(), free_memory,
105 5 : ClearRecordedSlots::kNo);
106 : }
107 : break;
108 : }
109 : }
110 4448 : handles.push_back(isolate->factory()->NewFixedArray(length, allocation));
111 4448 : CHECK((allocation == AllocationType::kYoung &&
112 : heap->new_space()->Contains(*handles.back())) ||
113 : (allocation == AllocationType::kOld &&
114 : heap->InOldSpace(*handles.back())));
115 2224 : free_memory -= handles.back()->Size();
116 : }
117 1042 : return handles;
118 : }
119 :
120 81 : void AllocateAllButNBytes(v8::internal::NewSpace* space, int extra_bytes,
121 : std::vector<Handle<FixedArray>>* out_handles) {
122 162 : PauseAllocationObserversScope pause_observers(space->heap());
123 : int space_remaining = static_cast<int>(*space->allocation_limit_address() -
124 81 : *space->allocation_top_address());
125 81 : CHECK(space_remaining >= extra_bytes);
126 81 : int new_linear_size = space_remaining - extra_bytes;
127 81 : if (new_linear_size == 0) return;
128 : std::vector<Handle<FixedArray>> handles = heap::CreatePadding(
129 81 : space->heap(), new_linear_size, i::AllocationType::kYoung);
130 81 : if (out_handles != nullptr)
131 30 : out_handles->insert(out_handles->end(), handles.begin(), handles.end());
132 : }
133 :
134 20 : void FillCurrentPage(v8::internal::NewSpace* space,
135 : std::vector<Handle<FixedArray>>* out_handles) {
136 77 : heap::AllocateAllButNBytes(space, 0, out_handles);
137 20 : }
138 :
139 1899 : bool FillUpOnePage(v8::internal::NewSpace* space,
140 : std::vector<Handle<FixedArray>>* out_handles) {
141 3798 : PauseAllocationObserversScope pause_observers(space->heap());
142 : int space_remaining = static_cast<int>(*space->allocation_limit_address() -
143 1899 : *space->allocation_top_address());
144 1899 : if (space_remaining == 0) return false;
145 : std::vector<Handle<FixedArray>> handles = heap::CreatePadding(
146 921 : space->heap(), space_remaining, i::AllocationType::kYoung);
147 921 : if (out_handles != nullptr)
148 765 : out_handles->insert(out_handles->end(), handles.begin(), handles.end());
149 : return true;
150 : }
151 :
152 57 : void SimulateFullSpace(v8::internal::NewSpace* space,
153 : std::vector<Handle<FixedArray>>* out_handles) {
154 : heap::FillCurrentPage(space, out_handles);
155 1899 : while (heap::FillUpOnePage(space, out_handles) || space->AddFreshPage()) {
156 : }
157 57 : }
158 :
159 312 : void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
160 : const double kStepSizeInMs = 100;
161 312 : CHECK(FLAG_incremental_marking);
162 : i::IncrementalMarking* marking = heap->incremental_marking();
163 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
164 312 : if (collector->sweeping_in_progress()) {
165 124 : collector->EnsureSweepingCompleted();
166 : }
167 312 : if (marking->IsSweeping()) {
168 1 : marking->FinalizeSweeping();
169 : }
170 312 : CHECK(marking->IsMarking() || marking->IsStopped() || marking->IsComplete());
171 312 : if (marking->IsStopped()) {
172 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
173 273 : i::GarbageCollectionReason::kTesting);
174 : }
175 312 : CHECK(marking->IsMarking() || marking->IsComplete());
176 312 : if (!force_completion) return;
177 :
178 3147 : while (!marking->IsComplete()) {
179 : marking->V8Step(kStepSizeInMs, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
180 2875 : i::StepOrigin::kV8);
181 2875 : if (marking->IsReadyToOverApproximateWeakClosure()) {
182 270 : marking->FinalizeIncrementally();
183 : }
184 : }
185 272 : CHECK(marking->IsComplete());
186 : }
187 :
188 380 : void SimulateFullSpace(v8::internal::PagedSpace* space) {
189 760 : CodeSpaceMemoryModificationScope modification_scope(space->heap());
190 : i::MarkCompactCollector* collector = space->heap()->mark_compact_collector();
191 380 : if (collector->sweeping_in_progress()) {
192 26 : collector->EnsureSweepingCompleted();
193 : }
194 380 : space->FreeLinearAllocationArea();
195 380 : space->ResetFreeList();
196 380 : }
197 :
198 20 : void AbandonCurrentlyFreeMemory(PagedSpace* space) {
199 20 : space->FreeLinearAllocationArea();
200 55 : for (Page* page : *space) {
201 35 : page->MarkNeverAllocateForTesting();
202 : }
203 20 : }
204 :
205 240 : void GcAndSweep(Heap* heap, AllocationSpace space) {
206 240 : heap->CollectGarbage(space, GarbageCollectionReason::kTesting);
207 240 : if (heap->mark_compact_collector()->sweeping_in_progress()) {
208 133 : heap->mark_compact_collector()->EnsureSweepingCompleted();
209 : }
210 240 : }
211 :
212 6384 : void ForceEvacuationCandidate(Page* page) {
213 6384 : CHECK(FLAG_manual_evacuation_candidates_selection);
214 : page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
215 : PagedSpace* space = static_cast<PagedSpace*>(page->owner());
216 : Address top = space->top();
217 : Address limit = space->limit();
218 12758 : if (top < limit && Page::FromAllocationAreaAddress(top) == page) {
219 : // Create filler object to keep page iterable if it was iterable.
220 6374 : int remaining = static_cast<int>(limit - top);
221 : space->heap()->CreateFillerObjectAt(top, remaining,
222 6374 : ClearRecordedSlots::kNo);
223 6374 : space->FreeLinearAllocationArea();
224 : }
225 6384 : }
226 :
227 : } // namespace heap
228 : } // namespace internal
229 79917 : } // namespace v8
|