Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "test/cctest/heap/heap-utils.h"
6 :
7 : #include "src/factory.h"
8 : #include "src/heap/heap-inl.h"
9 : #include "src/heap/incremental-marking.h"
10 : #include "src/heap/mark-compact.h"
11 : #include "src/isolate.h"
12 :
13 : namespace v8 {
14 : namespace internal {
15 : namespace heap {
16 :
17 288 : void SealCurrentObjects(Heap* heap) {
18 : heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
19 72 : GarbageCollectionReason::kTesting);
20 : heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
21 72 : GarbageCollectionReason::kTesting);
22 72 : heap->mark_compact_collector()->EnsureSweepingCompleted();
23 72 : heap->old_space()->EmptyAllocationInfo();
24 432 : for (Page* page : *heap->old_space()) {
25 144 : page->MarkNeverAllocateForTesting();
26 : }
27 72 : }
28 :
29 12 : int FixedArrayLenFromSize(int size) {
30 629 : return (size - FixedArray::kHeaderSize) / kPointerSize;
31 : }
32 :
33 18 : std::vector<Handle<FixedArray>> FillOldSpacePageWithFixedArrays(Heap* heap,
34 : int remainder) {
35 18 : PauseAllocationObserversScope pause_observers(heap);
36 : std::vector<Handle<FixedArray>> handles;
37 : Isolate* isolate = heap->isolate();
38 : const int kArraySize = 128;
39 : const int kArrayLen = heap::FixedArrayLenFromSize(kArraySize);
40 : CHECK_EQ(Page::kAllocatableMemory % kArraySize, 0);
41 : Handle<FixedArray> array;
42 72522 : for (int allocated = 0; allocated != (Page::kAllocatableMemory - remainder);
43 : allocated += array->Size()) {
44 72504 : if (allocated == (Page::kAllocatableMemory - kArraySize)) {
45 : array = isolate->factory()->NewFixedArray(
46 18 : heap::FixedArrayLenFromSize(kArraySize - remainder), TENURED);
47 18 : CHECK_EQ(kArraySize - remainder, array->Size());
48 : } else {
49 72486 : array = isolate->factory()->NewFixedArray(kArrayLen, TENURED);
50 72486 : CHECK_EQ(kArraySize, array->Size());
51 : }
52 72504 : if (handles.empty()) {
53 : // Check that allocations started on a new page.
54 36 : CHECK_EQ(array->address(),
55 : Page::FromAddress(array->address())->area_start());
56 : }
57 72504 : handles.push_back(array);
58 : }
59 18 : return handles;
60 : }
61 :
62 1675 : std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
63 : PretenureFlag tenure,
64 : int object_size) {
65 : std::vector<Handle<FixedArray>> handles;
66 : Isolate* isolate = heap->isolate();
67 : int allocate_memory;
68 : int length;
69 : int free_memory = padding_size;
70 477 : if (tenure == i::TENURED) {
71 47 : heap->old_space()->EmptyAllocationInfo();
72 47 : int overall_free_memory = static_cast<int>(heap->old_space()->Available());
73 47 : CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
74 : } else {
75 : heap->new_space()->DisableInlineAllocationSteps();
76 : int overall_free_memory =
77 430 : static_cast<int>(*heap->new_space()->allocation_limit_address() -
78 430 : *heap->new_space()->allocation_top_address());
79 430 : CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
80 : }
81 1535 : while (free_memory > 0) {
82 1076 : if (free_memory > object_size) {
83 : allocate_memory = object_size;
84 : length = FixedArrayLenFromSize(allocate_memory);
85 : } else {
86 : allocate_memory = free_memory;
87 : length = FixedArrayLenFromSize(allocate_memory);
88 477 : if (length <= 0) {
89 : // Not enough room to create another fixed array. Let's create a filler.
90 18 : if (free_memory > (2 * kPointerSize)) {
91 : heap->CreateFillerObjectAt(
92 : *heap->old_space()->allocation_top_address(), free_memory,
93 0 : ClearRecordedSlots::kNo);
94 : }
95 : break;
96 : }
97 : }
98 2116 : handles.push_back(isolate->factory()->NewFixedArray(length, tenure));
99 2116 : CHECK((tenure == NOT_TENURED && heap->InNewSpace(*handles.back())) ||
100 : (tenure == TENURED && heap->InOldSpace(*handles.back())));
101 1058 : free_memory -= allocate_memory;
102 : }
103 477 : return handles;
104 : }
105 :
106 73 : void AllocateAllButNBytes(v8::internal::NewSpace* space, int extra_bytes,
107 : std::vector<Handle<FixedArray>>* out_handles) {
108 : space->DisableInlineAllocationSteps();
109 73 : int space_remaining = static_cast<int>(*space->allocation_limit_address() -
110 73 : *space->allocation_top_address());
111 73 : CHECK(space_remaining >= extra_bytes);
112 73 : int new_linear_size = space_remaining - extra_bytes;
113 73 : if (new_linear_size == 0) return;
114 : std::vector<Handle<FixedArray>> handles =
115 73 : heap::CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED);
116 73 : if (out_handles != nullptr)
117 26 : out_handles->insert(out_handles->end(), handles.begin(), handles.end());
118 : }
119 :
120 17 : void FillCurrentPage(v8::internal::NewSpace* space,
121 : std::vector<Handle<FixedArray>>* out_handles) {
122 69 : heap::AllocateAllButNBytes(space, 0, out_handles);
123 17 : }
124 :
125 766 : bool FillUpOnePage(v8::internal::NewSpace* space,
126 : std::vector<Handle<FixedArray>>* out_handles) {
127 : space->DisableInlineAllocationSteps();
128 766 : int space_remaining = static_cast<int>(*space->allocation_limit_address() -
129 766 : *space->allocation_top_address());
130 766 : if (space_remaining == 0) return false;
131 : std::vector<Handle<FixedArray>> handles =
132 357 : heap::CreatePadding(space->heap(), space_remaining, i::NOT_TENURED);
133 357 : if (out_handles != nullptr)
134 295 : out_handles->insert(out_handles->end(), handles.begin(), handles.end());
135 : return true;
136 : }
137 :
138 52 : void SimulateFullSpace(v8::internal::NewSpace* space,
139 : std::vector<Handle<FixedArray>>* out_handles) {
140 : heap::FillCurrentPage(space, out_handles);
141 766 : while (heap::FillUpOnePage(space, out_handles) || space->AddFreshPage()) {
142 : }
143 52 : }
144 :
145 384 : void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
146 192 : CHECK(FLAG_incremental_marking);
147 475 : i::IncrementalMarking* marking = heap->incremental_marking();
148 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
149 192 : if (collector->sweeping_in_progress()) {
150 75 : collector->EnsureSweepingCompleted();
151 : }
152 192 : if (marking->IsSweeping()) {
153 0 : marking->FinalizeSweeping();
154 : }
155 192 : CHECK(marking->IsMarking() || marking->IsStopped() || marking->IsComplete());
156 192 : if (marking->IsStopped()) {
157 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
158 166 : i::GarbageCollectionReason::kTesting);
159 : }
160 192 : CHECK(marking->IsMarking() || marking->IsComplete());
161 384 : if (!force_completion) return;
162 :
163 635 : while (!marking->IsComplete()) {
164 : marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
165 475 : i::StepOrigin::kV8);
166 475 : if (marking->IsReadyToOverApproximateWeakClosure()) {
167 160 : marking->FinalizeIncrementally();
168 : }
169 : }
170 160 : CHECK(marking->IsComplete());
171 : }
172 :
173 341 : void SimulateFullSpace(v8::internal::PagedSpace* space) {
174 341 : i::MarkCompactCollector* collector = space->heap()->mark_compact_collector();
175 341 : if (collector->sweeping_in_progress()) {
176 22 : collector->EnsureSweepingCompleted();
177 : }
178 341 : space->EmptyAllocationInfo();
179 : space->ResetFreeList();
180 341 : }
181 :
182 11 : void AbandonCurrentlyFreeMemory(PagedSpace* space) {
183 11 : space->EmptyAllocationInfo();
184 56 : for (Page* page : *space) {
185 17 : page->MarkNeverAllocateForTesting();
186 : }
187 11 : }
188 :
189 408 : void GcAndSweep(Heap* heap, AllocationSpace space) {
190 204 : heap->CollectGarbage(space, GarbageCollectionReason::kTesting);
191 204 : if (heap->mark_compact_collector()->sweeping_in_progress()) {
192 114 : heap->mark_compact_collector()->EnsureSweepingCompleted();
193 : }
194 204 : }
195 :
196 4921 : void ForceEvacuationCandidate(Page* page) {
197 4921 : CHECK(FLAG_manual_evacuation_candidates_selection);
198 : page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
199 : PagedSpace* space = static_cast<PagedSpace*>(page->owner());
200 : Address top = space->top();
201 : Address limit = space->limit();
202 9822 : if (top < limit && Page::FromAllocationAreaAddress(top) == page) {
203 : // Create filler object to keep page iterable if it was iterable.
204 4896 : int remaining = static_cast<int>(limit - top);
205 : space->heap()->CreateFillerObjectAt(top, remaining,
206 4896 : ClearRecordedSlots::kNo);
207 4896 : space->EmptyAllocationInfo();
208 : }
209 4921 : }
210 :
211 : } // namespace heap
212 : } // namespace internal
213 : } // namespace v8
|