Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_HEAP_MARK_COMPACT_INL_H_
6 : #define V8_HEAP_MARK_COMPACT_INL_H_
7 :
8 : #include "src/heap/mark-compact.h"
9 : #include "src/heap/remembered-set.h"
10 : #include "src/isolate.h"
11 :
12 : namespace v8 {
13 : namespace internal {
14 :
15 : void MarkCompactCollector::PushBlack(HeapObject* obj) {
16 : DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(
17 : obj, MarkingState::Internal(obj))));
18 844018366 : if (!marking_deque()->Push(obj)) {
19 : ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(
20 1670616 : obj, MarkingState::Internal(obj));
21 : }
22 : }
23 :
24 : void MinorMarkCompactCollector::PushBlack(HeapObject* obj) {
25 : DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(
26 : obj, MarkingState::External(obj))));
27 0 : if (!marking_deque()->Push(obj)) {
28 : ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(
29 0 : obj, MarkingState::External(obj));
30 : }
31 : }
32 :
33 : void MarkCompactCollector::UnshiftBlack(HeapObject* obj) {
34 : DCHECK(ObjectMarking::IsBlack(obj, MarkingState::Internal(obj)));
35 85194 : if (!marking_deque()->Unshift(obj)) {
36 0 : ObjectMarking::BlackToGrey(obj, MarkingState::Internal(obj));
37 : }
38 : }
39 :
40 : void MarkCompactCollector::MarkObject(HeapObject* obj) {
41 3350612911 : if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
42 6701226640 : obj, MarkingState::Internal(obj))) {
43 : ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
44 842346278 : obj, MarkingState::Internal(obj));
45 : PushBlack(obj);
46 : }
47 : }
48 :
49 : void MinorMarkCompactCollector::MarkObject(HeapObject* obj) {
50 0 : if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
51 0 : obj, MarkingState::External(obj))) {
52 : ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
53 0 : obj, MarkingState::External(obj));
54 : PushBlack(obj);
55 : }
56 : }
57 :
58 : void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
59 : Object* target) {
60 4411134876 : Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
61 4411134904 : Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
62 4422979439 : if (target_page->IsEvacuationCandidate() &&
63 : !ShouldSkipEvacuationSlotRecording(object)) {
64 : DCHECK(
65 : ObjectMarking::IsBlackOrGrey(object, MarkingState::Internal(object)));
66 : RememberedSet<OLD_TO_OLD>::Insert(source_page,
67 6359734 : reinterpret_cast<Address>(slot));
68 : }
69 : }
70 :
71 :
72 9899 : void CodeFlusher::AddCandidate(SharedFunctionInfo* shared_info) {
73 9899 : if (GetNextCandidate(shared_info) == nullptr) {
74 9899 : SetNextCandidate(shared_info, shared_function_info_candidates_head_);
75 9899 : shared_function_info_candidates_head_ = shared_info;
76 : }
77 9899 : }
78 :
79 :
80 3704 : void CodeFlusher::AddCandidate(JSFunction* function) {
81 : DCHECK(function->code() == function->shared()->code());
82 7408 : if (function->next_function_link()->IsUndefined(isolate_)) {
83 3704 : SetNextCandidate(function, jsfunction_candidates_head_);
84 3704 : jsfunction_candidates_head_ = function;
85 : }
86 3704 : }
87 :
88 :
89 : JSFunction** CodeFlusher::GetNextCandidateSlot(JSFunction* candidate) {
90 : return reinterpret_cast<JSFunction**>(
91 : HeapObject::RawField(candidate, JSFunction::kNextFunctionLinkOffset));
92 : }
93 :
94 :
95 : JSFunction* CodeFlusher::GetNextCandidate(JSFunction* candidate) {
96 : Object* next_candidate = candidate->next_function_link();
97 : return reinterpret_cast<JSFunction*>(next_candidate);
98 : }
99 :
100 :
101 : void CodeFlusher::SetNextCandidate(JSFunction* candidate,
102 : JSFunction* next_candidate) {
103 3704 : candidate->set_next_function_link(next_candidate, UPDATE_WEAK_WRITE_BARRIER);
104 : }
105 :
106 :
107 : void CodeFlusher::ClearNextCandidate(JSFunction* candidate, Object* undefined) {
108 : DCHECK(undefined->IsUndefined(candidate->GetIsolate()));
109 3704 : candidate->set_next_function_link(undefined, SKIP_WRITE_BARRIER);
110 : }
111 :
112 :
113 : SharedFunctionInfo* CodeFlusher::GetNextCandidate(
114 : SharedFunctionInfo* candidate) {
115 : Object* next_candidate = candidate->code()->gc_metadata();
116 : return reinterpret_cast<SharedFunctionInfo*>(next_candidate);
117 : }
118 :
119 :
120 : void CodeFlusher::SetNextCandidate(SharedFunctionInfo* candidate,
121 : SharedFunctionInfo* next_candidate) {
122 9899 : candidate->code()->set_gc_metadata(next_candidate);
123 : }
124 :
125 :
126 : void CodeFlusher::ClearNextCandidate(SharedFunctionInfo* candidate) {
127 9899 : candidate->code()->set_gc_metadata(NULL, SKIP_WRITE_BARRIER);
128 : }
129 :
130 68745 : void CodeFlusher::VisitListHeads(RootVisitor* visitor) {
131 : visitor->VisitRootPointer(
132 : Root::kCodeFlusher,
133 68745 : reinterpret_cast<Object**>(&jsfunction_candidates_head_));
134 : visitor->VisitRootPointer(
135 : Root::kCodeFlusher,
136 68745 : reinterpret_cast<Object**>(&shared_function_info_candidates_head_));
137 68745 : }
138 :
139 : template <typename StaticVisitor>
140 68745 : void CodeFlusher::IteratePointersToFromSpace() {
141 : Heap* heap = isolate_->heap();
142 68745 : JSFunction* candidate = jsfunction_candidates_head_;
143 68785 : while (candidate != nullptr) {
144 40 : JSFunction** slot = GetNextCandidateSlot(candidate);
145 80 : if (heap->InFromSpace(*slot)) {
146 0 : StaticVisitor::VisitPointer(heap, candidate,
147 : reinterpret_cast<Object**>(slot));
148 : }
149 : candidate = GetNextCandidate(candidate);
150 : }
151 68745 : }
152 :
153 : template <LiveObjectIterationMode T>
154 667389964 : HeapObject* LiveObjectIterator<T>::Next() {
155 667389964 : Map* one_word_filler = heap()->one_pointer_filler_map();
156 667389964 : Map* two_word_filler = heap()->two_pointer_filler_map();
157 667389964 : Map* free_space_map = heap()->free_space_map();
158 1802567414 : while (!it_.Done()) {
159 : HeapObject* object = nullptr;
160 1209122395 : while (current_cell_ != 0) {
161 : uint32_t trailing_zeros = base::bits::CountTrailingZeros32(current_cell_);
162 741656330 : Address addr = cell_base_ + trailing_zeros * kPointerSize;
163 :
164 : // Clear the first bit of the found object..
165 741656330 : current_cell_ &= ~(1u << trailing_zeros);
166 :
167 : uint32_t second_bit_index = 0;
168 741656330 : if (trailing_zeros < Bitmap::kBitIndexMask) {
169 722595422 : second_bit_index = 1u << (trailing_zeros + 1);
170 : } else {
171 : second_bit_index = 0x1;
172 : // The overlapping case; there has to exist a cell after the current
173 : // cell.
174 : // However, if there is a black area at the end of the page, and the
175 : // last word is a one word filler, we are not allowed to advance. In
176 : // that case we can return immediately.
177 3145597126 : if (it_.Done()) {
178 : DCHECK(HeapObject::FromAddress(addr)->map() ==
179 : HeapObject::FromAddress(addr)
180 : ->GetHeap()
181 : ->one_pointer_filler_map());
182 : return nullptr;
183 : }
184 : bool not_done = it_.Advance();
185 : USE(not_done);
186 : DCHECK(not_done);
187 21139939 : cell_base_ = it_.CurrentCellBase();
188 21139939 : current_cell_ = *it_.CurrentCell();
189 : }
190 :
191 : Map* map = nullptr;
192 743735361 : if (current_cell_ & second_bit_index) {
193 : // We found a black object. If the black object is within a black area,
194 : // make sure that we skip all set bits in the black area until the
195 : // object ends.
196 741127617 : HeapObject* black_object = HeapObject::FromAddress(addr);
197 : map = base::NoBarrierAtomicValue<Map*>::FromAddress(addr)->Value();
198 740759027 : Address end = addr + black_object->SizeFromMap(map) - kPointerSize;
199 : // One word filler objects do not borrow the second mark bit. We have
200 : // to jump over the advancing and clearing part.
201 : // Note that we know that we are at a one word filler when
202 : // object_start + object_size - kPointerSize == object_start.
203 737057092 : if (addr != end) {
204 : DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end));
205 736691638 : uint32_t end_mark_bit_index = chunk_->AddressToMarkbitIndex(end);
206 : unsigned int end_cell_index =
207 736691638 : end_mark_bit_index >> Bitmap::kBitsPerCellLog2;
208 : MarkBit::CellType end_index_mask =
209 736691638 : 1u << Bitmap::IndexInCell(end_mark_bit_index);
210 736691638 : if (it_.Advance(end_cell_index)) {
211 187723187 : cell_base_ = it_.CurrentCellBase();
212 187723187 : current_cell_ = *it_.CurrentCell();
213 : }
214 :
215 : // Clear all bits in current_cell, including the end index.
216 736691638 : current_cell_ &= ~(end_index_mask + end_index_mask - 1);
217 : }
218 :
219 : if (T == kBlackObjects || T == kAllLiveObjects) {
220 : object = black_object;
221 : }
222 : } else if ((T == kGreyObjects || T == kAllLiveObjects)) {
223 : map = base::NoBarrierAtomicValue<Map*>::FromAddress(addr)->Value();
224 835308 : object = HeapObject::FromAddress(addr);
225 : }
226 :
227 : // We found a live object.
228 739664836 : if (object != nullptr) {
229 : // Do not use IsFiller() here. This may cause a data race for reading
230 : // out the instance type when a new map concurrently is written into
231 : // this object while iterating over the object.
232 663676474 : if (map == one_word_filler || map == two_word_filler ||
233 : map == free_space_map) {
234 : // There are two reasons why we can get black or grey fillers:
235 : // 1) Black areas together with slack tracking may result in black one
236 : // word filler objects.
237 : // 2) Left trimming may leave black or grey fillers behind because we
238 : // do not clear the old location of the object start.
239 : // We filter these objects out in the iterator.
240 : object = nullptr;
241 : } else {
242 : break;
243 : }
244 : }
245 : }
246 :
247 1132576942 : if (current_cell_ == 0) {
248 1063765957 : if (!it_.Done() && it_.Advance()) {
249 531420053 : cell_base_ = it_.CurrentCellBase();
250 531420053 : current_cell_ = *it_.CurrentCell();
251 : }
252 : }
253 1132576942 : if (object != nullptr) return object;
254 : }
255 : return nullptr;
256 : }
257 :
258 : } // namespace internal
259 : } // namespace v8
260 :
261 : #endif // V8_HEAP_MARK_COMPACT_INL_H_
|