Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_HEAP_MARK_COMPACT_INL_H_
6 : #define V8_HEAP_MARK_COMPACT_INL_H_
7 :
8 : #include "src/heap/mark-compact.h"
9 : #include "src/heap/remembered-set.h"
10 : #include "src/isolate.h"
11 :
12 : namespace v8 {
13 : namespace internal {
14 :
15 : void MarkCompactCollector::PushBlack(HeapObject* obj) {
16 : DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(
17 : obj, MarkingState::Internal(obj))));
18 844884195 : if (!marking_deque()->Push(obj)) {
19 : ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(
20 1670116 : obj, MarkingState::Internal(obj));
21 : }
22 : }
23 :
24 : void MinorMarkCompactCollector::PushBlack(HeapObject* obj) {
25 : DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(
26 : obj, MarkingState::External(obj))));
27 0 : if (!marking_deque()->Push(obj)) {
28 : ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(
29 0 : obj, MarkingState::External(obj));
30 : }
31 : }
32 :
33 : void MarkCompactCollector::UnshiftBlack(HeapObject* obj) {
34 : DCHECK(ObjectMarking::IsBlack(obj, MarkingState::Internal(obj)));
35 84714 : if (!marking_deque()->Unshift(obj)) {
36 0 : ObjectMarking::BlackToGrey(obj, MarkingState::Internal(obj));
37 : }
38 : }
39 :
40 : void MarkCompactCollector::MarkObject(HeapObject* obj) {
41 3355061988 : if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
42 6710125198 : obj, MarkingState::Internal(obj))) {
43 : ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
44 843212366 : obj, MarkingState::Internal(obj));
45 : PushBlack(obj);
46 : }
47 : }
48 :
49 : void MinorMarkCompactCollector::MarkObject(HeapObject* obj) {
50 0 : if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
51 0 : obj, MarkingState::External(obj))) {
52 : ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
53 0 : obj, MarkingState::External(obj));
54 : PushBlack(obj);
55 : }
56 : }
57 :
58 : void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
59 : Object* target) {
60 4419833891 : Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
61 4419833985 : Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
62 4432272451 : if (target_page->IsEvacuationCandidate() &&
63 : !ShouldSkipEvacuationSlotRecording(object)) {
64 : DCHECK(
65 : ObjectMarking::IsBlackOrGrey(object, MarkingState::Internal(object)));
66 : RememberedSet<OLD_TO_OLD>::Insert(source_page,
67 6647077 : reinterpret_cast<Address>(slot));
68 : }
69 : }
70 :
71 :
72 9893 : void CodeFlusher::AddCandidate(SharedFunctionInfo* shared_info) {
73 9893 : if (GetNextCandidate(shared_info) == nullptr) {
74 9893 : SetNextCandidate(shared_info, shared_function_info_candidates_head_);
75 9893 : shared_function_info_candidates_head_ = shared_info;
76 : }
77 9893 : }
78 :
79 :
80 3701 : void CodeFlusher::AddCandidate(JSFunction* function) {
81 : DCHECK(function->code() == function->shared()->code());
82 7402 : if (function->next_function_link()->IsUndefined(isolate_)) {
83 3701 : SetNextCandidate(function, jsfunction_candidates_head_);
84 3701 : jsfunction_candidates_head_ = function;
85 : }
86 3701 : }
87 :
88 :
89 : JSFunction** CodeFlusher::GetNextCandidateSlot(JSFunction* candidate) {
90 : return reinterpret_cast<JSFunction**>(
91 : HeapObject::RawField(candidate, JSFunction::kNextFunctionLinkOffset));
92 : }
93 :
94 :
95 : JSFunction* CodeFlusher::GetNextCandidate(JSFunction* candidate) {
96 : Object* next_candidate = candidate->next_function_link();
97 : return reinterpret_cast<JSFunction*>(next_candidate);
98 : }
99 :
100 :
101 : void CodeFlusher::SetNextCandidate(JSFunction* candidate,
102 : JSFunction* next_candidate) {
103 3701 : candidate->set_next_function_link(next_candidate, UPDATE_WEAK_WRITE_BARRIER);
104 : }
105 :
106 :
107 : void CodeFlusher::ClearNextCandidate(JSFunction* candidate, Object* undefined) {
108 : DCHECK(undefined->IsUndefined(candidate->GetIsolate()));
109 3701 : candidate->set_next_function_link(undefined, SKIP_WRITE_BARRIER);
110 : }
111 :
112 :
113 : SharedFunctionInfo* CodeFlusher::GetNextCandidate(
114 : SharedFunctionInfo* candidate) {
115 : Object* next_candidate = candidate->code()->gc_metadata();
116 : return reinterpret_cast<SharedFunctionInfo*>(next_candidate);
117 : }
118 :
119 :
120 : void CodeFlusher::SetNextCandidate(SharedFunctionInfo* candidate,
121 : SharedFunctionInfo* next_candidate) {
122 9893 : candidate->code()->set_gc_metadata(next_candidate);
123 : }
124 :
125 :
126 : void CodeFlusher::ClearNextCandidate(SharedFunctionInfo* candidate) {
127 9893 : candidate->code()->set_gc_metadata(NULL, SKIP_WRITE_BARRIER);
128 : }
129 :
130 69189 : void CodeFlusher::VisitListHeads(RootVisitor* visitor) {
131 : visitor->VisitRootPointer(
132 : Root::kCodeFlusher,
133 69189 : reinterpret_cast<Object**>(&jsfunction_candidates_head_));
134 : visitor->VisitRootPointer(
135 : Root::kCodeFlusher,
136 69189 : reinterpret_cast<Object**>(&shared_function_info_candidates_head_));
137 69189 : }
138 :
139 : template <typename StaticVisitor>
140 69189 : void CodeFlusher::IteratePointersToFromSpace() {
141 : Heap* heap = isolate_->heap();
142 69189 : JSFunction* candidate = jsfunction_candidates_head_;
143 69230 : while (candidate != nullptr) {
144 41 : JSFunction** slot = GetNextCandidateSlot(candidate);
145 82 : if (heap->InFromSpace(*slot)) {
146 0 : StaticVisitor::VisitPointer(heap, candidate,
147 : reinterpret_cast<Object**>(slot));
148 : }
149 : candidate = GetNextCandidate(candidate);
150 : }
151 69189 : }
152 :
153 : template <LiveObjectIterationMode T>
154 671939418 : HeapObject* LiveObjectIterator<T>::Next() {
155 671939418 : Map* one_word_filler = heap()->one_pointer_filler_map();
156 671939418 : Map* two_word_filler = heap()->two_pointer_filler_map();
157 671939418 : Map* free_space_map = heap()->free_space_map();
158 1811195625 : while (!it_.Done()) {
159 : HeapObject* object = nullptr;
160 1213670791 : while (current_cell_ != 0) {
161 : uint32_t trailing_zeros = base::bits::CountTrailingZeros32(current_cell_);
162 746640910 : Address addr = cell_base_ + trailing_zeros * kPointerSize;
163 :
164 : // Clear the first bit of the found object..
165 746640910 : current_cell_ &= ~(1u << trailing_zeros);
166 :
167 : uint32_t second_bit_index = 0;
168 746640910 : if (trailing_zeros < Bitmap::kBitIndexMask) {
169 727546541 : second_bit_index = 1u << (trailing_zeros + 1);
170 : } else {
171 : second_bit_index = 0x1;
172 : // The overlapping case; there has to exist a cell after the current
173 : // cell.
174 : // However, if there is a black area at the end of the page, and the
175 : // last word is a one word filler, we are not allowed to advance. In
176 : // that case we can return immediately.
177 3145435649 : if (it_.Done()) {
178 : DCHECK(HeapObject::FromAddress(addr)->map() ==
179 : HeapObject::FromAddress(addr)
180 : ->GetHeap()
181 : ->one_pointer_filler_map());
182 : return nullptr;
183 : }
184 : bool not_done = it_.Advance();
185 : USE(not_done);
186 : DCHECK(not_done);
187 21192401 : cell_base_ = it_.CurrentCellBase();
188 21192401 : current_cell_ = *it_.CurrentCell();
189 : }
190 :
191 : Map* map = nullptr;
192 748738942 : if (current_cell_ & second_bit_index) {
193 : // We found a black object. If the black object is within a black area,
194 : // make sure that we skip all set bits in the black area until the
195 : // object ends.
196 746525300 : HeapObject* black_object = HeapObject::FromAddress(addr);
197 : map = base::NoBarrierAtomicValue<Map*>::FromAddress(addr)->Value();
198 746376033 : Address end = addr + black_object->SizeFromMap(map) - kPointerSize;
199 : // One word filler objects do not borrow the second mark bit. We have
200 : // to jump over the advancing and clearing part.
201 : // Note that we know that we are at a one word filler when
202 : // object_start + object_size - kPointerSize == object_start.
203 742611236 : if (addr != end) {
204 : DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end));
205 742247079 : uint32_t end_mark_bit_index = chunk_->AddressToMarkbitIndex(end);
206 : unsigned int end_cell_index =
207 742247079 : end_mark_bit_index >> Bitmap::kBitsPerCellLog2;
208 : MarkBit::CellType end_index_mask =
209 742247079 : 1u << Bitmap::IndexInCell(end_mark_bit_index);
210 742247079 : if (it_.Advance(end_cell_index)) {
211 188252774 : cell_base_ = it_.CurrentCellBase();
212 188252774 : current_cell_ = *it_.CurrentCell();
213 : }
214 :
215 : // Clear all bits in current_cell, including the end index.
216 742247079 : current_cell_ &= ~(end_index_mask + end_index_mask - 1);
217 : }
218 :
219 : if (T == kBlackObjects || T == kAllLiveObjects) {
220 : object = black_object;
221 : }
222 : } else if ((T == kGreyObjects || T == kAllLiveObjects)) {
223 : map = base::NoBarrierAtomicValue<Map*>::FromAddress(addr)->Value();
224 835058 : object = HeapObject::FromAddress(addr);
225 : }
226 :
227 : // We found a live object.
228 744824878 : if (object != nullptr) {
229 : // Do not use IsFiller() here. This may cause a data race for reading
230 : // out the instance type when a new map concurrently is written into
231 : // this object while iterating over the object.
232 668730073 : if (map == one_word_filler || map == two_word_filler ||
233 : map == free_space_map) {
234 : // There are two reasons why we can get black or grey fillers:
235 : // 1) Black areas together with slack tracking may result in black one
236 : // word filler objects.
237 : // 2) Left trimming may leave black or grey fillers behind because we
238 : // do not clear the old location of the object start.
239 : // We filter these objects out in the iterator.
240 : object = nullptr;
241 : } else {
242 : break;
243 : }
244 : }
245 : }
246 :
247 1136831576 : if (current_cell_ == 0) {
248 1060181494 : if (!it_.Done() && it_.Advance()) {
249 529627944 : cell_base_ = it_.CurrentCellBase();
250 529627944 : current_cell_ = *it_.CurrentCell();
251 : }
252 : }
253 1136831576 : if (object != nullptr) return object;
254 : }
255 : return nullptr;
256 : }
257 :
258 : } // namespace internal
259 : } // namespace v8
260 :
261 : #endif // V8_HEAP_MARK_COMPACT_INL_H_
|