Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_HEAP_MARK_COMPACT_INL_H_
6 : #define V8_HEAP_MARK_COMPACT_INL_H_
7 :
8 : #include "src/base/bits.h"
9 : #include "src/heap/mark-compact.h"
10 : #include "src/heap/objects-visiting-inl.h"
11 : #include "src/heap/remembered-set.h"
12 :
13 : namespace v8 {
14 : namespace internal {
15 :
16 : template <FixedArrayVisitationMode fixed_array_mode,
17 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
18 : MarkingVisitor<fixed_array_mode, retaining_path_mode,
19 : MarkingState>::MarkingVisitor(MarkCompactCollector* collector,
20 : MarkingState* marking_state)
21 74792559 : : heap_(collector->heap()),
22 : collector_(collector),
23 149585116 : marking_state_(marking_state) {}
24 :
25 : template <FixedArrayVisitationMode fixed_array_mode,
26 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
27 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
28 : MarkingState>::VisitAllocationSite(Map* map,
29 : AllocationSite* object) {
30 114348 : int size = AllocationSite::BodyDescriptorWeak::SizeOf(map, object);
31 114348 : AllocationSite::BodyDescriptorWeak::IterateBody(object, size, this);
32 : return size;
33 : }
34 :
35 : template <FixedArrayVisitationMode fixed_array_mode,
36 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
37 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
38 : MarkingState>::VisitBytecodeArray(Map* map,
39 : BytecodeArray* array) {
40 475305 : int size = BytecodeArray::BodyDescriptor::SizeOf(map, array);
41 475305 : BytecodeArray::BodyDescriptor::IterateBody(array, size, this);
42 475305 : array->MakeOlder();
43 : return size;
44 : }
45 :
46 : template <FixedArrayVisitationMode fixed_array_mode,
47 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
48 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
49 : MarkingState>::VisitFixedArray(Map* map,
50 : FixedArray* object) {
51 : return (fixed_array_mode == FixedArrayVisitationMode::kRegular)
52 : ? Parent::VisitFixedArray(map, object)
53 : : VisitFixedArrayIncremental(map, object);
54 : }
55 :
56 : template <FixedArrayVisitationMode fixed_array_mode,
57 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
58 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
59 : MarkingState>::VisitJSApiObject(Map* map, JSObject* object) {
60 84034 : if (heap_->local_embedder_heap_tracer()->InUse()) {
61 : DCHECK(object->IsJSObject());
62 0 : heap_->TracePossibleWrapper(object);
63 : }
64 84034 : int size = JSObject::BodyDescriptor::SizeOf(map, object);
65 84034 : JSObject::BodyDescriptor::IterateBody(object, size, this);
66 : return size;
67 : }
68 :
69 : template <FixedArrayVisitationMode fixed_array_mode,
70 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
71 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
72 : MarkingState>::VisitJSFunction(Map* map,
73 : JSFunction* object) {
74 37154229 : int size = JSFunction::BodyDescriptorWeak::SizeOf(map, object);
75 37154229 : JSFunction::BodyDescriptorWeak::IterateBody(object, size, this);
76 : return size;
77 : }
78 :
79 : template <FixedArrayVisitationMode fixed_array_mode,
80 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
81 : int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
82 : VisitJSWeakCollection(Map* map, JSWeakCollection* weak_collection) {
83 : // Enqueue weak collection in linked list of encountered weak collections.
84 9425 : if (weak_collection->next() == heap_->undefined_value()) {
85 9038 : weak_collection->set_next(heap_->encountered_weak_collections());
86 9038 : heap_->set_encountered_weak_collections(weak_collection);
87 : }
88 :
89 : // Skip visiting the backing hash table containing the mappings and the
90 : // pointer to the other enqueued weak collections, both are post-processed.
91 9425 : int size = JSWeakCollection::BodyDescriptorWeak::SizeOf(map, weak_collection);
92 9425 : JSWeakCollection::BodyDescriptorWeak::IterateBody(weak_collection, size,
93 : this);
94 :
95 : // Partially initialized weak collection is enqueued, but table is ignored.
96 18850 : if (!weak_collection->table()->IsHashTable()) return size;
97 :
98 : // Mark the backing hash table without pushing it on the marking stack.
99 : Object** slot =
100 9423 : HeapObject::RawField(weak_collection, JSWeakCollection::kTableOffset);
101 9423 : HeapObject* obj = HeapObject::cast(*slot);
102 : collector_->RecordSlot(weak_collection, slot, obj);
103 : MarkObjectWithoutPush(weak_collection, obj);
104 : return size;
105 : }
106 :
107 : template <FixedArrayVisitationMode fixed_array_mode,
108 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
109 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
110 : MarkingState>::VisitMap(Map* map, Map* object) {
111 : // When map collection is enabled we have to mark through map's transitions
112 : // and back pointers in a special way to make these links weak.
113 36766602 : if (object->CanTransition()) {
114 : MarkMapContents(object);
115 : } else {
116 : VisitPointers(object,
117 : HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
118 6368910 : HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
119 : }
120 33833601 : return Map::BodyDescriptor::SizeOf(map, object);
121 : }
122 :
123 : template <FixedArrayVisitationMode fixed_array_mode,
124 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
125 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
126 : MarkingState>::VisitNativeContext(Map* map,
127 : Context* context) {
128 16203 : int size = Context::BodyDescriptorWeak::SizeOf(map, context);
129 16203 : Context::BodyDescriptorWeak::IterateBody(context, size, this);
130 : return size;
131 : }
132 :
133 : template <FixedArrayVisitationMode fixed_array_mode,
134 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
135 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
136 : MarkingState>::VisitTransitionArray(Map* map,
137 : TransitionArray* array) {
138 264672 : int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
139 264672 : TransitionArray::BodyDescriptor::IterateBody(array, size, this);
140 264672 : collector_->AddTransitionArray(array);
141 : return size;
142 : }
143 :
144 : template <FixedArrayVisitationMode fixed_array_mode,
145 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
146 : int MarkingVisitor<fixed_array_mode, retaining_path_mode,
147 17787293 : MarkingState>::VisitWeakCell(Map* map, WeakCell* weak_cell) {
148 : // Enqueue weak cell in linked list of encountered weak collections.
149 : // We can ignore weak cells with cleared values because they will always
150 : // contain smi zero.
151 22241187 : if (!weak_cell->cleared()) {
152 21075590 : HeapObject* value = HeapObject::cast(weak_cell->value());
153 42151176 : if (marking_state()->IsBlackOrGrey(value)) {
154 : // Weak cells with live values are directly processed here to reduce
155 : // the processing time of weak cells during the main GC pause.
156 18235206 : Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
157 18235206 : collector_->RecordSlot(weak_cell, slot, *slot);
158 : } else {
159 : // If we do not know about liveness of values of weak cells, we have to
160 : // process them when we know the liveness of the whole transitive
161 : // closure.
162 2840382 : collector_->AddWeakCell(weak_cell);
163 : }
164 : }
165 21887773 : return WeakCell::BodyDescriptor::SizeOf(map, weak_cell);
166 : }
167 :
168 : template <FixedArrayVisitationMode fixed_array_mode,
169 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
170 0 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
171 : MarkingState>::VisitPointer(HeapObject* host, Object** p) {
172 4636970090 : if (!(*p)->IsHeapObject()) return;
173 : HeapObject* target_object = HeapObject::cast(*p);
174 : collector_->RecordSlot(host, p, target_object);
175 : MarkObject(host, target_object);
176 : }
177 :
178 : template <FixedArrayVisitationMode fixed_array_mode,
179 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
180 0 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
181 : MarkingState>::VisitPointers(HeapObject* host,
182 : Object** start, Object** end) {
183 2312412899 : for (Object** p = start; p < end; p++) {
184 : VisitPointer(host, p);
185 : }
186 0 : }
187 :
188 : template <FixedArrayVisitationMode fixed_array_mode,
189 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
190 0 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
191 : MarkingState>::VisitEmbeddedPointer(Code* host,
192 7902066 : RelocInfo* rinfo) {
193 : DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
194 : HeapObject* object = HeapObject::cast(rinfo->target_object());
195 7902066 : collector_->RecordRelocSlot(host, rinfo, object);
196 7902066 : if (!host->IsWeakObject(object)) {
197 : MarkObject(host, object);
198 : }
199 0 : }
200 :
201 : template <FixedArrayVisitationMode fixed_array_mode,
202 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
203 0 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
204 : MarkingState>::VisitCodeTarget(Code* host,
205 195948261 : RelocInfo* rinfo) {
206 : DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
207 195948369 : Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
208 195948387 : collector_->RecordRelocSlot(host, rinfo, target);
209 : MarkObject(host, target);
210 0 : }
211 :
212 : template <FixedArrayVisitationMode fixed_array_mode,
213 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
214 : bool MarkingVisitor<fixed_array_mode, retaining_path_mode,
215 : MarkingState>::MarkObjectWithoutPush(HeapObject* host,
216 : HeapObject* object) {
217 45660614 : if (marking_state()->WhiteToBlack(object)) {
218 6615217 : if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
219 6615217 : V8_UNLIKELY(FLAG_track_retaining_path)) {
220 0 : heap_->AddRetainer(host, object);
221 : }
222 : return true;
223 : }
224 : return false;
225 : }
226 :
227 : template <FixedArrayVisitationMode fixed_array_mode,
228 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
229 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
230 : MarkingState>::MarkObject(HeapObject* host,
231 615033428 : HeapObject* object) {
232 3535880296 : if (marking_state()->WhiteToGrey(object)) {
233 146884293 : collector_->marking_worklist()->Push(object);
234 126385684 : if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
235 126385684 : V8_UNLIKELY(FLAG_track_retaining_path)) {
236 0 : heap_->AddRetainer(host, object);
237 : }
238 : }
239 : }
240 :
241 : template <FixedArrayVisitationMode fixed_array_mode,
242 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
243 : int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
244 5866 : VisitFixedArrayIncremental(Map* map, FixedArray* object) {
245 2502587 : MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
246 4903554 : int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
247 4903554 : if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
248 : DCHECK(!FLAG_use_marking_progress_bar ||
249 : chunk->owner()->identity() == LO_SPACE);
250 : // When using a progress bar for large fixed arrays, scan only a chunk of
251 : // the array and try to push it onto the marking deque again until it is
252 : // fully scanned. Fall back to scanning it through to the end in case this
253 : // fails because of a full deque.
254 : int start_offset =
255 2990 : Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
256 2990 : if (start_offset < object_size) {
257 : // Ensure that the object is either grey or black before pushing it
258 : // into marking worklist.
259 2933 : marking_state()->WhiteToGrey(object);
260 2933 : if (FLAG_concurrent_marking) {
261 1623 : marking_worklist()->PushBailout(object);
262 : } else {
263 1310 : marking_worklist()->Push(object);
264 : }
265 : DCHECK(marking_state()->IsGrey(object) ||
266 : marking_state()->IsBlack(object));
267 :
268 : int end_offset =
269 2933 : Min(object_size, start_offset + kProgressBarScanningChunk);
270 : int already_scanned_offset = start_offset;
271 : VisitPointers(object, HeapObject::RawField(object, start_offset),
272 2933 : HeapObject::RawField(object, end_offset));
273 : start_offset = end_offset;
274 : end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
275 2933 : chunk->set_progress_bar(start_offset);
276 2933 : if (start_offset < object_size) {
277 2881 : heap_->incremental_marking()->NotifyIncompleteScanOfObject(
278 2881 : object_size - (start_offset - already_scanned_offset));
279 : }
280 : }
281 : } else {
282 4900564 : FixedArray::BodyDescriptor::IterateBody(object, object_size, this);
283 : }
284 : return object_size;
285 : }
286 :
287 : template <FixedArrayVisitationMode fixed_array_mode,
288 : TraceRetainingPathMode retaining_path_mode, typename MarkingState>
289 : void MarkingVisitor<fixed_array_mode, retaining_path_mode,
290 : MarkingState>::MarkMapContents(Map* map) {
291 : // Since descriptor arrays are potentially shared, ensure that only the
292 : // descriptors that belong to this map are marked. The first time a non-empty
293 : // descriptor array is marked, its header is also visited. The slot holding
294 : // the descriptor array will be implicitly recorded when the pointer fields of
295 : // this map are visited. Prototype maps don't keep track of transitions, so
296 : // just mark the entire descriptor array.
297 30397692 : if (!map->is_prototype_map()) {
298 22824979 : DescriptorArray* descriptors = map->instance_descriptors();
299 22824979 : if (MarkObjectWithoutPush(map, descriptors) && descriptors->length() > 0) {
300 : VisitPointers(descriptors, descriptors->GetFirstElementAddress(),
301 7735313 : descriptors->GetDescriptorEndSlot(0));
302 : }
303 : int start = 0;
304 22824979 : int end = map->NumberOfOwnDescriptors();
305 22824979 : if (start < end) {
306 : VisitPointers(descriptors, descriptors->GetDescriptorStartSlot(start),
307 14400950 : descriptors->GetDescriptorEndSlot(end));
308 : }
309 : }
310 :
311 : // Mark the pointer fields of the Map. Since the transitions array has
312 : // been marked already, it is fine that one of these fields contains a
313 : // pointer to it.
314 : VisitPointers(map, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
315 30397692 : HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
316 : }
317 :
318 : void MarkCompactCollector::MarkObject(HeapObject* host, HeapObject* obj) {
319 206635597 : if (atomic_marking_state()->WhiteToGrey(obj)) {
320 2590498 : marking_worklist()->Push(obj);
321 2590498 : if (V8_UNLIKELY(FLAG_track_retaining_path)) {
322 0 : heap_->AddRetainer(host, obj);
323 : }
324 : }
325 : }
326 :
327 : void MarkCompactCollector::MarkRootObject(Root root, HeapObject* obj) {
328 243172703 : if (atomic_marking_state()->WhiteToGrey(obj)) {
329 147613783 : marking_worklist()->Push(obj);
330 147612866 : if (V8_UNLIKELY(FLAG_track_retaining_path)) {
331 0 : heap_->AddRetainingRoot(root, obj);
332 : }
333 : }
334 : }
335 :
336 : void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject* obj) {
337 0 : if (atomic_marking_state()->WhiteToGrey(obj)) {
338 0 : marking_worklist()->Push(obj);
339 0 : if (V8_UNLIKELY(FLAG_track_retaining_path)) {
340 0 : heap_->AddRetainingRoot(Root::kWrapperTracing, obj);
341 : }
342 : }
343 : }
344 :
345 : void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
346 : Object* target) {
347 3940761859 : Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
348 3941433964 : Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
349 3952672877 : if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>() &&
350 9902877 : !source_page->ShouldSkipEvacuationSlotRecording<AccessMode::ATOMIC>()) {
351 : RememberedSet<OLD_TO_OLD>::Insert(source_page,
352 5867018 : reinterpret_cast<Address>(slot));
353 : }
354 : }
355 :
356 : template <LiveObjectIterationMode mode>
357 3863152 : LiveObjectRange<mode>::iterator::iterator(MemoryChunk* chunk, Bitmap* bitmap,
358 : Address start)
359 : : chunk_(chunk),
360 965788 : one_word_filler_map_(chunk->heap()->one_pointer_filler_map()),
361 965788 : two_word_filler_map_(chunk->heap()->two_pointer_filler_map()),
362 965788 : free_space_map_(chunk->heap()->free_space_map()),
363 5311706 : it_(chunk, bitmap) {
364 : it_.Advance(Bitmap::IndexToCell(
365 : Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(start))));
366 965788 : if (!it_.Done()) {
367 482766 : cell_base_ = it_.CurrentCellBase();
368 482766 : current_cell_ = *it_.CurrentCell();
369 482766 : AdvanceToNextValidObject();
370 : } else {
371 483022 : current_object_ = nullptr;
372 : }
373 965996 : }
374 :
375 : template <LiveObjectIterationMode mode>
376 : typename LiveObjectRange<mode>::iterator& LiveObjectRange<mode>::iterator::
377 : operator++() {
378 612157713 : AdvanceToNextValidObject();
379 : return *this;
380 : }
381 :
382 : template <LiveObjectIterationMode mode>
383 : typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::iterator::
384 : operator++(int) {
385 : iterator retval = *this;
386 : ++(*this);
387 : return retval;
388 : }
389 :
390 : template <LiveObjectIterationMode mode>
391 614779528 : void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
392 1655640914 : while (!it_.Done()) {
393 : HeapObject* object = nullptr;
394 : int size = 0;
395 1040380159 : while (current_cell_ != 0) {
396 : uint32_t trailing_zeros = base::bits::CountTrailingZeros32(current_cell_);
397 609753089 : Address addr = cell_base_ + trailing_zeros * kPointerSize;
398 :
399 : // Clear the first bit of the found object..
400 609753089 : current_cell_ &= ~(1u << trailing_zeros);
401 :
402 609753089 : uint32_t second_bit_index = 1u << (trailing_zeros + 1);
403 609753089 : if (trailing_zeros >= Bitmap::kBitIndexMask) {
404 : second_bit_index = 0x1;
405 : // The overlapping case; there has to exist a cell after the current
406 : // cell.
407 : // However, if there is a black area at the end of the page, and the
408 : // last word is a one word filler, we are not allowed to advance. In
409 : // that case we can return immediately.
410 1824771968 : if (!it_.Advance()) {
411 : DCHECK(HeapObject::FromAddress(addr)->map() == one_word_filler_map_);
412 1190 : current_object_ = nullptr;
413 1190 : return;
414 : }
415 16102887 : cell_base_ = it_.CurrentCellBase();
416 16102887 : current_cell_ = *it_.CurrentCell();
417 : }
418 :
419 : Map* map = nullptr;
420 609751899 : if (current_cell_ & second_bit_index) {
421 : // We found a black object. If the black object is within a black area,
422 : // make sure that we skip all set bits in the black area until the
423 : // object ends.
424 611608762 : HeapObject* black_object = HeapObject::FromAddress(addr);
425 : map =
426 : base::AsAtomicPointer::Relaxed_Load(reinterpret_cast<Map**>(addr));
427 611608762 : size = black_object->SizeFromMap(map);
428 610747230 : Address end = addr + size - kPointerSize;
429 : // One word filler objects do not borrow the second mark bit. We have
430 : // to jump over the advancing and clearing part.
431 : // Note that we know that we are at a one word filler when
432 : // object_start + object_size - kPointerSize == object_start.
433 610747230 : if (addr != end) {
434 : DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end));
435 609572373 : uint32_t end_mark_bit_index = chunk_->AddressToMarkbitIndex(end);
436 : unsigned int end_cell_index =
437 609572373 : end_mark_bit_index >> Bitmap::kBitsPerCellLog2;
438 : MarkBit::CellType end_index_mask =
439 609572373 : 1u << Bitmap::IndexInCell(end_mark_bit_index);
440 609572373 : if (it_.Advance(end_cell_index)) {
441 127582504 : cell_base_ = it_.CurrentCellBase();
442 127582504 : current_cell_ = *it_.CurrentCell();
443 : }
444 :
445 : // Clear all bits in current_cell, including the end index.
446 609572373 : current_cell_ &= ~(end_index_mask + end_index_mask - 1);
447 : }
448 :
449 : if (mode == kBlackObjects || mode == kAllLiveObjects) {
450 : object = black_object;
451 : }
452 : } else if ((mode == kGreyObjects || mode == kAllLiveObjects)) {
453 : map =
454 : base::AsAtomicPointer::Relaxed_Load(reinterpret_cast<Map**>(addr));
455 6 : object = HeapObject::FromAddress(addr);
456 6 : size = object->SizeFromMap(map);
457 : }
458 :
459 : // We found a live object.
460 608899998 : if (object != nullptr) {
461 : // Do not use IsFiller() here. This may cause a data race for reading
462 : // out the instance type when a new map concurrently is written into
463 : // this object while iterating over the object.
464 611271046 : if (map == one_word_filler_map_ || map == two_word_filler_map_ ||
465 : map == free_space_map_) {
466 : // There are two reasons why we can get black or grey fillers:
467 : // 1) Black areas together with slack tracking may result in black one
468 : // word filler objects.
469 : // 2) Left trimming may leave black or grey fillers behind because we
470 : // do not clear the old location of the object start.
471 : // We filter these objects out in the iterator.
472 : object = nullptr;
473 : } else {
474 : break;
475 : }
476 : }
477 : }
478 :
479 1039525144 : if (current_cell_ == 0) {
480 497135300 : if (it_.Advance()) {
481 496538610 : cell_base_ = it_.CurrentCellBase();
482 496538610 : current_cell_ = *it_.CurrentCell();
483 : }
484 : }
485 1039525144 : if (object != nullptr) {
486 613443286 : current_object_ = object;
487 613443286 : current_size_ = size;
488 613443286 : return;
489 : }
490 : }
491 483151 : current_object_ = nullptr;
492 : }
493 :
494 : template <LiveObjectIterationMode mode>
495 : typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::begin() {
496 482897 : return iterator(chunk_, bitmap_, start_);
497 : }
498 :
499 : template <LiveObjectIterationMode mode>
500 : typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::end() {
501 482989 : return iterator(chunk_, bitmap_, end_);
502 : }
503 :
504 : } // namespace internal
505 : } // namespace v8
506 :
507 : #endif // V8_HEAP_MARK_COMPACT_INL_H_
|