Line data Source code
1 : // Copyright 2015 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_HEAP_SCAVENGER_H_
6 : #define V8_HEAP_SCAVENGER_H_
7 :
8 : #include "src/base/platform/condition-variable.h"
9 : #include "src/heap/local-allocator.h"
10 : #include "src/heap/objects-visiting.h"
11 : #include "src/heap/slot-set.h"
12 : #include "src/heap/worklist.h"
13 :
14 : namespace v8 {
15 : namespace internal {
16 :
17 : class OneshotBarrier;
18 :
19 88812 : class Scavenger {
20 : public:
21 : static const int kCopiedListSegmentSize = 256;
22 : static const int kPromotionListSegmentSize = 256;
23 :
24 : using ObjectAndSize = std::pair<HeapObject*, int>;
25 : using CopiedList = Worklist<ObjectAndSize, kCopiedListSegmentSize>;
26 : using PromotionList = Worklist<ObjectAndSize, kPromotionListSegmentSize>;
27 :
28 : Scavenger(Heap* heap, bool is_logging, CopiedList* copied_list,
29 : PromotionList* promotion_list, int task_id);
30 :
31 : // Entry point for scavenging an old generation page. For scavenging single
32 : // objects see RootScavengingVisitor and ScavengeVisitor below.
33 : void ScavengePage(MemoryChunk* page);
34 :
35 : // Processes remaining work (=objects) after single objects have been
36 : // manually scavenged using ScavengeObject or CheckAndScavengeObject.
37 : void Process(OneshotBarrier* barrier = nullptr);
38 :
39 : // Finalize the Scavenger. Needs to be called from the main thread.
40 : void Finalize();
41 :
42 : size_t bytes_copied() const { return copied_size_; }
43 : size_t bytes_promoted() const { return promoted_size_; }
44 :
45 : void AnnounceLockedPage(MemoryChunk* chunk) {
46 145630 : allocator_.AnnounceLockedPage(chunk);
47 : }
48 :
49 : private:
50 : // Number of objects to process before interrupting for potentially waking
51 : // up other tasks.
52 : static const int kInterruptThreshold = 128;
53 : static const int kInitialLocalPretenuringFeedbackCapacity = 256;
54 :
55 211152251 : inline Heap* heap() { return heap_; }
56 :
57 : inline void PageMemoryFence(Object* object);
58 :
59 : // Potentially scavenges an object referenced from |slot_address| if it is
60 : // indeed a HeapObject and resides in from space.
61 : inline SlotCallbackResult CheckAndScavengeObject(Heap* heap,
62 : Address slot_address);
63 :
64 : // Scavenges an object |object| referenced from slot |p|. |object| is required
65 : // to be in from space.
66 : inline void ScavengeObject(HeapObject** p, HeapObject* object);
67 :
68 : // Copies |source| to |target| and sets the forwarding pointer in |source|.
69 : V8_INLINE bool MigrateObject(Map* map, HeapObject* source, HeapObject* target,
70 : int size);
71 :
72 : V8_INLINE bool SemiSpaceCopyObject(Map* map, HeapObject** slot,
73 : HeapObject* object, int object_size);
74 :
75 : V8_INLINE bool PromoteObject(Map* map, HeapObject** slot, HeapObject* object,
76 : int object_size);
77 :
78 : V8_INLINE void EvacuateObject(HeapObject** slot, Map* map,
79 : HeapObject* source);
80 :
81 : // Different cases for object evacuation.
82 :
83 : V8_INLINE void EvacuateObjectDefault(Map* map, HeapObject** slot,
84 : HeapObject* object, int object_size);
85 :
86 : V8_INLINE void EvacuateJSFunction(Map* map, HeapObject** slot,
87 : JSFunction* object, int object_size);
88 :
89 : inline void EvacuateThinString(Map* map, HeapObject** slot,
90 : ThinString* object, int object_size);
91 :
92 : inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
93 : ConsString* object, int object_size);
94 :
95 : void IterateAndScavengePromotedObject(HeapObject* target, int size);
96 :
97 : void RecordCopiedObject(HeapObject* obj);
98 :
99 : static inline bool ContainsOnlyData(VisitorId visitor_id);
100 :
101 : Heap* const heap_;
102 : PromotionList::View promotion_list_;
103 : CopiedList::View copied_list_;
104 : Heap::PretenuringFeedbackMap local_pretenuring_feedback_;
105 : size_t copied_size_;
106 : size_t promoted_size_;
107 : LocalAllocator allocator_;
108 : const bool is_logging_;
109 : const bool is_incremental_marking_;
110 : const bool is_compacting_;
111 :
112 : friend class IterateAndScavengePromotedObjectsVisitor;
113 : friend class RootScavengeVisitor;
114 : friend class ScavengeVisitor;
115 : };
116 :
117 : // Helper class for turning the scavenger into an object visitor that is also
118 : // filtering out non-HeapObjects and objects which do not reside in new space.
119 29652 : class RootScavengeVisitor final : public RootVisitor {
120 : public:
121 : RootScavengeVisitor(Heap* heap, Scavenger* scavenger)
122 29652 : : heap_(heap), scavenger_(scavenger) {}
123 :
124 : void VisitRootPointer(Root root, Object** p) final;
125 : void VisitRootPointers(Root root, Object** start, Object** end) final;
126 :
127 : private:
128 : void ScavengePointer(Object** p);
129 :
130 : Heap* const heap_;
131 : Scavenger* const scavenger_;
132 : };
133 :
134 0 : class ScavengeVisitor final : public NewSpaceVisitor<ScavengeVisitor> {
135 : public:
136 : ScavengeVisitor(Heap* heap, Scavenger* scavenger)
137 140374 : : heap_(heap), scavenger_(scavenger) {}
138 :
139 : V8_INLINE void VisitPointers(HeapObject* host, Object** start,
140 : Object** end) final;
141 :
142 : private:
143 : Heap* const heap_;
144 : Scavenger* const scavenger_;
145 : };
146 :
147 : } // namespace internal
148 : } // namespace v8
149 :
150 : #endif // V8_HEAP_SCAVENGER_H_
|