Line data Source code
1 : // Copyright 2015 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/profiler/sampling-heap-profiler.h"
6 :
7 : #include <stdint.h>
8 : #include <memory>
9 : #include "src/api.h"
10 : #include "src/base/ieee754.h"
11 : #include "src/base/utils/random-number-generator.h"
12 : #include "src/frames-inl.h"
13 : #include "src/heap/heap.h"
14 : #include "src/isolate.h"
15 : #include "src/profiler/strings-storage.h"
16 :
17 : namespace v8 {
18 : namespace internal {
19 :
20 : // We sample with a Poisson process, with constant average sampling interval.
21 : // This follows the exponential probability distribution with parameter
22 : // λ = 1/rate where rate is the average number of bytes between samples.
23 : //
24 : // Let u be a uniformly distributed random number between 0 and 1, then
25 : // next_sample = (- ln u) / λ
26 40663 : intptr_t SamplingAllocationObserver::GetNextSampleInterval(uint64_t rate) {
27 40663 : if (FLAG_sampling_heap_profiler_suppress_randomness) {
28 40663 : return static_cast<intptr_t>(rate);
29 : }
30 0 : double u = random_->NextDouble();
31 0 : double next = (-base::ieee754::log(u)) * rate;
32 : return next < kPointerSize
33 : ? kPointerSize
34 0 : : (next > INT_MAX ? INT_MAX : static_cast<intptr_t>(next));
35 : }
36 :
37 : // Samples were collected according to a poisson process. Since we have not
38 : // recorded all allocations, we must approximate the shape of the underlying
39 : // space of allocations based on the samples we have collected. Given that
40 : // we sample at rate R, the probability that an allocation of size S will be
41 : // sampled is 1-exp(-S/R). This function uses the above probability to
42 : // approximate the true number of allocations with size *size* given that
43 : // *count* samples were observed.
44 993 : v8::AllocationProfile::Allocation SamplingHeapProfiler::ScaleSample(
45 : size_t size, unsigned int count) {
46 993 : double scale = 1.0 / (1.0 - std::exp(-static_cast<double>(size) / rate_));
47 : // Round count instead of truncating.
48 993 : return {size, static_cast<unsigned int>(count * scale + 0.5)};
49 : }
50 :
51 36 : SamplingHeapProfiler::SamplingHeapProfiler(
52 216 : Heap* heap, StringsStorage* names, uint64_t rate, int stack_depth,
53 : v8::HeapProfiler::SamplingFlags flags)
54 : : isolate_(heap->isolate()),
55 : heap_(heap),
56 : new_space_observer_(new SamplingAllocationObserver(
57 : heap_, static_cast<intptr_t>(rate), rate, this,
58 36 : heap->isolate()->random_number_generator())),
59 : other_spaces_observer_(new SamplingAllocationObserver(
60 : heap_, static_cast<intptr_t>(rate), rate, this,
61 36 : heap->isolate()->random_number_generator())),
62 : names_(names),
63 : profile_root_(nullptr, "(root)", v8::UnboundScript::kNoScriptId, 0),
64 : samples_(),
65 : stack_depth_(stack_depth),
66 : rate_(rate),
67 144 : flags_(flags) {
68 36 : CHECK_GT(rate_, 0u);
69 72 : heap->new_space()->AddAllocationObserver(new_space_observer_.get());
70 : AllSpaces spaces(heap);
71 216 : for (Space* space = spaces.next(); space != nullptr; space = spaces.next()) {
72 180 : if (space != heap->new_space()) {
73 288 : space->AddAllocationObserver(other_spaces_observer_.get());
74 : }
75 : }
76 36 : }
77 :
78 :
79 72 : SamplingHeapProfiler::~SamplingHeapProfiler() {
80 252 : heap_->new_space()->RemoveAllocationObserver(new_space_observer_.get());
81 36 : AllSpaces spaces(heap_);
82 216 : for (Space* space = spaces.next(); space != nullptr; space = spaces.next()) {
83 360 : if (space != heap_->new_space()) {
84 288 : space->RemoveAllocationObserver(other_spaces_observer_.get());
85 : }
86 : }
87 :
88 25347 : for (auto sample : samples_) {
89 25275 : delete sample;
90 : }
91 : std::set<Sample*> empty;
92 : samples_.swap(empty);
93 36 : }
94 :
95 :
96 81326 : void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
97 : DisallowHeapAllocation no_allocation;
98 :
99 40663 : HandleScope scope(isolate_);
100 40663 : HeapObject* heap_object = HeapObject::FromAddress(soon_object);
101 : Handle<Object> obj(heap_object, isolate_);
102 :
103 : // Mark the new block as FreeSpace to make sure the heap is iterable while we
104 : // are taking the sample.
105 : heap()->CreateFillerObjectAt(soon_object, static_cast<int>(size),
106 81326 : ClearRecordedSlots::kNo);
107 :
108 : Local<v8::Value> loc = v8::Utils::ToLocal(obj);
109 :
110 40663 : AllocationNode* node = AddStack();
111 40663 : node->allocations_[size]++;
112 81326 : Sample* sample = new Sample(size, node, loc, this);
113 : samples_.insert(sample);
114 40663 : sample->global.SetWeak(sample, OnWeakCallback, WeakCallbackType::kParameter);
115 40663 : sample->global.MarkIndependent();
116 40663 : }
117 :
118 15388 : void SamplingHeapProfiler::OnWeakCallback(
119 15388 : const WeakCallbackInfo<Sample>& data) {
120 15388 : Sample* sample = data.GetParameter();
121 15388 : AllocationNode* node = sample->owner;
122 : DCHECK(node->allocations_[sample->size] > 0);
123 15388 : node->allocations_[sample->size]--;
124 15388 : if (node->allocations_[sample->size] == 0) {
125 85 : node->allocations_.erase(sample->size);
126 97 : while (node->allocations_.empty() && node->children_.empty() &&
127 97 : node->parent_ && !node->parent_->pinned_) {
128 6 : AllocationNode* parent = node->parent_;
129 : AllocationNode::FunctionId id = AllocationNode::function_id(
130 12 : node->script_id_, node->script_position_, node->name_);
131 : parent->children_.erase(id);
132 6 : delete node;
133 : node = parent;
134 : }
135 : }
136 15388 : sample->profiler->samples_.erase(sample);
137 15388 : delete sample;
138 15388 : }
139 :
140 : SamplingHeapProfiler::AllocationNode*
141 78326 : SamplingHeapProfiler::AllocationNode::FindOrAddChildNode(const char* name,
142 : int script_id,
143 : int start_position) {
144 78326 : FunctionId id = function_id(script_id, start_position, name);
145 : auto it = children_.find(id);
146 78326 : if (it != children_.end()) {
147 : DCHECK(strcmp(it->second->name_, name) == 0);
148 78060 : return it->second;
149 : }
150 266 : auto child = new AllocationNode(this, name, script_id, start_position);
151 532 : children_.insert(std::make_pair(id, child));
152 266 : return child;
153 : }
154 :
155 108790 : SamplingHeapProfiler::AllocationNode* SamplingHeapProfiler::AddStack() {
156 40663 : AllocationNode* node = &profile_root_;
157 :
158 : std::vector<SharedFunctionInfo*> stack;
159 50862 : JavaScriptFrameIterator it(isolate_);
160 : int frames_captured = 0;
161 149453 : while (!it.done() && frames_captured < stack_depth_) {
162 : JavaScriptFrame* frame = it.frame();
163 136254 : SharedFunctionInfo* shared = frame->function()->shared();
164 68127 : stack.push_back(shared);
165 :
166 68127 : frames_captured++;
167 68127 : it.Advance();
168 : }
169 :
170 40663 : if (frames_captured == 0) {
171 : const char* name = nullptr;
172 20398 : switch (isolate_->current_vm_state()) {
173 : case GC:
174 : name = "(GC)";
175 : break;
176 : case COMPILER:
177 : name = "(COMPILER)";
178 : break;
179 : case OTHER:
180 : name = "(V8 API)";
181 : break;
182 : case EXTERNAL:
183 : name = "(EXTERNAL)";
184 : break;
185 : case IDLE:
186 : name = "(IDLE)";
187 : break;
188 : case JS:
189 : name = "(JS)";
190 : break;
191 : }
192 10199 : return node->FindOrAddChildNode(name, v8::UnboundScript::kNoScriptId, 0);
193 : }
194 :
195 : // We need to process the stack in reverse order as the top of the stack is
196 : // the first element in the list.
197 98591 : for (auto it = stack.rbegin(); it != stack.rend(); ++it) {
198 68127 : SharedFunctionInfo* shared = *it;
199 136254 : const char* name = this->names()->GetFunctionName(shared->DebugName());
200 : int script_id = v8::UnboundScript::kNoScriptId;
201 68127 : if (shared->script()->IsScript()) {
202 : Script* script = Script::cast(shared->script());
203 : script_id = script->id();
204 : }
205 68127 : node = node->FindOrAddChildNode(name, script_id, shared->start_position());
206 : }
207 : return node;
208 : }
209 :
210 266 : v8::AllocationProfile::Node* SamplingHeapProfiler::TranslateAllocationNode(
211 : AllocationProfile* profile, SamplingHeapProfiler::AllocationNode* node,
212 : const std::map<int, Handle<Script>>& scripts) {
213 : // By pinning the node we make sure its children won't get disposed if
214 : // a GC kicks in during the tree retrieval.
215 266 : node->pinned_ = true;
216 : Local<v8::String> script_name =
217 266 : ToApiHandle<v8::String>(isolate_->factory()->InternalizeUtf8String(""));
218 : int line = v8::AllocationProfile::kNoLineNumberInfo;
219 : int column = v8::AllocationProfile::kNoColumnNumberInfo;
220 : std::vector<v8::AllocationProfile::Allocation> allocations;
221 266 : allocations.reserve(node->allocations_.size());
222 424 : if (node->script_id_ != v8::UnboundScript::kNoScriptId &&
223 158 : scripts.find(node->script_id_) != scripts.end()) {
224 : // Cannot use std::map<T>::at because it is not available on android.
225 : auto non_const_scripts =
226 : const_cast<std::map<int, Handle<Script>>&>(scripts);
227 158 : Handle<Script> script = non_const_scripts[node->script_id_];
228 158 : if (!script.is_null()) {
229 158 : if (script->name()->IsName()) {
230 : Name* name = Name::cast(script->name());
231 : script_name = ToApiHandle<v8::String>(
232 32 : isolate_->factory()->InternalizeUtf8String(names_->GetName(name)));
233 : }
234 158 : line = 1 + Script::GetLineNumber(script, node->script_position_);
235 158 : column = 1 + Script::GetColumnNumber(script, node->script_position_);
236 : }
237 : }
238 1525 : for (auto alloc : node->allocations_) {
239 1986 : allocations.push_back(ScaleSample(alloc.first, alloc.second));
240 : }
241 :
242 266 : profile->nodes().push_back(v8::AllocationProfile::Node(
243 : {ToApiHandle<v8::String>(
244 266 : isolate_->factory()->InternalizeUtf8String(node->name_)),
245 : script_name, node->script_id_, node->script_position_, line, column,
246 798 : std::vector<v8::AllocationProfile::Node*>(), allocations}));
247 266 : v8::AllocationProfile::Node* current = &profile->nodes().back();
248 : // The children map may have nodes inserted into it during translation
249 : // because the translation may allocate strings on the JS heap that have
250 : // the potential to be sampled. That's ok since map iterators are not
251 : // invalidated upon std::map insertion.
252 768 : for (auto it : node->children_) {
253 : current->children.push_back(
254 472 : TranslateAllocationNode(profile, it.second, scripts));
255 : }
256 266 : node->pinned_ = false;
257 266 : return current;
258 : }
259 :
260 30 : v8::AllocationProfile* SamplingHeapProfiler::GetAllocationProfile() {
261 30 : if (flags_ & v8::HeapProfiler::kSamplingForceGC) {
262 : isolate_->heap()->CollectAllGarbage(
263 0 : Heap::kNoGCFlags, GarbageCollectionReason::kSamplingProfiler);
264 : }
265 : // To resolve positions to line/column numbers, we will need to look up
266 : // scripts. Build a map to allow fast mapping from script id to script.
267 : std::map<int, Handle<Script>> scripts;
268 : {
269 30 : Script::Iterator iterator(isolate_);
270 630 : while (Script* script = iterator.Next()) {
271 1200 : scripts[script->id()] = handle(script);
272 600 : }
273 : }
274 30 : auto profile = new v8::internal::AllocationProfile();
275 30 : TranslateAllocationNode(profile, &profile_root_, scripts);
276 30 : return profile;
277 : }
278 :
279 :
280 : } // namespace internal
281 : } // namespace v8
|