Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/profiler/profile-generator.h"
6 :
7 : #include "src/objects/shared-function-info-inl.h"
8 : #include "src/profiler/cpu-profiler.h"
9 : #include "src/profiler/profile-generator-inl.h"
10 : #include "src/tracing/trace-event.h"
11 : #include "src/tracing/traced-value.h"
12 :
13 : namespace v8 {
14 : namespace internal {
15 :
16 2713763 : void SourcePositionTable::SetPosition(int pc_offset, int line,
17 : int inlining_id) {
18 : DCHECK_GE(pc_offset, 0);
19 : DCHECK_GT(line, 0); // The 1-based number of the source line.
20 : // Check that we are inserting in ascending order, so that the vector remains
21 : // sorted.
22 : DCHECK(pc_offsets_to_lines_.empty() ||
23 : pc_offsets_to_lines_.back().pc_offset < pc_offset);
24 4824472 : if (pc_offsets_to_lines_.empty() ||
25 4222087 : pc_offsets_to_lines_.back().line_number != line ||
26 1508324 : pc_offsets_to_lines_.back().inlining_id != inlining_id) {
27 2410890 : pc_offsets_to_lines_.push_back({pc_offset, line, inlining_id});
28 : }
29 2713763 : }
30 :
31 246212 : int SourcePositionTable::GetSourceLineNumber(int pc_offset) const {
32 246212 : if (pc_offsets_to_lines_.empty()) {
33 : return v8::CpuProfileNode::kNoLineNumberInfo;
34 : }
35 : auto it = std::lower_bound(
36 : pc_offsets_to_lines_.begin(), pc_offsets_to_lines_.end(),
37 : SourcePositionTuple{pc_offset, 0, SourcePosition::kNotInlined});
38 246157 : if (it != pc_offsets_to_lines_.begin()) --it;
39 246157 : return it->line_number;
40 : }
41 :
42 182412 : int SourcePositionTable::GetInliningId(int pc_offset) const {
43 182412 : if (pc_offsets_to_lines_.empty()) {
44 : return SourcePosition::kNotInlined;
45 : }
46 : auto it = std::lower_bound(
47 : pc_offsets_to_lines_.begin(), pc_offsets_to_lines_.end(),
48 : SourcePositionTuple{pc_offset, 0, SourcePosition::kNotInlined});
49 182402 : if (it != pc_offsets_to_lines_.begin()) --it;
50 182402 : return it->inlining_id;
51 : }
52 :
53 0 : void SourcePositionTable::print() const {
54 0 : base::OS::Print(" - source position table at %p\n", this);
55 0 : for (const SourcePositionTuple& pos_info : pc_offsets_to_lines_) {
56 : base::OS::Print(" %d --> line_number: %d inlining_id: %d\n",
57 0 : pos_info.pc_offset, pos_info.line_number,
58 0 : pos_info.inlining_id);
59 : }
60 0 : }
61 :
62 : const char* const CodeEntry::kWasmResourceNamePrefix = "wasm ";
63 : const char* const CodeEntry::kEmptyResourceName = "";
64 : const char* const CodeEntry::kEmptyBailoutReason = "";
65 : const char* const CodeEntry::kNoDeoptReason = "";
66 :
67 : const char* const CodeEntry::kProgramEntryName = "(program)";
68 : const char* const CodeEntry::kIdleEntryName = "(idle)";
69 : const char* const CodeEntry::kGarbageCollectorEntryName = "(garbage collector)";
70 : const char* const CodeEntry::kUnresolvedFunctionName = "(unresolved function)";
71 : const char* const CodeEntry::kRootEntryName = "(root)";
72 :
73 : base::LazyDynamicInstance<CodeEntry, CodeEntry::ProgramEntryCreateTrait>::type
74 : CodeEntry::kProgramEntry = LAZY_DYNAMIC_INSTANCE_INITIALIZER;
75 :
76 : base::LazyDynamicInstance<CodeEntry, CodeEntry::IdleEntryCreateTrait>::type
77 : CodeEntry::kIdleEntry = LAZY_DYNAMIC_INSTANCE_INITIALIZER;
78 :
79 : base::LazyDynamicInstance<CodeEntry, CodeEntry::GCEntryCreateTrait>::type
80 : CodeEntry::kGCEntry = LAZY_DYNAMIC_INSTANCE_INITIALIZER;
81 :
82 : base::LazyDynamicInstance<CodeEntry,
83 : CodeEntry::UnresolvedEntryCreateTrait>::type
84 : CodeEntry::kUnresolvedEntry = LAZY_DYNAMIC_INSTANCE_INITIALIZER;
85 :
86 : base::LazyDynamicInstance<CodeEntry, CodeEntry::RootEntryCreateTrait>::type
87 : CodeEntry::kRootEntry = LAZY_DYNAMIC_INSTANCE_INITIALIZER;
88 :
89 209 : CodeEntry* CodeEntry::ProgramEntryCreateTrait::Create() {
90 : return new CodeEntry(CodeEventListener::FUNCTION_TAG,
91 418 : CodeEntry::kProgramEntryName);
92 : }
93 :
94 114 : CodeEntry* CodeEntry::IdleEntryCreateTrait::Create() {
95 : return new CodeEntry(CodeEventListener::FUNCTION_TAG,
96 228 : CodeEntry::kIdleEntryName);
97 : }
98 :
99 134 : CodeEntry* CodeEntry::GCEntryCreateTrait::Create() {
100 : return new CodeEntry(CodeEventListener::BUILTIN_TAG,
101 268 : CodeEntry::kGarbageCollectorEntryName);
102 : }
103 :
104 104 : CodeEntry* CodeEntry::UnresolvedEntryCreateTrait::Create() {
105 : return new CodeEntry(CodeEventListener::FUNCTION_TAG,
106 208 : CodeEntry::kUnresolvedFunctionName);
107 : }
108 :
109 302 : CodeEntry* CodeEntry::RootEntryCreateTrait::Create() {
110 : return new CodeEntry(CodeEventListener::FUNCTION_TAG,
111 604 : CodeEntry::kRootEntryName);
112 : }
113 :
114 218633 : uint32_t CodeEntry::GetHash() const {
115 : uint32_t hash = ComputeUnseededHash(tag());
116 218633 : if (script_id_ != v8::UnboundScript::kNoScriptId) {
117 192813 : hash ^= ComputeUnseededHash(static_cast<uint32_t>(script_id_));
118 385626 : hash ^= ComputeUnseededHash(static_cast<uint32_t>(position_));
119 : } else {
120 25820 : hash ^= ComputeUnseededHash(
121 25820 : static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_)));
122 25820 : hash ^= ComputeUnseededHash(
123 25820 : static_cast<uint32_t>(reinterpret_cast<uintptr_t>(resource_name_)));
124 51640 : hash ^= ComputeUnseededHash(line_number_);
125 : }
126 218633 : return hash;
127 : }
128 :
129 113 : bool CodeEntry::IsSameFunctionAs(const CodeEntry* entry) const {
130 209557 : if (this == entry) return true;
131 3091 : if (script_id_ != v8::UnboundScript::kNoScriptId) {
132 3081 : return script_id_ == entry->script_id_ && position_ == entry->position_;
133 : }
134 20 : return name_ == entry->name_ && resource_name_ == entry->resource_name_ &&
135 10 : line_number_ == entry->line_number_;
136 : }
137 :
138 :
139 26888 : void CodeEntry::SetBuiltinId(Builtins::Name id) {
140 53776 : bit_field_ = TagField::update(bit_field_, CodeEventListener::BUILTIN_TAG);
141 26888 : bit_field_ = BuiltinIdField::update(bit_field_, id);
142 26888 : }
143 :
144 :
145 0 : int CodeEntry::GetSourceLine(int pc_offset) const {
146 247614 : if (line_info_) return line_info_->GetSourceLineNumber(pc_offset);
147 : return v8::CpuProfileNode::kNoLineNumberInfo;
148 : }
149 :
150 13 : void CodeEntry::SetInlineStacks(
151 : std::unordered_set<std::unique_ptr<CodeEntry>, Hasher, Equals>
152 : inline_entries,
153 : std::unordered_map<int, std::vector<CodeEntryAndLineNumber>>
154 : inline_stacks) {
155 13 : EnsureRareData()->inline_entries_ = std::move(inline_entries);
156 : rare_data_->inline_stacks_ = std::move(inline_stacks);
157 13 : }
158 :
159 183853 : const std::vector<CodeEntryAndLineNumber>* CodeEntry::GetInlineStack(
160 : int pc_offset) const {
161 183853 : if (!line_info_) return nullptr;
162 :
163 182382 : int inlining_id = line_info_->GetInliningId(pc_offset);
164 182382 : if (inlining_id == SourcePosition::kNotInlined) return nullptr;
165 : DCHECK(rare_data_);
166 :
167 : auto it = rare_data_->inline_stacks_.find(inlining_id);
168 2612 : return it != rare_data_->inline_stacks_.end() ? &it->second : nullptr;
169 : }
170 :
171 6 : void CodeEntry::set_deopt_info(
172 : const char* deopt_reason, int deopt_id,
173 : std::vector<CpuProfileDeoptFrame> inlined_frames) {
174 : DCHECK(!has_deopt_info());
175 6 : RareData* rare_data = EnsureRareData();
176 6 : rare_data->deopt_reason_ = deopt_reason;
177 6 : rare_data->deopt_id_ = deopt_id;
178 6 : rare_data->deopt_inlined_frames_ = std::move(inlined_frames);
179 6 : }
180 :
181 603974 : void CodeEntry::FillFunctionInfo(SharedFunctionInfo shared) {
182 1207948 : if (!shared->script()->IsScript()) return;
183 603974 : Script script = Script::cast(shared->script());
184 : set_script_id(script->id());
185 603974 : set_position(shared->StartPosition());
186 603974 : if (shared->optimization_disabled()) {
187 136 : set_bailout_reason(GetBailoutReason(shared->disable_optimization_reason()));
188 : }
189 : }
190 :
191 3 : CpuProfileDeoptInfo CodeEntry::GetDeoptInfo() {
192 : DCHECK(has_deopt_info());
193 :
194 : CpuProfileDeoptInfo info;
195 3 : info.deopt_reason = rare_data_->deopt_reason_;
196 : DCHECK_NE(kNoDeoptimizationId, rare_data_->deopt_id_);
197 3 : if (rare_data_->deopt_inlined_frames_.empty()) {
198 0 : info.stack.push_back(CpuProfileDeoptFrame(
199 0 : {script_id_, static_cast<size_t>(std::max(0, position()))}));
200 : } else {
201 3 : info.stack = rare_data_->deopt_inlined_frames_;
202 : }
203 3 : return info;
204 : }
205 :
206 155 : CodeEntry::RareData* CodeEntry::EnsureRareData() {
207 155 : if (!rare_data_) {
208 304 : rare_data_.reset(new RareData());
209 : }
210 155 : return rare_data_.get();
211 : }
212 :
213 0 : void CodeEntry::print() const {
214 0 : base::OS::Print("CodeEntry: at %p\n", this);
215 :
216 0 : base::OS::Print(" - name: %s\n", name_);
217 0 : base::OS::Print(" - resource_name: %s\n", resource_name_);
218 0 : base::OS::Print(" - line_number: %d\n", line_number_);
219 0 : base::OS::Print(" - column_number: %d\n", column_number_);
220 0 : base::OS::Print(" - script_id: %d\n", script_id_);
221 0 : base::OS::Print(" - position: %d\n", position_);
222 0 : base::OS::Print(" - instruction_start: %p\n",
223 0 : reinterpret_cast<void*>(instruction_start_));
224 :
225 0 : if (line_info_) {
226 0 : line_info_->print();
227 : }
228 :
229 0 : if (rare_data_) {
230 0 : base::OS::Print(" - deopt_reason: %s\n", rare_data_->deopt_reason_);
231 0 : base::OS::Print(" - bailout_reason: %s\n", rare_data_->bailout_reason_);
232 0 : base::OS::Print(" - deopt_id: %d\n", rare_data_->deopt_id_);
233 :
234 0 : if (!rare_data_->inline_stacks_.empty()) {
235 0 : base::OS::Print(" - inline stacks:\n");
236 0 : for (auto it = rare_data_->inline_stacks_.begin();
237 : it != rare_data_->inline_stacks_.end(); it++) {
238 0 : base::OS::Print(" inlining_id: [%d]\n", it->first);
239 0 : for (const auto& e : it->second) {
240 0 : base::OS::Print(" %s --> %d\n", e.code_entry->name(),
241 0 : e.line_number);
242 : }
243 : }
244 : } else {
245 0 : base::OS::Print(" - inline stacks: (empty)\n");
246 : }
247 :
248 0 : if (!rare_data_->deopt_inlined_frames_.empty()) {
249 0 : base::OS::Print(" - deopt inlined frames:\n");
250 0 : for (const CpuProfileDeoptFrame& frame :
251 : rare_data_->deopt_inlined_frames_) {
252 0 : base::OS::Print("script_id: %d position: %zu\n", frame.script_id,
253 0 : frame.position);
254 : }
255 : } else {
256 0 : base::OS::Print(" - deopt inlined frames: (empty)\n");
257 : }
258 : }
259 0 : base::OS::Print("\n");
260 0 : }
261 :
262 3 : void ProfileNode::CollectDeoptInfo(CodeEntry* entry) {
263 6 : deopt_infos_.push_back(entry->GetDeoptInfo());
264 : entry->clear_deopt_info();
265 3 : }
266 :
267 349 : ProfileNode* ProfileNode::FindChild(CodeEntry* entry, int line_number) {
268 698 : auto map_entry = children_.find({entry, line_number});
269 349 : return map_entry != children_.end() ? map_entry->second : nullptr;
270 : }
271 :
272 213634 : ProfileNode* ProfileNode::FindOrAddChild(CodeEntry* entry, int line_number) {
273 427268 : auto map_entry = children_.find({entry, line_number});
274 213634 : if (map_entry == children_.end()) {
275 4459 : ProfileNode* node = new ProfileNode(tree_, entry, this, line_number);
276 8918 : children_[{entry, line_number}] = node;
277 4459 : children_list_.push_back(node);
278 4459 : return node;
279 : } else {
280 209175 : return map_entry->second;
281 : }
282 : }
283 :
284 :
285 60991 : void ProfileNode::IncrementLineTicks(int src_line) {
286 60991 : if (src_line == v8::CpuProfileNode::kNoLineNumberInfo) return;
287 : // Increment a hit counter of a certain source line.
288 : // Add a new source line if not found.
289 : auto map_entry = line_ticks_.find(src_line);
290 60991 : if (map_entry == line_ticks_.end()) {
291 484 : line_ticks_[src_line] = 1;
292 : } else {
293 60507 : line_ticks_[src_line]++;
294 : }
295 : }
296 :
297 :
298 28 : bool ProfileNode::GetLineTicks(v8::CpuProfileNode::LineTick* entries,
299 : unsigned int length) const {
300 28 : if (entries == nullptr || length == 0) return false;
301 :
302 28 : unsigned line_count = static_cast<unsigned>(line_ticks_.size());
303 :
304 28 : if (line_count == 0) return true;
305 28 : if (length < line_count) return false;
306 :
307 : v8::CpuProfileNode::LineTick* entry = entries;
308 :
309 102 : for (auto p = line_ticks_.begin(); p != line_ticks_.end(); p++, entry++) {
310 37 : entry->line = p->first;
311 37 : entry->hit_count = p->second;
312 : }
313 :
314 : return true;
315 : }
316 :
317 :
318 2963 : void ProfileNode::Print(int indent) {
319 2963 : int line_number = line_number_ != 0 ? line_number_ : entry_->line_number();
320 5926 : base::OS::Print("%5u %*s %s:%d %d %d #%d", self_ticks_, indent, "",
321 5926 : entry_->name(), line_number, source_type(),
322 5926 : entry_->script_id(), id());
323 2963 : if (entry_->resource_name()[0] != '\0')
324 28 : base::OS::Print(" %s:%d", entry_->resource_name(), entry_->line_number());
325 2963 : base::OS::Print("\n");
326 2969 : for (size_t i = 0; i < deopt_infos_.size(); ++i) {
327 : CpuProfileDeoptInfo& info = deopt_infos_[i];
328 3 : base::OS::Print("%*s;;; deopted at script_id: %d position: %" PRIuS
329 : " with reason '%s'.\n",
330 : indent + 10, "", info.stack[0].script_id,
331 3 : info.stack[0].position, info.deopt_reason);
332 15 : for (size_t index = 1; index < info.stack.size(); ++index) {
333 6 : base::OS::Print("%*s;;; Inline point: script_id %d position: %" PRIuS
334 : ".\n",
335 : indent + 10, "", info.stack[index].script_id,
336 6 : info.stack[index].position);
337 : }
338 : }
339 2963 : const char* bailout_reason = entry_->bailout_reason();
340 2963 : if (bailout_reason != GetBailoutReason(BailoutReason::kNoReason) &&
341 : bailout_reason != CodeEntry::kEmptyBailoutReason) {
342 273 : base::OS::Print("%*s bailed out due to '%s'\n", indent + 10, "",
343 273 : bailout_reason);
344 : }
345 5317 : for (auto child : children_) {
346 2354 : child.second->Print(indent + 2);
347 : }
348 2963 : }
349 :
350 :
351 : class DeleteNodesCallback {
352 : public:
353 : void BeforeTraversingChild(ProfileNode*, ProfileNode*) { }
354 :
355 : void AfterAllChildrenTraversed(ProfileNode* node) {
356 5951 : delete node;
357 : }
358 :
359 : void AfterChildTraversed(ProfileNode*, ProfileNode*) { }
360 : };
361 :
362 1492 : ProfileTree::ProfileTree(Isolate* isolate)
363 : : next_node_id_(1),
364 1492 : root_(new ProfileNode(this, CodeEntry::root_entry(), nullptr)),
365 : isolate_(isolate),
366 5968 : next_function_id_(1) {}
367 :
368 2984 : ProfileTree::~ProfileTree() {
369 : DeleteNodesCallback cb;
370 1492 : TraverseDepthFirst(&cb);
371 1492 : }
372 :
373 :
374 0 : unsigned ProfileTree::GetFunctionId(const ProfileNode* node) {
375 0 : CodeEntry* code_entry = node->entry();
376 : auto map_entry = function_ids_.find(code_entry);
377 0 : if (map_entry == function_ids_.end()) {
378 0 : return function_ids_[code_entry] = next_function_id_++;
379 : }
380 0 : return function_ids_[code_entry];
381 : }
382 :
383 140 : ProfileNode* ProfileTree::AddPathFromEnd(const std::vector<CodeEntry*>& path,
384 : int src_line, bool update_stats) {
385 140 : ProfileNode* node = root_;
386 : CodeEntry* last_entry = nullptr;
387 380 : for (auto it = path.rbegin(); it != path.rend(); ++it) {
388 240 : if (*it == nullptr) continue;
389 : last_entry = *it;
390 190 : node = node->FindOrAddChild(*it, v8::CpuProfileNode::kNoLineNumberInfo);
391 : }
392 280 : if (last_entry && last_entry->has_deopt_info()) {
393 0 : node->CollectDeoptInfo(last_entry);
394 : }
395 140 : if (update_stats) {
396 : node->IncrementSelfTicks();
397 140 : if (src_line != v8::CpuProfileNode::kNoLineNumberInfo) {
398 0 : node->IncrementLineTicks(src_line);
399 : }
400 : }
401 140 : return node;
402 : }
403 :
404 65444 : ProfileNode* ProfileTree::AddPathFromEnd(const ProfileStackTrace& path,
405 : int src_line, bool update_stats,
406 : ProfilingMode mode) {
407 65444 : ProfileNode* node = root_;
408 : CodeEntry* last_entry = nullptr;
409 : int parent_line_number = v8::CpuProfileNode::kNoLineNumberInfo;
410 298026 : for (auto it = path.rbegin(); it != path.rend(); ++it) {
411 232582 : if ((*it).code_entry == nullptr) continue;
412 : last_entry = (*it).code_entry;
413 213359 : node = node->FindOrAddChild((*it).code_entry, parent_line_number);
414 : parent_line_number = mode == ProfilingMode::kCallerLineNumbers
415 : ? (*it).line_number
416 213359 : : v8::CpuProfileNode::kNoLineNumberInfo;
417 : }
418 130821 : if (last_entry && last_entry->has_deopt_info()) {
419 3 : node->CollectDeoptInfo(last_entry);
420 : }
421 65444 : if (update_stats) {
422 : node->IncrementSelfTicks();
423 64418 : if (src_line != v8::CpuProfileNode::kNoLineNumberInfo) {
424 60901 : node->IncrementLineTicks(src_line);
425 : }
426 : }
427 65444 : return node;
428 : }
429 :
430 :
431 : class Position {
432 : public:
433 : explicit Position(ProfileNode* node)
434 5951 : : node(node), child_idx_(0) { }
435 : V8_INLINE ProfileNode* current_child() {
436 8918 : return node->children()->at(child_idx_);
437 : }
438 : V8_INLINE bool has_current_child() {
439 20820 : return child_idx_ < static_cast<int>(node->children()->size());
440 : }
441 4459 : V8_INLINE void next_child() { ++child_idx_; }
442 :
443 : ProfileNode* node;
444 : private:
445 : int child_idx_;
446 : };
447 :
448 :
449 : // Non-recursive implementation of a depth-first post-order tree traversal.
450 : template <typename Callback>
451 1492 : void ProfileTree::TraverseDepthFirst(Callback* callback) {
452 : std::vector<Position> stack;
453 1492 : stack.emplace_back(root_);
454 11902 : while (stack.size() > 0) {
455 : Position& current = stack.back();
456 10410 : if (current.has_current_child()) {
457 : callback->BeforeTraversingChild(current.node, current.current_child());
458 4459 : stack.emplace_back(current.current_child());
459 : } else {
460 : callback->AfterAllChildrenTraversed(current.node);
461 5951 : if (stack.size() > 1) {
462 4459 : Position& parent = stack[stack.size() - 2];
463 : callback->AfterChildTraversed(parent.node, current.node);
464 : parent.next_child();
465 : }
466 : // Remove child from the stack.
467 : stack.pop_back();
468 : }
469 : }
470 1492 : }
471 :
472 : using v8::tracing::TracedValue;
473 :
474 : std::atomic<uint32_t> CpuProfile::last_id_;
475 :
476 1442 : CpuProfile::CpuProfile(CpuProfiler* profiler, const char* title,
477 : bool record_samples, ProfilingMode mode)
478 : : title_(title),
479 : record_samples_(record_samples),
480 : mode_(mode),
481 : start_time_(base::TimeTicks::HighResolutionNow()),
482 : top_down_(profiler->isolate()),
483 : profiler_(profiler),
484 : streaming_next_sample_(0),
485 4326 : id_(++last_id_) {
486 1442 : auto value = TracedValue::Create();
487 1442 : value->SetDouble("startTime",
488 2884 : (start_time_ - base::TimeTicks()).InMicroseconds());
489 2884 : TRACE_EVENT_SAMPLE_WITH_ID1(TRACE_DISABLED_BY_DEFAULT("v8.cpu_profiler"),
490 : "Profile", id_, "data", std::move(value));
491 1442 : }
492 :
493 65439 : void CpuProfile::AddPath(base::TimeTicks timestamp,
494 : const ProfileStackTrace& path, int src_line,
495 : bool update_stats) {
496 : ProfileNode* top_frame_node =
497 65439 : top_down_.AddPathFromEnd(path, src_line, update_stats, mode_);
498 :
499 75855 : if (record_samples_ && !timestamp.IsNull()) {
500 20740 : samples_.push_back({top_frame_node, timestamp, src_line});
501 : }
502 :
503 : const int kSamplesFlushCount = 100;
504 : const int kNodesFlushCount = 10;
505 130786 : if (samples_.size() - streaming_next_sample_ >= kSamplesFlushCount ||
506 : top_down_.pending_nodes_count() >= kNodesFlushCount) {
507 135 : StreamPendingTraceEvents();
508 : }
509 65439 : }
510 :
511 : namespace {
512 :
513 5142 : void BuildNodeValue(const ProfileNode* node, TracedValue* value) {
514 : const CodeEntry* entry = node->entry();
515 5142 : value->BeginDictionary("callFrame");
516 5142 : value->SetString("functionName", entry->name());
517 5142 : if (*entry->resource_name()) {
518 223 : value->SetString("url", entry->resource_name());
519 : }
520 5142 : value->SetInteger("scriptId", entry->script_id());
521 5142 : if (entry->line_number()) {
522 1830 : value->SetInteger("lineNumber", entry->line_number() - 1);
523 : }
524 5142 : if (entry->column_number()) {
525 1830 : value->SetInteger("columnNumber", entry->column_number() - 1);
526 : }
527 5142 : value->EndDictionary();
528 5142 : value->SetInteger("id", node->id());
529 5142 : if (node->parent()) {
530 4240 : value->SetInteger("parent", node->parent()->id());
531 : }
532 : const char* deopt_reason = entry->bailout_reason();
533 5142 : if (deopt_reason && deopt_reason[0] && strcmp(deopt_reason, "no reason")) {
534 274 : value->SetString("deoptReason", deopt_reason);
535 : }
536 5142 : }
537 :
538 : } // namespace
539 :
540 1037 : void CpuProfile::StreamPendingTraceEvents() {
541 : std::vector<const ProfileNode*> pending_nodes = top_down_.TakePendingNodes();
542 1037 : if (pending_nodes.empty() && samples_.empty()) return;
543 1030 : auto value = TracedValue::Create();
544 :
545 1123 : if (!pending_nodes.empty() || streaming_next_sample_ != samples_.size()) {
546 1024 : value->BeginDictionary("cpuProfile");
547 1024 : if (!pending_nodes.empty()) {
548 937 : value->BeginArray("nodes");
549 6079 : for (auto node : pending_nodes) {
550 5142 : value->BeginDictionary();
551 5142 : BuildNodeValue(node, value.get());
552 5142 : value->EndDictionary();
553 : }
554 937 : value->EndArray();
555 : }
556 2048 : if (streaming_next_sample_ != samples_.size()) {
557 229 : value->BeginArray("samples");
558 21198 : for (size_t i = streaming_next_sample_; i < samples_.size(); ++i) {
559 20740 : value->AppendInteger(samples_[i].node->id());
560 : }
561 229 : value->EndArray();
562 : }
563 1024 : value->EndDictionary();
564 : }
565 2060 : if (streaming_next_sample_ != samples_.size()) {
566 229 : value->BeginArray("timeDeltas");
567 : base::TimeTicks lastTimestamp =
568 333 : streaming_next_sample_ ? samples_[streaming_next_sample_ - 1].timestamp
569 333 : : start_time();
570 21198 : for (size_t i = streaming_next_sample_; i < samples_.size(); ++i) {
571 10370 : value->AppendInteger(static_cast<int>(
572 20740 : (samples_[i].timestamp - lastTimestamp).InMicroseconds()));
573 10370 : lastTimestamp = samples_[i].timestamp;
574 : }
575 229 : value->EndArray();
576 : bool has_non_zero_lines =
577 687 : std::any_of(samples_.begin() + streaming_next_sample_, samples_.end(),
578 229 : [](const SampleInfo& sample) { return sample.line != 0; });
579 229 : if (has_non_zero_lines) {
580 214 : value->BeginArray("lines");
581 21118 : for (size_t i = streaming_next_sample_; i < samples_.size(); ++i) {
582 20690 : value->AppendInteger(samples_[i].line);
583 : }
584 214 : value->EndArray();
585 : }
586 229 : streaming_next_sample_ = samples_.size();
587 : }
588 :
589 2060 : TRACE_EVENT_SAMPLE_WITH_ID1(TRACE_DISABLED_BY_DEFAULT("v8.cpu_profiler"),
590 : "ProfileChunk", id_, "data", std::move(value));
591 : }
592 :
593 902 : void CpuProfile::FinishProfile() {
594 902 : end_time_ = base::TimeTicks::HighResolutionNow();
595 902 : StreamPendingTraceEvents();
596 902 : auto value = TracedValue::Create();
597 1804 : value->SetDouble("endTime", (end_time_ - base::TimeTicks()).InMicroseconds());
598 1804 : TRACE_EVENT_SAMPLE_WITH_ID1(TRACE_DISABLED_BY_DEFAULT("v8.cpu_profiler"),
599 : "ProfileChunk", id_, "data", std::move(value));
600 902 : }
601 :
602 586 : void CpuProfile::Print() {
603 586 : base::OS::Print("[Top down]:\n");
604 : top_down_.Print();
605 586 : }
606 :
607 : CodeMap::CodeMap() = default;
608 :
609 1634 : CodeMap::~CodeMap() {
610 : // First clean the free list as it's otherwise impossible to tell
611 : // the slot type.
612 817 : unsigned free_slot = free_list_head_;
613 827 : while (free_slot != kNoFreeSlot) {
614 5 : unsigned next_slot = code_entries_[free_slot].next_free_slot;
615 5 : code_entries_[free_slot].entry = nullptr;
616 : free_slot = next_slot;
617 : }
618 1533657 : for (auto slot : code_entries_) delete slot.entry;
619 817 : }
620 :
621 1539951 : void CodeMap::AddCode(Address addr, CodeEntry* entry, unsigned size) {
622 1539951 : ClearCodesInRange(addr, addr + size);
623 1539947 : unsigned index = AddCodeEntry(addr, entry);
624 3079883 : code_map_.emplace(addr, CodeEntryMapInfo{index, size});
625 : DCHECK(entry->instruction_start() == kNullAddress ||
626 : addr == entry->instruction_start());
627 1539943 : }
628 :
629 1539964 : void CodeMap::ClearCodesInRange(Address start, Address end) {
630 : auto left = code_map_.upper_bound(start);
631 1539964 : if (left != code_map_.begin()) {
632 : --left;
633 1535689 : if (left->first + left->second.size <= start) ++left;
634 : }
635 : auto right = left;
636 1547073 : for (; right != code_map_.end() && right->first < end; ++right) {
637 14226 : if (!entry(right->second.index)->used()) {
638 7105 : DeleteCodeEntry(right->second.index);
639 : }
640 : }
641 : code_map_.erase(left, right);
642 1539960 : }
643 :
644 1476165 : CodeEntry* CodeMap::FindEntry(Address addr) {
645 : auto it = code_map_.upper_bound(addr);
646 1476165 : if (it == code_map_.begin()) return nullptr;
647 : --it;
648 1426072 : Address start_address = it->first;
649 1426072 : Address end_address = start_address + it->second.size;
650 1426072 : CodeEntry* ret = addr < end_address ? entry(it->second.index) : nullptr;
651 : if (ret && ret->instruction_start() != kNullAddress) {
652 : DCHECK_EQ(start_address, ret->instruction_start());
653 : DCHECK(addr >= start_address && addr < end_address);
654 : }
655 1426072 : return ret;
656 : }
657 :
658 10 : void CodeMap::MoveCode(Address from, Address to) {
659 10 : if (from == to) return;
660 : auto it = code_map_.find(from);
661 10 : if (it == code_map_.end()) return;
662 10 : CodeEntryMapInfo info = it->second;
663 : code_map_.erase(it);
664 : DCHECK(from + info.size <= to || to + info.size <= from);
665 10 : ClearCodesInRange(to, to + info.size);
666 : code_map_.emplace(to, info);
667 :
668 20 : CodeEntry* entry = code_entries_[info.index].entry;
669 10 : entry->set_instruction_start(to);
670 : }
671 :
672 1539948 : unsigned CodeMap::AddCodeEntry(Address start, CodeEntry* entry) {
673 1539948 : if (free_list_head_ == kNoFreeSlot) {
674 3065690 : code_entries_.push_back(CodeEntrySlotInfo{entry});
675 1532842 : return static_cast<unsigned>(code_entries_.size()) - 1;
676 : }
677 : unsigned index = free_list_head_;
678 7100 : free_list_head_ = code_entries_[index].next_free_slot;
679 7100 : code_entries_[index].entry = entry;
680 7100 : return index;
681 : }
682 :
683 7105 : void CodeMap::DeleteCodeEntry(unsigned index) {
684 7105 : delete code_entries_[index].entry;
685 7105 : code_entries_[index].next_free_slot = free_list_head_;
686 7105 : free_list_head_ = index;
687 7105 : }
688 :
689 0 : void CodeMap::Print() {
690 0 : for (const auto& pair : code_map_) {
691 0 : base::OS::Print("%p %5d %s\n", reinterpret_cast<void*>(pair.first),
692 0 : pair.second.size, entry(pair.second.index)->name());
693 : }
694 0 : }
695 :
696 1007 : CpuProfilesCollection::CpuProfilesCollection(Isolate* isolate)
697 3021 : : profiler_(nullptr), current_profiles_semaphore_(1) {}
698 :
699 1457 : bool CpuProfilesCollection::StartProfiling(const char* title,
700 : bool record_samples,
701 : ProfilingMode mode) {
702 1457 : current_profiles_semaphore_.Wait();
703 1457 : if (static_cast<int>(current_profiles_.size()) >= kMaxSimultaneousProfiles) {
704 5 : current_profiles_semaphore_.Signal();
705 5 : return false;
706 : }
707 26332 : for (const std::unique_ptr<CpuProfile>& profile : current_profiles_) {
708 24890 : if (strcmp(profile->title(), title) == 0) {
709 : // Ignore attempts to start profile with the same title...
710 10 : current_profiles_semaphore_.Signal();
711 : // ... though return true to force it collect a sample.
712 : return true;
713 : }
714 : }
715 4326 : current_profiles_.emplace_back(
716 2884 : new CpuProfile(profiler_, title, record_samples, mode));
717 1442 : current_profiles_semaphore_.Signal();
718 1442 : return true;
719 : }
720 :
721 :
722 907 : CpuProfile* CpuProfilesCollection::StopProfiling(const char* title) {
723 1814 : const int title_len = StrLength(title);
724 : CpuProfile* profile = nullptr;
725 907 : current_profiles_semaphore_.Wait();
726 :
727 : auto it =
728 : std::find_if(current_profiles_.rbegin(), current_profiles_.rend(),
729 912 : [&](const std::unique_ptr<CpuProfile>& p) {
730 1746 : return title_len == 0 || strcmp(p->title(), title) == 0;
731 912 : });
732 :
733 907 : if (it != current_profiles_.rend()) {
734 902 : (*it)->FinishProfile();
735 : profile = it->get();
736 902 : finished_profiles_.push_back(std::move(*it));
737 : // Convert reverse iterator to matching forward iterator.
738 902 : current_profiles_.erase(--(it.base()));
739 : }
740 :
741 907 : current_profiles_semaphore_.Signal();
742 907 : return profile;
743 : }
744 :
745 :
746 873 : bool CpuProfilesCollection::IsLastProfile(const char* title) {
747 : // Called from VM thread, and only it can mutate the list,
748 : // so no locking is needed here.
749 873 : if (current_profiles_.size() != 1) return false;
750 : return StrLength(title) == 0
751 1452 : || strcmp(current_profiles_[0]->title(), title) == 0;
752 : }
753 :
754 :
755 760 : void CpuProfilesCollection::RemoveProfile(CpuProfile* profile) {
756 : // Called from VM thread for a completed profile.
757 : auto pos =
758 : std::find_if(finished_profiles_.begin(), finished_profiles_.end(),
759 : [&](const std::unique_ptr<CpuProfile>& finished_profile) {
760 765 : return finished_profile.get() == profile;
761 : });
762 : DCHECK(pos != finished_profiles_.end());
763 760 : finished_profiles_.erase(pos);
764 760 : }
765 :
766 65269 : void CpuProfilesCollection::AddPathToCurrentProfiles(
767 : base::TimeTicks timestamp, const ProfileStackTrace& path, int src_line,
768 : bool update_stats) {
769 : // As starting / stopping profiles is rare relatively to this
770 : // method, we don't bother minimizing the duration of lock holding,
771 : // e.g. copying contents of the list to a local vector.
772 65269 : current_profiles_semaphore_.Wait();
773 130708 : for (const std::unique_ptr<CpuProfile>& profile : current_profiles_) {
774 130878 : profile->AddPath(timestamp, path, src_line, update_stats);
775 : }
776 65269 : current_profiles_semaphore_.Signal();
777 65269 : }
778 :
779 812 : ProfileGenerator::ProfileGenerator(CpuProfilesCollection* profiles)
780 812 : : profiles_(profiles) {}
781 :
782 65269 : void ProfileGenerator::RecordTickSample(const TickSample& sample) {
783 : ProfileStackTrace stack_trace;
784 : // Conservatively reserve space for stack frames + pc + function + vm-state.
785 : // There could in fact be more of them because of inlined entries.
786 65269 : stack_trace.reserve(sample.frames_count + 3);
787 :
788 : // The ProfileNode knows nothing about all versions of generated code for
789 : // the same JS function. The line number information associated with
790 : // the latest version of generated code is used to find a source line number
791 : // for a JS function. Then, the detected source line is passed to
792 : // ProfileNode to increase the tick count for this source line.
793 : const int no_line_info = v8::CpuProfileNode::kNoLineNumberInfo;
794 : int src_line = no_line_info;
795 : bool src_line_not_found = true;
796 :
797 65269 : if (sample.pc != nullptr) {
798 64504 : if (sample.has_external_callback && sample.state == EXTERNAL) {
799 : // Don't use PC when in external callback code, as it can point
800 : // inside a callback's code, and we will erroneously report
801 : // that a callback calls itself.
802 33694 : stack_trace.push_back(
803 16847 : {FindEntry(reinterpret_cast<Address>(sample.external_callback_entry)),
804 16847 : no_line_info});
805 : } else {
806 47657 : Address attributed_pc = reinterpret_cast<Address>(sample.pc);
807 : CodeEntry* pc_entry = FindEntry(attributed_pc);
808 : // If there is no pc_entry, we're likely in native code. Find out if the
809 : // top of the stack (the return address) was pointing inside a JS
810 : // function, meaning that we have encountered a frameless invocation.
811 47657 : if (!pc_entry && !sample.has_external_callback) {
812 47141 : attributed_pc = reinterpret_cast<Address>(sample.tos);
813 : pc_entry = FindEntry(attributed_pc);
814 : }
815 : // If pc is in the function code before it set up stack frame or after the
816 : // frame was destroyed, SafeStackFrameIterator incorrectly thinks that
817 : // ebp contains the return address of the current function and skips the
818 : // caller's frame. Check for this case and just skip such samples.
819 47657 : if (pc_entry) {
820 : int pc_offset =
821 516 : static_cast<int>(attributed_pc - pc_entry->instruction_start());
822 : // TODO(petermarshall): pc_offset can still be negative in some cases.
823 : src_line = pc_entry->GetSourceLine(pc_offset);
824 516 : if (src_line == v8::CpuProfileNode::kNoLineNumberInfo) {
825 : src_line = pc_entry->line_number();
826 : }
827 : src_line_not_found = false;
828 1032 : stack_trace.push_back({pc_entry, src_line});
829 :
830 516 : if (pc_entry->builtin_id() == Builtins::kFunctionPrototypeApply ||
831 : pc_entry->builtin_id() == Builtins::kFunctionPrototypeCall) {
832 : // When current function is either the Function.prototype.apply or the
833 : // Function.prototype.call builtin the top frame is either frame of
834 : // the calling JS function or internal frame.
835 : // In the latter case we know the caller for sure but in the
836 : // former case we don't so we simply replace the frame with
837 : // 'unresolved' entry.
838 0 : if (!sample.has_external_callback) {
839 0 : stack_trace.push_back(
840 : {CodeEntry::unresolved_entry(), no_line_info});
841 : }
842 : }
843 : }
844 : }
845 :
846 470632 : for (unsigned i = 0; i < sample.frames_count; ++i) {
847 203064 : Address stack_pos = reinterpret_cast<Address>(sample.stack[i]);
848 : CodeEntry* entry = FindEntry(stack_pos);
849 : int line_number = no_line_info;
850 203064 : if (entry) {
851 : // Find out if the entry has an inlining stack associated.
852 : int pc_offset =
853 183853 : static_cast<int>(stack_pos - entry->instruction_start());
854 : // TODO(petermarshall): pc_offset can still be negative in some cases.
855 : const std::vector<CodeEntryAndLineNumber>* inline_stack =
856 183853 : entry->GetInlineStack(pc_offset);
857 183853 : if (inline_stack) {
858 : int most_inlined_frame_line_number = entry->GetSourceLine(pc_offset);
859 : stack_trace.insert(stack_trace.end(), inline_stack->begin(),
860 2612 : inline_stack->end());
861 : // This is a bit of a messy hack. The line number for the most-inlined
862 : // frame (the function at the end of the chain of function calls) has
863 : // the wrong line number in inline_stack. The actual line number in
864 : // this function is stored in the SourcePositionTable in entry. We fix
865 : // up the line number for the most-inlined frame here.
866 : // TODO(petermarshall): Remove this and use a tree with a node per
867 : // inlining_id.
868 : DCHECK(!inline_stack->empty());
869 2612 : size_t index = stack_trace.size() - inline_stack->size();
870 2612 : stack_trace[index].line_number = most_inlined_frame_line_number;
871 : }
872 : // Skip unresolved frames (e.g. internal frame) and get source line of
873 : // the first JS caller.
874 183853 : if (src_line_not_found) {
875 : src_line = entry->GetSourceLine(pc_offset);
876 60633 : if (src_line == v8::CpuProfileNode::kNoLineNumberInfo) {
877 : src_line = entry->line_number();
878 : }
879 : src_line_not_found = false;
880 : }
881 : line_number = entry->GetSourceLine(pc_offset);
882 :
883 : // The inline stack contains the top-level function i.e. the same
884 : // function as entry. We don't want to add it twice. The one from the
885 : // inline stack has the correct line number for this particular inlining
886 : // so we use it instead of pushing entry to stack_trace.
887 183853 : if (inline_stack) continue;
888 : }
889 400904 : stack_trace.push_back({entry, line_number});
890 : }
891 : }
892 :
893 65269 : if (FLAG_prof_browser_mode) {
894 : bool no_symbolized_entries = true;
895 67527 : for (auto e : stack_trace) {
896 63462 : if (e.code_entry != nullptr) {
897 : no_symbolized_entries = false;
898 : break;
899 : }
900 : }
901 : // If no frames were symbolized, put the VM state entry in.
902 65158 : if (no_symbolized_entries) {
903 8130 : stack_trace.push_back({EntryForVMState(sample.state), no_line_info});
904 : }
905 : }
906 :
907 65269 : profiles_->AddPathToCurrentProfiles(sample.timestamp, stack_trace, src_line,
908 130538 : sample.update_stats);
909 65269 : }
910 :
911 4065 : CodeEntry* ProfileGenerator::EntryForVMState(StateTag tag) {
912 4065 : switch (tag) {
913 : case GC:
914 2499 : return CodeEntry::gc_entry();
915 : case JS:
916 : case PARSER:
917 : case COMPILER:
918 : case BYTECODE_COMPILER:
919 : // DOM events handlers are reported as OTHER / EXTERNAL entries.
920 : // To avoid confusing people, let's put all these entries into
921 : // one bucket.
922 : case OTHER:
923 : case EXTERNAL:
924 1551 : return CodeEntry::program_entry();
925 : case IDLE:
926 15 : return CodeEntry::idle_entry();
927 : }
928 0 : UNREACHABLE();
929 : }
930 :
931 : } // namespace internal
932 122036 : } // namespace v8
|