Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/debug/debug-coverage.h"
6 :
7 : #include "src/ast/ast.h"
8 : #include "src/base/hashmap.h"
9 : #include "src/debug/debug.h"
10 : #include "src/deoptimizer.h"
11 : #include "src/frames-inl.h"
12 : #include "src/isolate.h"
13 : #include "src/objects.h"
14 : #include "src/objects/debug-objects-inl.h"
15 :
16 : namespace v8 {
17 : namespace internal {
18 :
19 : class SharedToCounterMap
20 : : public base::TemplateHashMapImpl<SharedFunctionInfo*, uint32_t,
21 : base::KeyEqualityMatcher<void*>,
22 : base::DefaultAllocationPolicy> {
23 : public:
24 : typedef base::TemplateHashMapEntry<SharedFunctionInfo*, uint32_t> Entry;
25 10766 : inline void Add(SharedFunctionInfo* key, uint32_t count) {
26 21532 : Entry* entry = LookupOrInsert(key, Hash(key), []() { return 0; });
27 10766 : uint32_t old_count = entry->value;
28 10766 : if (UINT32_MAX - count < old_count) {
29 0 : entry->value = UINT32_MAX;
30 : } else {
31 10766 : entry->value = old_count + count;
32 : }
33 10766 : }
34 :
35 23175 : inline uint32_t Get(SharedFunctionInfo* key) {
36 23175 : Entry* entry = Lookup(key, Hash(key));
37 23175 : if (entry == nullptr) return 0;
38 9892 : return entry->value;
39 : }
40 :
41 : private:
42 : static uint32_t Hash(SharedFunctionInfo* key) {
43 33941 : return static_cast<uint32_t>(reinterpret_cast<intptr_t>(key));
44 : }
45 :
46 : DisallowHeapAllocation no_gc;
47 : };
48 :
49 : namespace {
50 : int StartPosition(SharedFunctionInfo* info) {
51 : int start = info->function_token_position();
52 178619 : if (start == kNoSourcePosition) start = info->start_position();
53 : return start;
54 : }
55 :
56 77722 : bool CompareSharedFunctionInfo(SharedFunctionInfo* a, SharedFunctionInfo* b) {
57 : int a_start = StartPosition(a);
58 : int b_start = StartPosition(b);
59 79288 : if (a_start == b_start) return a->end_position() > b->end_position();
60 76156 : return a_start < b_start;
61 : }
62 :
63 111052 : bool CompareCoverageBlock(const CoverageBlock& a, const CoverageBlock& b) {
64 : DCHECK_NE(kNoSourcePosition, a.start);
65 : DCHECK_NE(kNoSourcePosition, b.start);
66 111052 : if (a.start == b.start) return a.end > b.end;
67 104116 : return a.start < b.start;
68 : }
69 :
70 7940 : std::vector<CoverageBlock> GetSortedBlockData(Isolate* isolate,
71 : SharedFunctionInfo* shared) {
72 : DCHECK(shared->HasCoverageInfo());
73 :
74 : CoverageInfo* coverage_info =
75 7940 : CoverageInfo::cast(shared->GetDebugInfo()->coverage_info());
76 :
77 : std::vector<CoverageBlock> result;
78 7940 : if (coverage_info->SlotCount() == 0) return result;
79 :
80 38308 : for (int i = 0; i < coverage_info->SlotCount(); i++) {
81 38308 : const int start_pos = coverage_info->StartSourcePosition(i);
82 38308 : const int until_pos = coverage_info->EndSourcePosition(i);
83 38308 : const int count = coverage_info->BlockCount(i);
84 :
85 : DCHECK_NE(kNoSourcePosition, start_pos);
86 38308 : result.emplace_back(start_pos, until_pos, count);
87 : }
88 :
89 : // Sort according to the block nesting structure.
90 3740 : std::sort(result.begin(), result.end(), CompareCoverageBlock);
91 :
92 : return result;
93 : }
94 :
95 : // A utility class to simplify logic for performing passes over block coverage
96 : // ranges. Provides access to the implicit tree structure of ranges (i.e. access
97 : // to parent and sibling blocks), and supports efficient in-place editing and
98 : // deletion. The underlying backing store is the array of CoverageBlocks stored
99 : // on the CoverageFunction.
100 : class CoverageBlockIterator final {
101 : public:
102 : explicit CoverageBlockIterator(CoverageFunction* function)
103 : : function_(function),
104 : ended_(false),
105 : delete_current_(false),
106 : read_index_(-1),
107 79490 : write_index_(-1) {
108 : DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
109 : CompareCoverageBlock));
110 : }
111 :
112 39745 : ~CoverageBlockIterator() {
113 39745 : Finalize();
114 : DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
115 : CompareCoverageBlock));
116 39745 : }
117 :
118 : bool HasNext() const {
119 492296 : return read_index_ + 1 < static_cast<int>(function_->blocks.size());
120 : }
121 :
122 355906 : bool Next() {
123 184347 : if (!HasNext()) {
124 75750 : if (!ended_) MaybeWriteCurrent();
125 75750 : ended_ = true;
126 75750 : return false;
127 : }
128 :
129 : // If a block has been deleted, subsequent iteration moves trailing blocks
130 : // to their updated position within the array.
131 108597 : MaybeWriteCurrent();
132 :
133 108597 : if (read_index_ == -1) {
134 : // Initialize the nesting stack with the function range.
135 : nesting_stack_.emplace_back(function_->start, function_->end,
136 184886 : function_->count);
137 95150 : } else if (!delete_current_) {
138 62962 : nesting_stack_.emplace_back(GetBlock());
139 : }
140 :
141 108597 : delete_current_ = false;
142 108597 : read_index_++;
143 :
144 : DCHECK(IsActive());
145 :
146 : CoverageBlock& block = GetBlock();
147 361814 : while (nesting_stack_.size() > 1 &&
148 81778 : nesting_stack_.back().end <= block.start) {
149 : nesting_stack_.pop_back();
150 : }
151 :
152 : DCHECK_IMPLIES(block.start >= function_->end,
153 : block.end == kNoSourcePosition);
154 : DCHECK_NE(block.start, kNoSourcePosition);
155 : DCHECK_LE(block.end, GetParent().end);
156 :
157 : return true;
158 : }
159 :
160 : CoverageBlock& GetBlock() {
161 : DCHECK(IsActive());
162 276416 : return function_->blocks[read_index_];
163 : }
164 :
165 : CoverageBlock& GetNextBlock() {
166 : DCHECK(IsActive());
167 : DCHECK(HasNext());
168 68906 : return function_->blocks[read_index_ + 1];
169 : }
170 :
171 : CoverageBlock& GetParent() {
172 : DCHECK(IsActive());
173 : return nesting_stack_.back();
174 : }
175 :
176 23493 : bool HasSiblingOrChild() {
177 : DCHECK(IsActive());
178 61839 : return HasNext() && GetNextBlock().start < GetParent().end;
179 : }
180 :
181 : CoverageBlock& GetSiblingOrChild() {
182 : DCHECK(HasSiblingOrChild());
183 : DCHECK(IsActive());
184 : return GetNextBlock();
185 : }
186 :
187 : // A range is considered to be at top level if its parent range is the
188 : // function range.
189 7484 : bool IsTopLevel() const { return nesting_stack_.size() == 1; }
190 :
191 : void DeleteBlock() {
192 : DCHECK(!delete_current_);
193 : DCHECK(IsActive());
194 35300 : delete_current_ = true;
195 : }
196 :
197 : private:
198 148342 : void MaybeWriteCurrent() {
199 296684 : if (delete_current_) return;
200 113042 : if (read_index_ >= 0 && write_index_ != read_index_) {
201 45510 : function_->blocks[write_index_] = function_->blocks[read_index_];
202 : }
203 113042 : write_index_++;
204 : }
205 :
206 39745 : void Finalize() {
207 39745 : while (Next()) {
208 : // Just iterate to the end.
209 : }
210 39745 : function_->blocks.resize(write_index_);
211 39745 : }
212 :
213 : bool IsActive() const { return read_index_ >= 0 && !ended_; }
214 :
215 : CoverageFunction* function_;
216 : std::vector<CoverageBlock> nesting_stack_;
217 : bool ended_;
218 : bool delete_current_;
219 : int read_index_;
220 : int write_index_;
221 : };
222 :
223 : bool HaveSameSourceRange(const CoverageBlock& lhs, const CoverageBlock& rhs) {
224 34568 : return lhs.start == rhs.start && lhs.end == rhs.end;
225 : }
226 :
227 7940 : void MergeDuplicateSingletons(CoverageFunction* function) {
228 : CoverageBlockIterator iter(function);
229 :
230 80816 : while (iter.Next() && iter.HasNext()) {
231 34568 : CoverageBlock& block = iter.GetBlock();
232 : CoverageBlock& next_block = iter.GetNextBlock();
233 :
234 : // Identical ranges should only occur through singleton ranges. Consider the
235 : // ranges for `for (.) break;`: continuation ranges for both the `break` and
236 : // `for` statements begin after the trailing semicolon.
237 : // Such ranges are merged and keep the maximal execution count.
238 34568 : if (!HaveSameSourceRange(block, next_block)) continue;
239 :
240 : DCHECK_EQ(kNoSourcePosition, block.end); // Singleton range.
241 12408 : next_block.count = std::max(block.count, next_block.count);
242 : iter.DeleteBlock();
243 7940 : }
244 7940 : }
245 :
246 : // Rewrite position singletons (produced by unconditional control flow
247 : // like return statements, and by continuation counters) into source
248 : // ranges that end at the next sibling range or the end of the parent
249 : // range, whichever comes first.
250 7940 : void RewritePositionSingletonsToRanges(CoverageFunction* function) {
251 : CoverageBlockIterator iter(function);
252 :
253 40044 : while (iter.Next()) {
254 32104 : CoverageBlock& block = iter.GetBlock();
255 : CoverageBlock& parent = iter.GetParent();
256 :
257 32104 : if (block.start >= function->end) {
258 : DCHECK_EQ(block.end, kNoSourcePosition);
259 : iter.DeleteBlock();
260 32104 : } else if (block.end == kNoSourcePosition) {
261 : // The current block ends at the next sibling block (if it exists) or the
262 : // end of the parent block otherwise.
263 17396 : if (iter.HasSiblingOrChild()) {
264 9912 : block.end = iter.GetSiblingOrChild().start;
265 7484 : } else if (iter.IsTopLevel()) {
266 : // See https://crbug.com/v8/6661. Functions are special-cased because
267 : // we never want the closing brace to be uncovered. This is mainly to
268 : // avoid a noisy UI.
269 3692 : block.end = parent.end - 1;
270 : } else {
271 3792 : block.end = parent.end;
272 : }
273 : }
274 7940 : }
275 7940 : }
276 :
277 7940 : void MergeNestedAndConsecutiveRanges(CoverageFunction* function) {
278 : CoverageBlockIterator iter(function);
279 :
280 40044 : while (iter.Next()) {
281 32104 : CoverageBlock& block = iter.GetBlock();
282 : CoverageBlock& parent = iter.GetParent();
283 :
284 32104 : if (parent.count == block.count) {
285 : iter.DeleteBlock();
286 6097 : } else if (iter.HasSiblingOrChild()) {
287 : CoverageBlock& sibling = iter.GetSiblingOrChild();
288 5253 : if (sibling.start == block.end && sibling.count == block.count) {
289 : // Best-effort: this pass may miss mergeable siblings in the presence of
290 : // child blocks.
291 3089 : sibling.start = block.start;
292 : iter.DeleteBlock();
293 : }
294 : }
295 7940 : }
296 7940 : }
297 :
298 7940 : void FilterUncoveredRanges(CoverageFunction* function) {
299 : CoverageBlockIterator iter(function);
300 :
301 10948 : while (iter.Next()) {
302 3008 : CoverageBlock& block = iter.GetBlock();
303 : CoverageBlock& parent = iter.GetParent();
304 3008 : if (block.count == 0 && parent.count == 0) iter.DeleteBlock();
305 7940 : }
306 7940 : }
307 :
308 7940 : void FilterEmptyRanges(CoverageFunction* function) {
309 : CoverageBlockIterator iter(function);
310 :
311 10948 : while (iter.Next()) {
312 3008 : CoverageBlock& block = iter.GetBlock();
313 3008 : if (block.start == block.end) iter.DeleteBlock();
314 7940 : }
315 7940 : }
316 :
317 45 : void ClampToBinary(CoverageFunction* function) {
318 : CoverageBlockIterator iter(function);
319 :
320 110 : while (iter.Next()) {
321 65 : CoverageBlock& block = iter.GetBlock();
322 65 : if (block.count > 0) block.count = 1;
323 45 : }
324 45 : }
325 :
326 7940 : void ResetAllBlockCounts(SharedFunctionInfo* shared) {
327 : DCHECK(shared->HasCoverageInfo());
328 :
329 : CoverageInfo* coverage_info =
330 7940 : CoverageInfo::cast(shared->GetDebugInfo()->coverage_info());
331 :
332 46248 : for (int i = 0; i < coverage_info->SlotCount(); i++) {
333 38308 : coverage_info->ResetBlockCount(i);
334 : }
335 7940 : }
336 :
337 : bool IsBlockMode(debug::Coverage::Mode mode) {
338 23175 : switch (mode) {
339 : case debug::Coverage::kBlockBinary:
340 : case debug::Coverage::kBlockCount:
341 : return true;
342 : default:
343 : return false;
344 : }
345 : }
346 :
347 7940 : void CollectBlockCoverage(Isolate* isolate, CoverageFunction* function,
348 : SharedFunctionInfo* info,
349 : debug::Coverage::Mode mode) {
350 : DCHECK(IsBlockMode(mode));
351 :
352 7940 : function->has_block_coverage = true;
353 15880 : function->blocks = GetSortedBlockData(isolate, info);
354 :
355 : // If in binary mode, only report counts of 0/1.
356 7940 : if (mode == debug::Coverage::kBlockBinary) ClampToBinary(function);
357 :
358 : // Remove duplicate singleton ranges, keeping the max count.
359 7940 : MergeDuplicateSingletons(function);
360 :
361 : // Rewrite all singletons (created e.g. by continuations and unconditional
362 : // control flow) to ranges.
363 7940 : RewritePositionSingletonsToRanges(function);
364 :
365 : // Merge nested and consecutive ranges with identical counts.
366 7940 : MergeNestedAndConsecutiveRanges(function);
367 :
368 : // Filter out ranges with count == 0 unless the immediate parent range has
369 : // a count != 0.
370 7940 : FilterUncoveredRanges(function);
371 :
372 : // Filter out ranges of zero length.
373 7940 : FilterEmptyRanges(function);
374 :
375 :
376 : // Reset all counters on the DebugInfo to zero.
377 7940 : ResetAllBlockCounts(info);
378 7940 : }
379 : } // anonymous namespace
380 :
381 347 : std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
382 : DCHECK(!isolate->is_best_effort_code_coverage());
383 : std::unique_ptr<Coverage> result =
384 347 : Collect(isolate, isolate->code_coverage_mode());
385 1041 : if (!isolate->is_collecting_type_profile() &&
386 332 : (isolate->is_precise_binary_code_coverage() ||
387 : isolate->is_block_binary_code_coverage())) {
388 : // We do not have to hold onto feedback vectors for invocations we already
389 : // reported. So we can reset the list.
390 60 : isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
391 : }
392 347 : return result;
393 : }
394 :
395 96 : std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
396 96 : return Collect(isolate, v8::debug::Coverage::kBestEffort);
397 : }
398 :
399 443 : std::unique_ptr<Coverage> Coverage::Collect(
400 443 : Isolate* isolate, v8::debug::Coverage::Mode collectionMode) {
401 : SharedToCounterMap counter_map;
402 :
403 : const bool reset_count = collectionMode != v8::debug::Coverage::kBestEffort;
404 :
405 443 : switch (isolate->code_coverage_mode()) {
406 : case v8::debug::Coverage::kBlockBinary:
407 : case v8::debug::Coverage::kBlockCount:
408 : case v8::debug::Coverage::kPreciseBinary:
409 : case v8::debug::Coverage::kPreciseCount: {
410 : // Feedback vectors are already listed to prevent losing them to GC.
411 : DCHECK(isolate->factory()
412 : ->feedback_vectors_for_profiling_tools()
413 : ->IsArrayList());
414 : Handle<ArrayList> list = Handle<ArrayList>::cast(
415 : isolate->factory()->feedback_vectors_for_profiling_tools());
416 11036 : for (int i = 0; i < list->Length(); i++) {
417 : FeedbackVector* vector = FeedbackVector::cast(list->Get(i));
418 : SharedFunctionInfo* shared = vector->shared_function_info();
419 : DCHECK(shared->IsSubjectToDebugging());
420 10262 : uint32_t count = static_cast<uint32_t>(vector->invocation_count());
421 10262 : if (reset_count) vector->clear_invocation_count();
422 10262 : counter_map.Add(shared, count);
423 : }
424 : break;
425 : }
426 : case v8::debug::Coverage::kBestEffort: {
427 : DCHECK(!isolate->factory()
428 : ->feedback_vectors_for_profiling_tools()
429 : ->IsArrayList());
430 : DCHECK_EQ(v8::debug::Coverage::kBestEffort, collectionMode);
431 56 : HeapIterator heap_iterator(isolate->heap());
432 532473 : while (HeapObject* current_obj = heap_iterator.next()) {
433 532417 : if (!current_obj->IsFeedbackVector()) continue;
434 : FeedbackVector* vector = FeedbackVector::cast(current_obj);
435 : SharedFunctionInfo* shared = vector->shared_function_info();
436 6964 : if (!shared->IsSubjectToDebugging()) continue;
437 504 : uint32_t count = static_cast<uint32_t>(vector->invocation_count());
438 504 : counter_map.Add(shared, count);
439 : }
440 56 : break;
441 : }
442 : }
443 :
444 : // Iterate shared function infos of every script and build a mapping
445 : // between source ranges and invocation counts.
446 443 : std::unique_ptr<Coverage> result(new Coverage());
447 443 : Script::Iterator scripts(isolate);
448 9796 : while (Script* script = scripts.Next()) {
449 14899 : if (!script->IsUserJavaScript()) continue;
450 :
451 : // Create and add new script data.
452 : Handle<Script> script_handle(script, isolate);
453 3807 : result->emplace_back(script_handle);
454 8292 : std::vector<CoverageFunction>* functions = &result->back().functions;
455 :
456 : std::vector<SharedFunctionInfo*> sorted;
457 :
458 : {
459 : // Sort functions by start position, from outer to inner functions.
460 3807 : SharedFunctionInfo::ScriptIterator infos(script_handle);
461 26982 : while (SharedFunctionInfo* info = infos.Next()) {
462 23175 : sorted.push_back(info);
463 : }
464 26982 : std::sort(sorted.begin(), sorted.end(), CompareSharedFunctionInfo);
465 : }
466 :
467 : // Stack to track nested functions, referring function by index.
468 : std::vector<size_t> nesting;
469 :
470 : // Use sorted list to reconstruct function nesting.
471 30789 : for (SharedFunctionInfo* info : sorted) {
472 : int start = StartPosition(info);
473 : int end = info->end_position();
474 23175 : uint32_t count = counter_map.Get(info);
475 : // Find the correct outer function based on start position.
476 55826 : while (!nesting.empty() && functions->at(nesting.back()).end <= start) {
477 : nesting.pop_back();
478 : }
479 23175 : if (count != 0) {
480 2523 : switch (collectionMode) {
481 : case v8::debug::Coverage::kBlockCount:
482 : case v8::debug::Coverage::kPreciseCount:
483 : break;
484 : case v8::debug::Coverage::kBlockBinary:
485 : case v8::debug::Coverage::kPreciseBinary:
486 60 : count = info->has_reported_binary_coverage() ? 0 : 1;
487 60 : info->set_has_reported_binary_coverage(true);
488 60 : break;
489 : case v8::debug::Coverage::kBestEffort:
490 : count = 1;
491 600 : break;
492 : }
493 : }
494 :
495 23175 : Handle<String> name(info->DebugName(), isolate);
496 : CoverageFunction function(start, end, count, name);
497 :
498 23175 : if (IsBlockMode(collectionMode) && info->HasCoverageInfo()) {
499 7940 : CollectBlockCoverage(isolate, &function, info, collectionMode);
500 : }
501 :
502 : // Only include a function range if itself or its parent function is
503 : // covered, or if it contains non-trivial block coverage.
504 23175 : bool is_covered = (count != 0);
505 : bool parent_is_covered =
506 25885 : (!nesting.empty() && functions->at(nesting.back()).count != 0);
507 : bool has_block_coverage = !function.blocks.empty();
508 23175 : if (is_covered || parent_is_covered || has_block_coverage) {
509 8970 : nesting.push_back(functions->size());
510 4485 : functions->emplace_back(function);
511 : }
512 : }
513 :
514 : // Remove entries for scripts that have no coverage.
515 3807 : if (functions->empty()) result->pop_back();
516 : }
517 443 : return result;
518 : }
519 :
520 696 : void Coverage::SelectMode(Isolate* isolate, debug::Coverage::Mode mode) {
521 410 : switch (mode) {
522 : case debug::Coverage::kBestEffort:
523 : // Note that DevTools switches back to best-effort coverage once the
524 : // recording is stopped. Since we delete coverage infos at that point, any
525 : // following coverage recording (without reloads) will be at function
526 : // granularity.
527 286 : isolate->debug()->RemoveAllCoverageInfos();
528 286 : if (!isolate->is_collecting_type_profile()) {
529 : isolate->SetFeedbackVectorsForProfilingTools(
530 276 : isolate->heap()->undefined_value());
531 : }
532 : break;
533 : case debug::Coverage::kBlockBinary:
534 : case debug::Coverage::kBlockCount:
535 : case debug::Coverage::kPreciseBinary:
536 : case debug::Coverage::kPreciseCount: {
537 : HandleScope scope(isolate);
538 : // Remove all optimized function. Optimized and inlined functions do not
539 : // increment invocation count.
540 124 : Deoptimizer::DeoptimizeAll(isolate);
541 124 : if (isolate->factory()
542 : ->feedback_vectors_for_profiling_tools()
543 : ->IsUndefined(isolate)) {
544 124 : isolate->InitializeVectorListFromHeap();
545 : }
546 : break;
547 : }
548 : }
549 : isolate->set_code_coverage_mode(mode);
550 410 : }
551 :
552 : } // namespace internal
553 : } // namespace v8
|