Line data Source code
1 : // Copyright 2017 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/debug/debug-coverage.h"
6 :
7 : #include "src/ast/ast.h"
8 : #include "src/base/hashmap.h"
9 : #include "src/debug/debug.h"
10 : #include "src/deoptimizer.h"
11 : #include "src/frames-inl.h"
12 : #include "src/isolate.h"
13 : #include "src/objects.h"
14 : #include "src/objects/debug-objects-inl.h"
15 :
16 : namespace v8 {
17 : namespace internal {
18 :
19 447 : class SharedToCounterMap
20 : : public base::TemplateHashMapImpl<SharedFunctionInfo, uint32_t,
21 : base::KeyEqualityMatcher<Object>,
22 : base::DefaultAllocationPolicy> {
23 : public:
24 : using Entry = base::TemplateHashMapEntry<SharedFunctionInfo, uint32_t>;
25 15495 : inline void Add(SharedFunctionInfo key, uint32_t count) {
26 15495 : Entry* entry = LookupOrInsert(key, Hash(key), []() { return 0; });
27 15495 : uint32_t old_count = entry->value;
28 15495 : if (UINT32_MAX - count < old_count) {
29 0 : entry->value = UINT32_MAX;
30 : } else {
31 15495 : entry->value = old_count + count;
32 : }
33 15495 : }
34 :
35 28698 : inline uint32_t Get(SharedFunctionInfo key) {
36 : Entry* entry = Lookup(key, Hash(key));
37 28698 : if (entry == nullptr) return 0;
38 15001 : return entry->value;
39 : }
40 :
41 : private:
42 : static uint32_t Hash(SharedFunctionInfo key) {
43 44193 : return static_cast<uint32_t>(key.ptr());
44 : }
45 :
46 : DisallowHeapAllocation no_gc;
47 : };
48 :
49 : namespace {
50 203624 : int StartPosition(SharedFunctionInfo info) {
51 : int start = info->function_token_position();
52 203624 : if (start == kNoSourcePosition) start = info->StartPosition();
53 203624 : return start;
54 : }
55 :
56 87463 : bool CompareSharedFunctionInfo(SharedFunctionInfo a, SharedFunctionInfo b) {
57 87463 : int a_start = StartPosition(a);
58 87463 : int b_start = StartPosition(b);
59 87463 : if (a_start == b_start) return a->EndPosition() > b->EndPosition();
60 85003 : return a_start < b_start;
61 : }
62 :
63 145568 : bool CompareCoverageBlock(const CoverageBlock& a, const CoverageBlock& b) {
64 : DCHECK_NE(kNoSourcePosition, a.start);
65 : DCHECK_NE(kNoSourcePosition, b.start);
66 145568 : if (a.start == b.start) return a.end > b.end;
67 144952 : return a.start < b.start;
68 : }
69 :
70 : void SortBlockData(std::vector<CoverageBlock>& v) {
71 : // Sort according to the block nesting structure.
72 : std::sort(v.begin(), v.end(), CompareCoverageBlock);
73 : }
74 :
75 12912 : std::vector<CoverageBlock> GetSortedBlockData(SharedFunctionInfo shared) {
76 : DCHECK(shared->HasCoverageInfo());
77 :
78 : CoverageInfo coverage_info =
79 12912 : CoverageInfo::cast(shared->GetDebugInfo()->coverage_info());
80 :
81 : std::vector<CoverageBlock> result;
82 12912 : if (coverage_info->SlotCount() == 0) return result;
83 :
84 99044 : for (int i = 0; i < coverage_info->SlotCount(); i++) {
85 46264 : const int start_pos = coverage_info->StartSourcePosition(i);
86 46264 : const int until_pos = coverage_info->EndSourcePosition(i);
87 46264 : const int count = coverage_info->BlockCount(i);
88 :
89 : DCHECK_NE(kNoSourcePosition, start_pos);
90 46264 : result.emplace_back(start_pos, until_pos, count);
91 : }
92 :
93 : SortBlockData(result);
94 :
95 : return result;
96 : }
97 :
98 : // A utility class to simplify logic for performing passes over block coverage
99 : // ranges. Provides access to the implicit tree structure of ranges (i.e. access
100 : // to parent and sibling blocks), and supports efficient in-place editing and
101 : // deletion. The underlying backing store is the array of CoverageBlocks stored
102 : // on the CoverageFunction.
103 : class CoverageBlockIterator final {
104 : public:
105 : explicit CoverageBlockIterator(CoverageFunction* function)
106 : : function_(function),
107 : ended_(false),
108 : delete_current_(false),
109 : read_index_(-1),
110 206664 : write_index_(-1) {
111 : DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
112 : CompareCoverageBlock));
113 : }
114 :
115 206664 : ~CoverageBlockIterator() {
116 103332 : Finalize();
117 : DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
118 : CompareCoverageBlock));
119 103332 : }
120 :
121 : bool HasNext() const {
122 985416 : return read_index_ + 1 < static_cast<int>(function_->blocks.size());
123 : }
124 :
125 400812 : bool Next() {
126 400812 : if (!HasNext()) {
127 206544 : if (!ended_) MaybeWriteCurrent();
128 206544 : ended_ = true;
129 206544 : return false;
130 : }
131 :
132 : // If a block has been deleted, subsequent iteration moves trailing blocks
133 : // to their updated position within the array.
134 194268 : MaybeWriteCurrent();
135 :
136 194268 : if (read_index_ == -1) {
137 : // Initialize the nesting stack with the function range.
138 36420 : nesting_stack_.emplace_back(function_->start, function_->end,
139 72840 : function_->count);
140 157848 : } else if (!delete_current_) {
141 121088 : nesting_stack_.emplace_back(GetBlock());
142 : }
143 :
144 194268 : delete_current_ = false;
145 194268 : read_index_++;
146 :
147 : DCHECK(IsActive());
148 :
149 : CoverageBlock& block = GetBlock();
150 461092 : while (nesting_stack_.size() > 1 &&
151 148420 : nesting_stack_.back().end <= block.start) {
152 : nesting_stack_.pop_back();
153 : }
154 :
155 : DCHECK_IMPLIES(block.start >= function_->end,
156 : block.end == kNoSourcePosition);
157 : DCHECK_NE(block.start, kNoSourcePosition);
158 : DCHECK_LE(block.end, GetParent().end);
159 :
160 : return true;
161 : }
162 :
163 : CoverageBlock& GetBlock() {
164 : DCHECK(IsActive());
165 496592 : return function_->blocks[read_index_];
166 : }
167 :
168 : CoverageBlock& GetNextBlock() {
169 : DCHECK(IsActive());
170 : DCHECK(HasNext());
171 124644 : return function_->blocks[read_index_ + 1];
172 : }
173 :
174 : CoverageBlock& GetPreviousBlock() {
175 : DCHECK(IsActive());
176 : DCHECK_GT(read_index_, 0);
177 39748 : return function_->blocks[read_index_ - 1];
178 : }
179 :
180 : CoverageBlock& GetParent() {
181 : DCHECK(IsActive());
182 : return nesting_stack_.back();
183 : }
184 :
185 : bool HasSiblingOrChild() {
186 : DCHECK(IsActive());
187 168952 : return HasNext() && GetNextBlock().start < GetParent().end;
188 : }
189 :
190 : CoverageBlock& GetSiblingOrChild() {
191 : DCHECK(HasSiblingOrChild());
192 : DCHECK(IsActive());
193 : return GetNextBlock();
194 : }
195 :
196 : // A range is considered to be at top level if its parent range is the
197 : // function range.
198 : bool IsTopLevel() const { return nesting_stack_.size() == 1; }
199 :
200 : void DeleteBlock() {
201 : DCHECK(!delete_current_);
202 : DCHECK(IsActive());
203 42748 : delete_current_ = true;
204 : }
205 :
206 : private:
207 297600 : void MaybeWriteCurrent() {
208 297600 : if (delete_current_) return;
209 254852 : if (read_index_ >= 0 && write_index_ != read_index_) {
210 57072 : function_->blocks[write_index_] = function_->blocks[read_index_];
211 : }
212 254852 : write_index_++;
213 : }
214 :
215 103332 : void Finalize() {
216 103332 : while (Next()) {
217 : // Just iterate to the end.
218 : }
219 103332 : function_->blocks.resize(write_index_);
220 103332 : }
221 :
222 : bool IsActive() const { return read_index_ >= 0 && !ended_; }
223 :
224 : CoverageFunction* function_;
225 : std::vector<CoverageBlock> nesting_stack_;
226 : bool ended_;
227 : bool delete_current_;
228 : int read_index_;
229 : int write_index_;
230 : };
231 :
232 : bool HaveSameSourceRange(const CoverageBlock& lhs, const CoverageBlock& rhs) {
233 16176 : return lhs.start == rhs.start && lhs.end == rhs.end;
234 : }
235 :
236 12912 : void MergeDuplicateRanges(CoverageFunction* function) {
237 12912 : CoverageBlockIterator iter(function);
238 :
239 51780 : while (iter.Next() && iter.HasNext()) {
240 : CoverageBlock& block = iter.GetBlock();
241 : CoverageBlock& next_block = iter.GetNextBlock();
242 :
243 16176 : if (!HaveSameSourceRange(block, next_block)) continue;
244 :
245 : DCHECK_NE(kNoSourcePosition, block.end); // Non-singleton range.
246 0 : next_block.count = std::max(block.count, next_block.count);
247 : iter.DeleteBlock();
248 : }
249 12912 : }
250 :
251 : // Rewrite position singletons (produced by unconditional control flow
252 : // like return statements, and by continuation counters) into source
253 : // ranges that end at the next sibling range or the end of the parent
254 : // range, whichever comes first.
255 12912 : void RewritePositionSingletonsToRanges(CoverageFunction* function) {
256 12912 : CoverageBlockIterator iter(function);
257 :
258 58560 : while (iter.Next()) {
259 : CoverageBlock& block = iter.GetBlock();
260 : CoverageBlock& parent = iter.GetParent();
261 :
262 45648 : if (block.start >= function->end) {
263 : DCHECK_EQ(block.end, kNoSourcePosition);
264 : iter.DeleteBlock();
265 45648 : } else if (block.end == kNoSourcePosition) {
266 : // The current block ends at the next sibling block (if it exists) or the
267 : // end of the parent block otherwise.
268 19288 : if (iter.HasSiblingOrChild()) {
269 13364 : block.end = iter.GetSiblingOrChild().start;
270 5924 : } else if (iter.IsTopLevel()) {
271 : // See https://crbug.com/v8/6661. Functions are special-cased because
272 : // we never want the closing brace to be uncovered. This is mainly to
273 : // avoid a noisy UI.
274 3764 : block.end = parent.end - 1;
275 : } else {
276 2160 : block.end = parent.end;
277 : }
278 : }
279 : }
280 12912 : }
281 :
282 25824 : void MergeConsecutiveRanges(CoverageFunction* function) {
283 25824 : CoverageBlockIterator iter(function);
284 :
285 75740 : while (iter.Next()) {
286 : CoverageBlock& block = iter.GetBlock();
287 :
288 49916 : if (iter.HasSiblingOrChild()) {
289 : CoverageBlock& sibling = iter.GetSiblingOrChild();
290 37480 : if (sibling.start == block.end && sibling.count == block.count) {
291 : // Best-effort: this pass may miss mergeable siblings in the presence of
292 : // child blocks.
293 23708 : sibling.start = block.start;
294 : iter.DeleteBlock();
295 : }
296 : }
297 : }
298 25824 : }
299 :
300 12912 : void MergeNestedRanges(CoverageFunction* function) {
301 12912 : CoverageBlockIterator iter(function);
302 :
303 35604 : while (iter.Next()) {
304 : CoverageBlock& block = iter.GetBlock();
305 : CoverageBlock& parent = iter.GetParent();
306 :
307 22692 : if (parent.count == block.count) {
308 : // Transformation may not be valid if sibling blocks exist with a
309 : // differing count.
310 : iter.DeleteBlock();
311 : }
312 : }
313 12912 : }
314 :
315 12912 : void FilterAliasedSingletons(CoverageFunction* function) {
316 12912 : CoverageBlockIterator iter(function);
317 :
318 12912 : iter.Next(); // Advance once since we reference the previous block later.
319 :
320 52660 : while (iter.Next()) {
321 : CoverageBlock& previous_block = iter.GetPreviousBlock();
322 : CoverageBlock& block = iter.GetBlock();
323 :
324 39748 : bool is_singleton = block.end == kNoSourcePosition;
325 39748 : bool aliases_start = block.start == previous_block.start;
326 :
327 39748 : if (is_singleton && aliases_start) {
328 : // The previous block must have a full range since duplicate singletons
329 : // have already been merged.
330 : DCHECK_NE(previous_block.end, kNoSourcePosition);
331 : // Likewise, the next block must have another start position since
332 : // singletons are sorted to the end.
333 : DCHECK_IMPLIES(iter.HasNext(), iter.GetNextBlock().start != block.start);
334 : iter.DeleteBlock();
335 : }
336 : }
337 12912 : }
338 :
339 12912 : void FilterUncoveredRanges(CoverageFunction* function) {
340 12912 : CoverageBlockIterator iter(function);
341 :
342 16428 : while (iter.Next()) {
343 : CoverageBlock& block = iter.GetBlock();
344 : CoverageBlock& parent = iter.GetParent();
345 3516 : if (block.count == 0 && parent.count == 0) iter.DeleteBlock();
346 : }
347 12912 : }
348 :
349 12912 : void FilterEmptyRanges(CoverageFunction* function) {
350 12912 : CoverageBlockIterator iter(function);
351 :
352 16428 : while (iter.Next()) {
353 : CoverageBlock& block = iter.GetBlock();
354 3516 : if (block.start == block.end) iter.DeleteBlock();
355 : }
356 12912 : }
357 :
358 36 : void ClampToBinary(CoverageFunction* function) {
359 36 : CoverageBlockIterator iter(function);
360 :
361 60 : while (iter.Next()) {
362 : CoverageBlock& block = iter.GetBlock();
363 24 : if (block.count > 0) block.count = 1;
364 : }
365 36 : }
366 :
367 12912 : void ResetAllBlockCounts(SharedFunctionInfo shared) {
368 : DCHECK(shared->HasCoverageInfo());
369 :
370 : CoverageInfo coverage_info =
371 12912 : CoverageInfo::cast(shared->GetDebugInfo()->coverage_info());
372 :
373 105440 : for (int i = 0; i < coverage_info->SlotCount(); i++) {
374 46264 : coverage_info->ResetBlockCount(i);
375 : }
376 12912 : }
377 :
378 : bool IsBlockMode(debug::CoverageMode mode) {
379 28698 : switch (mode) {
380 : case debug::CoverageMode::kBlockBinary:
381 : case debug::CoverageMode::kBlockCount:
382 : return true;
383 : default:
384 : return false;
385 : }
386 : }
387 :
388 : bool IsBinaryMode(debug::CoverageMode mode) {
389 963996 : switch (mode) {
390 : case debug::CoverageMode::kBlockBinary:
391 : case debug::CoverageMode::kPreciseBinary:
392 : return true;
393 : default:
394 : return false;
395 : }
396 : }
397 :
398 12912 : void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo info,
399 : debug::CoverageMode mode) {
400 : DCHECK(IsBlockMode(mode));
401 :
402 12912 : function->has_block_coverage = true;
403 25824 : function->blocks = GetSortedBlockData(info);
404 :
405 : // If in binary mode, only report counts of 0/1.
406 12912 : if (mode == debug::CoverageMode::kBlockBinary) ClampToBinary(function);
407 :
408 : // Remove singleton ranges with the same start position as a full range and
409 : // throw away their counts.
410 : // Singleton ranges are only intended to split existing full ranges and should
411 : // never expand into a full range. Consider 'if (cond) { ... } else { ... }'
412 : // as a problematic example; if the then-block produces a continuation
413 : // singleton, it would incorrectly expand into the else range.
414 : // For more context, see https://crbug.com/v8/8237.
415 12912 : FilterAliasedSingletons(function);
416 :
417 : // Rewrite all singletons (created e.g. by continuations and unconditional
418 : // control flow) to ranges.
419 12912 : RewritePositionSingletonsToRanges(function);
420 :
421 : // Merge nested and consecutive ranges with identical counts.
422 : // Note that it's necessary to merge duplicate ranges prior to merging nested
423 : // changes in order to avoid invalid transformations. See crbug.com/827530.
424 12912 : MergeConsecutiveRanges(function);
425 :
426 : SortBlockData(function->blocks);
427 12912 : MergeDuplicateRanges(function);
428 12912 : MergeNestedRanges(function);
429 :
430 12912 : MergeConsecutiveRanges(function);
431 :
432 : // Filter out ranges with count == 0 unless the immediate parent range has
433 : // a count != 0.
434 12912 : FilterUncoveredRanges(function);
435 :
436 : // Filter out ranges of zero length.
437 12912 : FilterEmptyRanges(function);
438 :
439 : // Reset all counters on the DebugInfo to zero.
440 12912 : ResetAllBlockCounts(info);
441 12912 : }
442 : } // anonymous namespace
443 :
444 371 : std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
445 : DCHECK(!isolate->is_best_effort_code_coverage());
446 : std::unique_ptr<Coverage> result =
447 371 : Collect(isolate, isolate->code_coverage_mode());
448 742 : if (!isolate->is_collecting_type_profile() &&
449 359 : (isolate->is_precise_binary_code_coverage() ||
450 : isolate->is_block_binary_code_coverage())) {
451 : // We do not have to hold onto feedback vectors for invocations we already
452 : // reported. So we can reset the list.
453 48 : isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0));
454 : }
455 371 : return result;
456 : }
457 :
458 76 : std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
459 76 : return Collect(isolate, v8::debug::CoverageMode::kBestEffort);
460 : }
461 :
462 447 : std::unique_ptr<Coverage> Coverage::Collect(
463 : Isolate* isolate, v8::debug::CoverageMode collectionMode) {
464 : SharedToCounterMap counter_map;
465 :
466 : const bool reset_count =
467 : collectionMode != v8::debug::CoverageMode::kBestEffort;
468 :
469 447 : switch (isolate->code_coverage_mode()) {
470 : case v8::debug::CoverageMode::kBlockBinary:
471 : case v8::debug::CoverageMode::kBlockCount:
472 : case v8::debug::CoverageMode::kPreciseBinary:
473 : case v8::debug::CoverageMode::kPreciseCount: {
474 : // Feedback vectors are already listed to prevent losing them to GC.
475 : DCHECK(isolate->factory()
476 : ->feedback_vectors_for_profiling_tools()
477 : ->IsArrayList());
478 : Handle<ArrayList> list = Handle<ArrayList>::cast(
479 : isolate->factory()->feedback_vectors_for_profiling_tools());
480 30601 : for (int i = 0; i < list->Length(); i++) {
481 : FeedbackVector vector = FeedbackVector::cast(list->Get(i));
482 15099 : SharedFunctionInfo shared = vector->shared_function_info();
483 : DCHECK(shared->IsSubjectToDebugging());
484 15099 : uint32_t count = static_cast<uint32_t>(vector->invocation_count());
485 15099 : if (reset_count) vector->clear_invocation_count();
486 15099 : counter_map.Add(shared, count);
487 : }
488 : break;
489 : }
490 : case v8::debug::CoverageMode::kBestEffort: {
491 : DCHECK(!isolate->factory()
492 : ->feedback_vectors_for_profiling_tools()
493 : ->IsArrayList());
494 : DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, collectionMode);
495 88 : HeapIterator heap_iterator(isolate->heap());
496 362787 : for (HeapObject current_obj = heap_iterator.next();
497 : !current_obj.is_null(); current_obj = heap_iterator.next()) {
498 725090 : if (!current_obj->IsFeedbackVector()) continue;
499 : FeedbackVector vector = FeedbackVector::cast(current_obj);
500 396 : SharedFunctionInfo shared = vector->shared_function_info();
501 396 : if (!shared->IsSubjectToDebugging()) continue;
502 396 : uint32_t count = static_cast<uint32_t>(vector->invocation_count());
503 396 : counter_map.Add(shared, count);
504 : }
505 : break;
506 : }
507 : }
508 :
509 : // Iterate shared function infos of every script and build a mapping
510 : // between source ranges and invocation counts.
511 447 : std::unique_ptr<Coverage> result(new Coverage());
512 447 : Script::Iterator scripts(isolate);
513 15358 : for (Script script = scripts.Next(); !script.is_null();
514 : script = scripts.Next()) {
515 8573 : if (!script->IsUserJavaScript()) continue;
516 :
517 : // Create and add new script data.
518 : Handle<Script> script_handle(script, isolate);
519 5891 : result->emplace_back(script_handle);
520 5891 : std::vector<CoverageFunction>* functions = &result->back().functions;
521 :
522 : std::vector<SharedFunctionInfo> sorted;
523 :
524 : {
525 : // Sort functions by start position, from outer to inner functions.
526 5891 : SharedFunctionInfo::ScriptIterator infos(isolate, *script_handle);
527 69178 : for (SharedFunctionInfo info = infos.Next(); !info.is_null();
528 : info = infos.Next()) {
529 28698 : sorted.push_back(info);
530 : }
531 : std::sort(sorted.begin(), sorted.end(), CompareSharedFunctionInfo);
532 : }
533 :
534 : // Stack to track nested functions, referring function by index.
535 : std::vector<size_t> nesting;
536 :
537 : // Use sorted list to reconstruct function nesting.
538 34589 : for (SharedFunctionInfo info : sorted) {
539 28698 : int start = StartPosition(info);
540 28698 : int end = info->EndPosition();
541 28698 : uint32_t count = counter_map.Get(info);
542 : // Find the correct outer function based on start position.
543 37056 : while (!nesting.empty() && functions->at(nesting.back()).end <= start) {
544 : nesting.pop_back();
545 : }
546 28698 : if (count != 0) {
547 2749 : switch (collectionMode) {
548 : case v8::debug::CoverageMode::kBlockCount:
549 : case v8::debug::CoverageMode::kPreciseCount:
550 : break;
551 : case v8::debug::CoverageMode::kBlockBinary:
552 : case v8::debug::CoverageMode::kPreciseBinary:
553 48 : count = info->has_reported_binary_coverage() ? 0 : 1;
554 48 : info->set_has_reported_binary_coverage(true);
555 48 : break;
556 : case v8::debug::CoverageMode::kBestEffort:
557 : count = 1;
558 507 : break;
559 : }
560 : }
561 :
562 28698 : Handle<String> name(info->DebugName(), isolate);
563 : CoverageFunction function(start, end, count, name);
564 :
565 28698 : if (IsBlockMode(collectionMode) && info->HasCoverageInfo()) {
566 12912 : CollectBlockCoverage(&function, info, collectionMode);
567 : }
568 :
569 : // Only include a function range if itself or its parent function is
570 : // covered, or if it contains non-trivial block coverage.
571 28698 : bool is_covered = (count != 0);
572 : bool parent_is_covered =
573 30912 : (!nesting.empty() && functions->at(nesting.back()).count != 0);
574 : bool has_block_coverage = !function.blocks.empty();
575 28698 : if (is_covered || parent_is_covered || has_block_coverage) {
576 8354 : nesting.push_back(functions->size());
577 4177 : functions->emplace_back(function);
578 : }
579 : }
580 :
581 : // Remove entries for scripts that have no coverage.
582 5891 : if (functions->empty()) result->pop_back();
583 : }
584 447 : return result;
585 : }
586 :
587 354 : void Coverage::SelectMode(Isolate* isolate, debug::CoverageMode mode) {
588 354 : switch (mode) {
589 : case debug::CoverageMode::kBestEffort:
590 : // Note that DevTools switches back to best-effort coverage once the
591 : // recording is stopped. Since we delete coverage infos at that point, any
592 : // following coverage recording (without reloads) will be at function
593 : // granularity.
594 242 : isolate->debug()->RemoveAllCoverageInfos();
595 242 : if (!isolate->is_collecting_type_profile()) {
596 : isolate->SetFeedbackVectorsForProfilingTools(
597 234 : ReadOnlyRoots(isolate).undefined_value());
598 : }
599 : break;
600 : case debug::CoverageMode::kBlockBinary:
601 : case debug::CoverageMode::kBlockCount:
602 : case debug::CoverageMode::kPreciseBinary:
603 : case debug::CoverageMode::kPreciseCount: {
604 : HandleScope scope(isolate);
605 :
606 : // Remove all optimized function. Optimized and inlined functions do not
607 : // increment invocation count.
608 112 : Deoptimizer::DeoptimizeAll(isolate);
609 :
610 : // Root all feedback vectors to avoid early collection.
611 112 : isolate->MaybeInitializeVectorListFromHeap();
612 :
613 224 : HeapIterator heap_iterator(isolate->heap());
614 1928216 : for (HeapObject o = heap_iterator.next(); !o.is_null();
615 : o = heap_iterator.next()) {
616 1133100 : if (IsBinaryMode(mode) && o->IsSharedFunctionInfo()) {
617 : // If collecting binary coverage, reset
618 : // SFI::has_reported_binary_coverage to avoid optimizing / inlining
619 : // functions before they have reported coverage.
620 16440 : SharedFunctionInfo shared = SharedFunctionInfo::cast(o);
621 16440 : shared->set_has_reported_binary_coverage(false);
622 947556 : } else if (o->IsFeedbackVector()) {
623 : // In any case, clear any collected invocation counts.
624 : FeedbackVector::cast(o)->clear_invocation_count();
625 : }
626 : }
627 :
628 : break;
629 : }
630 : }
631 : isolate->set_code_coverage_mode(mode);
632 354 : }
633 :
634 : } // namespace internal
635 122036 : } // namespace v8
|