LCOV - code coverage report
Current view: top level - src - runtime-profiler.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 62 95 65.3 %
Date: 2019-01-20 Functions: 10 13 76.9 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/runtime-profiler.h"
       6             : 
       7             : #include "src/assembler.h"
       8             : #include "src/base/platform/platform.h"
       9             : #include "src/bootstrapper.h"
      10             : #include "src/compilation-cache.h"
      11             : #include "src/compiler.h"
      12             : #include "src/execution.h"
      13             : #include "src/frames-inl.h"
      14             : #include "src/global-handles.h"
      15             : #include "src/interpreter/interpreter.h"
      16             : 
      17             : namespace v8 {
      18             : namespace internal {
      19             : 
      20             : // Number of times a function has to be seen on the stack before it is
      21             : // optimized.
      22             : static const int kProfilerTicksBeforeOptimization = 2;
      23             : 
      24             : // The number of ticks required for optimizing a function increases with
      25             : // the size of the bytecode. This is in addition to the
      26             : // kProfilerTicksBeforeOptimization required for any function.
      27             : static const int kBytecodeSizeAllowancePerTick = 1200;
      28             : 
      29             : // Maximum size in bytes of generate code for a function to allow OSR.
      30             : static const int kOSRBytecodeSizeAllowanceBase = 180;
      31             : 
      32             : static const int kOSRBytecodeSizeAllowancePerTick = 48;
      33             : 
      34             : // Maximum size in bytes of generated code for a function to be optimized
      35             : // the very first time it is seen on the stack.
      36             : static const int kMaxBytecodeSizeForEarlyOpt = 90;
      37             : 
      38             : // Certain functions are simply too big to be worth optimizing.
      39             : static const int kMaxBytecodeSizeForOpt = 60 * KB;
      40             : 
      41             : #define OPTIMIZATION_REASON_LIST(V)                            \
      42             :   V(DoNotOptimize, "do not optimize")                          \
      43             :   V(HotAndStable, "hot and stable")                            \
      44             :   V(SmallFunction, "small function")
      45             : 
      46             : enum class OptimizationReason : uint8_t {
      47             : #define OPTIMIZATION_REASON_CONSTANTS(Constant, message) k##Constant,
      48             :   OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_CONSTANTS)
      49             : #undef OPTIMIZATION_REASON_CONSTANTS
      50             : };
      51             : 
      52           0 : char const* OptimizationReasonToString(OptimizationReason reason) {
      53             :   static char const* reasons[] = {
      54             : #define OPTIMIZATION_REASON_TEXTS(Constant, message) message,
      55             :       OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_TEXTS)
      56             : #undef OPTIMIZATION_REASON_TEXTS
      57             :   };
      58       28839 :   size_t const index = static_cast<size_t>(reason);
      59             :   DCHECK_LT(index, arraysize(reasons));
      60       28839 :   return reasons[index];
      61             : }
      62             : 
      63           0 : std::ostream& operator<<(std::ostream& os, OptimizationReason reason) {
      64           0 :   return os << OptimizationReasonToString(reason);
      65             : }
      66             : 
      67       62883 : RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
      68             :     : isolate_(isolate),
      69       62883 :       any_ic_changed_(false) {
      70       62883 : }
      71             : 
      72           0 : static void GetICCounts(JSFunction function, int* ic_with_type_info_count,
      73             :                         int* ic_generic_count, int* ic_total_count,
      74             :                         int* type_info_percentage, int* generic_percentage) {
      75           0 :   FeedbackVector vector = function->feedback_vector();
      76             :   vector->ComputeCounts(ic_with_type_info_count, ic_generic_count,
      77           0 :                         ic_total_count);
      78             : 
      79           0 :   if (*ic_total_count > 0) {
      80           0 :     *type_info_percentage = 100 * *ic_with_type_info_count / *ic_total_count;
      81           0 :     *generic_percentage = 100 * *ic_generic_count / *ic_total_count;
      82             :   } else {
      83           0 :     *type_info_percentage = 100;  // Compared against lower bound.
      84           0 :     *generic_percentage = 0;      // Compared against upper bound.
      85             :   }
      86           0 : }
      87             : 
      88       28839 : static void TraceRecompile(JSFunction function, const char* reason,
      89             :                            const char* type) {
      90       28839 :   if (FLAG_trace_opt) {
      91           0 :     PrintF("[marking ");
      92           0 :     function->ShortPrint();
      93           0 :     PrintF(" for %s recompilation, reason: %s", type, reason);
      94           0 :     if (FLAG_type_info_threshold > 0) {
      95             :       int typeinfo, generic, total, type_percentage, generic_percentage;
      96             :       GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
      97           0 :                   &generic_percentage);
      98             :       PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total,
      99           0 :              type_percentage);
     100           0 :       PrintF(", generic ICs: %d/%d (%d%%)", generic, total, generic_percentage);
     101             :     }
     102           0 :     PrintF("]\n");
     103             :   }
     104       28839 : }
     105             : 
     106       28839 : void RuntimeProfiler::Optimize(JSFunction function, OptimizationReason reason) {
     107             :   DCHECK_NE(reason, OptimizationReason::kDoNotOptimize);
     108       28839 :   TraceRecompile(function, OptimizationReasonToString(reason), "optimized");
     109       28839 :   function->MarkForOptimization(ConcurrencyMode::kConcurrent);
     110       28839 : }
     111             : 
     112       36484 : void RuntimeProfiler::AttemptOnStackReplacement(InterpretedFrame* frame,
     113             :                                                 int loop_nesting_levels) {
     114       36484 :   JSFunction function = frame->function();
     115       36484 :   SharedFunctionInfo shared = function->shared();
     116       36484 :   if (!FLAG_use_osr || !shared->IsUserJavaScript()) {
     117        4778 :     return;
     118             :   }
     119             : 
     120             :   // If the code is not optimizable, don't try OSR.
     121       34992 :   if (shared->optimization_disabled()) return;
     122             : 
     123             :   // We're using on-stack replacement: Store new loop nesting level in
     124             :   // BytecodeArray header so that certain back edges in any interpreter frame
     125             :   // for this bytecode will trigger on-stack replacement for that frame.
     126       31706 :   if (FLAG_trace_osr) {
     127           0 :     PrintF("[OSR - arming back edges in ");
     128           0 :     function->PrintName();
     129           0 :     PrintF("]\n");
     130             :   }
     131             : 
     132             :   DCHECK_EQ(StackFrame::INTERPRETED, frame->type());
     133       63412 :   int level = frame->GetBytecodeArray()->osr_loop_nesting_level();
     134             :   frame->GetBytecodeArray()->set_osr_loop_nesting_level(
     135       95118 :       Min(level + loop_nesting_levels, AbstractCode::kMaxLoopNestingMarker));
     136             : }
     137             : 
     138      152917 : void RuntimeProfiler::MaybeOptimize(JSFunction function,
     139             :                                     InterpretedFrame* frame) {
     140      152917 :   if (function->IsInOptimizationQueue()) {
     141        2653 :     if (FLAG_trace_opt_verbose) {
     142           0 :       PrintF("[function ");
     143           0 :       function->PrintName();
     144           0 :       PrintF(" is already in optimization queue]\n");
     145             :     }
     146             :     return;
     147             :   }
     148             : 
     149      150264 :   if (FLAG_always_osr) {
     150           0 :     AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
     151             :     // Fall through and do a normal optimized compile as well.
     152      150264 :   } else if (MaybeOSR(function, frame)) {
     153             :     return;
     154             :   }
     155             : 
     156      108713 :   if (function->shared()->optimization_disabled()) return;
     157             : 
     158             :   OptimizationReason reason =
     159       67507 :       ShouldOptimize(function, function->shared()->GetBytecodeArray());
     160             : 
     161       67507 :   if (reason != OptimizationReason::kDoNotOptimize) {
     162       28839 :     Optimize(function, reason);
     163             :   }
     164             : }
     165             : 
     166      150264 : bool RuntimeProfiler::MaybeOSR(JSFunction function, InterpretedFrame* frame) {
     167      300528 :   int ticks = function->feedback_vector()->profiler_ticks();
     168             :   // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
     169             :   // than kMaxToplevelSourceSize.
     170             : 
     171      449178 :   if (function->IsMarkedForOptimization() ||
     172      263410 :       function->IsMarkedForConcurrentOptimization() ||
     173      113146 :       function->HasOptimizedCode()) {
     174             :     // Attempt OSR if we are still running interpreted code even though the
     175             :     // the function has long been marked or even already been optimized.
     176             :     int64_t allowance =
     177             :         kOSRBytecodeSizeAllowanceBase +
     178       41551 :         static_cast<int64_t>(ticks) * kOSRBytecodeSizeAllowancePerTick;
     179       83102 :     if (function->shared()->GetBytecodeArray()->length() <= allowance) {
     180       29894 :       AttemptOnStackReplacement(frame);
     181             :     }
     182             :     return true;
     183             :   }
     184             :   return false;
     185             : }
     186             : 
     187       67507 : OptimizationReason RuntimeProfiler::ShouldOptimize(JSFunction function,
     188             :                                                    BytecodeArray bytecode) {
     189      135014 :   int ticks = function->feedback_vector()->profiler_ticks();
     190       67507 :   if (bytecode->length() > kMaxBytecodeSizeForOpt) {
     191             :     return OptimizationReason::kDoNotOptimize;
     192             :   }
     193             : 
     194             :   int ticks_for_optimization =
     195             :       kProfilerTicksBeforeOptimization +
     196       65312 :       (bytecode->length() / kBytecodeSizeAllowancePerTick);
     197       65312 :   if (ticks >= ticks_for_optimization) {
     198             :     return OptimizationReason::kHotAndStable;
     199       82653 :   } else if (!any_ic_changed_ &&
     200             :              bytecode->length() < kMaxBytecodeSizeForEarlyOpt) {
     201             :     // If no IC was patched since the last tick and this function is very
     202             :     // small, optimistically optimize it now.
     203             :     return OptimizationReason::kSmallFunction;
     204       36473 :   } else if (FLAG_trace_opt_verbose) {
     205           0 :     PrintF("[not yet optimizing ");
     206           0 :     function->PrintName();
     207             :     PrintF(", not enough ticks: %d/%d and ", ticks,
     208           0 :            kProfilerTicksBeforeOptimization);
     209           0 :     if (any_ic_changed_) {
     210           0 :       PrintF("ICs changed]\n");
     211             :     } else {
     212             :       PrintF(" too large for small function optimization: %d/%d]\n",
     213           0 :              bytecode->length(), kMaxBytecodeSizeForEarlyOpt);
     214             :     }
     215             :   }
     216             :   return OptimizationReason::kDoNotOptimize;
     217             : }
     218             : 
     219      247791 : void RuntimeProfiler::MarkCandidatesForOptimization() {
     220      247791 :   HandleScope scope(isolate_);
     221             : 
     222      495582 :   if (!isolate_->use_optimizer()) return;
     223             : 
     224             :   DisallowHeapAllocation no_gc;
     225             : 
     226             :   // Run through the JavaScript frames and collect them. If we already
     227             :   // have a sample of the function, we mark it for optimizations
     228             :   // (eagerly or lazily).
     229             :   int frame_count = 0;
     230      161561 :   int frame_count_limit = FLAG_frame_count;
     231      484678 :   for (JavaScriptFrameIterator it(isolate_);
     232      484678 :        frame_count++ < frame_count_limit && !it.done();
     233      161556 :        it.Advance()) {
     234             :     JavaScriptFrame* frame = it.frame();
     235      331751 :     if (!frame->is_interpreted()) continue;
     236             : 
     237      152917 :     JSFunction function = frame->function();
     238             :     DCHECK(function->shared()->is_compiled());
     239      305834 :     if (!function->shared()->IsInterpreted()) continue;
     240             : 
     241      152917 :     if (!function->has_feedback_vector()) continue;
     242             : 
     243      152917 :     MaybeOptimize(function, InterpretedFrame::cast(frame));
     244             : 
     245             :     // TODO(leszeks): Move this increment to before the maybe optimize checks,
     246             :     // and update the tests to assume the increment has already happened.
     247      305834 :     int ticks = function->feedback_vector()->profiler_ticks();
     248      152917 :     if (ticks < Smi::kMaxValue) {
     249      305834 :       function->feedback_vector()->set_profiler_ticks(ticks + 1);
     250             :     }
     251             :   }
     252      161561 :   any_ic_changed_ = false;
     253             : }
     254             : 
     255             : }  // namespace internal
     256      183867 : }  // namespace v8

Generated by: LCOV version 1.10