LCOV - code coverage report
Current view: top level - src - runtime-profiler.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 56 88 63.6 %
Date: 2017-10-20 Functions: 8 11 72.7 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/runtime-profiler.h"
       6             : 
       7             : #include "src/assembler.h"
       8             : #include "src/base/platform/platform.h"
       9             : #include "src/bootstrapper.h"
      10             : #include "src/code-stubs.h"
      11             : #include "src/compilation-cache.h"
      12             : #include "src/compiler.h"
      13             : #include "src/execution.h"
      14             : #include "src/frames-inl.h"
      15             : #include "src/global-handles.h"
      16             : #include "src/interpreter/interpreter.h"
      17             : 
      18             : namespace v8 {
      19             : namespace internal {
      20             : 
      21             : // Number of times a function has to be seen on the stack before it is
      22             : // optimized.
      23             : static const int kProfilerTicksBeforeOptimization = 2;
      24             : 
      25             : // The number of ticks required for optimizing a function increases with
      26             : // the size of the bytecode. This is in addition to the
      27             : // kProfilerTicksBeforeOptimization required for any function.
      28             : static const int kBytecodeSizeAllowancePerTick = 1200;
      29             : 
      30             : // Maximum size in bytes of generate code for a function to allow OSR.
      31             : static const int kOSRBytecodeSizeAllowanceBase = 180;
      32             : 
      33             : static const int kOSRBytecodeSizeAllowancePerTick = 48;
      34             : 
      35             : // Maximum size in bytes of generated code for a function to be optimized
      36             : // the very first time it is seen on the stack.
      37             : static const int kMaxBytecodeSizeForEarlyOpt = 90;
      38             : 
      39             : // Certain functions are simply too big to be worth optimizing.
      40             : static const int kMaxBytecodeSizeForOpt = 60 * KB;
      41             : 
      42             : #define OPTIMIZATION_REASON_LIST(V)                            \
      43             :   V(DoNotOptimize, "do not optimize")                          \
      44             :   V(HotAndStable, "hot and stable")                            \
      45             :   V(SmallFunction, "small function")
      46             : 
      47             : enum class OptimizationReason : uint8_t {
      48             : #define OPTIMIZATION_REASON_CONSTANTS(Constant, message) k##Constant,
      49             :   OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_CONSTANTS)
      50             : #undef OPTIMIZATION_REASON_CONSTANTS
      51             : };
      52             : 
      53           0 : char const* OptimizationReasonToString(OptimizationReason reason) {
      54             :   static char const* reasons[] = {
      55             : #define OPTIMIZATION_REASON_TEXTS(Constant, message) message,
      56             :       OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_TEXTS)
      57             : #undef OPTIMIZATION_REASON_TEXTS
      58             :   };
      59       13078 :   size_t const index = static_cast<size_t>(reason);
      60             :   DCHECK_LT(index, arraysize(reasons));
      61       13078 :   return reasons[index];
      62             : }
      63             : 
      64           0 : std::ostream& operator<<(std::ostream& os, OptimizationReason reason) {
      65           0 :   return os << OptimizationReasonToString(reason);
      66             : }
      67             : 
      68       54999 : RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
      69             :     : isolate_(isolate),
      70       54999 :       any_ic_changed_(false) {
      71       54999 : }
      72             : 
      73           0 : static void GetICCounts(JSFunction* function, int* ic_with_type_info_count,
      74             :                         int* ic_generic_count, int* ic_total_count,
      75             :                         int* type_info_percentage, int* generic_percentage) {
      76             :   // Harvest vector-ics.
      77             :   FeedbackVector* vector = function->feedback_vector();
      78             :   vector->ComputeCounts(ic_with_type_info_count, ic_generic_count,
      79           0 :                         ic_total_count);
      80             : 
      81           0 :   if (*ic_total_count > 0) {
      82           0 :     *type_info_percentage = 100 * *ic_with_type_info_count / *ic_total_count;
      83           0 :     *generic_percentage = 100 * *ic_generic_count / *ic_total_count;
      84             :   } else {
      85           0 :     *type_info_percentage = 100;  // Compared against lower bound.
      86           0 :     *generic_percentage = 0;      // Compared against upper bound.
      87             :   }
      88           0 : }
      89             : 
      90       13078 : static void TraceRecompile(JSFunction* function, const char* reason,
      91             :                            const char* type) {
      92       13078 :   if (FLAG_trace_opt) {
      93           0 :     PrintF("[marking ");
      94           0 :     function->ShortPrint();
      95           0 :     PrintF(" for %s recompilation, reason: %s", type, reason);
      96           0 :     if (FLAG_type_info_threshold > 0) {
      97             :       int typeinfo, generic, total, type_percentage, generic_percentage;
      98             :       GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
      99           0 :                   &generic_percentage);
     100             :       PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total,
     101           0 :              type_percentage);
     102           0 :       PrintF(", generic ICs: %d/%d (%d%%)", generic, total, generic_percentage);
     103             :     }
     104           0 :     PrintF("]\n");
     105             :   }
     106       13078 : }
     107             : 
     108       13078 : void RuntimeProfiler::Optimize(JSFunction* function,
     109             :                                OptimizationReason reason) {
     110             :   DCHECK_NE(reason, OptimizationReason::kDoNotOptimize);
     111       13078 :   TraceRecompile(function, OptimizationReasonToString(reason), "optimized");
     112       13078 :   function->MarkForOptimization(ConcurrencyMode::kConcurrent);
     113       13078 : }
     114             : 
     115       18081 : void RuntimeProfiler::AttemptOnStackReplacement(JavaScriptFrame* frame,
     116             :                                                 int loop_nesting_levels) {
     117       18081 :   JSFunction* function = frame->function();
     118             :   SharedFunctionInfo* shared = function->shared();
     119       18081 :   if (!FLAG_use_osr || !function->shared()->IsUserJavaScript()) {
     120             :     return;
     121             :   }
     122             : 
     123             :   // If the code is not optimizable, don't try OSR.
     124       11813 :   if (shared->optimization_disabled()) return;
     125             : 
     126             :   // We're using on-stack replacement: Store new loop nesting level in
     127             :   // BytecodeArray header so that certain back edges in any interpreter frame
     128             :   // for this bytecode will trigger on-stack replacement for that frame.
     129        9369 :   if (FLAG_trace_osr) {
     130           0 :     PrintF("[OSR - arming back edges in ");
     131           0 :     function->PrintName();
     132           0 :     PrintF("]\n");
     133             :   }
     134             : 
     135             :   DCHECK_EQ(StackFrame::INTERPRETED, frame->type());
     136             :   DCHECK(shared->HasBytecodeArray());
     137             :   int level = shared->bytecode_array()->osr_loop_nesting_level();
     138             :   shared->bytecode_array()->set_osr_loop_nesting_level(
     139        9369 :       Min(level + loop_nesting_levels, AbstractCode::kMaxLoopNestingMarker));
     140             : }
     141             : 
     142       83687 : void RuntimeProfiler::MaybeOptimize(JSFunction* function,
     143             :                                     JavaScriptFrame* frame) {
     144       83687 :   if (function->IsInOptimizationQueue()) {
     145       12434 :     if (FLAG_trace_opt_verbose) {
     146           0 :       PrintF("[function ");
     147           0 :       function->PrintName();
     148           0 :       PrintF(" is already in optimization queue]\n");
     149             :     }
     150             :     return;
     151             :   }
     152             : 
     153       71253 :   if (FLAG_always_osr) {
     154           0 :     AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
     155             :     // Fall through and do a normal optimized compile as well.
     156       71253 :   } else if (MaybeOSR(function, frame)) {
     157             :     return;
     158             :   }
     159             : 
     160       49959 :   if (function->shared()->optimization_disabled()) return;
     161             : 
     162       80364 :   if (frame->is_optimized()) return;
     163             : 
     164       40182 :   OptimizationReason reason = ShouldOptimize(function, frame);
     165             : 
     166       40182 :   if (reason != OptimizationReason::kDoNotOptimize) {
     167       13078 :     Optimize(function, reason);
     168             :   }
     169             : }
     170             : 
     171       71253 : bool RuntimeProfiler::MaybeOSR(JSFunction* function, JavaScriptFrame* frame) {
     172             :   SharedFunctionInfo* shared = function->shared();
     173             :   int ticks = function->feedback_vector()->profiler_ticks();
     174             : 
     175             :   // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
     176             :   // than kMaxToplevelSourceSize.
     177             : 
     178      285012 :   if (!frame->is_optimized() &&
     179      140707 :       (function->IsMarkedForOptimization() ||
     180      126807 :        function->IsMarkedForConcurrentOptimization() ||
     181       57353 :        function->HasOptimizedCode())) {
     182             :     // Attempt OSR if we are still running interpreted code even though the
     183             :     // the function has long been marked or even already been optimized.
     184             :     int64_t allowance =
     185             :         kOSRBytecodeSizeAllowanceBase +
     186       21294 :         static_cast<int64_t>(ticks) * kOSRBytecodeSizeAllowancePerTick;
     187       21294 :     if (shared->bytecode_array()->length() <= allowance) {
     188       11449 :       AttemptOnStackReplacement(frame);
     189             :     }
     190             :     return true;
     191             :   }
     192             :   return false;
     193             : }
     194             : 
     195       40182 : OptimizationReason RuntimeProfiler::ShouldOptimize(JSFunction* function,
     196             :                                                    JavaScriptFrame* frame) {
     197             :   SharedFunctionInfo* shared = function->shared();
     198             :   int ticks = function->feedback_vector()->profiler_ticks();
     199             : 
     200       40182 :   if (shared->bytecode_array()->length() > kMaxBytecodeSizeForOpt) {
     201             :     return OptimizationReason::kDoNotOptimize;
     202             :   }
     203             : 
     204             :   int ticks_for_optimization =
     205             :       kProfilerTicksBeforeOptimization +
     206       40025 :       (shared->bytecode_array()->length() / kBytecodeSizeAllowancePerTick);
     207       40025 :   if (ticks >= ticks_for_optimization) {
     208             :     return OptimizationReason::kHotAndStable;
     209       34639 :   } else if (!any_ic_changed_ &&
     210             :              shared->bytecode_array()->length() < kMaxBytecodeSizeForEarlyOpt) {
     211             :     // If no IC was patched since the last tick and this function is very
     212             :     // small, optimistically optimize it now.
     213             :     return OptimizationReason::kSmallFunction;
     214       26947 :   } else if (FLAG_trace_opt_verbose) {
     215           0 :     PrintF("[not yet optimizing ");
     216           0 :     function->PrintName();
     217             :     PrintF(", not enough ticks: %d/%d and ", ticks,
     218           0 :            kProfilerTicksBeforeOptimization);
     219           0 :     if (any_ic_changed_) {
     220           0 :       PrintF("ICs changed]\n");
     221             :     } else {
     222             :       PrintF(" too large for small function optimization: %d/%d]\n",
     223           0 :              shared->bytecode_array()->length(), kMaxBytecodeSizeForEarlyOpt);
     224             :     }
     225             :   }
     226             :   return OptimizationReason::kDoNotOptimize;
     227             : }
     228             : 
     229      288925 : void RuntimeProfiler::MarkCandidatesForOptimization() {
     230      288925 :   HandleScope scope(isolate_);
     231             : 
     232      577850 :   if (!isolate_->use_optimizer()) return;
     233             : 
     234             :   DisallowHeapAllocation no_gc;
     235             : 
     236             :   // Run through the JavaScript frames and collect them. If we already
     237             :   // have a sample of the function, we mark it for optimizations
     238             :   // (eagerly or lazily).
     239             :   int frame_count = 0;
     240      182554 :   int frame_count_limit = FLAG_frame_count;
     241      547657 :   for (JavaScriptFrameIterator it(isolate_);
     242      547657 :        frame_count++ < frame_count_limit && !it.done();
     243      182549 :        it.Advance()) {
     244             :     JavaScriptFrame* frame = it.frame();
     245      365098 :     if (frame->is_optimized()) continue;
     246             : 
     247       83687 :     JSFunction* function = frame->function();
     248             :     DCHECK(function->shared()->is_compiled());
     249       83687 :     if (!function->shared()->IsInterpreted()) continue;
     250             : 
     251       83687 :     MaybeOptimize(function, frame);
     252             : 
     253             :     // TODO(leszeks): Move this increment to before the maybe optimize checks,
     254             :     // and update the tests to assume the increment has already happened.
     255             :     int ticks = function->feedback_vector()->profiler_ticks();
     256       83687 :     if (ticks < Smi::kMaxValue) {
     257       83687 :       function->feedback_vector()->set_profiler_ticks(ticks + 1);
     258             :     }
     259             :   }
     260      182554 :   any_ic_changed_ = false;
     261             : }
     262             : 
     263             : }  // namespace internal
     264             : }  // namespace v8

Generated by: LCOV version 1.10