LCOV - code coverage report
Current view: top level - src - runtime-profiler.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 60 78 76.9 %
Date: 2019-04-17 Functions: 8 11 72.7 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/runtime-profiler.h"
       6             : 
       7             : #include "src/assembler.h"
       8             : #include "src/base/platform/platform.h"
       9             : #include "src/bootstrapper.h"
      10             : #include "src/compilation-cache.h"
      11             : #include "src/compiler.h"
      12             : #include "src/execution.h"
      13             : #include "src/frames-inl.h"
      14             : #include "src/global-handles.h"
      15             : #include "src/interpreter/interpreter.h"
      16             : #include "src/tracing/trace-event.h"
      17             : 
      18             : namespace v8 {
      19             : namespace internal {
      20             : 
      21             : // Number of times a function has to be seen on the stack before it is
      22             : // optimized.
      23             : static const int kProfilerTicksBeforeOptimization = 2;
      24             : 
      25             : // The number of ticks required for optimizing a function increases with
      26             : // the size of the bytecode. This is in addition to the
      27             : // kProfilerTicksBeforeOptimization required for any function.
      28             : static const int kBytecodeSizeAllowancePerTick = 1200;
      29             : 
      30             : // Maximum size in bytes of generate code for a function to allow OSR.
      31             : static const int kOSRBytecodeSizeAllowanceBase = 180;
      32             : 
      33             : static const int kOSRBytecodeSizeAllowancePerTick = 48;
      34             : 
      35             : // Maximum size in bytes of generated code for a function to be optimized
      36             : // the very first time it is seen on the stack.
      37             : static const int kMaxBytecodeSizeForEarlyOpt = 90;
      38             : 
      39             : #define OPTIMIZATION_REASON_LIST(V)                            \
      40             :   V(DoNotOptimize, "do not optimize")                          \
      41             :   V(HotAndStable, "hot and stable")                            \
      42             :   V(SmallFunction, "small function")
      43             : 
      44             : enum class OptimizationReason : uint8_t {
      45             : #define OPTIMIZATION_REASON_CONSTANTS(Constant, message) k##Constant,
      46             :   OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_CONSTANTS)
      47             : #undef OPTIMIZATION_REASON_CONSTANTS
      48             : };
      49             : 
      50           0 : char const* OptimizationReasonToString(OptimizationReason reason) {
      51             :   static char const* reasons[] = {
      52             : #define OPTIMIZATION_REASON_TEXTS(Constant, message) message,
      53             :       OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_TEXTS)
      54             : #undef OPTIMIZATION_REASON_TEXTS
      55             :   };
      56       28646 :   size_t const index = static_cast<size_t>(reason);
      57             :   DCHECK_LT(index, arraysize(reasons));
      58       28646 :   return reasons[index];
      59             : }
      60             : 
      61           0 : std::ostream& operator<<(std::ostream& os, OptimizationReason reason) {
      62           0 :   return os << OptimizationReasonToString(reason);
      63             : }
      64             : 
      65       62425 : RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
      66             :     : isolate_(isolate),
      67       62425 :       any_ic_changed_(false) {
      68       62425 : }
      69             : 
      70       28646 : static void TraceRecompile(JSFunction function, const char* reason,
      71             :                            const char* type) {
      72       28646 :   if (FLAG_trace_opt) {
      73           1 :     PrintF("[marking ");
      74           1 :     function->ShortPrint();
      75           1 :     PrintF(" for %s recompilation, reason: %s", type, reason);
      76           1 :     PrintF("]\n");
      77             :   }
      78       28646 : }
      79             : 
      80           0 : void RuntimeProfiler::Optimize(JSFunction function, OptimizationReason reason) {
      81             :   DCHECK_NE(reason, OptimizationReason::kDoNotOptimize);
      82       28646 :   TraceRecompile(function, OptimizationReasonToString(reason), "optimized");
      83       28646 :   function->MarkForOptimization(ConcurrencyMode::kConcurrent);
      84           0 : }
      85             : 
      86       35281 : void RuntimeProfiler::AttemptOnStackReplacement(InterpretedFrame* frame,
      87             :                                                 int loop_nesting_levels) {
      88       35281 :   JSFunction function = frame->function();
      89       35281 :   SharedFunctionInfo shared = function->shared();
      90       35281 :   if (!FLAG_use_osr || !shared->IsUserJavaScript()) {
      91        4774 :     return;
      92             :   }
      93             : 
      94             :   // If the code is not optimizable, don't try OSR.
      95       33802 :   if (shared->optimization_disabled()) return;
      96             : 
      97             :   // We're using on-stack replacement: Store new loop nesting level in
      98             :   // BytecodeArray header so that certain back edges in any interpreter frame
      99             :   // for this bytecode will trigger on-stack replacement for that frame.
     100       30507 :   if (FLAG_trace_osr) {
     101           0 :     PrintF("[OSR - arming back edges in ");
     102           0 :     function->PrintName();
     103           0 :     PrintF("]\n");
     104             :   }
     105             : 
     106             :   DCHECK_EQ(StackFrame::INTERPRETED, frame->type());
     107       61014 :   int level = frame->GetBytecodeArray()->osr_loop_nesting_level();
     108       91521 :   frame->GetBytecodeArray()->set_osr_loop_nesting_level(
     109             :       Min(level + loop_nesting_levels, AbstractCode::kMaxLoopNestingMarker));
     110             : }
     111             : 
     112      164576 : void RuntimeProfiler::MaybeOptimize(JSFunction function,
     113             :                                     InterpretedFrame* frame) {
     114      164576 :   if (function->IsInOptimizationQueue()) {
     115        3782 :     if (FLAG_trace_opt_verbose) {
     116           0 :       PrintF("[function ");
     117           0 :       function->PrintName();
     118           0 :       PrintF(" is already in optimization queue]\n");
     119             :     }
     120             :     return;
     121             :   }
     122             : 
     123      160794 :   if (FLAG_always_osr) {
     124           0 :     AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
     125             :     // Fall through and do a normal optimized compile as well.
     126      160794 :   } else if (MaybeOSR(function, frame)) {
     127             :     return;
     128             :   }
     129             : 
     130      121167 :   if (function->shared()->optimization_disabled()) return;
     131             : 
     132             :   OptimizationReason reason =
     133       66205 :       ShouldOptimize(function, function->shared()->GetBytecodeArray());
     134             : 
     135       66205 :   if (reason != OptimizationReason::kDoNotOptimize) {
     136             :     Optimize(function, reason);
     137             :   }
     138             : }
     139             : 
     140      160794 : bool RuntimeProfiler::MaybeOSR(JSFunction function, InterpretedFrame* frame) {
     141             :   int ticks = function->feedback_vector()->profiler_ticks();
     142             :   // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
     143             :   // than kMaxToplevelSourceSize.
     144             : 
     145      480814 :   if (function->IsMarkedForOptimization() ||
     146      284867 :       function->IsMarkedForConcurrentOptimization() ||
     147      124073 :       function->HasOptimizedCode()) {
     148             :     // Attempt OSR if we are still running interpreted code even though the
     149             :     // the function has long been marked or even already been optimized.
     150             :     int64_t allowance =
     151             :         kOSRBytecodeSizeAllowanceBase +
     152       39627 :         static_cast<int64_t>(ticks) * kOSRBytecodeSizeAllowancePerTick;
     153       79254 :     if (function->shared()->GetBytecodeArray()->length() <= allowance) {
     154       28676 :       AttemptOnStackReplacement(frame);
     155             :     }
     156             :     return true;
     157             :   }
     158             :   return false;
     159             : }
     160             : 
     161       66205 : OptimizationReason RuntimeProfiler::ShouldOptimize(JSFunction function,
     162             :                                                    BytecodeArray bytecode) {
     163             :   int ticks = function->feedback_vector()->profiler_ticks();
     164             :   int ticks_for_optimization =
     165             :       kProfilerTicksBeforeOptimization +
     166       66205 :       (bytecode->length() / kBytecodeSizeAllowancePerTick);
     167       66205 :   if (ticks >= ticks_for_optimization) {
     168             :     return OptimizationReason::kHotAndStable;
     169       53017 :   } else if (!any_ic_changed_ &&
     170             :              bytecode->length() < kMaxBytecodeSizeForEarlyOpt) {
     171             :     // If no IC was patched since the last tick and this function is very
     172             :     // small, optimistically optimize it now.
     173             :     return OptimizationReason::kSmallFunction;
     174       37559 :   } else if (FLAG_trace_opt_verbose) {
     175           0 :     PrintF("[not yet optimizing ");
     176           0 :     function->PrintName();
     177             :     PrintF(", not enough ticks: %d/%d and ", ticks,
     178           0 :            kProfilerTicksBeforeOptimization);
     179           0 :     if (any_ic_changed_) {
     180           0 :       PrintF("ICs changed]\n");
     181             :     } else {
     182             :       PrintF(" too large for small function optimization: %d/%d]\n",
     183           0 :              bytecode->length(), kMaxBytecodeSizeForEarlyOpt);
     184             :     }
     185             :   }
     186             :   return OptimizationReason::kDoNotOptimize;
     187             : }
     188             : 
     189     1621547 : void RuntimeProfiler::MarkCandidatesForOptimization() {
     190     1621547 :   HandleScope scope(isolate_);
     191             : 
     192     1621547 :   if (!isolate_->use_optimizer()) return;
     193             : 
     194             :   DisallowHeapAllocation no_gc;
     195      517914 :   TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
     196             :                "V8.MarkCandidatesForOptimization");
     197             : 
     198             :   // Run through the JavaScript frames and collect them. If we already
     199             :   // have a sample of the function, we mark it for optimizations
     200             :   // (eagerly or lazily).
     201             :   int frame_count = 0;
     202      172638 :   int frame_count_limit = FLAG_frame_count;
     203      690536 :   for (JavaScriptFrameIterator it(isolate_);
     204      345268 :        frame_count++ < frame_count_limit && !it.done();
     205      172630 :        it.Advance()) {
     206             :     JavaScriptFrame* frame = it.frame();
     207      353314 :     if (!frame->is_interpreted()) continue;
     208             : 
     209      164576 :     JSFunction function = frame->function();
     210             :     DCHECK(function->shared()->is_compiled());
     211      329152 :     if (!function->shared()->IsInterpreted()) continue;
     212             : 
     213      164576 :     if (!function->has_feedback_vector()) continue;
     214             : 
     215      164576 :     MaybeOptimize(function, InterpretedFrame::cast(frame));
     216             : 
     217             :     // TODO(leszeks): Move this increment to before the maybe optimize checks,
     218             :     // and update the tests to assume the increment has already happened.
     219             :     int ticks = function->feedback_vector()->profiler_ticks();
     220      164576 :     if (ticks < Smi::kMaxValue) {
     221      164576 :       function->feedback_vector()->set_profiler_ticks(ticks + 1);
     222             :     }
     223             :   }
     224      172638 :   any_ic_changed_ = false;
     225             : }
     226             : 
     227             : }  // namespace internal
     228      122004 : }  // namespace v8

Generated by: LCOV version 1.10