LCOV - code coverage report
Current view: top level - src/builtins - builtins-lazy-gen.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 69 69 100.0 %
Date: 2019-04-17 Functions: 10 10 100.0 %

          Line data    Source code
       1             : // Copyright 2018 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/builtins/builtins-lazy-gen.h"
       6             : 
       7             : #include "src/builtins/builtins-utils-gen.h"
       8             : #include "src/builtins/builtins.h"
       9             : #include "src/feedback-vector.h"
      10             : #include "src/globals.h"
      11             : #include "src/objects/shared-function-info.h"
      12             : 
      13             : namespace v8 {
      14             : namespace internal {
      15             : 
      16         448 : void LazyBuiltinsAssembler::GenerateTailCallToJSCode(
      17             :     TNode<Code> code, TNode<JSFunction> function) {
      18             :   TNode<Int32T> argc =
      19         448 :       UncheckedCast<Int32T>(Parameter(Descriptor::kActualArgumentsCount));
      20         448 :   TNode<Context> context = CAST(Parameter(Descriptor::kContext));
      21         448 :   TNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
      22             : 
      23         448 :   TailCallJSCode(code, context, function, new_target, argc);
      24         448 : }
      25             : 
      26         280 : void LazyBuiltinsAssembler::GenerateTailCallToReturnedCode(
      27             :     Runtime::FunctionId function_id, TNode<JSFunction> function) {
      28         280 :   TNode<Context> context = CAST(Parameter(Descriptor::kContext));
      29         280 :   TNode<Code> code = CAST(CallRuntime(function_id, context, function));
      30         280 :   GenerateTailCallToJSCode(code, function);
      31         280 : }
      32             : 
      33         168 : void LazyBuiltinsAssembler::TailCallRuntimeIfMarkerEquals(
      34             :     TNode<Smi> marker, OptimizationMarker expected_marker,
      35             :     Runtime::FunctionId function_id, TNode<JSFunction> function) {
      36         336 :   Label no_match(this);
      37         336 :   GotoIfNot(SmiEqual(marker, SmiConstant(expected_marker)), &no_match);
      38         168 :   GenerateTailCallToReturnedCode(function_id, function);
      39         168 :   BIND(&no_match);
      40         168 : }
      41             : 
      42          56 : void LazyBuiltinsAssembler::MaybeTailCallOptimizedCodeSlot(
      43             :     TNode<JSFunction> function, TNode<FeedbackVector> feedback_vector) {
      44         112 :   Label fallthrough(this);
      45             : 
      46             :   TNode<MaybeObject> maybe_optimized_code_entry = LoadMaybeWeakObjectField(
      47          56 :       feedback_vector, FeedbackVector::kOptimizedCodeOffset);
      48             : 
      49             :   // Check if the code entry is a Smi. If yes, we interpret it as an
      50             :   // optimisation marker. Otherwise, interpret it as a weak reference to a code
      51             :   // object.
      52          56 :   Label optimized_code_slot_is_smi(this), optimized_code_slot_is_weak_ref(this);
      53         112 :   Branch(TaggedIsSmi(maybe_optimized_code_entry), &optimized_code_slot_is_smi,
      54          56 :          &optimized_code_slot_is_weak_ref);
      55             : 
      56          56 :   BIND(&optimized_code_slot_is_smi);
      57             :   {
      58             :     // Optimized code slot is a Smi optimization marker.
      59          56 :     TNode<Smi> marker = CAST(maybe_optimized_code_entry);
      60             : 
      61             :     // Fall through if no optimization trigger.
      62         112 :     GotoIf(SmiEqual(marker, SmiConstant(OptimizationMarker::kNone)),
      63          56 :            &fallthrough);
      64             : 
      65             :     // TODO(ishell): introduce Runtime::kHandleOptimizationMarker and check
      66             :     // all these marker values there.
      67             :     TailCallRuntimeIfMarkerEquals(marker,
      68             :                                   OptimizationMarker::kLogFirstExecution,
      69          56 :                                   Runtime::kFunctionFirstExecution, function);
      70             :     TailCallRuntimeIfMarkerEquals(marker, OptimizationMarker::kCompileOptimized,
      71             :                                   Runtime::kCompileOptimized_NotConcurrent,
      72          56 :                                   function);
      73             :     TailCallRuntimeIfMarkerEquals(
      74             :         marker, OptimizationMarker::kCompileOptimizedConcurrent,
      75          56 :         Runtime::kCompileOptimized_Concurrent, function);
      76             : 
      77             :     // Otherwise, the marker is InOptimizationQueue, so fall through hoping
      78             :     // that an interrupt will eventually update the slot with optimized code.
      79             :     CSA_ASSERT(this,
      80             :                SmiEqual(marker,
      81             :                         SmiConstant(OptimizationMarker::kInOptimizationQueue)));
      82          56 :     Goto(&fallthrough);
      83             :   }
      84             : 
      85          56 :   BIND(&optimized_code_slot_is_weak_ref);
      86             :   {
      87             :     // Optimized code slot is a weak reference.
      88             :     TNode<Code> optimized_code =
      89          56 :         CAST(GetHeapObjectAssumeWeak(maybe_optimized_code_entry, &fallthrough));
      90             : 
      91             :     // Check if the optimized code is marked for deopt. If it is, call the
      92             :     // runtime to clear it.
      93          56 :     Label found_deoptimized_code(this);
      94             :     TNode<CodeDataContainer> code_data_container =
      95             :         CAST(LoadObjectField(optimized_code, Code::kCodeDataContainerOffset));
      96             : 
      97             :     TNode<Int32T> code_kind_specific_flags = LoadObjectField<Int32T>(
      98             :         code_data_container, CodeDataContainer::kKindSpecificFlagsOffset);
      99          56 :     GotoIf(IsSetWord32<Code::MarkedForDeoptimizationField>(
     100             :                code_kind_specific_flags),
     101          56 :            &found_deoptimized_code);
     102             : 
     103             :     // Optimized code is good, get it into the closure and link the closure into
     104             :     // the optimized functions list, then tail call the optimized code.
     105          56 :     StoreObjectField(function, JSFunction::kCodeOffset, optimized_code);
     106          56 :     GenerateTailCallToJSCode(optimized_code, function);
     107             : 
     108             :     // Optimized code slot contains deoptimized code, evict it and re-enter the
     109             :     // closure's code.
     110          56 :     BIND(&found_deoptimized_code);
     111          56 :     GenerateTailCallToReturnedCode(Runtime::kEvictOptimizedCodeSlot, function);
     112             :   }
     113             : 
     114             :   // Fall-through if the optimized code cell is clear and there is no
     115             :   // optimization marker.
     116          56 :   BIND(&fallthrough);
     117          56 : }
     118             : 
     119          56 : void LazyBuiltinsAssembler::CompileLazy(TNode<JSFunction> function) {
     120             :   // First lookup code, maybe we don't need to compile!
     121         112 :   Label compile_function(this, Label::kDeferred);
     122             : 
     123             :   // Check the code object for the SFI. If SFI's code entry points to
     124             :   // CompileLazy, then we need to lazy compile regardless of the function or
     125             :   // feedback vector marker.
     126             :   TNode<SharedFunctionInfo> shared =
     127          56 :       CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
     128          56 :   TNode<Code> sfi_code = GetSharedFunctionInfoCode(shared, &compile_function);
     129             : 
     130          56 :   TNode<HeapObject> feedback_cell_value = LoadFeedbackCellValue(function);
     131             : 
     132             :   // If feedback cell isn't initialized, compile function
     133         112 :   GotoIf(IsUndefined(feedback_cell_value), &compile_function);
     134             : 
     135          56 :   Label use_sfi_code(this);
     136             :   // If there is no feedback, don't check for optimized code.
     137         112 :   GotoIf(HasInstanceType(feedback_cell_value, CLOSURE_FEEDBACK_CELL_ARRAY_TYPE),
     138          56 :          &use_sfi_code);
     139             : 
     140             :   // If it isn't undefined or fixed array it must be a feedback vector.
     141             :   CSA_ASSERT(this, IsFeedbackVector(feedback_cell_value));
     142             : 
     143             :   // Is there an optimization marker or optimized code in the feedback vector?
     144          56 :   MaybeTailCallOptimizedCodeSlot(function, CAST(feedback_cell_value));
     145          56 :   Goto(&use_sfi_code);
     146             : 
     147          56 :   BIND(&use_sfi_code);
     148             :   // If not, install the SFI's code entry and jump to that.
     149             :   CSA_ASSERT(this, WordNotEqual(sfi_code, HeapConstant(BUILTIN_CODE(
     150             :                                               isolate(), CompileLazy))));
     151          56 :   StoreObjectField(function, JSFunction::kCodeOffset, sfi_code);
     152          56 :   GenerateTailCallToJSCode(sfi_code, function);
     153             : 
     154          56 :   BIND(&compile_function);
     155          56 :   GenerateTailCallToReturnedCode(Runtime::kCompileLazy, function);
     156          56 : }
     157             : 
     158         224 : TF_BUILTIN(CompileLazy, LazyBuiltinsAssembler) {
     159          56 :   TNode<JSFunction> function = CAST(Parameter(Descriptor::kTarget));
     160             : 
     161          56 :   CompileLazy(function);
     162          56 : }
     163             : 
     164         224 : TF_BUILTIN(CompileLazyDeoptimizedCode, LazyBuiltinsAssembler) {
     165             :   TNode<JSFunction> function = CAST(Parameter(Descriptor::kTarget));
     166             : 
     167             :   // Set the code slot inside the JSFunction to CompileLazy.
     168         112 :   TNode<Code> code = HeapConstant(BUILTIN_CODE(isolate(), CompileLazy));
     169          56 :   StoreObjectField(function, JSFunction::kCodeOffset, code);
     170          56 :   GenerateTailCallToJSCode(code, function);
     171          56 : }
     172             : 
     173             : }  // namespace internal
     174       59456 : }  // namespace v8

Generated by: LCOV version 1.10