LCOV - code coverage report
Current view: top level - src/ic - binary-op-assembler.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 261 262 99.6 %
Date: 2019-01-20 Functions: 13 13 100.0 %

          Line data    Source code
       1             : // Copyright 2016 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/ic/binary-op-assembler.h"
       6             : 
       7             : #include "src/globals.h"
       8             : 
       9             : namespace v8 {
      10             : namespace internal {
      11             : 
      12             : using compiler::Node;
      13             : 
      14         336 : Node* BinaryOpAssembler::Generate_AddWithFeedback(Node* context, Node* lhs,
      15             :                                                   Node* rhs, Node* slot_id,
      16             :                                                   Node* feedback_vector,
      17             :                                                   bool rhs_is_smi) {
      18             :   // Shared entry for floating point addition.
      19         672 :   Label do_fadd(this), if_lhsisnotnumber(this, Label::kDeferred),
      20         336 :       check_rhsisoddball(this, Label::kDeferred),
      21         336 :       call_with_oddball_feedback(this), call_with_any_feedback(this),
      22         336 :       call_add_stub(this), end(this), bigint(this, Label::kDeferred);
      23         672 :   VARIABLE(var_fadd_lhs, MachineRepresentation::kFloat64);
      24         672 :   VARIABLE(var_fadd_rhs, MachineRepresentation::kFloat64);
      25         672 :   VARIABLE(var_type_feedback, MachineRepresentation::kTaggedSigned);
      26         672 :   VARIABLE(var_result, MachineRepresentation::kTagged);
      27             : 
      28             :   // Check if the {lhs} is a Smi or a HeapObject.
      29         336 :   Label if_lhsissmi(this);
      30             :   // If rhs is known to be an Smi we want to fast path Smi operation. This is
      31             :   // for AddSmi operation. For the normal Add operation, we want to fast path
      32             :   // both Smi and Number operations, so this path should not be marked as
      33             :   // Deferred.
      34             :   Label if_lhsisnotsmi(this,
      35         672 :                        rhs_is_smi ? Label::kDeferred : Label::kNonDeferred);
      36         672 :   Branch(TaggedIsNotSmi(lhs), &if_lhsisnotsmi, &if_lhsissmi);
      37             : 
      38         336 :   BIND(&if_lhsissmi);
      39             :   {
      40         336 :     Comment("lhs is Smi");
      41         336 :     if (!rhs_is_smi) {
      42             :       // Check if the {rhs} is also a Smi.
      43         168 :       Label if_rhsissmi(this), if_rhsisnotsmi(this);
      44         336 :       Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
      45             : 
      46         168 :       BIND(&if_rhsisnotsmi);
      47             :       {
      48             :         // Check if the {rhs} is a HeapNumber.
      49         336 :         GotoIfNot(IsHeapNumber(rhs), &check_rhsisoddball);
      50             : 
      51         336 :         var_fadd_lhs.Bind(SmiToFloat64(lhs));
      52         336 :         var_fadd_rhs.Bind(LoadHeapNumberValue(rhs));
      53         168 :         Goto(&do_fadd);
      54             :       }
      55             : 
      56         336 :       BIND(&if_rhsissmi);
      57             :     }
      58             : 
      59             :     {
      60         336 :       Comment("perform smi operation");
      61             :       // If rhs is known to be an Smi we want to fast path Smi operation. This
      62             :       // is for AddSmi operation. For the normal Add operation, we want to fast
      63             :       // path both Smi and Number operations, so this path should not be marked
      64             :       // as Deferred.
      65             :       Label if_overflow(this,
      66         336 :                         rhs_is_smi ? Label::kDeferred : Label::kNonDeferred);
      67         336 :       TNode<Smi> smi_result = TrySmiAdd(CAST(lhs), CAST(rhs), &if_overflow);
      68             :       // Not overflowed.
      69             :       {
      70             :         var_type_feedback.Bind(
      71         336 :             SmiConstant(BinaryOperationFeedback::kSignedSmall));
      72         336 :         var_result.Bind(smi_result);
      73         336 :         Goto(&end);
      74             :       }
      75             : 
      76         336 :       BIND(&if_overflow);
      77             :       {
      78         672 :         var_fadd_lhs.Bind(SmiToFloat64(lhs));
      79         672 :         var_fadd_rhs.Bind(SmiToFloat64(rhs));
      80         336 :         Goto(&do_fadd);
      81         336 :       }
      82             :     }
      83             :   }
      84             : 
      85         336 :   BIND(&if_lhsisnotsmi);
      86             :   {
      87             :     // Check if {lhs} is a HeapNumber.
      88         672 :     GotoIfNot(IsHeapNumber(lhs), &if_lhsisnotnumber);
      89             : 
      90         336 :     if (!rhs_is_smi) {
      91             :       // Check if the {rhs} is Smi.
      92         168 :       Label if_rhsissmi(this), if_rhsisnotsmi(this);
      93         336 :       Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
      94             : 
      95         168 :       BIND(&if_rhsisnotsmi);
      96             :       {
      97             :         // Check if the {rhs} is a HeapNumber.
      98         336 :         GotoIfNot(IsHeapNumber(rhs), &check_rhsisoddball);
      99             : 
     100         336 :         var_fadd_lhs.Bind(LoadHeapNumberValue(lhs));
     101         336 :         var_fadd_rhs.Bind(LoadHeapNumberValue(rhs));
     102         168 :         Goto(&do_fadd);
     103             :       }
     104             : 
     105         336 :       BIND(&if_rhsissmi);
     106             :     }
     107             :     {
     108         672 :       var_fadd_lhs.Bind(LoadHeapNumberValue(lhs));
     109         672 :       var_fadd_rhs.Bind(SmiToFloat64(rhs));
     110         336 :       Goto(&do_fadd);
     111             :     }
     112             :   }
     113             : 
     114         336 :   BIND(&do_fadd);
     115             :   {
     116         336 :     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kNumber));
     117        1344 :     Node* value = Float64Add(var_fadd_lhs.value(), var_fadd_rhs.value());
     118         672 :     Node* result = AllocateHeapNumberWithValue(value);
     119         336 :     var_result.Bind(result);
     120         336 :     Goto(&end);
     121             :   }
     122             : 
     123         336 :   BIND(&if_lhsisnotnumber);
     124             :   {
     125             :     // No checks on rhs are done yet. We just know lhs is not a number or Smi.
     126         336 :     Label if_lhsisoddball(this), if_lhsisnotoddball(this);
     127         672 :     Node* lhs_instance_type = LoadInstanceType(lhs);
     128         672 :     Node* lhs_is_oddball = InstanceTypeEqual(lhs_instance_type, ODDBALL_TYPE);
     129         336 :     Branch(lhs_is_oddball, &if_lhsisoddball, &if_lhsisnotoddball);
     130             : 
     131         336 :     BIND(&if_lhsisoddball);
     132             :     {
     133         672 :       GotoIf(TaggedIsSmi(rhs), &call_with_oddball_feedback);
     134             : 
     135             :       // Check if {rhs} is a HeapNumber.
     136         336 :       Branch(IsHeapNumber(rhs), &call_with_oddball_feedback,
     137         672 :              &check_rhsisoddball);
     138             :     }
     139             : 
     140         336 :     BIND(&if_lhsisnotoddball);
     141             :     {
     142         336 :       Label lhs_is_string(this), lhs_is_bigint(this);
     143         672 :       GotoIf(IsStringInstanceType(lhs_instance_type), &lhs_is_string);
     144         672 :       GotoIf(IsBigIntInstanceType(lhs_instance_type), &lhs_is_bigint);
     145         336 :       Goto(&call_with_any_feedback);
     146             : 
     147         336 :       BIND(&lhs_is_bigint);
     148             :       {
     149         672 :         GotoIf(TaggedIsSmi(rhs), &call_with_any_feedback);
     150         672 :         Branch(IsBigInt(rhs), &bigint, &call_with_any_feedback);
     151             :       }
     152             : 
     153         336 :       BIND(&lhs_is_string);
     154             :       // Check if the {rhs} is a smi, and exit the string check early if it is.
     155         672 :       GotoIf(TaggedIsSmi(rhs), &call_with_any_feedback);
     156             : 
     157         672 :       Node* rhs_instance_type = LoadInstanceType(rhs);
     158             : 
     159             :       // Exit unless {rhs} is a string. Since {lhs} is a string we no longer
     160             :       // need an Oddball check.
     161         336 :       GotoIfNot(IsStringInstanceType(rhs_instance_type),
     162         672 :                 &call_with_any_feedback);
     163             : 
     164         336 :       var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kString));
     165             :       var_result.Bind(
     166         672 :           CallBuiltin(Builtins::kStringAdd_CheckNone, context, lhs, rhs));
     167             : 
     168         672 :       Goto(&end);
     169         336 :     }
     170             :   }
     171             : 
     172         336 :   BIND(&check_rhsisoddball);
     173             :   {
     174             :     // Check if rhs is an oddball. At this point we know lhs is either a
     175             :     // Smi or number or oddball and rhs is not a number or Smi.
     176         672 :     Node* rhs_instance_type = LoadInstanceType(rhs);
     177         672 :     Node* rhs_is_oddball = InstanceTypeEqual(rhs_instance_type, ODDBALL_TYPE);
     178         336 :     GotoIf(rhs_is_oddball, &call_with_oddball_feedback);
     179         336 :     Branch(IsBigIntInstanceType(rhs_instance_type), &bigint,
     180         672 :            &call_with_any_feedback);
     181             :   }
     182             : 
     183         336 :   BIND(&bigint);
     184             :   {
     185         336 :     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kBigInt));
     186             :     var_result.Bind(CallRuntime(Runtime::kBigIntBinaryOp, context, lhs, rhs,
     187         336 :                                 SmiConstant(Operation::kAdd)));
     188         336 :     Goto(&end);
     189             :   }
     190             : 
     191         336 :   BIND(&call_with_oddball_feedback);
     192             :   {
     193             :     var_type_feedback.Bind(
     194         336 :         SmiConstant(BinaryOperationFeedback::kNumberOrOddball));
     195         336 :     Goto(&call_add_stub);
     196             :   }
     197             : 
     198         336 :   BIND(&call_with_any_feedback);
     199             :   {
     200         336 :     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kAny));
     201         336 :     Goto(&call_add_stub);
     202             :   }
     203             : 
     204         336 :   BIND(&call_add_stub);
     205             :   {
     206         672 :     var_result.Bind(CallBuiltin(Builtins::kAdd, context, lhs, rhs));
     207         336 :     Goto(&end);
     208             :   }
     209             : 
     210         336 :   BIND(&end);
     211         336 :   UpdateFeedback(var_type_feedback.value(), feedback_vector, slot_id);
     212         672 :   return var_result.value();
     213             : }
     214             : 
     215        1344 : Node* BinaryOpAssembler::Generate_BinaryOperationWithFeedback(
     216             :     Node* context, Node* lhs, Node* rhs, Node* slot_id, Node* feedback_vector,
     217             :     const SmiOperation& smiOperation, const FloatOperation& floatOperation,
     218             :     Operation op, bool rhs_is_smi) {
     219        2688 :   Label do_float_operation(this), end(this), call_stub(this),
     220        1344 :       check_rhsisoddball(this, Label::kDeferred), call_with_any_feedback(this),
     221        1344 :       if_lhsisnotnumber(this, Label::kDeferred),
     222        1344 :       if_bigint(this, Label::kDeferred);
     223        2688 :   VARIABLE(var_float_lhs, MachineRepresentation::kFloat64);
     224        2688 :   VARIABLE(var_float_rhs, MachineRepresentation::kFloat64);
     225        2688 :   VARIABLE(var_type_feedback, MachineRepresentation::kTaggedSigned);
     226        2688 :   VARIABLE(var_result, MachineRepresentation::kTagged);
     227             : 
     228        1344 :   Label if_lhsissmi(this);
     229             :   // If rhs is known to be an Smi (in the SubSmi, MulSmi, DivSmi, ModSmi
     230             :   // bytecode handlers) we want to fast path Smi operation. For the normal
     231             :   // operation, we want to fast path both Smi and Number operations, so this
     232             :   // path should not be marked as Deferred.
     233             :   Label if_lhsisnotsmi(this,
     234        2688 :                        rhs_is_smi ? Label::kDeferred : Label::kNonDeferred);
     235        2688 :   Branch(TaggedIsNotSmi(lhs), &if_lhsisnotsmi, &if_lhsissmi);
     236             : 
     237             :   // Check if the {lhs} is a Smi or a HeapObject.
     238        1344 :   BIND(&if_lhsissmi);
     239             :   {
     240        1344 :     Comment("lhs is Smi");
     241        1344 :     if (!rhs_is_smi) {
     242             :       // Check if the {rhs} is also a Smi.
     243         672 :       Label if_rhsissmi(this), if_rhsisnotsmi(this);
     244        1344 :       Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
     245         672 :       BIND(&if_rhsisnotsmi);
     246             :       {
     247             :         // Check if {rhs} is a HeapNumber.
     248        1344 :         GotoIfNot(IsHeapNumber(rhs), &check_rhsisoddball);
     249             : 
     250             :         // Perform a floating point operation.
     251        1344 :         var_float_lhs.Bind(SmiToFloat64(lhs));
     252        1344 :         var_float_rhs.Bind(LoadHeapNumberValue(rhs));
     253         672 :         Goto(&do_float_operation);
     254             :       }
     255             : 
     256        1344 :       BIND(&if_rhsissmi);
     257             :     }
     258             : 
     259             :     {
     260        1344 :       Comment("perform smi operation");
     261        1344 :       var_result.Bind(smiOperation(lhs, rhs, &var_type_feedback));
     262        1344 :       Goto(&end);
     263             :     }
     264             :   }
     265             : 
     266        1344 :   BIND(&if_lhsisnotsmi);
     267             :   {
     268        1344 :     Comment("lhs is not Smi");
     269             :     // Check if the {lhs} is a HeapNumber.
     270        2688 :     GotoIfNot(IsHeapNumber(lhs), &if_lhsisnotnumber);
     271             : 
     272        1344 :     if (!rhs_is_smi) {
     273             :       // Check if the {rhs} is a Smi.
     274         672 :       Label if_rhsissmi(this), if_rhsisnotsmi(this);
     275        1344 :       Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
     276             : 
     277         672 :       BIND(&if_rhsisnotsmi);
     278             :       {
     279             :         // Check if the {rhs} is a HeapNumber.
     280        1344 :         GotoIfNot(IsHeapNumber(rhs), &check_rhsisoddball);
     281             : 
     282             :         // Perform a floating point operation.
     283        1344 :         var_float_lhs.Bind(LoadHeapNumberValue(lhs));
     284        1344 :         var_float_rhs.Bind(LoadHeapNumberValue(rhs));
     285         672 :         Goto(&do_float_operation);
     286             :       }
     287             : 
     288        1344 :       BIND(&if_rhsissmi);
     289             :     }
     290             : 
     291             :     {
     292             :       // Perform floating point operation.
     293        2688 :       var_float_lhs.Bind(LoadHeapNumberValue(lhs));
     294        2688 :       var_float_rhs.Bind(SmiToFloat64(rhs));
     295        1344 :       Goto(&do_float_operation);
     296             :     }
     297             :   }
     298             : 
     299        1344 :   BIND(&do_float_operation);
     300             :   {
     301        1344 :     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kNumber));
     302        1344 :     Node* lhs_value = var_float_lhs.value();
     303        1344 :     Node* rhs_value = var_float_rhs.value();
     304        1344 :     Node* value = floatOperation(lhs_value, rhs_value);
     305        2688 :     var_result.Bind(AllocateHeapNumberWithValue(value));
     306        1344 :     Goto(&end);
     307             :   }
     308             : 
     309        1344 :   BIND(&if_lhsisnotnumber);
     310             :   {
     311             :     // No checks on rhs are done yet. We just know lhs is not a number or Smi.
     312        1344 :     Label if_left_bigint(this), if_left_oddball(this);
     313        2688 :     Node* lhs_instance_type = LoadInstanceType(lhs);
     314        2688 :     GotoIf(IsBigIntInstanceType(lhs_instance_type), &if_left_bigint);
     315        2688 :     Node* lhs_is_oddball = InstanceTypeEqual(lhs_instance_type, ODDBALL_TYPE);
     316        1344 :     Branch(lhs_is_oddball, &if_left_oddball, &call_with_any_feedback);
     317             : 
     318        1344 :     BIND(&if_left_oddball);
     319             :     {
     320        1344 :       Label if_rhsissmi(this), if_rhsisnotsmi(this);
     321        2688 :       Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
     322             : 
     323        1344 :       BIND(&if_rhsissmi);
     324             :       {
     325             :         var_type_feedback.Bind(
     326        1344 :             SmiConstant(BinaryOperationFeedback::kNumberOrOddball));
     327        1344 :         Goto(&call_stub);
     328             :       }
     329             : 
     330        1344 :       BIND(&if_rhsisnotsmi);
     331             :       {
     332             :         // Check if {rhs} is a HeapNumber.
     333        2688 :         GotoIfNot(IsHeapNumber(rhs), &check_rhsisoddball);
     334             : 
     335             :         var_type_feedback.Bind(
     336        1344 :             SmiConstant(BinaryOperationFeedback::kNumberOrOddball));
     337        1344 :         Goto(&call_stub);
     338        1344 :       }
     339             :     }
     340             : 
     341        1344 :     BIND(&if_left_bigint);
     342             :     {
     343        2688 :       GotoIf(TaggedIsSmi(rhs), &call_with_any_feedback);
     344        2688 :       Branch(IsBigInt(rhs), &if_bigint, &call_with_any_feedback);
     345        1344 :     }
     346             :   }
     347             : 
     348        1344 :   BIND(&check_rhsisoddball);
     349             :   {
     350             :     // Check if rhs is an oddball. At this point we know lhs is either a
     351             :     // Smi or number or oddball and rhs is not a number or Smi.
     352        2688 :     Node* rhs_instance_type = LoadInstanceType(rhs);
     353        2688 :     GotoIf(IsBigIntInstanceType(rhs_instance_type), &if_bigint);
     354        2688 :     Node* rhs_is_oddball = InstanceTypeEqual(rhs_instance_type, ODDBALL_TYPE);
     355        1344 :     GotoIfNot(rhs_is_oddball, &call_with_any_feedback);
     356             : 
     357             :     var_type_feedback.Bind(
     358        1344 :         SmiConstant(BinaryOperationFeedback::kNumberOrOddball));
     359        1344 :     Goto(&call_stub);
     360             :   }
     361             : 
     362             :   // This handles the case where at least one input is a BigInt.
     363        1344 :   BIND(&if_bigint);
     364             :   {
     365        1344 :     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kBigInt));
     366             :     var_result.Bind(CallRuntime(Runtime::kBigIntBinaryOp, context, lhs, rhs,
     367        1344 :                                 SmiConstant(op)));
     368        1344 :     Goto(&end);
     369             :   }
     370             : 
     371        1344 :   BIND(&call_with_any_feedback);
     372             :   {
     373        1344 :     var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kAny));
     374        1344 :     Goto(&call_stub);
     375             :   }
     376             : 
     377        1344 :   BIND(&call_stub);
     378             :   {
     379             :     Node* result;
     380        1344 :     switch (op) {
     381             :       case Operation::kSubtract:
     382         672 :         result = CallBuiltin(Builtins::kSubtract, context, lhs, rhs);
     383         336 :         break;
     384             :       case Operation::kMultiply:
     385         672 :         result = CallBuiltin(Builtins::kMultiply, context, lhs, rhs);
     386         336 :         break;
     387             :       case Operation::kDivide:
     388         672 :         result = CallBuiltin(Builtins::kDivide, context, lhs, rhs);
     389         336 :         break;
     390             :       case Operation::kModulus:
     391         672 :         result = CallBuiltin(Builtins::kModulus, context, lhs, rhs);
     392         336 :         break;
     393             :       default:
     394           0 :         UNREACHABLE();
     395             :     }
     396        1344 :     var_result.Bind(result);
     397        1344 :     Goto(&end);
     398             :   }
     399             : 
     400        1344 :   BIND(&end);
     401        1344 :   UpdateFeedback(var_type_feedback.value(), feedback_vector, slot_id);
     402        2688 :   return var_result.value();
     403             : }
     404             : 
     405         336 : Node* BinaryOpAssembler::Generate_SubtractWithFeedback(Node* context, Node* lhs,
     406             :                                                        Node* rhs, Node* slot_id,
     407             :                                                        Node* feedback_vector,
     408             :                                                        bool rhs_is_smi) {
     409         336 :   auto smiFunction = [=](Node* lhs, Node* rhs, Variable* var_type_feedback) {
     410         336 :     Label end(this);
     411         336 :     TVARIABLE(Number, var_result);
     412             :     // If rhs is known to be an Smi (for SubSmi) we want to fast path Smi
     413             :     // operation. For the normal Sub operation, we want to fast path both
     414             :     // Smi and Number operations, so this path should not be marked as Deferred.
     415             :     Label if_overflow(this,
     416         672 :                       rhs_is_smi ? Label::kDeferred : Label::kNonDeferred);
     417         672 :     var_result = TrySmiSub(CAST(lhs), CAST(rhs), &if_overflow);
     418         672 :     var_type_feedback->Bind(SmiConstant(BinaryOperationFeedback::kSignedSmall));
     419         336 :     Goto(&end);
     420             : 
     421         336 :     BIND(&if_overflow);
     422             :     {
     423         672 :       var_type_feedback->Bind(SmiConstant(BinaryOperationFeedback::kNumber));
     424        1344 :       Node* value = Float64Sub(SmiToFloat64(lhs), SmiToFloat64(rhs));
     425         672 :       var_result = AllocateHeapNumberWithValue(value);
     426         336 :       Goto(&end);
     427             :     }
     428             : 
     429         336 :     BIND(&end);
     430         336 :     return var_result.value();
     431         672 :   };
     432             :   auto floatFunction = [=](Node* lhs, Node* rhs) {
     433         672 :     return Float64Sub(lhs, rhs);
     434         336 :   };
     435             :   return Generate_BinaryOperationWithFeedback(
     436             :       context, lhs, rhs, slot_id, feedback_vector, smiFunction, floatFunction,
     437        1344 :       Operation::kSubtract, rhs_is_smi);
     438             : }
     439             : 
     440         336 : Node* BinaryOpAssembler::Generate_MultiplyWithFeedback(Node* context, Node* lhs,
     441             :                                                        Node* rhs, Node* slot_id,
     442             :                                                        Node* feedback_vector,
     443             :                                                        bool rhs_is_smi) {
     444         336 :   auto smiFunction = [=](Node* lhs, Node* rhs, Variable* var_type_feedback) {
     445         336 :     TNode<Number> result = SmiMul(CAST(lhs), CAST(rhs));
     446             :     var_type_feedback->Bind(SelectSmiConstant(
     447         672 :         TaggedIsSmi(result), BinaryOperationFeedback::kSignedSmall,
     448        1008 :         BinaryOperationFeedback::kNumber));
     449         336 :     return result;
     450             :   };
     451             :   auto floatFunction = [=](Node* lhs, Node* rhs) {
     452         672 :     return Float64Mul(lhs, rhs);
     453         336 :   };
     454             :   return Generate_BinaryOperationWithFeedback(
     455             :       context, lhs, rhs, slot_id, feedback_vector, smiFunction, floatFunction,
     456        1344 :       Operation::kMultiply, rhs_is_smi);
     457             : }
     458             : 
     459         336 : Node* BinaryOpAssembler::Generate_DivideWithFeedback(
     460             :     Node* context, Node* dividend, Node* divisor, Node* slot_id,
     461             :     Node* feedback_vector, bool rhs_is_smi) {
     462         336 :   auto smiFunction = [=](Node* lhs, Node* rhs, Variable* var_type_feedback) {
     463         336 :     VARIABLE(var_result, MachineRepresentation::kTagged);
     464             :     // If rhs is known to be an Smi (for DivSmi) we want to fast path Smi
     465             :     // operation. For the normal Div operation, we want to fast path both
     466             :     // Smi and Number operations, so this path should not be marked as Deferred.
     467         672 :     Label bailout(this, rhs_is_smi ? Label::kDeferred : Label::kNonDeferred),
     468         672 :         end(this);
     469         672 :     var_result.Bind(TrySmiDiv(CAST(lhs), CAST(rhs), &bailout));
     470         672 :     var_type_feedback->Bind(SmiConstant(BinaryOperationFeedback::kSignedSmall));
     471         336 :     Goto(&end);
     472             : 
     473         336 :     BIND(&bailout);
     474             :     {
     475             :       var_type_feedback->Bind(
     476         672 :           SmiConstant(BinaryOperationFeedback::kSignedSmallInputs));
     477        1344 :       Node* value = Float64Div(SmiToFloat64(lhs), SmiToFloat64(rhs));
     478         672 :       var_result.Bind(AllocateHeapNumberWithValue(value));
     479         336 :       Goto(&end);
     480             :     }
     481             : 
     482         336 :     BIND(&end);
     483         672 :     return var_result.value();
     484         672 :   };
     485             :   auto floatFunction = [=](Node* lhs, Node* rhs) {
     486         672 :     return Float64Div(lhs, rhs);
     487         336 :   };
     488             :   return Generate_BinaryOperationWithFeedback(
     489             :       context, dividend, divisor, slot_id, feedback_vector, smiFunction,
     490        1344 :       floatFunction, Operation::kDivide, rhs_is_smi);
     491             : }
     492             : 
     493         336 : Node* BinaryOpAssembler::Generate_ModulusWithFeedback(
     494             :     Node* context, Node* dividend, Node* divisor, Node* slot_id,
     495             :     Node* feedback_vector, bool rhs_is_smi) {
     496         336 :   auto smiFunction = [=](Node* lhs, Node* rhs, Variable* var_type_feedback) {
     497         336 :     TNode<Number> result = SmiMod(CAST(lhs), CAST(rhs));
     498             :     var_type_feedback->Bind(SelectSmiConstant(
     499         672 :         TaggedIsSmi(result), BinaryOperationFeedback::kSignedSmall,
     500        1008 :         BinaryOperationFeedback::kNumber));
     501         336 :     return result;
     502             :   };
     503             :   auto floatFunction = [=](Node* lhs, Node* rhs) {
     504         672 :     return Float64Mod(lhs, rhs);
     505         336 :   };
     506             :   return Generate_BinaryOperationWithFeedback(
     507             :       context, dividend, divisor, slot_id, feedback_vector, smiFunction,
     508        1344 :       floatFunction, Operation::kModulus, rhs_is_smi);
     509             : }
     510             : 
     511         336 : Node* BinaryOpAssembler::Generate_ExponentiateWithFeedback(
     512             :     Node* context, Node* base, Node* exponent, Node* slot_id,
     513             :     Node* feedback_vector, bool rhs_is_smi) {
     514             :   // We currently don't optimize exponentiation based on feedback.
     515         336 :   Node* dummy_feedback = SmiConstant(BinaryOperationFeedback::kAny);
     516         336 :   UpdateFeedback(dummy_feedback, feedback_vector, slot_id);
     517         672 :   return CallBuiltin(Builtins::kExponentiate, context, base, exponent);
     518             : }
     519             : 
     520             : }  // namespace internal
     521       94089 : }  // namespace v8

Generated by: LCOV version 1.10