LCOV - code coverage report
Current view: top level - src/heap - heap.h (source / functions) Hit Total Coverage
Test: app.info Lines: 131 139 94.2 %
Date: 2017-10-20 Functions: 23 34 67.6 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #ifndef V8_HEAP_HEAP_H_
       6             : #define V8_HEAP_HEAP_H_
       7             : 
       8             : #include <cmath>
       9             : #include <map>
      10             : #include <unordered_map>
      11             : #include <vector>
      12             : 
      13             : // Clients of this interface shouldn't depend on lots of heap internals.
      14             : // Do not include anything from src/heap here!
      15             : #include "include/v8.h"
      16             : #include "src/allocation.h"
      17             : #include "src/assert-scope.h"
      18             : #include "src/base/atomic-utils.h"
      19             : #include "src/globals.h"
      20             : #include "src/heap-symbols.h"
      21             : #include "src/objects.h"
      22             : #include "src/objects/hash-table.h"
      23             : #include "src/objects/string-table.h"
      24             : #include "src/visitors.h"
      25             : 
      26             : namespace v8 {
      27             : 
      28             : namespace debug {
      29             : typedef void (*OutOfMemoryCallback)(void* data);
      30             : }  // namespace debug
      31             : 
      32             : namespace internal {
      33             : 
      34             : namespace heap {
      35             : class HeapTester;
      36             : class TestMemoryAllocatorScope;
      37             : }  // namespace heap
      38             : 
      39             : class BytecodeArray;
      40             : class JSArrayBuffer;
      41             : 
      42             : using v8::MemoryPressureLevel;
      43             : 
      44             : // Defines all the roots in Heap.
      45             : #define STRONG_ROOT_LIST(V)                                                    \
      46             :   /* Cluster the most popular ones in a few cache lines here at the top.    */ \
      47             :   /* The first 32 entries are most often used in the startup snapshot and   */ \
      48             :   /* can use a shorter representation in the serialization format.          */ \
      49             :   V(Map, free_space_map, FreeSpaceMap)                                         \
      50             :   V(Map, one_pointer_filler_map, OnePointerFillerMap)                          \
      51             :   V(Map, two_pointer_filler_map, TwoPointerFillerMap)                          \
      52             :   V(Oddball, uninitialized_value, UninitializedValue)                          \
      53             :   V(Oddball, undefined_value, UndefinedValue)                                  \
      54             :   V(Oddball, the_hole_value, TheHoleValue)                                     \
      55             :   V(Oddball, null_value, NullValue)                                            \
      56             :   V(Oddball, true_value, TrueValue)                                            \
      57             :   V(Oddball, false_value, FalseValue)                                          \
      58             :   V(String, empty_string, empty_string)                                        \
      59             :   V(Map, meta_map, MetaMap)                                                    \
      60             :   V(Map, byte_array_map, ByteArrayMap)                                         \
      61             :   V(Map, fixed_array_map, FixedArrayMap)                                       \
      62             :   V(Map, fixed_cow_array_map, FixedCOWArrayMap)                                \
      63             :   V(Map, hash_table_map, HashTableMap)                                         \
      64             :   V(Map, symbol_map, SymbolMap)                                                \
      65             :   V(Map, one_byte_string_map, OneByteStringMap)                                \
      66             :   V(Map, one_byte_internalized_string_map, OneByteInternalizedStringMap)       \
      67             :   V(Map, scope_info_map, ScopeInfoMap)                                         \
      68             :   V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
      69             :   V(Map, code_map, CodeMap)                                                    \
      70             :   V(Map, function_context_map, FunctionContextMap)                             \
      71             :   V(Map, cell_map, CellMap)                                                    \
      72             :   V(Map, weak_cell_map, WeakCellMap)                                           \
      73             :   V(Map, global_property_cell_map, GlobalPropertyCellMap)                      \
      74             :   V(Map, foreign_map, ForeignMap)                                              \
      75             :   V(Map, heap_number_map, HeapNumberMap)                                       \
      76             :   V(Map, transition_array_map, TransitionArrayMap)                             \
      77             :   V(Map, feedback_vector_map, FeedbackVectorMap)                               \
      78             :   V(ScopeInfo, empty_scope_info, EmptyScopeInfo)                               \
      79             :   V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
      80             :   V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
      81             :   /* Entries beyond the first 32                                            */ \
      82             :   /* The roots above this line should be boring from a GC point of view.    */ \
      83             :   /* This means they are never in new space and never on a page that is     */ \
      84             :   /* being compacted.                                                       */ \
      85             :   /* Oddballs */                                                               \
      86             :   V(Oddball, arguments_marker, ArgumentsMarker)                                \
      87             :   V(Oddball, exception, Exception)                                             \
      88             :   V(Oddball, termination_exception, TerminationException)                      \
      89             :   V(Oddball, optimized_out, OptimizedOut)                                      \
      90             :   V(Oddball, stale_register, StaleRegister)                                    \
      91             :   /* Context maps */                                                           \
      92             :   V(Map, native_context_map, NativeContextMap)                                 \
      93             :   V(Map, module_context_map, ModuleContextMap)                                 \
      94             :   V(Map, eval_context_map, EvalContextMap)                                     \
      95             :   V(Map, script_context_map, ScriptContextMap)                                 \
      96             :   V(Map, block_context_map, BlockContextMap)                                   \
      97             :   V(Map, catch_context_map, CatchContextMap)                                   \
      98             :   V(Map, with_context_map, WithContextMap)                                     \
      99             :   V(Map, debug_evaluate_context_map, DebugEvaluateContextMap)                  \
     100             :   V(Map, script_context_table_map, ScriptContextTableMap)                      \
     101             :   /* Maps */                                                                   \
     102             :   V(Map, fixed_double_array_map, FixedDoubleArrayMap)                          \
     103             :   V(Map, mutable_heap_number_map, MutableHeapNumberMap)                        \
     104             :   V(Map, ordered_hash_table_map, OrderedHashTableMap)                          \
     105             :   V(Map, unseeded_number_dictionary_map, UnseededNumberDictionaryMap)          \
     106             :   V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap)            \
     107             :   V(Map, small_ordered_hash_map_map, SmallOrderedHashMapMap)                   \
     108             :   V(Map, small_ordered_hash_set_map, SmallOrderedHashSetMap)                   \
     109             :   V(Map, message_object_map, JSMessageObjectMap)                               \
     110             :   V(Map, external_map, ExternalMap)                                            \
     111             :   V(Map, bytecode_array_map, BytecodeArrayMap)                                 \
     112             :   V(Map, module_info_map, ModuleInfoMap)                                       \
     113             :   V(Map, no_closures_cell_map, NoClosuresCellMap)                              \
     114             :   V(Map, one_closure_cell_map, OneClosureCellMap)                              \
     115             :   V(Map, many_closures_cell_map, ManyClosuresCellMap)                          \
     116             :   V(Map, property_array_map, PropertyArrayMap)                                 \
     117             :   V(Map, bigint_map, BigIntMap)                                                \
     118             :   /* String maps */                                                            \
     119             :   V(Map, native_source_string_map, NativeSourceStringMap)                      \
     120             :   V(Map, string_map, StringMap)                                                \
     121             :   V(Map, cons_one_byte_string_map, ConsOneByteStringMap)                       \
     122             :   V(Map, cons_string_map, ConsStringMap)                                       \
     123             :   V(Map, thin_one_byte_string_map, ThinOneByteStringMap)                       \
     124             :   V(Map, thin_string_map, ThinStringMap)                                       \
     125             :   V(Map, sliced_string_map, SlicedStringMap)                                   \
     126             :   V(Map, sliced_one_byte_string_map, SlicedOneByteStringMap)                   \
     127             :   V(Map, external_string_map, ExternalStringMap)                               \
     128             :   V(Map, external_string_with_one_byte_data_map,                               \
     129             :     ExternalStringWithOneByteDataMap)                                          \
     130             :   V(Map, external_one_byte_string_map, ExternalOneByteStringMap)               \
     131             :   V(Map, short_external_string_map, ShortExternalStringMap)                    \
     132             :   V(Map, short_external_string_with_one_byte_data_map,                         \
     133             :     ShortExternalStringWithOneByteDataMap)                                     \
     134             :   V(Map, internalized_string_map, InternalizedStringMap)                       \
     135             :   V(Map, external_internalized_string_map, ExternalInternalizedStringMap)      \
     136             :   V(Map, external_internalized_string_with_one_byte_data_map,                  \
     137             :     ExternalInternalizedStringWithOneByteDataMap)                              \
     138             :   V(Map, external_one_byte_internalized_string_map,                            \
     139             :     ExternalOneByteInternalizedStringMap)                                      \
     140             :   V(Map, short_external_internalized_string_map,                               \
     141             :     ShortExternalInternalizedStringMap)                                        \
     142             :   V(Map, short_external_internalized_string_with_one_byte_data_map,            \
     143             :     ShortExternalInternalizedStringWithOneByteDataMap)                         \
     144             :   V(Map, short_external_one_byte_internalized_string_map,                      \
     145             :     ShortExternalOneByteInternalizedStringMap)                                 \
     146             :   V(Map, short_external_one_byte_string_map, ShortExternalOneByteStringMap)    \
     147             :   /* Array element maps */                                                     \
     148             :   V(Map, fixed_uint8_array_map, FixedUint8ArrayMap)                            \
     149             :   V(Map, fixed_int8_array_map, FixedInt8ArrayMap)                              \
     150             :   V(Map, fixed_uint16_array_map, FixedUint16ArrayMap)                          \
     151             :   V(Map, fixed_int16_array_map, FixedInt16ArrayMap)                            \
     152             :   V(Map, fixed_uint32_array_map, FixedUint32ArrayMap)                          \
     153             :   V(Map, fixed_int32_array_map, FixedInt32ArrayMap)                            \
     154             :   V(Map, fixed_float32_array_map, FixedFloat32ArrayMap)                        \
     155             :   V(Map, fixed_float64_array_map, FixedFloat64ArrayMap)                        \
     156             :   V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap)             \
     157             :   /* Oddball maps */                                                           \
     158             :   V(Map, undefined_map, UndefinedMap)                                          \
     159             :   V(Map, the_hole_map, TheHoleMap)                                             \
     160             :   V(Map, null_map, NullMap)                                                    \
     161             :   V(Map, boolean_map, BooleanMap)                                              \
     162             :   V(Map, uninitialized_map, UninitializedMap)                                  \
     163             :   V(Map, arguments_marker_map, ArgumentsMarkerMap)                             \
     164             :   V(Map, exception_map, ExceptionMap)                                          \
     165             :   V(Map, termination_exception_map, TerminationExceptionMap)                   \
     166             :   V(Map, optimized_out_map, OptimizedOutMap)                                   \
     167             :   V(Map, stale_register_map, StaleRegisterMap)                                 \
     168             :   /* Canonical empty values */                                                 \
     169             :   V(EnumCache, empty_enum_cache, EmptyEnumCache)                               \
     170             :   V(PropertyArray, empty_property_array, EmptyPropertyArray)                   \
     171             :   V(ByteArray, empty_byte_array, EmptyByteArray)                               \
     172             :   V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array)        \
     173             :   V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array)          \
     174             :   V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array)      \
     175             :   V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array)        \
     176             :   V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array)      \
     177             :   V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array)        \
     178             :   V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array)    \
     179             :   V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array)    \
     180             :   V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array,                      \
     181             :     EmptyFixedUint8ClampedArray)                                               \
     182             :   V(Script, empty_script, EmptyScript)                                         \
     183             :   V(Cell, undefined_cell, UndefinedCell)                                       \
     184             :   V(FixedArray, empty_sloppy_arguments_elements, EmptySloppyArgumentsElements) \
     185             :   V(SeededNumberDictionary, empty_slow_element_dictionary,                     \
     186             :     EmptySlowElementDictionary)                                                \
     187             :   V(FixedArray, empty_ordered_hash_table, EmptyOrderedHashTable)               \
     188             :   V(PropertyCell, empty_property_cell, EmptyPropertyCell)                      \
     189             :   V(WeakCell, empty_weak_cell, EmptyWeakCell)                                  \
     190             :   V(InterceptorInfo, noop_interceptor_info, NoOpInterceptorInfo)               \
     191             :   /* Protectors */                                                             \
     192             :   V(Cell, array_constructor_protector, ArrayConstructorProtector)              \
     193             :   V(PropertyCell, array_protector, ArrayProtector)                             \
     194             :   V(Cell, is_concat_spreadable_protector, IsConcatSpreadableProtector)         \
     195             :   V(PropertyCell, species_protector, SpeciesProtector)                         \
     196             :   V(Cell, string_length_protector, StringLengthProtector)                      \
     197             :   V(Cell, fast_array_iteration_protector, FastArrayIterationProtector)         \
     198             :   V(PropertyCell, array_iterator_protector, ArrayIteratorProtector)            \
     199             :   V(PropertyCell, array_buffer_neutering_protector,                            \
     200             :     ArrayBufferNeuteringProtector)                                             \
     201             :   /* Special numbers */                                                        \
     202             :   V(HeapNumber, nan_value, NanValue)                                           \
     203             :   V(HeapNumber, hole_nan_value, HoleNanValue)                                  \
     204             :   V(HeapNumber, infinity_value, InfinityValue)                                 \
     205             :   V(HeapNumber, minus_zero_value, MinusZeroValue)                              \
     206             :   V(HeapNumber, minus_infinity_value, MinusInfinityValue)                      \
     207             :   /* Caches */                                                                 \
     208             :   V(FixedArray, number_string_cache, NumberStringCache)                        \
     209             :   V(FixedArray, single_character_string_cache, SingleCharacterStringCache)     \
     210             :   V(FixedArray, string_split_cache, StringSplitCache)                          \
     211             :   V(FixedArray, regexp_multiple_cache, RegExpMultipleCache)                    \
     212             :   /* Lists and dictionaries */                                                 \
     213             :   V(NameDictionary, empty_property_dictionary, EmptyPropertyDictionary)        \
     214             :   V(NameDictionary, public_symbol_table, PublicSymbolTable)                    \
     215             :   V(NameDictionary, api_symbol_table, ApiSymbolTable)                          \
     216             :   V(NameDictionary, api_private_symbol_table, ApiPrivateSymbolTable)           \
     217             :   V(Object, script_list, ScriptList)                                           \
     218             :   V(UnseededNumberDictionary, code_stubs, CodeStubs)                           \
     219             :   V(FixedArray, materialized_objects, MaterializedObjects)                     \
     220             :   V(FixedArray, microtask_queue, MicrotaskQueue)                               \
     221             :   V(FixedArray, detached_contexts, DetachedContexts)                           \
     222             :   V(HeapObject, retaining_path_targets, RetainingPathTargets)                  \
     223             :   V(ArrayList, retained_maps, RetainedMaps)                                    \
     224             :   V(WeakHashTable, weak_object_to_code_table, WeakObjectToCodeTable)           \
     225             :   /* weak_new_space_object_to_code_list is an array of weak cells, where */    \
     226             :   /* slots with even indices refer to the weak object, and the subsequent */   \
     227             :   /* slots refer to the code with the reference to the weak object. */         \
     228             :   V(ArrayList, weak_new_space_object_to_code_list,                             \
     229             :     WeakNewSpaceObjectToCodeList)                                              \
     230             :   /* Feedback vectors that we need for code coverage or type profile */        \
     231             :   V(Object, feedback_vectors_for_profiling_tools,                              \
     232             :     FeedbackVectorsForProfilingTools)                                          \
     233             :   V(Object, weak_stack_trace_list, WeakStackTraceList)                         \
     234             :   V(Object, noscript_shared_function_infos, NoScriptSharedFunctionInfos)       \
     235             :   V(FixedArray, serialized_templates, SerializedTemplates)                     \
     236             :   V(FixedArray, serialized_global_proxy_sizes, SerializedGlobalProxySizes)     \
     237             :   V(TemplateList, message_listeners, MessageListeners)                         \
     238             :   /* JS Entries */                                                             \
     239             :   V(Code, js_entry_code, JsEntryCode)                                          \
     240             :   V(Code, js_construct_entry_code, JsConstructEntryCode)
     241             : 
     242             : // Entries in this list are limited to Smis and are not visited during GC.
     243             : #define SMI_ROOT_LIST(V)                                                       \
     244             :   V(Smi, stack_limit, StackLimit)                                              \
     245             :   V(Smi, real_stack_limit, RealStackLimit)                                     \
     246             :   V(Smi, last_script_id, LastScriptId)                                         \
     247             :   V(Smi, hash_seed, HashSeed)                                                  \
     248             :   /* To distinguish the function templates, so that we can find them in the */ \
     249             :   /* function cache of the native context. */                                  \
     250             :   V(Smi, next_template_serial_number, NextTemplateSerialNumber)                \
     251             :   V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset)     \
     252             :   V(Smi, construct_stub_create_deopt_pc_offset,                                \
     253             :     ConstructStubCreateDeoptPCOffset)                                          \
     254             :   V(Smi, construct_stub_invoke_deopt_pc_offset,                                \
     255             :     ConstructStubInvokeDeoptPCOffset)                                          \
     256             :   V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset)                 \
     257             :   V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)                 \
     258             :   V(Smi, interpreter_entry_return_pc_offset, InterpreterEntryReturnPCOffset)
     259             : 
     260             : #define ROOT_LIST(V)  \
     261             :   STRONG_ROOT_LIST(V) \
     262             :   SMI_ROOT_LIST(V)    \
     263             :   V(StringTable, string_table, StringTable)
     264             : 
     265             : 
     266             : // Heap roots that are known to be immortal immovable, for which we can safely
     267             : // skip write barriers. This list is not complete and has omissions.
     268             : #define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
     269             :   V(ArgumentsMarker)                    \
     270             :   V(ArgumentsMarkerMap)                 \
     271             :   V(ArrayBufferNeuteringProtector)      \
     272             :   V(ArrayIteratorProtector)             \
     273             :   V(ArrayProtector)                     \
     274             :   V(BigIntMap)                          \
     275             :   V(BlockContextMap)                    \
     276             :   V(BooleanMap)                         \
     277             :   V(ByteArrayMap)                       \
     278             :   V(BytecodeArrayMap)                   \
     279             :   V(CatchContextMap)                    \
     280             :   V(CellMap)                            \
     281             :   V(CodeMap)                            \
     282             :   V(EmptyByteArray)                     \
     283             :   V(EmptyDescriptorArray)               \
     284             :   V(EmptyFixedArray)                    \
     285             :   V(EmptyFixedFloat32Array)             \
     286             :   V(EmptyFixedFloat64Array)             \
     287             :   V(EmptyFixedInt16Array)               \
     288             :   V(EmptyFixedInt32Array)               \
     289             :   V(EmptyFixedInt8Array)                \
     290             :   V(EmptyFixedUint16Array)              \
     291             :   V(EmptyFixedUint32Array)              \
     292             :   V(EmptyFixedUint8Array)               \
     293             :   V(EmptyFixedUint8ClampedArray)        \
     294             :   V(EmptyPropertyCell)                  \
     295             :   V(EmptyScopeInfo)                     \
     296             :   V(EmptyScript)                        \
     297             :   V(EmptySloppyArgumentsElements)       \
     298             :   V(EmptySlowElementDictionary)         \
     299             :   V(empty_string)                       \
     300             :   V(EmptyWeakCell)                      \
     301             :   V(EvalContextMap)                     \
     302             :   V(Exception)                          \
     303             :   V(FalseValue)                         \
     304             :   V(FastArrayIterationProtector)        \
     305             :   V(FixedArrayMap)                      \
     306             :   V(FixedCOWArrayMap)                   \
     307             :   V(FixedDoubleArrayMap)                \
     308             :   V(ForeignMap)                         \
     309             :   V(FreeSpaceMap)                       \
     310             :   V(FunctionContextMap)                 \
     311             :   V(GlobalPropertyCellMap)              \
     312             :   V(HashTableMap)                       \
     313             :   V(HeapNumberMap)                      \
     314             :   V(HoleNanValue)                       \
     315             :   V(InfinityValue)                      \
     316             :   V(IsConcatSpreadableProtector)        \
     317             :   V(JsConstructEntryCode)               \
     318             :   V(JsEntryCode)                        \
     319             :   V(JSMessageObjectMap)                 \
     320             :   V(ManyClosuresCellMap)                \
     321             :   V(MetaMap)                            \
     322             :   V(MinusInfinityValue)                 \
     323             :   V(MinusZeroValue)                     \
     324             :   V(ModuleContextMap)                   \
     325             :   V(ModuleInfoMap)                      \
     326             :   V(MutableHeapNumberMap)               \
     327             :   V(NanValue)                           \
     328             :   V(NativeContextMap)                   \
     329             :   V(NoClosuresCellMap)                  \
     330             :   V(NullMap)                            \
     331             :   V(NullValue)                          \
     332             :   V(OneClosureCellMap)                  \
     333             :   V(OnePointerFillerMap)                \
     334             :   V(OptimizedOut)                       \
     335             :   V(OrderedHashTableMap)                \
     336             :   V(PropertyArrayMap)                   \
     337             :   V(SmallOrderedHashMapMap)             \
     338             :   V(SmallOrderedHashSetMap)             \
     339             :   V(ScopeInfoMap)                       \
     340             :   V(ScriptContextMap)                   \
     341             :   V(SharedFunctionInfoMap)              \
     342             :   V(SloppyArgumentsElementsMap)         \
     343             :   V(SpeciesProtector)                   \
     344             :   V(StaleRegister)                      \
     345             :   V(StringLengthProtector)              \
     346             :   V(SymbolMap)                          \
     347             :   V(TerminationException)               \
     348             :   V(TheHoleMap)                         \
     349             :   V(TheHoleValue)                       \
     350             :   V(TransitionArrayMap)                 \
     351             :   V(TrueValue)                          \
     352             :   V(TwoPointerFillerMap)                \
     353             :   V(UndefinedCell)                      \
     354             :   V(UndefinedMap)                       \
     355             :   V(UndefinedValue)                     \
     356             :   V(UninitializedMap)                   \
     357             :   V(UninitializedValue)                 \
     358             :   V(WeakCellMap)                        \
     359             :   V(WithContextMap)                     \
     360             :   PRIVATE_SYMBOL_LIST(V)
     361             : 
     362             : #define FIXED_ARRAY_ELEMENTS_WRITE_BARRIER(heap, array, start, length) \
     363             :   do {                                                                 \
     364             :     heap->RecordFixedArrayElements(array, start, length);              \
     365             :     heap->incremental_marking()->RecordWrites(array);                  \
     366             :   } while (false)
     367             : 
     368             : class AllocationObserver;
     369             : class ArrayBufferTracker;
     370             : class ConcurrentMarking;
     371             : class GCIdleTimeAction;
     372             : class GCIdleTimeHandler;
     373             : class GCIdleTimeHeapState;
     374             : class GCTracer;
     375             : class HeapObjectsFilter;
     376             : class HeapStats;
     377             : class HistogramTimer;
     378             : class Isolate;
     379             : class LocalEmbedderHeapTracer;
     380             : class MemoryAllocator;
     381             : class MemoryReducer;
     382             : class MinorMarkCompactCollector;
     383             : class ObjectIterator;
     384             : class ObjectStats;
     385             : class Page;
     386             : class PagedSpace;
     387             : class RootVisitor;
     388             : class Scavenger;
     389             : class ScavengeJob;
     390             : class Space;
     391             : class StoreBuffer;
     392             : class TracePossibleWrapperReporter;
     393             : class WeakObjectRetainer;
     394             : 
     395             : typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to);
     396             : 
     397             : enum ArrayStorageAllocationMode {
     398             :   DONT_INITIALIZE_ARRAY_ELEMENTS,
     399             :   INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
     400             : };
     401             : 
     402             : enum class ClearRecordedSlots { kYes, kNo };
     403             : 
     404             : enum class FixedArrayVisitationMode { kRegular, kIncremental };
     405             : 
     406             : enum class TraceRetainingPathMode { kEnabled, kDisabled };
     407             : 
     408             : enum class GarbageCollectionReason {
     409             :   kUnknown = 0,
     410             :   kAllocationFailure = 1,
     411             :   kAllocationLimit = 2,
     412             :   kContextDisposal = 3,
     413             :   kCountersExtension = 4,
     414             :   kDebugger = 5,
     415             :   kDeserializer = 6,
     416             :   kExternalMemoryPressure = 7,
     417             :   kFinalizeMarkingViaStackGuard = 8,
     418             :   kFinalizeMarkingViaTask = 9,
     419             :   kFullHashtable = 10,
     420             :   kHeapProfiler = 11,
     421             :   kIdleTask = 12,
     422             :   kLastResort = 13,
     423             :   kLowMemoryNotification = 14,
     424             :   kMakeHeapIterable = 15,
     425             :   kMemoryPressure = 16,
     426             :   kMemoryReducer = 17,
     427             :   kRuntime = 18,
     428             :   kSamplingProfiler = 19,
     429             :   kSnapshotCreator = 20,
     430             :   kTesting = 21
     431             :   // If you add new items here, then update the incremental_marking_reason,
     432             :   // mark_compact_reason, and scavenge_reason counters in counters.h.
     433             :   // Also update src/tools/metrics/histograms/histograms.xml in chromium.
     434             : };
     435             : 
     436             : enum class YoungGenerationHandling {
     437             :   kRegularScavenge = 0,
     438             :   kFastPromotionDuringScavenge = 1,
     439             :   // Histogram::InspectConstructionArguments in chromium requires us to have at
     440             :   // least three buckets.
     441             :   kUnusedBucket = 2,
     442             :   // If you add new items here, then update the young_generation_handling in
     443             :   // counters.h.
     444             :   // Also update src/tools/metrics/histograms/histograms.xml in chromium.
     445             : };
     446             : 
     447             : class AllocationResult {
     448             :  public:
     449       26420 :   static inline AllocationResult Retry(AllocationSpace space = NEW_SPACE) {
     450       26420 :     return AllocationResult(space);
     451             :   }
     452             : 
     453             :   // Implicit constructor from Object*.
     454  1010936684 :   AllocationResult(Object* object)  // NOLINT
     455  1010936684 :       : object_(object) {
     456             :     // AllocationResults can't return Smis, which are used to represent
     457             :     // failure and the space to retry in.
     458  1010936684 :     CHECK(!object->IsSmi());
     459  1010936684 :   }
     460             : 
     461             :   AllocationResult() : object_(Smi::FromInt(NEW_SPACE)) {}
     462             : 
     463   639399746 :   inline bool IsRetry() { return object_->IsSmi(); }
     464             :   inline HeapObject* ToObjectChecked();
     465             :   inline AllocationSpace RetrySpace();
     466             : 
     467             :   template <typename T>
     468   106683128 :   bool To(T** obj) {
     469  1606680037 :     if (IsRetry()) return false;
     470   135915623 :     *obj = T::cast(object_);
     471   106243634 :     return true;
     472             :   }
     473             : 
     474             :  private:
     475             :   explicit AllocationResult(AllocationSpace space)
     476       26604 :       : object_(Smi::FromInt(static_cast<int>(space))) {}
     477             : 
     478             :   Object* object_;
     479             : };
     480             : 
     481             : STATIC_ASSERT(sizeof(AllocationResult) == kPointerSize);
     482             : 
     483             : #ifdef DEBUG
     484             : struct CommentStatistic {
     485             :   const char* comment;
     486             :   int size;
     487             :   int count;
     488             :   void Clear() {
     489             :     comment = nullptr;
     490             :     size = 0;
     491             :     count = 0;
     492             :   }
     493             :   // Must be small, since an iteration is used for lookup.
     494             :   static const int kMaxComments = 64;
     495             : };
     496             : #endif
     497             : 
     498             : class NumberAndSizeInfo BASE_EMBEDDED {
     499             :  public:
     500    28162560 :   NumberAndSizeInfo() : number_(0), bytes_(0) {}
     501             : 
     502             :   int number() const { return number_; }
     503           0 :   void increment_number(int num) { number_ += num; }
     504             : 
     505             :   int bytes() const { return bytes_; }
     506           0 :   void increment_bytes(int size) { bytes_ += size; }
     507             : 
     508             :   void clear() {
     509           0 :     number_ = 0;
     510           0 :     bytes_ = 0;
     511             :   }
     512             : 
     513             :  private:
     514             :   int number_;
     515             :   int bytes_;
     516             : };
     517             : 
     518             : // HistogramInfo class for recording a single "bar" of a histogram.  This
     519             : // class is used for collecting statistics to print to the log file.
     520             : class HistogramInfo : public NumberAndSizeInfo {
     521             :  public:
     522    28162560 :   HistogramInfo() : NumberAndSizeInfo(), name_(nullptr) {}
     523             : 
     524             :   const char* name() { return name_; }
     525    16501500 :   void set_name(const char* name) { name_ = name; }
     526             : 
     527             :  private:
     528             :   const char* name_;
     529             : };
     530             : 
     531      213460 : class Heap {
     532             :  public:
     533             :   // Declare all the root indices.  This defines the root list order.
     534             :   enum RootListIndex {
     535             : #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
     536             :     STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
     537             : #undef ROOT_INDEX_DECLARATION
     538             : 
     539             : #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
     540             :         INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
     541             : #undef STRING_DECLARATION
     542             : 
     543             : #define SYMBOL_INDEX_DECLARATION(name) k##name##RootIndex,
     544             :             PRIVATE_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
     545             : #undef SYMBOL_INDEX_DECLARATION
     546             : 
     547             : #define SYMBOL_INDEX_DECLARATION(name, description) k##name##RootIndex,
     548             :                 PUBLIC_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
     549             :                     WELL_KNOWN_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
     550             : #undef SYMBOL_INDEX_DECLARATION
     551             : 
     552             : // Utility type maps
     553             : #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
     554             :                         STRUCT_LIST(DECLARE_STRUCT_MAP)
     555             : #undef DECLARE_STRUCT_MAP
     556             :                             kStringTableRootIndex,
     557             : 
     558             : #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
     559             :     SMI_ROOT_LIST(ROOT_INDEX_DECLARATION)
     560             : #undef ROOT_INDEX_DECLARATION
     561             :         kRootListLength,
     562             :     kStrongRootListLength = kStringTableRootIndex,
     563             :     kSmiRootsStart = kStringTableRootIndex + 1
     564             :   };
     565             : 
     566             :   enum FindMementoMode { kForRuntime, kForGC };
     567             : 
     568             :   enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT, MINOR_MARK_COMPACT };
     569             : 
     570             :   using PretenuringFeedbackMap = std::unordered_map<AllocationSite*, size_t>;
     571             : 
     572             :   // Taking this mutex prevents the GC from entering a phase that relocates
     573             :   // object references.
     574             :   base::Mutex* relocation_mutex() { return &relocation_mutex_; }
     575             : 
     576             :   // Support for partial snapshots.  After calling this we have a linear
     577             :   // space to write objects in each space.
     578             :   struct Chunk {
     579             :     uint32_t size;
     580             :     Address start;
     581             :     Address end;
     582             :   };
     583             :   typedef std::vector<Chunk> Reservation;
     584             : 
     585             :   static const int kInitalOldGenerationLimitFactor = 2;
     586             : 
     587             : #if V8_OS_ANDROID
     588             :   // Don't apply pointer multiplier on Android since it has no swap space and
     589             :   // should instead adapt it's heap size based on available physical memory.
     590             :   static const int kPointerMultiplier = 1;
     591             : #else
     592             :   static const int kPointerMultiplier = i::kPointerSize / 4;
     593             : #endif
     594             : 
     595             :   // Semi-space size needs to be a multiple of page size.
     596             :   static const int kMinSemiSpaceSizeInKB =
     597             :       1 * kPointerMultiplier * ((1 << kPageSizeBits) / KB);
     598             :   static const int kMaxSemiSpaceSizeInKB =
     599             :       16 * kPointerMultiplier * ((1 << kPageSizeBits) / KB);
     600             : 
     601             :   // The old space size has to be a multiple of Page::kPageSize.
     602             :   // Sizes are in MB.
     603             :   static const int kMinOldGenerationSize = 128 * kPointerMultiplier;
     604             :   static const int kMaxOldGenerationSize = 1024 * kPointerMultiplier;
     605             : 
     606             :   static const int kTraceRingBufferSize = 512;
     607             :   static const int kStacktraceBufferSize = 512;
     608             : 
     609             :   V8_EXPORT_PRIVATE static const double kMinHeapGrowingFactor;
     610             :   V8_EXPORT_PRIVATE static const double kMaxHeapGrowingFactor;
     611             :   static const double kMaxHeapGrowingFactorMemoryConstrained;
     612             :   static const double kMaxHeapGrowingFactorIdle;
     613             :   static const double kConservativeHeapGrowingFactor;
     614             :   static const double kTargetMutatorUtilization;
     615             : 
     616             :   static const int kNoGCFlags = 0;
     617             :   static const int kReduceMemoryFootprintMask = 1;
     618             :   static const int kAbortIncrementalMarkingMask = 2;
     619             :   static const int kFinalizeIncrementalMarkingMask = 4;
     620             : 
     621             :   // Making the heap iterable requires us to abort incremental marking.
     622             :   static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
     623             : 
     624             :   // The roots that have an index less than this are always in old space.
     625             :   static const int kOldSpaceRoots = 0x20;
     626             : 
     627             :   // The minimum size of a HeapObject on the heap.
     628             :   static const int kMinObjectSizeInWords = 2;
     629             : 
     630             :   static const int kMinPromotedPercentForFastPromotionMode = 90;
     631             : 
     632             :   STATIC_ASSERT(kUndefinedValueRootIndex ==
     633             :                 Internals::kUndefinedValueRootIndex);
     634             :   STATIC_ASSERT(kTheHoleValueRootIndex == Internals::kTheHoleValueRootIndex);
     635             :   STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
     636             :   STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
     637             :   STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
     638             :   STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
     639             : 
     640             :   // Calculates the maximum amount of filler that could be required by the
     641             :   // given alignment.
     642             :   static int GetMaximumFillToAlign(AllocationAlignment alignment);
     643             :   // Calculates the actual amount of filler required for a given address at the
     644             :   // given alignment.
     645             :   static int GetFillToAlign(Address address, AllocationAlignment alignment);
     646             : 
     647             :   template <typename T>
     648             :   static inline bool IsOneByte(T t, int chars);
     649             : 
     650             :   static void FatalProcessOutOfMemory(const char* location,
     651             :                                       bool is_heap_oom = false);
     652             : 
     653             :   V8_EXPORT_PRIVATE static bool RootIsImmortalImmovable(int root_index);
     654             : 
     655             :   // Checks whether the space is valid.
     656             :   static bool IsValidAllocationSpace(AllocationSpace space);
     657             : 
     658             :   // Generated code can embed direct references to non-writable roots if
     659             :   // they are in new space.
     660             :   static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
     661             : 
     662             :   // Zapping is needed for verify heap, and always done in debug builds.
     663             :   static inline bool ShouldZapGarbage() {
     664             : #ifdef DEBUG
     665             :     return true;
     666             : #else
     667             : #ifdef VERIFY_HEAP
     668             :     return FLAG_verify_heap;
     669             : #else
     670             :     return false;
     671             : #endif
     672             : #endif
     673             :   }
     674             : 
     675             :   static inline bool IsYoungGenerationCollector(GarbageCollector collector) {
     676      432237 :     return collector == SCAVENGER || collector == MINOR_MARK_COMPACTOR;
     677             :   }
     678             : 
     679             :   static inline GarbageCollector YoungGenerationCollector() {
     680       29652 :     return (FLAG_minor_mc) ? MINOR_MARK_COMPACTOR : SCAVENGER;
     681             :   }
     682             : 
     683             :   static inline const char* CollectorName(GarbageCollector collector) {
     684          30 :     switch (collector) {
     685             :       case SCAVENGER:
     686             :         return "Scavenger";
     687             :       case MARK_COMPACTOR:
     688             :         return "Mark-Compact";
     689             :       case MINOR_MARK_COMPACTOR:
     690             :         return "Minor Mark-Compact";
     691             :     }
     692             :     return "Unknown collector";
     693             :   }
     694             : 
     695             :   V8_EXPORT_PRIVATE static double MaxHeapGrowingFactor(
     696             :       size_t max_old_generation_size);
     697             :   V8_EXPORT_PRIVATE static double HeapGrowingFactor(double gc_speed,
     698             :                                                     double mutator_speed,
     699             :                                                     double max_factor);
     700             : 
     701             :   // Copy block of memory from src to dst. Size of block should be aligned
     702             :   // by pointer size.
     703             :   static inline void CopyBlock(Address dst, Address src, int byte_size);
     704             : 
     705             :   // Notifies the heap that is ok to start marking or other activities that
     706             :   // should not happen during deserialization.
     707             :   void NotifyDeserializationComplete();
     708             : 
     709             :   inline Address* NewSpaceAllocationTopAddress();
     710             :   inline Address* NewSpaceAllocationLimitAddress();
     711             :   inline Address* OldSpaceAllocationTopAddress();
     712             :   inline Address* OldSpaceAllocationLimitAddress();
     713             : 
     714             :   // FreeSpace objects have a null map after deserialization. Update the map.
     715             :   void RepairFreeListsAfterDeserialization();
     716             : 
     717             :   // Move len elements within a given array from src_index index to dst_index
     718             :   // index.
     719             :   void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
     720             : 
     721             :   // Initialize a filler object to keep the ability to iterate over the heap
     722             :   // when introducing gaps within pages. If slots could have been recorded in
     723             :   // the freed area, then pass ClearRecordedSlots::kYes as the mode. Otherwise,
     724             :   // pass ClearRecordedSlots::kNo.
     725             :   V8_EXPORT_PRIVATE HeapObject* CreateFillerObjectAt(Address addr, int size,
     726             :                                                      ClearRecordedSlots mode);
     727             : 
     728             :   bool CanMoveObjectStart(HeapObject* object);
     729             : 
     730             :   static bool IsImmovable(HeapObject* object);
     731             : 
     732             :   // Trim the given array from the left. Note that this relocates the object
     733             :   // start and hence is only valid if there is only a single reference to it.
     734             :   FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
     735             : 
     736             :   // Trim the given array from the right.
     737             :   void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
     738             : 
     739             :   // Converts the given boolean condition to JavaScript boolean value.
     740             :   inline Oddball* ToBoolean(bool condition);
     741             : 
     742             :   // Notify the heap that a context has been disposed.
     743             :   int NotifyContextDisposed(bool dependant_context);
     744             : 
     745             :   void set_native_contexts_list(Object* object) {
     746      336852 :     native_contexts_list_ = object;
     747             :   }
     748             :   Object* native_contexts_list() const { return native_contexts_list_; }
     749             : 
     750             :   void set_allocation_sites_list(Object* object) {
     751      469655 :     allocation_sites_list_ = object;
     752             :   }
     753             :   Object* allocation_sites_list() { return allocation_sites_list_; }
     754             : 
     755             :   // Used in CreateAllocationSiteStub and the (de)serializer.
     756             :   Object** allocation_sites_list_address() { return &allocation_sites_list_; }
     757             : 
     758        9038 :   void set_encountered_weak_collections(Object* weak_collection) {
     759      121749 :     encountered_weak_collections_ = weak_collection;
     760        9038 :   }
     761        9038 :   Object* encountered_weak_collections() const {
     762        9038 :     return encountered_weak_collections_;
     763             :   }
     764             :   void IterateEncounteredWeakCollections(RootVisitor* visitor);
     765             : 
     766             :   // Number of mark-sweeps.
     767          10 :   int ms_count() const { return ms_count_; }
     768             : 
     769             :   // Checks whether the given object is allowed to be migrated from it's
     770             :   // current space into the given destination space. Used for debugging.
     771             :   bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
     772             : 
     773             :   void CheckHandleCount();
     774             : 
     775             :   // Number of "runtime allocations" done so far.
     776             :   uint32_t allocations_count() { return allocations_count_; }
     777             : 
     778             :   // Print short heap statistics.
     779             :   void PrintShortHeapStatistics();
     780             : 
     781             :   inline HeapState gc_state() { return gc_state_; }
     782             :   void SetGCState(HeapState state);
     783             : 
     784             :   inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
     785             : 
     786             :   // If an object has an AllocationMemento trailing it, return it, otherwise
     787             :   // return nullptr;
     788             :   template <FindMementoMode mode>
     789             :   inline AllocationMemento* FindAllocationMemento(Map* map, HeapObject* object);
     790             : 
     791             :   // Returns false if not able to reserve.
     792             :   bool ReserveSpace(Reservation* reservations, std::vector<Address>* maps);
     793             : 
     794             :   //
     795             :   // Support for the API.
     796             :   //
     797             : 
     798             :   bool CreateApiObjects();
     799             : 
     800             :   // Implements the corresponding V8 API function.
     801             :   bool IdleNotification(double deadline_in_seconds);
     802             :   bool IdleNotification(int idle_time_in_ms);
     803             : 
     804             :   void MemoryPressureNotification(MemoryPressureLevel level,
     805             :                                   bool is_isolate_locked);
     806             :   void CheckMemoryPressure();
     807             : 
     808             :   void SetOutOfMemoryCallback(v8::debug::OutOfMemoryCallback callback,
     809             :                               void* data);
     810             : 
     811             :   double MonotonicallyIncreasingTimeInMs();
     812             : 
     813             :   void RecordStats(HeapStats* stats, bool take_snapshot = false);
     814             : 
     815             :   // Check new space expansion criteria and expand semispaces if it was hit.
     816             :   void CheckNewSpaceExpansionCriteria();
     817             : 
     818             :   void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
     819             : 
     820             :   // An object should be promoted if the object has survived a
     821             :   // scavenge operation.
     822             :   inline bool ShouldBePromoted(Address old_address);
     823             : 
     824             :   void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
     825             : 
     826             :   inline uint32_t HashSeed();
     827             : 
     828             :   inline int NextScriptId();
     829             :   inline int GetNextTemplateSerialNumber();
     830             : 
     831             :   void SetArgumentsAdaptorDeoptPCOffset(int pc_offset);
     832             :   void SetConstructStubCreateDeoptPCOffset(int pc_offset);
     833             :   void SetConstructStubInvokeDeoptPCOffset(int pc_offset);
     834             :   void SetGetterStubDeoptPCOffset(int pc_offset);
     835             :   void SetSetterStubDeoptPCOffset(int pc_offset);
     836             :   void SetInterpreterEntryReturnPCOffset(int pc_offset);
     837             : 
     838             :   void SetSerializedTemplates(FixedArray* templates);
     839             :   void SetSerializedGlobalProxySizes(FixedArray* sizes);
     840             : 
     841             :   // For post mortem debugging.
     842             :   void RememberUnmappedPage(Address page, bool compacted);
     843             : 
     844         364 :   int64_t external_memory_hard_limit() { return MaxOldGenerationSize() / 2; }
     845             : 
     846             :   int64_t external_memory() { return external_memory_; }
     847        2496 :   void update_external_memory(int64_t delta) { external_memory_ += delta; }
     848             : 
     849             :   void update_external_memory_concurrently_freed(intptr_t freed) {
     850             :     external_memory_concurrently_freed_.Increment(freed);
     851             :   }
     852             : 
     853       86452 :   void account_external_memory_concurrently_freed() {
     854       86452 :     external_memory_ -= external_memory_concurrently_freed_.Value();
     855             :     external_memory_concurrently_freed_.SetValue(0);
     856       86452 :   }
     857             : 
     858             :   void DeoptMarkedAllocationSites();
     859             : 
     860             :   bool DeoptMaybeTenuredAllocationSites();
     861             : 
     862             :   void AddWeakNewSpaceObjectToCodeDependency(Handle<HeapObject> obj,
     863             :                                              Handle<WeakCell> code);
     864             : 
     865             :   void AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
     866             :                                      Handle<DependentCode> dep);
     867             : 
     868             :   DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj);
     869             : 
     870             :   void CompactWeakFixedArrays();
     871             : 
     872             :   void AddRetainedMap(Handle<Map> map);
     873             : 
     874             :   // This event is triggered after successful allocation of a new object made
     875             :   // by runtime. Allocations of target space for object evacuation do not
     876             :   // trigger the event. In order to track ALL allocations one must turn off
     877             :   // FLAG_inline_new.
     878             :   inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);
     879             : 
     880             :   // This event is triggered after object is moved to a new place.
     881             :   inline void OnMoveEvent(HeapObject* target, HeapObject* source,
     882             :                           int size_in_bytes);
     883             : 
     884             :   bool deserialization_complete() const { return deserialization_complete_; }
     885             : 
     886             :   bool HasLowAllocationRate();
     887             :   bool HasHighFragmentation();
     888             :   bool HasHighFragmentation(size_t used, size_t committed);
     889             : 
     890             :   void ActivateMemoryReducerIfNeeded();
     891             : 
     892             :   bool ShouldOptimizeForMemoryUsage();
     893             : 
     894      315615 :   bool HighMemoryPressure() {
     895      315615 :     return memory_pressure_level_.Value() != MemoryPressureLevel::kNone;
     896             :   }
     897             : 
     898             :   size_t HeapLimitForDebugging() {
     899             :     const size_t kDebugHeapSizeFactor = 4;
     900             :     size_t max_limit = std::numeric_limits<size_t>::max() / 4;
     901             :     return Min(max_limit,
     902          30 :                initial_max_old_generation_size_ * kDebugHeapSizeFactor);
     903             :   }
     904             : 
     905          15 :   void IncreaseHeapLimitForDebugging() {
     906             :     max_old_generation_size_ =
     907          30 :         Max(max_old_generation_size_, HeapLimitForDebugging());
     908             :   }
     909             : 
     910        3086 :   void RestoreOriginalHeapLimit() {
     911             :     // Do not set the limit lower than the live size + some slack.
     912        3086 :     size_t min_limit = SizeOfObjects() + SizeOfObjects() / 4;
     913             :     max_old_generation_size_ =
     914             :         Min(max_old_generation_size_,
     915        9258 :             Max(initial_max_old_generation_size_, min_limit));
     916        3086 :   }
     917             : 
     918          15 :   bool IsHeapLimitIncreasedForDebugging() {
     919          30 :     return max_old_generation_size_ == HeapLimitForDebugging();
     920             :   }
     921             : 
     922             :   // ===========================================================================
     923             :   // Initialization. ===========================================================
     924             :   // ===========================================================================
     925             : 
     926             :   // Configure heap sizes
     927             :   // max_semi_space_size_in_kb: maximum semi-space size in KB
     928             :   // max_old_generation_size_in_mb: maximum old generation size in MB
     929             :   // code_range_size_in_mb: code range size in MB
     930             :   // Return false if the heap has been set up already.
     931             :   bool ConfigureHeap(size_t max_semi_space_size_in_kb,
     932             :                      size_t max_old_generation_size_in_mb,
     933             :                      size_t code_range_size_in_mb);
     934             :   bool ConfigureHeapDefault();
     935             : 
     936             :   // Prepares the heap, setting up memory areas that are needed in the isolate
     937             :   // without actually creating any objects.
     938             :   bool SetUp();
     939             : 
     940             :   // (Re-)Initialize hash seed from flag or RNG.
     941             :   void InitializeHashSeed();
     942             : 
     943             :   // Bootstraps the object heap with the core set of objects required to run.
     944             :   // Returns whether it succeeded.
     945             :   bool CreateHeapObjects();
     946             : 
     947             :   // Create ObjectStats if live_object_stats_ or dead_object_stats_ are nullptr.
     948             :   void CreateObjectStats();
     949             : 
     950             :   // Destroys all memory allocated by the heap.
     951             :   void TearDown();
     952             : 
     953             :   // Returns whether SetUp has been called.
     954             :   bool HasBeenSetUp();
     955             : 
     956             :   bool use_tasks() const { return use_tasks_; }
     957             : 
     958             :   // ===========================================================================
     959             :   // Getters for spaces. =======================================================
     960             :   // ===========================================================================
     961             : 
     962             :   inline Address NewSpaceTop();
     963             : 
     964             :   NewSpace* new_space() { return new_space_; }
     965             :   OldSpace* old_space() { return old_space_; }
     966             :   OldSpace* code_space() { return code_space_; }
     967             :   MapSpace* map_space() { return map_space_; }
     968             :   LargeObjectSpace* lo_space() { return lo_space_; }
     969             : 
     970             :   inline PagedSpace* paged_space(int idx);
     971             :   inline Space* space(int idx);
     972             : 
     973             :   // Returns name of the space.
     974             :   const char* GetSpaceName(int idx);
     975             : 
     976             :   // ===========================================================================
     977             :   // Getters to other components. ==============================================
     978             :   // ===========================================================================
     979             : 
     980             :   GCTracer* tracer() { return tracer_; }
     981             : 
     982             :   MemoryAllocator* memory_allocator() { return memory_allocator_; }
     983             : 
     984             :   inline Isolate* isolate();
     985             : 
     986    54613789 :   MarkCompactCollector* mark_compact_collector() {
     987    54613789 :     return mark_compact_collector_;
     988             :   }
     989             : 
     990             :   MinorMarkCompactCollector* minor_mark_compact_collector() {
     991             :     return minor_mark_compact_collector_;
     992             :   }
     993             : 
     994             :   // ===========================================================================
     995             :   // Root set access. ==========================================================
     996             :   // ===========================================================================
     997             : 
     998             :   // Heap root getters.
     999             : #define ROOT_ACCESSOR(type, name, camel_name) inline type* name();
    1000             :   ROOT_LIST(ROOT_ACCESSOR)
    1001             : #undef ROOT_ACCESSOR
    1002             : 
    1003             :   // Utility type maps.
    1004             : #define STRUCT_MAP_ACCESSOR(NAME, Name, name) inline Map* name##_map();
    1005             :   STRUCT_LIST(STRUCT_MAP_ACCESSOR)
    1006             : #undef STRUCT_MAP_ACCESSOR
    1007             : 
    1008             : #define STRING_ACCESSOR(name, str) inline String* name();
    1009             :   INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
    1010             : #undef STRING_ACCESSOR
    1011             : 
    1012             : #define SYMBOL_ACCESSOR(name) inline Symbol* name();
    1013             :   PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
    1014             : #undef SYMBOL_ACCESSOR
    1015             : 
    1016             : #define SYMBOL_ACCESSOR(name, description) inline Symbol* name();
    1017             :   PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
    1018             :   WELL_KNOWN_SYMBOL_LIST(SYMBOL_ACCESSOR)
    1019             : #undef SYMBOL_ACCESSOR
    1020             : 
    1021  1369026055 :   Object* root(RootListIndex index) { return roots_[index]; }
    1022             :   Handle<Object> root_handle(RootListIndex index) {
    1023    11564013 :     return Handle<Object>(&roots_[index]);
    1024             :   }
    1025             :   template <typename T>
    1026             :   bool IsRootHandle(Handle<T> handle, RootListIndex* index) const {
    1027             :     Object** const handle_location = bit_cast<Object**>(handle.address());
    1028    11937788 :     if (handle_location >= &roots_[kRootListLength]) return false;
    1029     3750837 :     if (handle_location < &roots_[0]) return false;
    1030     3737283 :     *index = static_cast<RootListIndex>(handle_location - &roots_[0]);
    1031             :     return true;
    1032             :   }
    1033             : 
    1034             :   // Generated code can embed this address to get access to the roots.
    1035             :   Object** roots_array_start() { return roots_; }
    1036             : 
    1037             :   // Sets the stub_cache_ (only used when expanding the dictionary).
    1038             :   void SetRootCodeStubs(UnseededNumberDictionary* value);
    1039             : 
    1040             :   void SetRootMaterializedObjects(FixedArray* objects) {
    1041          55 :     roots_[kMaterializedObjectsRootIndex] = objects;
    1042             :   }
    1043             : 
    1044             :   void SetRootScriptList(Object* value) {
    1045         300 :     roots_[kScriptListRootIndex] = value;
    1046             :   }
    1047             : 
    1048             :   void SetRootStringTable(StringTable* value) {
    1049    12899152 :     roots_[kStringTableRootIndex] = value;
    1050             :   }
    1051             : 
    1052             :   void SetRootNoScriptSharedFunctionInfos(Object* value) {
    1053        6065 :     roots_[kNoScriptSharedFunctionInfosRootIndex] = value;
    1054             :   }
    1055             : 
    1056             :   void SetMessageListeners(TemplateList* value) {
    1057       27751 :     roots_[kMessageListenersRootIndex] = value;
    1058             :   }
    1059             : 
    1060             :   // Set the stack limit in the roots_ array.  Some architectures generate
    1061             :   // code that looks here, because it is faster than loading from the static
    1062             :   // jslimit_/real_jslimit_ variable in the StackGuard.
    1063             :   void SetStackLimits();
    1064             : 
    1065             :   // The stack limit is thread-dependent. To be able to reproduce the same
    1066             :   // snapshot blob, we need to reset it before serializing.
    1067             :   void ClearStackLimits();
    1068             : 
    1069             :   // Generated code can treat direct references to this root as constant.
    1070             :   bool RootCanBeTreatedAsConstant(RootListIndex root_index);
    1071             : 
    1072             :   Map* MapForFixedTypedArray(ExternalArrayType array_type);
    1073             :   RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type);
    1074             : 
    1075             :   RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind);
    1076             :   FixedTypedArrayBase* EmptyFixedTypedArrayForMap(const Map* map);
    1077             : 
    1078             :   void RegisterStrongRoots(Object** start, Object** end);
    1079             :   void UnregisterStrongRoots(Object** start);
    1080             : 
    1081             :   // ===========================================================================
    1082             :   // Inline allocation. ========================================================
    1083             :   // ===========================================================================
    1084             : 
    1085             :   // Indicates whether inline bump-pointer allocation has been disabled.
    1086             :   bool inline_allocation_disabled() { return inline_allocation_disabled_; }
    1087             : 
    1088             :   // Switch whether inline bump-pointer allocation should be used.
    1089             :   void EnableInlineAllocation();
    1090             :   void DisableInlineAllocation();
    1091             : 
    1092             :   // ===========================================================================
    1093             :   // Methods triggering GCs. ===================================================
    1094             :   // ===========================================================================
    1095             : 
    1096             :   // Performs garbage collection operation.
    1097             :   // Returns whether there is a chance that another major GC could
    1098             :   // collect more garbage.
    1099             :   bool CollectGarbage(
    1100             :       AllocationSpace space, GarbageCollectionReason gc_reason,
    1101             :       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
    1102             : 
    1103             :   // Performs a full garbage collection.  If (flags & kMakeHeapIterableMask) is
    1104             :   // non-zero, then the slower precise sweeper is used, which leaves the heap
    1105             :   // in a state where we can iterate over the heap visiting all objects.
    1106             :   void CollectAllGarbage(
    1107             :       int flags, GarbageCollectionReason gc_reason,
    1108             :       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
    1109             : 
    1110             :   // Last hope GC, should try to squeeze as much as possible.
    1111             :   void CollectAllAvailableGarbage(GarbageCollectionReason gc_reason);
    1112             : 
    1113             :   // Reports and external memory pressure event, either performs a major GC or
    1114             :   // completes incremental marking in order to free external resources.
    1115             :   void ReportExternalMemoryPressure();
    1116             : 
    1117             :   typedef v8::Isolate::GetExternallyAllocatedMemoryInBytesCallback
    1118             :       GetExternallyAllocatedMemoryInBytesCallback;
    1119             : 
    1120             :   void SetGetExternallyAllocatedMemoryInBytesCallback(
    1121             :       GetExternallyAllocatedMemoryInBytesCallback callback) {
    1122       54999 :     external_memory_callback_ = callback;
    1123             :   }
    1124             : 
    1125             :   // Invoked when GC was requested via the stack guard.
    1126             :   void HandleGCRequest();
    1127             : 
    1128             :   // ===========================================================================
    1129             :   // Iterators. ================================================================
    1130             :   // ===========================================================================
    1131             : 
    1132             :   // Iterates over all roots in the heap.
    1133             :   void IterateRoots(RootVisitor* v, VisitMode mode);
    1134             :   // Iterates over all strong roots in the heap.
    1135             :   void IterateStrongRoots(RootVisitor* v, VisitMode mode);
    1136             :   // Iterates over entries in the smi roots list.  Only interesting to the
    1137             :   // serializer/deserializer, since GC does not care about smis.
    1138             :   void IterateSmiRoots(RootVisitor* v);
    1139             :   // Iterates over all the other roots in the heap.
    1140             :   void IterateWeakRoots(RootVisitor* v, VisitMode mode);
    1141             : 
    1142             :   // ===========================================================================
    1143             :   // Store buffer API. =========================================================
    1144             :   // ===========================================================================
    1145             : 
    1146             :   // Write barrier support for object[offset] = o;
    1147             :   inline void RecordWrite(Object* object, Object** slot, Object* value);
    1148             :   inline void RecordWriteIntoCode(Code* host, RelocInfo* rinfo, Object* target);
    1149             :   void RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, Object* target);
    1150             :   void RecordWritesIntoCode(Code* code);
    1151             :   inline void RecordFixedArrayElements(FixedArray* array, int offset,
    1152             :                                        int length);
    1153             : 
    1154             :   // Used for query incremental marking status in generated code.
    1155             :   Address* IsMarkingFlagAddress() {
    1156             :     return reinterpret_cast<Address*>(&is_marking_flag_);
    1157             :   }
    1158             : 
    1159      117338 :   void SetIsMarkingFlag(uint8_t flag) { is_marking_flag_ = flag; }
    1160             : 
    1161             :   inline Address* store_buffer_top_address();
    1162             : 
    1163             :   void ClearRecordedSlot(HeapObject* object, Object** slot);
    1164             :   void ClearRecordedSlotRange(Address start, Address end);
    1165             : 
    1166             :   bool HasRecordedSlot(HeapObject* object, Object** slot);
    1167             : 
    1168             :   // ===========================================================================
    1169             :   // Incremental marking API. ==================================================
    1170             :   // ===========================================================================
    1171             : 
    1172             :   // Start incremental marking and ensure that idle time handler can perform
    1173             :   // incremental steps.
    1174             :   void StartIdleIncrementalMarking(
    1175             :       GarbageCollectionReason gc_reason,
    1176             :       GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
    1177             : 
    1178             :   // Starts incremental marking assuming incremental marking is currently
    1179             :   // stopped.
    1180             :   void StartIncrementalMarking(
    1181             :       int gc_flags, GarbageCollectionReason gc_reason,
    1182             :       GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
    1183             : 
    1184             :   void StartIncrementalMarkingIfAllocationLimitIsReached(
    1185             :       int gc_flags,
    1186             :       GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
    1187             : 
    1188             :   void FinalizeIncrementalMarkingIfComplete(GarbageCollectionReason gc_reason);
    1189             : 
    1190             :   void RegisterDeserializedObjectsForBlackAllocation(
    1191             :       Reservation* reservations, const std::vector<HeapObject*>& large_objects,
    1192             :       const std::vector<Address>& maps);
    1193             : 
    1194     4993994 :   IncrementalMarking* incremental_marking() { return incremental_marking_; }
    1195             : 
    1196             :   // ===========================================================================
    1197             :   // Concurrent marking API. ===================================================
    1198             :   // ===========================================================================
    1199             : 
    1200             :   ConcurrentMarking* concurrent_marking() { return concurrent_marking_; }
    1201             : 
    1202             :   // The runtime uses this function to notify potentially unsafe object layout
    1203             :   // changes that require special synchronization with the concurrent marker.
    1204             :   // The old size is the size of the object before layout change.
    1205             :   void NotifyObjectLayoutChange(HeapObject* object, int old_size,
    1206             :                                 const DisallowHeapAllocation&);
    1207             : 
    1208             : #ifdef VERIFY_HEAP
    1209             :   // This function checks that either
    1210             :   // - the map transition is safe,
    1211             :   // - or it was communicated to GC using NotifyObjectLayoutChange.
    1212             :   void VerifyObjectLayoutChange(HeapObject* object, Map* new_map);
    1213             : #endif
    1214             : 
    1215             :   // ===========================================================================
    1216             :   // Embedder heap tracer support. =============================================
    1217             :   // ===========================================================================
    1218             : 
    1219      170181 :   LocalEmbedderHeapTracer* local_embedder_heap_tracer() {
    1220      170181 :     return local_embedder_heap_tracer_;
    1221             :   }
    1222             :   void SetEmbedderHeapTracer(EmbedderHeapTracer* tracer);
    1223             :   void TracePossibleWrapper(JSObject* js_object);
    1224             :   void RegisterExternallyReferencedObject(Object** object);
    1225             : 
    1226             :   // ===========================================================================
    1227             :   // External string table API. ================================================
    1228             :   // ===========================================================================
    1229             : 
    1230             :   // Registers an external string.
    1231             :   inline void RegisterExternalString(String* string);
    1232             : 
    1233             :   // Finalizes an external string by deleting the associated external
    1234             :   // data and clearing the resource pointer.
    1235             :   inline void FinalizeExternalString(String* string);
    1236             : 
    1237             :   // ===========================================================================
    1238             :   // Methods checking/returning the space of a given object/address. ===========
    1239             :   // ===========================================================================
    1240             : 
    1241             :   // Returns whether the object resides in new space.
    1242             :   inline bool InNewSpace(Object* object);
    1243             :   inline bool InFromSpace(Object* object);
    1244             :   inline bool InToSpace(Object* object);
    1245             : 
    1246             :   // Returns whether the object resides in old space.
    1247             :   inline bool InOldSpace(Object* object);
    1248             : 
    1249             :   // Checks whether an address/object in the heap (including auxiliary
    1250             :   // area and unused area).
    1251             :   bool Contains(HeapObject* value);
    1252             : 
    1253             :   // Checks whether an address/object in a space.
    1254             :   // Currently used by tests, serialization and heap verification only.
    1255             :   bool InSpace(HeapObject* value, AllocationSpace space);
    1256             : 
    1257             :   // Slow methods that can be used for verification as they can also be used
    1258             :   // with off-heap Addresses.
    1259             :   bool ContainsSlow(Address addr);
    1260             :   bool InSpaceSlow(Address addr, AllocationSpace space);
    1261             :   inline bool InNewSpaceSlow(Address address);
    1262             :   inline bool InOldSpaceSlow(Address address);
    1263             : 
    1264             :   // ===========================================================================
    1265             :   // Object statistics tracking. ===============================================
    1266             :   // ===========================================================================
    1267             : 
    1268             :   // Returns the number of buckets used by object statistics tracking during a
    1269             :   // major GC. Note that the following methods fail gracefully when the bounds
    1270             :   // are exceeded though.
    1271             :   size_t NumberOfTrackedHeapObjectTypes();
    1272             : 
    1273             :   // Returns object statistics about count and size at the last major GC.
    1274             :   // Objects are being grouped into buckets that roughly resemble existing
    1275             :   // instance types.
    1276             :   size_t ObjectCountAtLastGC(size_t index);
    1277             :   size_t ObjectSizeAtLastGC(size_t index);
    1278             : 
    1279             :   // Retrieves names of buckets used by object statistics tracking.
    1280             :   bool GetObjectTypeName(size_t index, const char** object_type,
    1281             :                          const char** object_sub_type);
    1282             : 
    1283             :   // ===========================================================================
    1284             :   // Code statistics. ==========================================================
    1285             :   // ===========================================================================
    1286             : 
    1287             :   // Collect code (Code and BytecodeArray objects) statistics.
    1288             :   void CollectCodeStatistics();
    1289             : 
    1290             :   // ===========================================================================
    1291             :   // GC statistics. ============================================================
    1292             :   // ===========================================================================
    1293             : 
    1294             :   // Returns the maximum amount of memory reserved for the heap.
    1295             :   size_t MaxReserved() {
    1296       57428 :     return 2 * max_semi_space_size_ + max_old_generation_size_;
    1297             :   }
    1298             :   size_t MaxSemiSpaceSize() { return max_semi_space_size_; }
    1299             :   size_t InitialSemiSpaceSize() { return initial_semispace_size_; }
    1300             :   size_t MaxOldGenerationSize() { return max_old_generation_size_; }
    1301             : 
    1302             :   static size_t ComputeMaxOldGenerationSize(uint64_t physical_memory) {
    1303             :     const int old_space_physical_memory_factor = 4;
    1304             :     int computed_size =
    1305       26789 :         static_cast<int>(physical_memory / i::MB /
    1306       26789 :                          old_space_physical_memory_factor * kPointerMultiplier);
    1307             :     return Max(Min(computed_size, kMaxOldGenerationSize),
    1308           6 :                kMinOldGenerationSize);
    1309             :   }
    1310             : 
    1311             :   static size_t ComputeMaxSemiSpaceSize(uint64_t physical_memory) {
    1312             :     const uint64_t min_physical_memory = 512 * MB;
    1313             :     const uint64_t max_physical_memory = 3 * static_cast<uint64_t>(GB);
    1314             : 
    1315             :     uint64_t capped_physical_memory =
    1316             :         Max(Min(physical_memory, max_physical_memory), min_physical_memory);
    1317             :     // linearly scale max semi-space size: (X-A)/(B-A)*(D-C)+C
    1318             :     int semi_space_size_in_kb =
    1319       26783 :         static_cast<int>(((capped_physical_memory - min_physical_memory) *
    1320       26783 :                           (kMaxSemiSpaceSizeInKB - kMinSemiSpaceSizeInKB)) /
    1321             :                              (max_physical_memory - min_physical_memory) +
    1322       26783 :                          kMinSemiSpaceSizeInKB);
    1323       26783 :     return RoundUp(semi_space_size_in_kb, (1 << kPageSizeBits) / KB);
    1324             :   }
    1325             : 
    1326             :   // Returns the capacity of the heap in bytes w/o growing. Heap grows when
    1327             :   // more spaces are needed until it reaches the limit.
    1328             :   size_t Capacity();
    1329             : 
    1330             :   // Returns the capacity of the old generation.
    1331             :   size_t OldGenerationCapacity();
    1332             : 
    1333             :   // Returns the amount of memory currently committed for the heap.
    1334             :   size_t CommittedMemory();
    1335             : 
    1336             :   // Returns the amount of memory currently committed for the old space.
    1337             :   size_t CommittedOldGenerationMemory();
    1338             : 
    1339             :   // Returns the amount of executable memory currently committed for the heap.
    1340             :   size_t CommittedMemoryExecutable();
    1341             : 
    1342             :   // Returns the amount of phyical memory currently committed for the heap.
    1343             :   size_t CommittedPhysicalMemory();
    1344             : 
    1345             :   // Returns the maximum amount of memory ever committed for the heap.
    1346             :   size_t MaximumCommittedMemory() { return maximum_committed_; }
    1347             : 
    1348             :   // Updates the maximum committed memory for the heap. Should be called
    1349             :   // whenever a space grows.
    1350             :   void UpdateMaximumCommitted();
    1351             : 
    1352             :   // Returns the available bytes in space w/o growing.
    1353             :   // Heap doesn't guarantee that it can allocate an object that requires
    1354             :   // all available bytes. Check MaxHeapObjectSize() instead.
    1355             :   size_t Available();
    1356             : 
    1357             :   // Returns of size of all objects residing in the heap.
    1358             :   size_t SizeOfObjects();
    1359             : 
    1360             :   void UpdateSurvivalStatistics(int start_new_space_size);
    1361             : 
    1362             :   inline void IncrementPromotedObjectsSize(size_t object_size) {
    1363      100468 :     promoted_objects_size_ += object_size;
    1364             :   }
    1365             :   inline size_t promoted_objects_size() { return promoted_objects_size_; }
    1366             : 
    1367             :   inline void IncrementSemiSpaceCopiedObjectSize(size_t object_size) {
    1368      100468 :     semi_space_copied_object_size_ += object_size;
    1369             :   }
    1370             :   inline size_t semi_space_copied_object_size() {
    1371             :     return semi_space_copied_object_size_;
    1372             :   }
    1373             : 
    1374             :   inline size_t SurvivedNewSpaceObjectSize() {
    1375      116082 :     return promoted_objects_size_ + semi_space_copied_object_size_;
    1376             :   }
    1377             : 
    1378      125191 :   inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; }
    1379             : 
    1380      361149 :   inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
    1381             : 
    1382      272484 :   inline void IncrementNodesPromoted() { nodes_promoted_++; }
    1383             : 
    1384             :   inline void IncrementYoungSurvivorsCounter(size_t survived) {
    1385       85714 :     survived_last_scavenge_ = survived;
    1386       85714 :     survived_since_last_expansion_ += survived;
    1387             :   }
    1388             : 
    1389         202 :   inline uint64_t PromotedTotalSize() {
    1390      972146 :     return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
    1391             :   }
    1392             : 
    1393             :   inline void UpdateNewSpaceAllocationCounter();
    1394             : 
    1395             :   inline size_t NewSpaceAllocationCounter();
    1396             : 
    1397             :   // This should be used only for testing.
    1398             :   void set_new_space_allocation_counter(size_t new_value) {
    1399           5 :     new_space_allocation_counter_ = new_value;
    1400             :   }
    1401             : 
    1402             :   void UpdateOldGenerationAllocationCounter() {
    1403             :     old_generation_allocation_counter_at_last_gc_ =
    1404       56800 :         OldGenerationAllocationCounter();
    1405       56800 :     old_generation_size_at_last_gc_ = 0;
    1406             :   }
    1407             : 
    1408             :   size_t OldGenerationAllocationCounter() {
    1409      251402 :     return old_generation_allocation_counter_at_last_gc_ +
    1410      251407 :            PromotedSinceLastGC();
    1411             :   }
    1412             : 
    1413             :   // This should be used only for testing.
    1414             :   void set_old_generation_allocation_counter_at_last_gc(size_t new_value) {
    1415           5 :     old_generation_allocation_counter_at_last_gc_ = new_value;
    1416             :   }
    1417             : 
    1418             :   size_t PromotedSinceLastGC() {
    1419      251407 :     size_t old_generation_size = PromotedSpaceSizeOfObjects();
    1420             :     DCHECK_GE(old_generation_size, old_generation_size_at_last_gc_);
    1421      251407 :     return old_generation_size - old_generation_size_at_last_gc_;
    1422             :   }
    1423             : 
    1424             :   // This is called by the sweeper when it discovers more free space
    1425             :   // as expected at the end of the last GC.
    1426             :   void NotifyRefinedOldGenerationSize(size_t decreased_bytes) {
    1427       13499 :     if (old_generation_size_at_last_gc_ != 0) {
    1428             :       // PromotedSpaceSizeOfObjects() is now smaller by |decreased_bytes|.
    1429             :       // Adjust old_generation_size_at_last_gc_ too so that PromotedSinceLastGC
    1430             :       // stay monotonically non-decreasing function.
    1431             :       DCHECK_GE(old_generation_size_at_last_gc_, decreased_bytes);
    1432        8081 :       old_generation_size_at_last_gc_ -= decreased_bytes;
    1433             :     }
    1434             :   }
    1435             : 
    1436    17831706 :   int gc_count() const { return gc_count_; }
    1437             : 
    1438             :   // Returns the size of objects residing in non new spaces.
    1439             :   size_t PromotedSpaceSizeOfObjects();
    1440             : 
    1441             :   // ===========================================================================
    1442             :   // Prologue/epilogue callback methods.========================================
    1443             :   // ===========================================================================
    1444             : 
    1445             :   void AddGCPrologueCallback(v8::Isolate::GCCallbackWithData callback,
    1446             :                              GCType gc_type_filter, void* data);
    1447             :   void RemoveGCPrologueCallback(v8::Isolate::GCCallbackWithData callback,
    1448             :                                 void* data);
    1449             : 
    1450             :   void AddGCEpilogueCallback(v8::Isolate::GCCallbackWithData callback,
    1451             :                              GCType gc_type_filter, void* data);
    1452             :   void RemoveGCEpilogueCallback(v8::Isolate::GCCallbackWithData callback,
    1453             :                                 void* data);
    1454             : 
    1455             :   void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
    1456             :   void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
    1457             : 
    1458             :   // ===========================================================================
    1459             :   // Allocation methods. =======================================================
    1460             :   // ===========================================================================
    1461             : 
    1462             :   // Creates a filler object and returns a heap object immediately after it.
    1463             :   MUST_USE_RESULT HeapObject* PrecedeWithFiller(HeapObject* object,
    1464             :                                                 int filler_size);
    1465             : 
    1466             :   // Creates a filler object if needed for alignment and returns a heap object
    1467             :   // immediately after it. If any space is left after the returned object,
    1468             :   // another filler object is created so the over allocated memory is iterable.
    1469             :   MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object,
    1470             :                                               int object_size,
    1471             :                                               int allocation_size,
    1472             :                                               AllocationAlignment alignment);
    1473             : 
    1474             :   // ===========================================================================
    1475             :   // ArrayBuffer tracking. =====================================================
    1476             :   // ===========================================================================
    1477             : 
    1478             :   // TODO(gc): API usability: encapsulate mutation of JSArrayBuffer::is_external
    1479             :   // in the registration/unregistration APIs. Consider dropping the "New" from
    1480             :   // "RegisterNewArrayBuffer" because one can re-register a previously
    1481             :   // unregistered buffer, too, and the name is confusing.
    1482             :   void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
    1483             :   void UnregisterArrayBuffer(JSArrayBuffer* buffer);
    1484             : 
    1485             :   // ===========================================================================
    1486             :   // Allocation site tracking. =================================================
    1487             :   // ===========================================================================
    1488             : 
    1489             :   // Updates the AllocationSite of a given {object}. The entry (including the
    1490             :   // count) is cached on the local pretenuring feedback.
    1491             :   inline void UpdateAllocationSite(
    1492             :       Map* map, HeapObject* object,
    1493             :       PretenuringFeedbackMap* pretenuring_feedback);
    1494             : 
    1495             :   // Merges local pretenuring feedback into the global one. Note that this
    1496             :   // method needs to be called after evacuation, as allocation sites may be
    1497             :   // evacuated and this method resolves forward pointers accordingly.
    1498             :   void MergeAllocationSitePretenuringFeedback(
    1499             :       const PretenuringFeedbackMap& local_pretenuring_feedback);
    1500             : 
    1501             :   // ===========================================================================
    1502             :   // Retaining path tracking. ==================================================
    1503             :   // ===========================================================================
    1504             : 
    1505             :   // Adds the given object to the weak table of retaining path targets.
    1506             :   // On each GC if the marker discovers the object, it will print the retaining
    1507             :   // path. This requires --track-retaining-path flag.
    1508             :   void AddRetainingPathTarget(Handle<HeapObject> object);
    1509             : 
    1510             : // =============================================================================
    1511             : #ifdef VERIFY_HEAP
    1512             :   // Verify the heap is in its normal state before or after a GC.
    1513             :   void Verify();
    1514             :   void VerifyRememberedSetFor(HeapObject* object);
    1515             : #endif
    1516             : 
    1517             : #ifdef DEBUG
    1518             :   void VerifyCountersAfterSweeping();
    1519             :   void VerifyCountersBeforeConcurrentSweeping();
    1520             : 
    1521             :   void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; }
    1522             : 
    1523             :   void Print();
    1524             :   void PrintHandles();
    1525             : 
    1526             :   // Report heap statistics.
    1527             :   void ReportHeapStatistics(const char* title);
    1528             :   void ReportCodeStatistics(const char* title);
    1529             : #endif
    1530             :   void* GetRandomMmapAddr() {
    1531      800342 :     void* result = base::OS::GetRandomMmapAddr();
    1532             : #if V8_TARGET_ARCH_X64
    1533             : #if V8_OS_MACOSX
    1534             :     // The Darwin kernel [as of macOS 10.12.5] does not clean up page
    1535             :     // directory entries [PDE] created from mmap or mach_vm_allocate, even
    1536             :     // after the region is destroyed. Using a virtual address space that is
    1537             :     // too large causes a leak of about 1 wired [can never be paged out] page
    1538             :     // per call to mmap(). The page is only reclaimed when the process is
    1539             :     // killed. Confine the hint to a 32-bit section of the virtual address
    1540             :     // space. See crbug.com/700928.
    1541             :     uintptr_t offset =
    1542             :         reinterpret_cast<uintptr_t>(base::OS::GetRandomMmapAddr()) &
    1543             :         kMmapRegionMask;
    1544             :     result = reinterpret_cast<void*>(mmap_region_base_ + offset);
    1545             : #endif  // V8_OS_MACOSX
    1546             : #endif  // V8_TARGET_ARCH_X64
    1547             :     return result;
    1548             :   }
    1549             : 
    1550             :   static const char* GarbageCollectionReasonToString(
    1551             :       GarbageCollectionReason gc_reason);
    1552             : 
    1553             :  private:
    1554             :   class SkipStoreBufferScope;
    1555             : 
    1556             :   typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
    1557             :                                                         Object** pointer);
    1558             : 
    1559             :   // External strings table is a place where all external strings are
    1560             :   // registered.  We need to keep track of such strings to properly
    1561             :   // finalize them.
    1562      106730 :   class ExternalStringTable {
    1563             :    public:
    1564       54999 :     explicit ExternalStringTable(Heap* heap) : heap_(heap) {}
    1565             : 
    1566             :     // Registers an external string.
    1567             :     inline void AddString(String* string);
    1568             : 
    1569             :     void IterateAll(RootVisitor* v);
    1570             :     void IterateNewSpaceStrings(RootVisitor* v);
    1571             :     void PromoteAllNewSpaceStrings();
    1572             : 
    1573             :     // Restores internal invariant and gets rid of collected strings. Must be
    1574             :     // called after each Iterate*() that modified the strings.
    1575             :     void CleanUpAll();
    1576             :     void CleanUpNewSpaceStrings();
    1577             : 
    1578             :     // Finalize all registered external strings and clear tables.
    1579             :     void TearDown();
    1580             : 
    1581             :     void UpdateNewSpaceReferences(
    1582             :         Heap::ExternalStringTableUpdaterCallback updater_func);
    1583             :     void UpdateReferences(
    1584             :         Heap::ExternalStringTableUpdaterCallback updater_func);
    1585             : 
    1586             :    private:
    1587             :     void Verify();
    1588             : 
    1589             :     Heap* const heap_;
    1590             : 
    1591             :     // To speed up scavenge collections new space string are kept
    1592             :     // separate from old space strings.
    1593             :     std::vector<Object*> new_space_strings_;
    1594             :     std::vector<Object*> old_space_strings_;
    1595             : 
    1596             :     DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
    1597             :   };
    1598             : 
    1599             :   struct StrongRootsList;
    1600             : 
    1601             :   struct StringTypeTable {
    1602             :     InstanceType type;
    1603             :     int size;
    1604             :     RootListIndex index;
    1605             :   };
    1606             : 
    1607             :   struct ConstantStringTable {
    1608             :     const char* contents;
    1609             :     RootListIndex index;
    1610             :   };
    1611             : 
    1612             :   struct StructTable {
    1613             :     InstanceType type;
    1614             :     int size;
    1615             :     RootListIndex index;
    1616             :   };
    1617             : 
    1618             :   struct GCCallbackTuple {
    1619             :     GCCallbackTuple(v8::Isolate::GCCallbackWithData callback, GCType gc_type,
    1620             :                     void* data)
    1621          75 :         : callback(callback), gc_type(gc_type), data(data) {}
    1622             : 
    1623             :     bool operator==(const GCCallbackTuple& other) const;
    1624             :     GCCallbackTuple& operator=(const GCCallbackTuple& other);
    1625             : 
    1626             :     v8::Isolate::GCCallbackWithData callback;
    1627             :     GCType gc_type;
    1628             :     void* data;
    1629             :   };
    1630             : 
    1631             :   static const int kInitialStringTableSize = 2048;
    1632             :   static const int kInitialEvalCacheSize = 64;
    1633             :   static const int kInitialNumberStringCacheSize = 256;
    1634             : 
    1635             :   static const int kRememberedUnmappedPages = 128;
    1636             : 
    1637             :   static const StringTypeTable string_type_table[];
    1638             :   static const ConstantStringTable constant_string_table[];
    1639             :   static const StructTable struct_table[];
    1640             : 
    1641             :   static const int kYoungSurvivalRateHighThreshold = 90;
    1642             :   static const int kYoungSurvivalRateAllowedDeviation = 15;
    1643             :   static const int kOldSurvivalRateLowThreshold = 10;
    1644             : 
    1645             :   static const int kMaxMarkCompactsInIdleRound = 7;
    1646             :   static const int kIdleScavengeThreshold = 5;
    1647             : 
    1648             :   static const int kInitialFeedbackCapacity = 256;
    1649             : 
    1650             :   static const int kMaxScavengerTasks = 8;
    1651             : 
    1652             :   Heap();
    1653             : 
    1654             :   static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
    1655             :       Heap* heap, Object** pointer);
    1656             : 
    1657             :   // Selects the proper allocation space based on the pretenuring decision.
    1658             :   static AllocationSpace SelectSpace(PretenureFlag pretenure) {
    1659   240574634 :     return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
    1660             :   }
    1661             : 
    1662           0 :   static size_t DefaultGetExternallyAllocatedMemoryInBytesCallback() {
    1663           0 :     return 0;
    1664             :   }
    1665             : 
    1666             : #define ROOT_ACCESSOR(type, name, camel_name) \
    1667             :   inline void set_##name(type* value);
    1668             :   ROOT_LIST(ROOT_ACCESSOR)
    1669             : #undef ROOT_ACCESSOR
    1670             : 
    1671             :   StoreBuffer* store_buffer() { return store_buffer_; }
    1672             : 
    1673             :   void set_current_gc_flags(int flags) {
    1674      102786 :     current_gc_flags_ = flags;
    1675             :     DCHECK(!ShouldFinalizeIncrementalMarking() ||
    1676             :            !ShouldAbortIncrementalMarking());
    1677             :   }
    1678             : 
    1679             :   inline bool ShouldReduceMemory() const {
    1680      562336 :     return (current_gc_flags_ & kReduceMemoryFootprintMask) != 0;
    1681             :   }
    1682             : 
    1683             :   inline bool ShouldAbortIncrementalMarking() const {
    1684       61312 :     return (current_gc_flags_ & kAbortIncrementalMarkingMask) != 0;
    1685             :   }
    1686             : 
    1687             :   inline bool ShouldFinalizeIncrementalMarking() const {
    1688             :     return (current_gc_flags_ & kFinalizeIncrementalMarkingMask) != 0;
    1689             :   }
    1690             : 
    1691             :   int NumberOfScavengeTasks();
    1692             : 
    1693             :   void PreprocessStackTraces();
    1694             : 
    1695             :   // Checks whether a global GC is necessary
    1696             :   GarbageCollector SelectGarbageCollector(AllocationSpace space,
    1697             :                                           const char** reason);
    1698             : 
    1699             :   // Make sure there is a filler value behind the top of the new space
    1700             :   // so that the GC does not confuse some unintialized/stale memory
    1701             :   // with the allocation memento of the object at the top
    1702             :   void EnsureFillerObjectAtTop();
    1703             : 
    1704             :   // Ensure that we have swept all spaces in such a way that we can iterate
    1705             :   // over all objects.  May cause a GC.
    1706             :   void MakeHeapIterable();
    1707             : 
    1708             :   // Performs garbage collection
    1709             :   // Returns whether there is a chance another major GC could
    1710             :   // collect more garbage.
    1711             :   bool PerformGarbageCollection(
    1712             :       GarbageCollector collector,
    1713             :       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
    1714             : 
    1715             :   inline void UpdateOldSpaceLimits();
    1716             : 
    1717             :   // Initializes a JSObject based on its map.
    1718             :   void InitializeJSObjectFromMap(JSObject* obj, Object* properties, Map* map);
    1719             : 
    1720             :   // Initializes JSObject body starting at given offset.
    1721             :   void InitializeJSObjectBody(JSObject* obj, Map* map, int start_offset);
    1722             : 
    1723             :   void InitializeAllocationMemento(AllocationMemento* memento,
    1724             :                                    AllocationSite* allocation_site);
    1725             : 
    1726             :   bool CreateInitialMaps();
    1727             :   void CreateInitialObjects();
    1728             : 
    1729             :   // These five Create*EntryStub functions are here and forced to not be inlined
    1730             :   // because of a gcc-4.4 bug that assigns wrong vtable entries.
    1731             :   NO_INLINE(void CreateJSEntryStub());
    1732             :   NO_INLINE(void CreateJSConstructEntryStub());
    1733             : 
    1734             :   void CreateFixedStubs();
    1735             : 
    1736             :   // Commits from space if it is uncommitted.
    1737             :   void EnsureFromSpaceIsCommitted();
    1738             : 
    1739             :   // Uncommit unused semi space.
    1740             :   bool UncommitFromSpace();
    1741             : 
    1742             :   // Fill in bogus values in from space
    1743             :   void ZapFromSpace();
    1744             : 
    1745             :   // Deopts all code that contains allocation instruction which are tenured or
    1746             :   // not tenured. Moreover it clears the pretenuring allocation site statistics.
    1747             :   void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
    1748             : 
    1749             :   // Evaluates local pretenuring for the old space and calls
    1750             :   // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
    1751             :   // the old space.
    1752             :   void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
    1753             : 
    1754             :   // Record statistics before and after garbage collection.
    1755             :   void ReportStatisticsBeforeGC();
    1756             :   void ReportStatisticsAfterGC();
    1757             : 
    1758             :   // Creates and installs the full-sized number string cache.
    1759             :   int FullSizeNumberStringCacheLength();
    1760             :   // Flush the number to string cache.
    1761             :   void FlushNumberStringCache();
    1762             : 
    1763             :   void ConfigureInitialOldGenerationSize();
    1764             : 
    1765             :   bool HasLowYoungGenerationAllocationRate();
    1766             :   bool HasLowOldGenerationAllocationRate();
    1767             :   double YoungGenerationMutatorUtilization();
    1768             :   double OldGenerationMutatorUtilization();
    1769             : 
    1770             :   void ReduceNewSpaceSize();
    1771             : 
    1772             :   GCIdleTimeHeapState ComputeHeapState();
    1773             : 
    1774             :   bool PerformIdleTimeAction(GCIdleTimeAction action,
    1775             :                              GCIdleTimeHeapState heap_state,
    1776             :                              double deadline_in_ms);
    1777             : 
    1778             :   void IdleNotificationEpilogue(GCIdleTimeAction action,
    1779             :                                 GCIdleTimeHeapState heap_state, double start_ms,
    1780             :                                 double deadline_in_ms);
    1781             : 
    1782             :   inline void UpdateAllocationsHash(HeapObject* object);
    1783             :   inline void UpdateAllocationsHash(uint32_t value);
    1784             :   void PrintAllocationsHash();
    1785             : 
    1786             :   void AddToRingBuffer(const char* string);
    1787             :   void GetFromRingBuffer(char* buffer);
    1788             : 
    1789             :   void CompactRetainedMaps(ArrayList* retained_maps);
    1790             : 
    1791             :   void CollectGarbageOnMemoryPressure();
    1792             : 
    1793             :   void InvokeOutOfMemoryCallback();
    1794             : 
    1795             :   void ComputeFastPromotionMode(double survival_rate);
    1796             : 
    1797             :   // Attempt to over-approximate the weak closure by marking object groups and
    1798             :   // implicit references from global handles, but don't atomically complete
    1799             :   // marking. If we continue to mark incrementally, we might have marked
    1800             :   // objects that die later.
    1801             :   void FinalizeIncrementalMarking(GarbageCollectionReason gc_reason);
    1802             : 
    1803             :   // Returns the timer used for a given GC type.
    1804             :   // - GCScavenger: young generation GC
    1805             :   // - GCCompactor: full GC
    1806             :   // - GCFinalzeMC: finalization of incremental full GC
    1807             :   // - GCFinalizeMCReduceMemory: finalization of incremental full GC with
    1808             :   // memory reduction
    1809             :   HistogramTimer* GCTypeTimer(GarbageCollector collector);
    1810             : 
    1811             :   // ===========================================================================
    1812             :   // Pretenuring. ==============================================================
    1813             :   // ===========================================================================
    1814             : 
    1815             :   // Pretenuring decisions are made based on feedback collected during new space
    1816             :   // evacuation. Note that between feedback collection and calling this method
    1817             :   // object in old space must not move.
    1818             :   void ProcessPretenuringFeedback();
    1819             : 
    1820             :   // Removes an entry from the global pretenuring storage.
    1821             :   void RemoveAllocationSitePretenuringFeedback(AllocationSite* site);
    1822             : 
    1823             :   // ===========================================================================
    1824             :   // Actual GC. ================================================================
    1825             :   // ===========================================================================
    1826             : 
    1827             :   // Code that should be run before and after each GC.  Includes some
    1828             :   // reporting/verification activities when compiled with DEBUG set.
    1829             :   void GarbageCollectionPrologue();
    1830             :   void GarbageCollectionEpilogue();
    1831             : 
    1832             :   // Performs a major collection in the whole heap.
    1833             :   void MarkCompact();
    1834             :   // Performs a minor collection of just the young generation.
    1835             :   void MinorMarkCompact();
    1836             : 
    1837             :   // Code to be run before and after mark-compact.
    1838             :   void MarkCompactPrologue();
    1839             :   void MarkCompactEpilogue();
    1840             : 
    1841             :   // Performs a minor collection in new generation.
    1842             :   void Scavenge();
    1843             :   void EvacuateYoungGeneration();
    1844             : 
    1845             :   void UpdateNewSpaceReferencesInExternalStringTable(
    1846             :       ExternalStringTableUpdaterCallback updater_func);
    1847             : 
    1848             :   void UpdateReferencesInExternalStringTable(
    1849             :       ExternalStringTableUpdaterCallback updater_func);
    1850             : 
    1851             :   void ProcessAllWeakReferences(WeakObjectRetainer* retainer);
    1852             :   void ProcessYoungWeakReferences(WeakObjectRetainer* retainer);
    1853             :   void ProcessNativeContexts(WeakObjectRetainer* retainer);
    1854             :   void ProcessAllocationSites(WeakObjectRetainer* retainer);
    1855             :   void ProcessWeakListRoots(WeakObjectRetainer* retainer);
    1856             : 
    1857             :   // ===========================================================================
    1858             :   // GC statistics. ============================================================
    1859             :   // ===========================================================================
    1860             : 
    1861      242179 :   inline size_t OldGenerationSpaceAvailable() {
    1862      484257 :     if (old_generation_allocation_limit_ <= PromotedTotalSize()) return 0;
    1863      240023 :     return old_generation_allocation_limit_ -
    1864      240023 :            static_cast<size_t>(PromotedTotalSize());
    1865             :   }
    1866             : 
    1867             :   // We allow incremental marking to overshoot the allocation limit for
    1868             :   // performace reasons. If the overshoot is too large then we are more
    1869             :   // eager to finalize incremental marking.
    1870        2482 :   inline bool AllocationLimitOvershotByLargeMargin() {
    1871             :     // This guards against too eager finalization in small heaps.
    1872             :     // The number is chosen based on v8.browsing_mobile on Nexus 7v2.
    1873             :     size_t kMarginForSmallHeaps = 32u * MB;
    1874        4964 :     if (old_generation_allocation_limit_ >= PromotedTotalSize()) return false;
    1875        1490 :     uint64_t overshoot = PromotedTotalSize() - old_generation_allocation_limit_;
    1876             :     // Overshoot margin is 50% of allocation limit or half-way to the max heap
    1877             :     // with special handling of small heaps.
    1878             :     uint64_t margin =
    1879             :         Min(Max(old_generation_allocation_limit_ / 2, kMarginForSmallHeaps),
    1880        1490 :             (max_old_generation_size_ - old_generation_allocation_limit_) / 2);
    1881        1490 :     return overshoot >= margin;
    1882             :   }
    1883             : 
    1884             :   void UpdateTotalGCTime(double duration);
    1885             : 
    1886       86452 :   bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
    1887             : 
    1888             :   // ===========================================================================
    1889             :   // Growing strategy. =========================================================
    1890             :   // ===========================================================================
    1891             : 
    1892             :   // For some webpages RAIL mode does not switch from PERFORMANCE_LOAD.
    1893             :   // This constant limits the effect of load RAIL mode on GC.
    1894             :   // The value is arbitrary and chosen as the largest load time observed in
    1895             :   // v8 browsing benchmarks.
    1896             :   static const int kMaxLoadTimeMs = 7000;
    1897             : 
    1898             :   bool ShouldOptimizeForLoadTime();
    1899             : 
    1900             :   // Decrease the allocation limit if the new limit based on the given
    1901             :   // parameters is lower than the current limit.
    1902             :   void DampenOldGenerationAllocationLimit(size_t old_gen_size, double gc_speed,
    1903             :                                           double mutator_speed);
    1904             : 
    1905             :   // Calculates the allocation limit based on a given growing factor and a
    1906             :   // given old generation size.
    1907             :   size_t CalculateOldGenerationAllocationLimit(double factor,
    1908             :                                                size_t old_gen_size);
    1909             : 
    1910             :   // Sets the allocation limit to trigger the next full garbage collection.
    1911             :   void SetOldGenerationAllocationLimit(size_t old_gen_size, double gc_speed,
    1912             :                                        double mutator_speed);
    1913             : 
    1914             :   size_t MinimumAllocationLimitGrowingStep();
    1915             : 
    1916             :   size_t old_generation_allocation_limit() const {
    1917             :     return old_generation_allocation_limit_;
    1918             :   }
    1919             : 
    1920             :   bool always_allocate() { return always_allocate_scope_count_.Value() != 0; }
    1921             : 
    1922      415072 :   bool CanExpandOldGeneration(size_t size) {
    1923      415127 :     if (force_oom_) return false;
    1924      830144 :     return (OldGenerationCapacity() + size) < MaxOldGenerationSize();
    1925             :   }
    1926             : 
    1927       82789 :   bool IsCloseToOutOfMemory(size_t slack) {
    1928       82789 :     return OldGenerationCapacity() + slack >= MaxOldGenerationSize();
    1929             :   }
    1930             : 
    1931             :   bool ShouldExpandOldGenerationOnSlowAllocation();
    1932             : 
    1933             :   enum class IncrementalMarkingLimit { kNoLimit, kSoftLimit, kHardLimit };
    1934             :   IncrementalMarkingLimit IncrementalMarkingLimitReached();
    1935             : 
    1936             :   // ===========================================================================
    1937             :   // Idle notification. ========================================================
    1938             :   // ===========================================================================
    1939             : 
    1940             :   bool RecentIdleNotificationHappened();
    1941             :   void ScheduleIdleScavengeIfNeeded(int bytes_allocated);
    1942             : 
    1943             :   // ===========================================================================
    1944             :   // HeapIterator helpers. =====================================================
    1945             :   // ===========================================================================
    1946             : 
    1947       11093 :   void heap_iterator_start() { heap_iterator_depth_++; }
    1948             : 
    1949       11093 :   void heap_iterator_end() { heap_iterator_depth_--; }
    1950             : 
    1951             :   bool in_heap_iterator() { return heap_iterator_depth_ > 0; }
    1952             : 
    1953             :   // ===========================================================================
    1954             :   // Allocation methods. =======================================================
    1955             :   // ===========================================================================
    1956             : 
    1957             :   // Returns a deep copy of the JavaScript object.
    1958             :   // Properties and elements are copied too.
    1959             :   // Optionally takes an AllocationSite to be appended in an AllocationMemento.
    1960             :   MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source,
    1961             :                                                 AllocationSite* site = nullptr);
    1962             : 
    1963             :   // Allocates a JS Map in the heap.
    1964             :   MUST_USE_RESULT AllocationResult
    1965             :   AllocateMap(InstanceType instance_type, int instance_size,
    1966             :               ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND,
    1967             :               int inobject_properties = 0);
    1968             : 
    1969             :   // Allocates and initializes a new JavaScript object based on a
    1970             :   // constructor.
    1971             :   // If allocation_site is non-null, then a memento is emitted after the object
    1972             :   // that points to the site.
    1973             :   MUST_USE_RESULT AllocationResult AllocateJSObject(
    1974             :       JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED,
    1975             :       AllocationSite* allocation_site = nullptr);
    1976             : 
    1977             :   // Allocates and initializes a new JavaScript object based on a map.
    1978             :   // Passing an allocation site means that a memento will be created that
    1979             :   // points to the site.
    1980             :   MUST_USE_RESULT AllocationResult
    1981             :   AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure = NOT_TENURED,
    1982             :                           AllocationSite* allocation_site = nullptr);
    1983             : 
    1984             :   // Allocates a HeapNumber from value.
    1985             :   MUST_USE_RESULT AllocationResult AllocateHeapNumber(
    1986             :       MutableMode mode = IMMUTABLE, PretenureFlag pretenure = NOT_TENURED);
    1987             : 
    1988             :   MUST_USE_RESULT AllocationResult AllocateBigInt(int length,
    1989             :                                                   bool zero_initialize,
    1990             :                                                   PretenureFlag pretenure);
    1991             : 
    1992             :   // Allocates a byte array of the specified length
    1993             :   MUST_USE_RESULT AllocationResult
    1994             :   AllocateByteArray(int length, PretenureFlag pretenure = NOT_TENURED);
    1995             : 
    1996             :   // Allocates a bytecode array with given contents.
    1997             :   MUST_USE_RESULT AllocationResult
    1998             :   AllocateBytecodeArray(int length, const byte* raw_bytecodes, int frame_size,
    1999             :                         int parameter_count, FixedArray* constant_pool);
    2000             : 
    2001             :   MUST_USE_RESULT AllocationResult CopyCode(Code* code);
    2002             : 
    2003             :   MUST_USE_RESULT AllocationResult
    2004             :   CopyBytecodeArray(BytecodeArray* bytecode_array);
    2005             : 
    2006             :   // Allocates a fixed array initialized with undefined values
    2007             :   MUST_USE_RESULT inline AllocationResult AllocateFixedArray(
    2008             :       int length, PretenureFlag pretenure = NOT_TENURED);
    2009             : 
    2010             :   // Allocates a property array initialized with undefined values
    2011             :   MUST_USE_RESULT AllocationResult
    2012             :   AllocatePropertyArray(int length, PretenureFlag pretenure = NOT_TENURED);
    2013             : 
    2014             :   // Allocate a feedback vector for the given shared function info. The slots
    2015             :   // are pre-filled with undefined.
    2016             :   MUST_USE_RESULT AllocationResult
    2017             :   AllocateFeedbackVector(SharedFunctionInfo* shared, PretenureFlag pretenure);
    2018             : 
    2019             :   // Allocate an uninitialized feedback vector.
    2020             :   MUST_USE_RESULT AllocationResult
    2021             :   AllocateRawFeedbackVector(int length, PretenureFlag pretenure);
    2022             : 
    2023             :   MUST_USE_RESULT AllocationResult AllocateSmallOrderedHashSet(
    2024             :       int length, PretenureFlag pretenure = NOT_TENURED);
    2025             :   MUST_USE_RESULT AllocationResult AllocateSmallOrderedHashMap(
    2026             :       int length, PretenureFlag pretenure = NOT_TENURED);
    2027             : 
    2028             :   // Allocate an uninitialized object.  The memory is non-executable if the
    2029             :   // hardware and OS allow.  This is the single choke-point for allocations
    2030             :   // performed by the runtime and should not be bypassed (to extend this to
    2031             :   // inlined allocations, use the Heap::DisableInlineAllocation() support).
    2032             :   MUST_USE_RESULT inline AllocationResult AllocateRaw(
    2033             :       int size_in_bytes, AllocationSpace space,
    2034             :       AllocationAlignment aligment = kWordAligned);
    2035             : 
    2036             :   // Allocates a heap object based on the map.
    2037             :   MUST_USE_RESULT AllocationResult
    2038             :   Allocate(Map* map, AllocationSpace space,
    2039             :            AllocationSite* allocation_site = nullptr);
    2040             : 
    2041             :   // Allocates a partial map for bootstrapping.
    2042             :   MUST_USE_RESULT AllocationResult
    2043             :       AllocatePartialMap(InstanceType instance_type, int instance_size);
    2044             : 
    2045             :   // Allocate a block of memory in the given space (filled with a filler).
    2046             :   // Used as a fall-back for generated code when the space is full.
    2047             :   MUST_USE_RESULT AllocationResult
    2048             :       AllocateFillerObject(int size, bool double_align, AllocationSpace space);
    2049             : 
    2050             :   // Allocate an uninitialized fixed array.
    2051             :   MUST_USE_RESULT AllocationResult
    2052             :       AllocateRawFixedArray(int length, PretenureFlag pretenure);
    2053             : 
    2054             :   // Allocate an uninitialized fixed double array.
    2055             :   MUST_USE_RESULT AllocationResult
    2056             :       AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure);
    2057             : 
    2058             :   // Allocate an initialized fixed array with the given filler value.
    2059             :   MUST_USE_RESULT AllocationResult
    2060             :       AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure,
    2061             :                                    Object* filler);
    2062             : 
    2063             :   // Allocate and partially initializes a String.  There are two String
    2064             :   // encodings: one-byte and two-byte.  These functions allocate a string of
    2065             :   // the given length and set its map and length fields.  The characters of
    2066             :   // the string are uninitialized.
    2067             :   MUST_USE_RESULT AllocationResult
    2068             :       AllocateRawOneByteString(int length, PretenureFlag pretenure);
    2069             :   MUST_USE_RESULT AllocationResult
    2070             :       AllocateRawTwoByteString(int length, PretenureFlag pretenure);
    2071             : 
    2072             :   // Allocates an internalized string in old space based on the character
    2073             :   // stream.
    2074             :   MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8(
    2075             :       Vector<const char> str, int chars, uint32_t hash_field);
    2076             : 
    2077             :   MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString(
    2078             :       Vector<const uint8_t> str, uint32_t hash_field);
    2079             : 
    2080             :   MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString(
    2081             :       Vector<const uc16> str, uint32_t hash_field);
    2082             : 
    2083             :   template <bool is_one_byte, typename T>
    2084             :   MUST_USE_RESULT AllocationResult
    2085         495 :       AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field);
    2086             : 
    2087             :   template <typename T>
    2088             :   MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl(
    2089             :       T t, int chars, uint32_t hash_field);
    2090             : 
    2091             :   // Allocates an uninitialized fixed array. It must be filled by the caller.
    2092             :   MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(
    2093             :       int length, PretenureFlag pretenure = NOT_TENURED);
    2094             : 
    2095             :   // Make a copy of src and return it.
    2096             :   MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src);
    2097             : 
    2098             :   // Make a copy of src, also grow the copy, and return the copy.
    2099             :   template <typename T>
    2100             :   MUST_USE_RESULT AllocationResult CopyArrayAndGrow(T* src, int grow_by,
    2101             :                                                     PretenureFlag pretenure);
    2102             : 
    2103             :   // Make a copy of src, also grow the copy, and return the copy.
    2104             :   MUST_USE_RESULT AllocationResult CopyPropertyArrayAndGrow(
    2105             :       PropertyArray* src, int grow_by, PretenureFlag pretenure);
    2106             : 
    2107             :   // Make a copy of src, also grow the copy, and return the copy.
    2108             :   MUST_USE_RESULT AllocationResult CopyFixedArrayUpTo(FixedArray* src,
    2109             :                                                       int new_len,
    2110             :                                                       PretenureFlag pretenure);
    2111             : 
    2112             :   // Make a copy of src, set the map, and return the copy.
    2113             :   template <typename T>
    2114             :   MUST_USE_RESULT AllocationResult CopyArrayWithMap(T* src, Map* map);
    2115             : 
    2116             :   // Make a copy of src, set the map, and return the copy.
    2117             :   MUST_USE_RESULT AllocationResult CopyFixedArrayWithMap(FixedArray* src,
    2118             :                                                          Map* map);
    2119             : 
    2120             :   // Make a copy of src, set the map, and return the copy.
    2121             :   MUST_USE_RESULT AllocationResult CopyPropertyArray(PropertyArray* src);
    2122             : 
    2123             :   // Make a copy of src and return it.
    2124             :   MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray(
    2125             :       FixedDoubleArray* src);
    2126             : 
    2127             :   // Make a copy of src and return it.
    2128             :   MUST_USE_RESULT AllocationResult CopyFeedbackVector(FeedbackVector* src);
    2129             : 
    2130             :   // Computes a single character string where the character has code.
    2131             :   // A cache is used for one-byte (Latin1) codes.
    2132             :   MUST_USE_RESULT AllocationResult
    2133             :       LookupSingleCharacterStringFromCode(uint16_t code);
    2134             : 
    2135             :   // Allocate a symbol in old space.
    2136             :   MUST_USE_RESULT AllocationResult AllocateSymbol();
    2137             : 
    2138             :   // Allocates an external array of the specified length and type.
    2139             :   MUST_USE_RESULT AllocationResult AllocateFixedTypedArrayWithExternalPointer(
    2140             :       int length, ExternalArrayType array_type, void* external_pointer,
    2141             :       PretenureFlag pretenure);
    2142             : 
    2143             :   // Allocates a fixed typed array of the specified length and type.
    2144             :   MUST_USE_RESULT AllocationResult
    2145             :   AllocateFixedTypedArray(int length, ExternalArrayType array_type,
    2146             :                           bool initialize, PretenureFlag pretenure);
    2147             : 
    2148             :   // Make a copy of src and return it.
    2149             :   MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src);
    2150             : 
    2151             :   // Make a copy of src, set the map, and return the copy.
    2152             :   MUST_USE_RESULT AllocationResult
    2153             :       CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, Map* map);
    2154             : 
    2155             :   // Allocates a fixed double array with uninitialized values. Returns
    2156             :   MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray(
    2157             :       int length, PretenureFlag pretenure = NOT_TENURED);
    2158             : 
    2159             :   // Allocate empty fixed array.
    2160             :   MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray();
    2161             : 
    2162             :   // Allocate empty scope info.
    2163             :   MUST_USE_RESULT AllocationResult AllocateEmptyScopeInfo();
    2164             : 
    2165             :   // Allocate empty fixed typed array of given type.
    2166             :   MUST_USE_RESULT AllocationResult
    2167             :       AllocateEmptyFixedTypedArray(ExternalArrayType array_type);
    2168             : 
    2169             :   // Allocate a tenured simple cell.
    2170             :   MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
    2171             : 
    2172             :   // Allocate a tenured JS global property cell initialized with the hole.
    2173             :   MUST_USE_RESULT AllocationResult AllocatePropertyCell(Name* name);
    2174             : 
    2175             :   MUST_USE_RESULT AllocationResult AllocateWeakCell(HeapObject* value);
    2176             : 
    2177             :   MUST_USE_RESULT AllocationResult AllocateTransitionArray(int capacity);
    2178             : 
    2179             :   // Allocates a new utility object in the old generation.
    2180             :   MUST_USE_RESULT AllocationResult
    2181             :   AllocateStruct(InstanceType type, PretenureFlag pretenure = NOT_TENURED);
    2182             : 
    2183             :   // Allocates a new foreign object.
    2184             :   MUST_USE_RESULT AllocationResult
    2185             :       AllocateForeign(Address address, PretenureFlag pretenure = NOT_TENURED);
    2186             : 
    2187             :   MUST_USE_RESULT AllocationResult
    2188             :       AllocateCode(int object_size, bool immovable);
    2189             : 
    2190          79 :   void set_force_oom(bool value) { force_oom_ = value; }
    2191             : 
    2192             :   // ===========================================================================
    2193             :   // Retaining path tracing ====================================================
    2194             :   // ===========================================================================
    2195             : 
    2196             :   void AddRetainer(HeapObject* retainer, HeapObject* object);
    2197             :   void AddRetainingRoot(Root root, HeapObject* object);
    2198             :   bool IsRetainingPathTarget(HeapObject* object);
    2199             :   void PrintRetainingPath(HeapObject* object);
    2200             : 
    2201             :   // The amount of external memory registered through the API.
    2202             :   int64_t external_memory_;
    2203             : 
    2204             :   // The limit when to trigger memory pressure from the API.
    2205             :   int64_t external_memory_limit_;
    2206             : 
    2207             :   // Caches the amount of external memory registered at the last MC.
    2208             :   int64_t external_memory_at_last_mark_compact_;
    2209             : 
    2210             :   // The amount of memory that has been freed concurrently.
    2211             :   base::AtomicNumber<intptr_t> external_memory_concurrently_freed_;
    2212             : 
    2213             :   // This can be calculated directly from a pointer to the heap; however, it is
    2214             :   // more expedient to get at the isolate directly from within Heap methods.
    2215             :   Isolate* isolate_;
    2216             : 
    2217             :   Object* roots_[kRootListLength];
    2218             : 
    2219             :   size_t code_range_size_;
    2220             :   size_t max_semi_space_size_;
    2221             :   size_t initial_semispace_size_;
    2222             :   size_t max_old_generation_size_;
    2223             :   size_t initial_max_old_generation_size_;
    2224             :   size_t initial_old_generation_size_;
    2225             :   bool old_generation_size_configured_;
    2226             :   size_t maximum_committed_;
    2227             : 
    2228             :   // For keeping track of how much data has survived
    2229             :   // scavenge since last new space expansion.
    2230             :   size_t survived_since_last_expansion_;
    2231             : 
    2232             :   // ... and since the last scavenge.
    2233             :   size_t survived_last_scavenge_;
    2234             : 
    2235             :   // This is not the depth of nested AlwaysAllocateScope's but rather a single
    2236             :   // count, as scopes can be acquired from multiple tasks (read: threads).
    2237             :   base::AtomicNumber<size_t> always_allocate_scope_count_;
    2238             : 
    2239             :   // Stores the memory pressure level that set by MemoryPressureNotification
    2240             :   // and reset by a mark-compact garbage collection.
    2241             :   base::AtomicValue<MemoryPressureLevel> memory_pressure_level_;
    2242             : 
    2243             :   v8::debug::OutOfMemoryCallback out_of_memory_callback_;
    2244             :   void* out_of_memory_callback_data_;
    2245             : 
    2246             :   // For keeping track of context disposals.
    2247             :   int contexts_disposed_;
    2248             : 
    2249             :   // The length of the retained_maps array at the time of context disposal.
    2250             :   // This separates maps in the retained_maps array that were created before
    2251             :   // and after context disposal.
    2252             :   int number_of_disposed_maps_;
    2253             : 
    2254             :   NewSpace* new_space_;
    2255             :   OldSpace* old_space_;
    2256             :   OldSpace* code_space_;
    2257             :   MapSpace* map_space_;
    2258             :   LargeObjectSpace* lo_space_;
    2259             :   // Map from the space id to the space.
    2260             :   Space* space_[LAST_SPACE + 1];
    2261             :   HeapState gc_state_;
    2262             :   int gc_post_processing_depth_;
    2263             : 
    2264             :   // Returns the amount of external memory registered since last global gc.
    2265             :   uint64_t PromotedExternalMemorySize();
    2266             : 
    2267             :   // How many "runtime allocations" happened.
    2268             :   uint32_t allocations_count_;
    2269             : 
    2270             :   // Running hash over allocations performed.
    2271             :   uint32_t raw_allocations_hash_;
    2272             : 
    2273             :   // How many mark-sweep collections happened.
    2274             :   unsigned int ms_count_;
    2275             : 
    2276             :   // How many gc happened.
    2277             :   unsigned int gc_count_;
    2278             : 
    2279             :   static const uintptr_t kMmapRegionMask = 0xFFFFFFFFu;
    2280             :   uintptr_t mmap_region_base_;
    2281             : 
    2282             :   // For post mortem debugging.
    2283             :   int remembered_unmapped_pages_index_;
    2284             :   Address remembered_unmapped_pages_[kRememberedUnmappedPages];
    2285             : 
    2286             : #ifdef DEBUG
    2287             :   // If the --gc-interval flag is set to a positive value, this
    2288             :   // variable holds the value indicating the number of allocations
    2289             :   // remain until the next failure and garbage collection.
    2290             :   int allocation_timeout_;
    2291             : #endif  // DEBUG
    2292             : 
    2293             :   // Limit that triggers a global GC on the next (normally caused) GC.  This
    2294             :   // is checked when we have already decided to do a GC to help determine
    2295             :   // which collector to invoke, before expanding a paged space in the old
    2296             :   // generation and on every allocation in large object space.
    2297             :   size_t old_generation_allocation_limit_;
    2298             : 
    2299             :   // Indicates that inline bump-pointer allocation has been globally disabled
    2300             :   // for all spaces. This is used to disable allocations in generated code.
    2301             :   bool inline_allocation_disabled_;
    2302             : 
    2303             :   // Weak list heads, threaded through the objects.
    2304             :   // List heads are initialized lazily and contain the undefined_value at start.
    2305             :   Object* native_contexts_list_;
    2306             :   Object* allocation_sites_list_;
    2307             : 
    2308             :   // List of encountered weak collections (JSWeakMap and JSWeakSet) during
    2309             :   // marking. It is initialized during marking, destroyed after marking and
    2310             :   // contains Smi(0) while marking is not active.
    2311             :   Object* encountered_weak_collections_;
    2312             : 
    2313             :   std::vector<GCCallbackTuple> gc_epilogue_callbacks_;
    2314             :   std::vector<GCCallbackTuple> gc_prologue_callbacks_;
    2315             : 
    2316             :   GetExternallyAllocatedMemoryInBytesCallback external_memory_callback_;
    2317             : 
    2318             :   int deferred_counters_[v8::Isolate::kUseCounterFeatureCount];
    2319             : 
    2320             :   GCTracer* tracer_;
    2321             : 
    2322             :   size_t promoted_objects_size_;
    2323             :   double promotion_ratio_;
    2324             :   double promotion_rate_;
    2325             :   size_t semi_space_copied_object_size_;
    2326             :   size_t previous_semi_space_copied_object_size_;
    2327             :   double semi_space_copied_rate_;
    2328             :   int nodes_died_in_new_space_;
    2329             :   int nodes_copied_in_new_space_;
    2330             :   int nodes_promoted_;
    2331             : 
    2332             :   // This is the pretenuring trigger for allocation sites that are in maybe
    2333             :   // tenure state. When we switched to the maximum new space size we deoptimize
    2334             :   // the code that belongs to the allocation site and derive the lifetime
    2335             :   // of the allocation site.
    2336             :   unsigned int maximum_size_scavenges_;
    2337             : 
    2338             :   // Total time spent in GC.
    2339             :   double total_gc_time_ms_;
    2340             : 
    2341             :   // Last time an idle notification happened.
    2342             :   double last_idle_notification_time_;
    2343             : 
    2344             :   // Last time a garbage collection happened.
    2345             :   double last_gc_time_;
    2346             : 
    2347             :   MarkCompactCollector* mark_compact_collector_;
    2348             :   MinorMarkCompactCollector* minor_mark_compact_collector_;
    2349             : 
    2350             :   MemoryAllocator* memory_allocator_;
    2351             : 
    2352             :   StoreBuffer* store_buffer_;
    2353             : 
    2354             :   IncrementalMarking* incremental_marking_;
    2355             :   ConcurrentMarking* concurrent_marking_;
    2356             : 
    2357             :   GCIdleTimeHandler* gc_idle_time_handler_;
    2358             : 
    2359             :   MemoryReducer* memory_reducer_;
    2360             : 
    2361             :   ObjectStats* live_object_stats_;
    2362             :   ObjectStats* dead_object_stats_;
    2363             : 
    2364             :   ScavengeJob* scavenge_job_;
    2365             :   base::Semaphore parallel_scavenge_semaphore_;
    2366             : 
    2367             :   AllocationObserver* idle_scavenge_observer_;
    2368             : 
    2369             :   // This counter is increased before each GC and never reset.
    2370             :   // To account for the bytes allocated since the last GC, use the
    2371             :   // NewSpaceAllocationCounter() function.
    2372             :   size_t new_space_allocation_counter_;
    2373             : 
    2374             :   // This counter is increased before each GC and never reset. To
    2375             :   // account for the bytes allocated since the last GC, use the
    2376             :   // OldGenerationAllocationCounter() function.
    2377             :   size_t old_generation_allocation_counter_at_last_gc_;
    2378             : 
    2379             :   // The size of objects in old generation after the last MarkCompact GC.
    2380             :   size_t old_generation_size_at_last_gc_;
    2381             : 
    2382             :   // The feedback storage is used to store allocation sites (keys) and how often
    2383             :   // they have been visited (values) by finding a memento behind an object. The
    2384             :   // storage is only alive temporary during a GC. The invariant is that all
    2385             :   // pointers in this map are already fixed, i.e., they do not point to
    2386             :   // forwarding pointers.
    2387             :   PretenuringFeedbackMap global_pretenuring_feedback_;
    2388             : 
    2389             :   char trace_ring_buffer_[kTraceRingBufferSize];
    2390             : 
    2391             :   // Used as boolean.
    2392             :   uint8_t is_marking_flag_;
    2393             : 
    2394             :   // If it's not full then the data is from 0 to ring_buffer_end_.  If it's
    2395             :   // full then the data is from ring_buffer_end_ to the end of the buffer and
    2396             :   // from 0 to ring_buffer_end_.
    2397             :   bool ring_buffer_full_;
    2398             :   size_t ring_buffer_end_;
    2399             : 
    2400             :   // Flag is set when the heap has been configured.  The heap can be repeatedly
    2401             :   // configured through the API until it is set up.
    2402             :   bool configured_;
    2403             : 
    2404             :   // Currently set GC flags that are respected by all GC components.
    2405             :   int current_gc_flags_;
    2406             : 
    2407             :   // Currently set GC callback flags that are used to pass information between
    2408             :   // the embedder and V8's GC.
    2409             :   GCCallbackFlags current_gc_callback_flags_;
    2410             : 
    2411             :   ExternalStringTable external_string_table_;
    2412             : 
    2413             :   base::Mutex relocation_mutex_;
    2414             : 
    2415             :   int gc_callbacks_depth_;
    2416             : 
    2417             :   bool deserialization_complete_;
    2418             : 
    2419             :   StrongRootsList* strong_roots_list_;
    2420             : 
    2421             :   // The depth of HeapIterator nestings.
    2422             :   int heap_iterator_depth_;
    2423             : 
    2424             :   LocalEmbedderHeapTracer* local_embedder_heap_tracer_;
    2425             : 
    2426             :   bool fast_promotion_mode_;
    2427             : 
    2428             :   bool use_tasks_;
    2429             : 
    2430             :   // Used for testing purposes.
    2431             :   bool force_oom_;
    2432             :   bool delay_sweeper_tasks_for_testing_;
    2433             : 
    2434             :   HeapObject* pending_layout_change_object_;
    2435             : 
    2436             :   std::map<HeapObject*, HeapObject*> retainer_;
    2437             :   std::map<HeapObject*, Root> retaining_root_;
    2438             : 
    2439             :   // Classes in "heap" can be friends.
    2440             :   friend class AlwaysAllocateScope;
    2441             :   friend class ConcurrentMarking;
    2442             :   friend class GCCallbacksScope;
    2443             :   friend class GCTracer;
    2444             :   friend class HeapIterator;
    2445             :   friend class IdleScavengeObserver;
    2446             :   friend class IncrementalMarking;
    2447             :   friend class IncrementalMarkingJob;
    2448             :   friend class LargeObjectSpace;
    2449             :   template <FixedArrayVisitationMode fixed_array_mode,
    2450             :             TraceRetainingPathMode retaining_path_mode, typename MarkingState>
    2451             :   friend class MarkingVisitor;
    2452             :   friend class MarkCompactCollector;
    2453             :   friend class MarkCompactCollectorBase;
    2454             :   friend class MinorMarkCompactCollector;
    2455             :   friend class NewSpace;
    2456             :   friend class ObjectStatsCollector;
    2457             :   friend class Page;
    2458             :   friend class PagedSpace;
    2459             :   friend class Scavenger;
    2460             :   friend class StoreBuffer;
    2461             :   friend class heap::TestMemoryAllocatorScope;
    2462             : 
    2463             :   // The allocator interface.
    2464             :   friend class Factory;
    2465             : 
    2466             :   // The Isolate constructs us.
    2467             :   friend class Isolate;
    2468             : 
    2469             :   // Used in cctest.
    2470             :   friend class heap::HeapTester;
    2471             : 
    2472             :   DISALLOW_COPY_AND_ASSIGN(Heap);
    2473             : };
    2474             : 
    2475             : 
    2476             : class HeapStats {
    2477             :  public:
    2478             :   static const int kStartMarker = 0xDECADE00;
    2479             :   static const int kEndMarker = 0xDECADE01;
    2480             : 
    2481             :   intptr_t* start_marker;                  //  0
    2482             :   size_t* new_space_size;                  //  1
    2483             :   size_t* new_space_capacity;              //  2
    2484             :   size_t* old_space_size;                  //  3
    2485             :   size_t* old_space_capacity;              //  4
    2486             :   size_t* code_space_size;                 //  5
    2487             :   size_t* code_space_capacity;             //  6
    2488             :   size_t* map_space_size;                  //  7
    2489             :   size_t* map_space_capacity;              //  8
    2490             :   size_t* lo_space_size;                   //  9
    2491             :   size_t* global_handle_count;             // 10
    2492             :   size_t* weak_global_handle_count;        // 11
    2493             :   size_t* pending_global_handle_count;     // 12
    2494             :   size_t* near_death_global_handle_count;  // 13
    2495             :   size_t* free_global_handle_count;        // 14
    2496             :   size_t* memory_allocator_size;           // 15
    2497             :   size_t* memory_allocator_capacity;       // 16
    2498             :   size_t* malloced_memory;                 // 17
    2499             :   size_t* malloced_peak_memory;            // 18
    2500             :   size_t* objects_per_type;                // 19
    2501             :   size_t* size_per_type;                   // 20
    2502             :   int* os_error;                           // 21
    2503             :   char* last_few_messages;                 // 22
    2504             :   char* js_stacktrace;                     // 23
    2505             :   intptr_t* end_marker;                    // 24
    2506             : };
    2507             : 
    2508             : 
    2509             : class AlwaysAllocateScope {
    2510             :  public:
    2511             :   explicit inline AlwaysAllocateScope(Isolate* isolate);
    2512             :   inline ~AlwaysAllocateScope();
    2513             : 
    2514             :  private:
    2515             :   Heap* heap_;
    2516             : };
    2517             : 
    2518             : class CodeSpaceMemoryModificationScope {
    2519             :  public:
    2520             :   explicit inline CodeSpaceMemoryModificationScope(Heap* heap);
    2521             :   inline ~CodeSpaceMemoryModificationScope();
    2522             : 
    2523             :  private:
    2524             :   Heap* heap_;
    2525             : };
    2526             : 
    2527             : class CodePageMemoryModificationScope {
    2528             :  public:
    2529             :   explicit inline CodePageMemoryModificationScope(MemoryChunk* chunk);
    2530             :   inline ~CodePageMemoryModificationScope();
    2531             : 
    2532             :  private:
    2533             :   MemoryChunk* chunk_;
    2534             : };
    2535             : 
    2536             : // Visitor class to verify interior pointers in spaces that do not contain
    2537             : // or care about intergenerational references. All heap object pointers have to
    2538             : // point into the heap to a location that has a map pointer at its first word.
    2539             : // Caveat: Heap::Contains is an approximation because it can return true for
    2540             : // objects in a heap space but above the allocation pointer.
    2541           0 : class VerifyPointersVisitor : public ObjectVisitor, public RootVisitor {
    2542             :  public:
    2543             :   void VisitPointers(HeapObject* host, Object** start, Object** end) override;
    2544             :   void VisitRootPointers(Root root, Object** start, Object** end) override;
    2545             : 
    2546             :  private:
    2547             :   void VerifyPointers(Object** start, Object** end);
    2548             : };
    2549             : 
    2550             : 
    2551             : // Verify that all objects are Smis.
    2552           0 : class VerifySmisVisitor : public RootVisitor {
    2553             :  public:
    2554             :   void VisitRootPointers(Root root, Object** start, Object** end) override;
    2555             : };
    2556             : 
    2557             : 
    2558             : // Space iterator for iterating over all spaces of the heap.  Returns each space
    2559             : // in turn, and null when it is done.
    2560             : class AllSpaces BASE_EMBEDDED {
    2561             :  public:
    2562      731637 :   explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
    2563             :   Space* next();
    2564             : 
    2565             :  private:
    2566             :   Heap* heap_;
    2567             :   int counter_;
    2568             : };
    2569             : 
    2570             : 
    2571             : // Space iterator for iterating over all old spaces of the heap: Old space
    2572             : // and code space.  Returns each space in turn, and null when it is done.
    2573             : class V8_EXPORT_PRIVATE OldSpaces BASE_EMBEDDED {
    2574             :  public:
    2575      172860 :   explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
    2576             :   OldSpace* next();
    2577             : 
    2578             :  private:
    2579             :   Heap* heap_;
    2580             :   int counter_;
    2581             : };
    2582             : 
    2583             : 
    2584             : // Space iterator for iterating over all the paged spaces of the heap: Map
    2585             : // space, old space, code space and cell space.  Returns
    2586             : // each space in turn, and null when it is done.
    2587             : class PagedSpaces BASE_EMBEDDED {
    2588             :  public:
    2589      166829 :   explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
    2590             :   PagedSpace* next();
    2591             : 
    2592             :  private:
    2593             :   Heap* heap_;
    2594             :   int counter_;
    2595             : };
    2596             : 
    2597             : 
    2598             : class SpaceIterator : public Malloced {
    2599             :  public:
    2600             :   explicit SpaceIterator(Heap* heap);
    2601             :   virtual ~SpaceIterator();
    2602             : 
    2603             :   bool has_next();
    2604             :   Space* next();
    2605             : 
    2606             :  private:
    2607             :   Heap* heap_;
    2608             :   int current_space_;         // from enum AllocationSpace.
    2609             : };
    2610             : 
    2611             : 
    2612             : // A HeapIterator provides iteration over the whole heap. It
    2613             : // aggregates the specific iterators for the different spaces as
    2614             : // these can only iterate over one space only.
    2615             : //
    2616             : // HeapIterator ensures there is no allocation during its lifetime
    2617             : // (using an embedded DisallowHeapAllocation instance).
    2618             : //
    2619             : // HeapIterator can skip free list nodes (that is, de-allocated heap
    2620             : // objects that still remain in the heap). As implementation of free
    2621             : // nodes filtering uses GC marks, it can't be used during MS/MC GC
    2622             : // phases. Also, it is forbidden to interrupt iteration in this mode,
    2623             : // as this will leave heap objects marked (and thus, unusable).
    2624             : class HeapIterator BASE_EMBEDDED {
    2625             :  public:
    2626             :   enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
    2627             : 
    2628             :   explicit HeapIterator(Heap* heap,
    2629             :                         HeapObjectsFiltering filtering = kNoFiltering);
    2630             :   ~HeapIterator();
    2631             : 
    2632             :   HeapObject* next();
    2633             : 
    2634             :  private:
    2635             :   HeapObject* NextObject();
    2636             : 
    2637             :   DisallowHeapAllocation no_heap_allocation_;
    2638             : 
    2639             :   Heap* heap_;
    2640             :   HeapObjectsFiltering filtering_;
    2641             :   HeapObjectsFilter* filter_;
    2642             :   // Space iterator for iterating all the spaces.
    2643             :   SpaceIterator* space_iterator_;
    2644             :   // Object iterator for the space currently being iterated.
    2645             :   std::unique_ptr<ObjectIterator> object_iterator_;
    2646             : };
    2647             : 
    2648             : // Abstract base class for checking whether a weak object should be retained.
    2649       86452 : class WeakObjectRetainer {
    2650             :  public:
    2651      143252 :   virtual ~WeakObjectRetainer() {}
    2652             : 
    2653             :   // Return whether this object should be retained. If nullptr is returned the
    2654             :   // object has no references. Otherwise the address of the retained object
    2655             :   // should be returned as in some GC situations the object has been moved.
    2656             :   virtual Object* RetainAs(Object* object) = 0;
    2657             : };
    2658             : 
    2659             : // -----------------------------------------------------------------------------
    2660             : // Allows observation of allocations.
    2661             : class AllocationObserver {
    2662             :  public:
    2663             :   explicit AllocationObserver(intptr_t step_size)
    2664      165101 :       : step_size_(step_size), bytes_to_next_step_(step_size) {
    2665             :     DCHECK_LE(kPointerSize, step_size);
    2666             :   }
    2667      160163 :   virtual ~AllocationObserver() {}
    2668             : 
    2669             :   // Called each time the observed space does an allocation step. This may be
    2670             :   // more frequently than the step_size we are monitoring (e.g. when there are
    2671             :   // multiple observers, or when page or space boundary is encountered.)
    2672    22337675 :   void AllocationStep(int bytes_allocated, Address soon_object, size_t size) {
    2673    22337675 :     bytes_to_next_step_ -= bytes_allocated;
    2674    22337675 :     if (bytes_to_next_step_ <= 0) {
    2675             :       Step(static_cast<int>(step_size_ - bytes_to_next_step_), soon_object,
    2676      186692 :            size);
    2677      186692 :       step_size_ = GetNextStepSize();
    2678      186692 :       bytes_to_next_step_ = step_size_;
    2679             :     }
    2680    22337675 :   }
    2681             : 
    2682             :  protected:
    2683             :   intptr_t step_size() const { return step_size_; }
    2684             :   intptr_t bytes_to_next_step() const { return bytes_to_next_step_; }
    2685             : 
    2686             :   // Pure virtual method provided by the subclasses that gets called when at
    2687             :   // least step_size bytes have been allocated. soon_object is the address just
    2688             :   // allocated (but not yet initialized.) size is the size of the object as
    2689             :   // requested (i.e. w/o the alignment fillers). Some complexities to be aware
    2690             :   // of:
    2691             :   // 1) soon_object will be nullptr in cases where we end up observing an
    2692             :   //    allocation that happens to be a filler space (e.g. page boundaries.)
    2693             :   // 2) size is the requested size at the time of allocation. Right-trimming
    2694             :   //    may change the object size dynamically.
    2695             :   // 3) soon_object may actually be the first object in an allocation-folding
    2696             :   //    group. In such a case size is the size of the group rather than the
    2697             :   //    first object.
    2698             :   virtual void Step(int bytes_allocated, Address soon_object, size_t size) = 0;
    2699             : 
    2700             :   // Subclasses can override this method to make step size dynamic.
    2701      137295 :   virtual intptr_t GetNextStepSize() { return step_size_; }
    2702             : 
    2703             :   intptr_t step_size_;
    2704             :   intptr_t bytes_to_next_step_;
    2705             : 
    2706             :  private:
    2707             :   friend class Space;
    2708             :   DISALLOW_COPY_AND_ASSIGN(AllocationObserver);
    2709             : };
    2710             : 
    2711             : V8_EXPORT_PRIVATE const char* AllocationSpaceName(AllocationSpace space);
    2712             : 
    2713             : }  // namespace internal
    2714             : }  // namespace v8
    2715             : 
    2716             : #endif  // V8_HEAP_HEAP_H_

Generated by: LCOV version 1.10