LCOV - code coverage report
Current view: top level - src/heap - heap.cc (source / functions) Hit Total Coverage
Test: app.info Lines: 1934 2311 83.7 %
Date: 2017-04-26 Functions: 231 299 77.3 %

          Line data    Source code
       1             : // Copyright 2012 the V8 project authors. All rights reserved.
       2             : // Use of this source code is governed by a BSD-style license that can be
       3             : // found in the LICENSE file.
       4             : 
       5             : #include "src/heap/heap.h"
       6             : 
       7             : #include "src/accessors.h"
       8             : #include "src/api.h"
       9             : #include "src/assembler-inl.h"
      10             : #include "src/ast/context-slot-cache.h"
      11             : #include "src/base/bits.h"
      12             : #include "src/base/once.h"
      13             : #include "src/base/utils/random-number-generator.h"
      14             : #include "src/bootstrapper.h"
      15             : #include "src/codegen.h"
      16             : #include "src/compilation-cache.h"
      17             : #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
      18             : #include "src/conversions.h"
      19             : #include "src/debug/debug.h"
      20             : #include "src/deoptimizer.h"
      21             : #include "src/feedback-vector.h"
      22             : #include "src/global-handles.h"
      23             : #include "src/heap/array-buffer-tracker-inl.h"
      24             : #include "src/heap/code-stats.h"
      25             : #include "src/heap/concurrent-marking.h"
      26             : #include "src/heap/embedder-tracing.h"
      27             : #include "src/heap/gc-idle-time-handler.h"
      28             : #include "src/heap/gc-tracer.h"
      29             : #include "src/heap/incremental-marking.h"
      30             : #include "src/heap/mark-compact-inl.h"
      31             : #include "src/heap/mark-compact.h"
      32             : #include "src/heap/memory-reducer.h"
      33             : #include "src/heap/object-stats.h"
      34             : #include "src/heap/objects-visiting-inl.h"
      35             : #include "src/heap/objects-visiting.h"
      36             : #include "src/heap/remembered-set.h"
      37             : #include "src/heap/scavenge-job.h"
      38             : #include "src/heap/scavenger-inl.h"
      39             : #include "src/heap/store-buffer.h"
      40             : #include "src/interpreter/interpreter.h"
      41             : #include "src/regexp/jsregexp.h"
      42             : #include "src/runtime-profiler.h"
      43             : #include "src/snapshot/natives.h"
      44             : #include "src/snapshot/serializer-common.h"
      45             : #include "src/snapshot/snapshot.h"
      46             : #include "src/tracing/trace-event.h"
      47             : #include "src/utils.h"
      48             : #include "src/v8.h"
      49             : #include "src/v8threads.h"
      50             : #include "src/vm-state-inl.h"
      51             : 
      52             : namespace v8 {
      53             : namespace internal {
      54             : 
      55             : 
      56             : struct Heap::StrongRootsList {
      57             :   Object** start;
      58             :   Object** end;
      59             :   StrongRootsList* next;
      60             : };
      61             : 
      62      118570 : class IdleScavengeObserver : public AllocationObserver {
      63             :  public:
      64             :   IdleScavengeObserver(Heap& heap, intptr_t step_size)
      65       60782 :       : AllocationObserver(step_size), heap_(heap) {}
      66             : 
      67       99851 :   void Step(int bytes_allocated, Address, size_t) override {
      68       99851 :     heap_.ScheduleIdleScavengeIfNeeded(bytes_allocated);
      69       99851 :   }
      70             : 
      71             :  private:
      72             :   Heap& heap_;
      73             : };
      74             : 
      75       60782 : Heap::Heap()
      76             :     : external_memory_(0),
      77             :       external_memory_limit_(kExternalAllocationSoftLimit),
      78             :       external_memory_at_last_mark_compact_(0),
      79             :       isolate_(nullptr),
      80             :       code_range_size_(0),
      81             :       // semispace_size_ should be a power of 2 and old_generation_size_ should
      82             :       // be a multiple of Page::kPageSize.
      83             :       max_semi_space_size_(8 * (kPointerSize / 4) * MB),
      84             :       initial_semispace_size_(MB),
      85             :       max_old_generation_size_(700ul * (kPointerSize / 4) * MB),
      86             :       initial_max_old_generation_size_(max_old_generation_size_),
      87             :       initial_old_generation_size_(max_old_generation_size_ /
      88             :                                    kInitalOldGenerationLimitFactor),
      89             :       old_generation_size_configured_(false),
      90             :       max_executable_size_(256ul * (kPointerSize / 4) * MB),
      91             :       // Variables set based on semispace_size_ and old_generation_size_ in
      92             :       // ConfigureHeap.
      93             :       // Will be 4 * reserved_semispace_size_ to ensure that young
      94             :       // generation can be aligned to its size.
      95             :       maximum_committed_(0),
      96             :       survived_since_last_expansion_(0),
      97             :       survived_last_scavenge_(0),
      98             :       always_allocate_scope_count_(0),
      99             :       memory_pressure_level_(MemoryPressureLevel::kNone),
     100             :       out_of_memory_callback_(nullptr),
     101             :       out_of_memory_callback_data_(nullptr),
     102             :       contexts_disposed_(0),
     103             :       number_of_disposed_maps_(0),
     104             :       global_ic_age_(0),
     105             :       new_space_(nullptr),
     106             :       old_space_(NULL),
     107             :       code_space_(NULL),
     108             :       map_space_(NULL),
     109             :       lo_space_(NULL),
     110             :       gc_state_(NOT_IN_GC),
     111             :       gc_post_processing_depth_(0),
     112             :       allocations_count_(0),
     113             :       raw_allocations_hash_(0),
     114             :       ms_count_(0),
     115             :       gc_count_(0),
     116             :       remembered_unmapped_pages_index_(0),
     117             : #ifdef DEBUG
     118             :       allocation_timeout_(0),
     119             : #endif  // DEBUG
     120             :       old_generation_allocation_limit_(initial_old_generation_size_),
     121             :       inline_allocation_disabled_(false),
     122             :       tracer_(nullptr),
     123             :       promoted_objects_size_(0),
     124             :       promotion_ratio_(0),
     125             :       semi_space_copied_object_size_(0),
     126             :       previous_semi_space_copied_object_size_(0),
     127             :       semi_space_copied_rate_(0),
     128             :       nodes_died_in_new_space_(0),
     129             :       nodes_copied_in_new_space_(0),
     130             :       nodes_promoted_(0),
     131             :       maximum_size_scavenges_(0),
     132             :       last_idle_notification_time_(0.0),
     133             :       last_gc_time_(0.0),
     134             :       scavenge_collector_(nullptr),
     135             :       mark_compact_collector_(nullptr),
     136             :       minor_mark_compact_collector_(nullptr),
     137             :       memory_allocator_(nullptr),
     138             :       store_buffer_(nullptr),
     139             :       incremental_marking_(nullptr),
     140             :       concurrent_marking_(nullptr),
     141             :       gc_idle_time_handler_(nullptr),
     142             :       memory_reducer_(nullptr),
     143             :       live_object_stats_(nullptr),
     144             :       dead_object_stats_(nullptr),
     145             :       scavenge_job_(nullptr),
     146             :       idle_scavenge_observer_(nullptr),
     147             :       new_space_allocation_counter_(0),
     148             :       old_generation_allocation_counter_at_last_gc_(0),
     149             :       old_generation_size_at_last_gc_(0),
     150             :       gcs_since_last_deopt_(0),
     151             :       global_pretenuring_feedback_(nullptr),
     152             :       ring_buffer_full_(false),
     153             :       ring_buffer_end_(0),
     154             :       promotion_queue_(this),
     155             :       configured_(false),
     156             :       current_gc_flags_(Heap::kNoGCFlags),
     157             :       current_gc_callback_flags_(GCCallbackFlags::kNoGCCallbackFlags),
     158             :       external_string_table_(this),
     159             :       gc_callbacks_depth_(0),
     160             :       deserialization_complete_(false),
     161             :       strong_roots_list_(NULL),
     162             :       heap_iterator_depth_(0),
     163             :       local_embedder_heap_tracer_(nullptr),
     164             :       fast_promotion_mode_(false),
     165             :       force_oom_(false),
     166             :       delay_sweeper_tasks_for_testing_(false),
     167      364692 :       pending_layout_change_object_(nullptr) {
     168             : // Allow build-time customization of the max semispace size. Building
     169             : // V8 with snapshots and a non-default max semispace size is much
     170             : // easier if you can define it as part of the build environment.
     171             : #if defined(V8_MAX_SEMISPACE_SIZE)
     172             :   max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
     173             : #endif
     174             : 
     175             :   // Ensure old_generation_size_ is a multiple of kPageSize.
     176             :   DCHECK((max_old_generation_size_ & (Page::kPageSize - 1)) == 0);
     177             : 
     178       60782 :   memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
     179             :   set_native_contexts_list(NULL);
     180             :   set_allocation_sites_list(Smi::kZero);
     181             :   set_encountered_weak_collections(Smi::kZero);
     182             :   set_encountered_weak_cells(Smi::kZero);
     183             :   set_encountered_transition_arrays(Smi::kZero);
     184             :   // Put a dummy entry in the remembered pages so we can find the list the
     185             :   // minidump even if there are no real unmapped pages.
     186             :   RememberUnmappedPage(NULL, false);
     187       60782 : }
     188             : 
     189          24 : size_t Heap::Capacity() {
     190          24 :   if (!HasBeenSetUp()) return 0;
     191             : 
     192          48 :   return new_space_->Capacity() + OldGenerationCapacity();
     193             : }
     194             : 
     195      566053 : size_t Heap::OldGenerationCapacity() {
     196      566053 :   if (!HasBeenSetUp()) return 0;
     197             : 
     198     1698159 :   return old_space_->Capacity() + code_space_->Capacity() +
     199     1132106 :          map_space_->Capacity() + lo_space_->SizeOfObjects();
     200             : }
     201             : 
     202     1641339 : size_t Heap::CommittedOldGenerationMemory() {
     203     1641339 :   if (!HasBeenSetUp()) return 0;
     204             : 
     205     3282678 :   return old_space_->CommittedMemory() + code_space_->CommittedMemory() +
     206     3282678 :          map_space_->CommittedMemory() + lo_space_->Size();
     207             : }
     208             : 
     209     1534637 : size_t Heap::CommittedMemory() {
     210     1534637 :   if (!HasBeenSetUp()) return 0;
     211             : 
     212     1534637 :   return new_space_->CommittedMemory() + CommittedOldGenerationMemory();
     213             : }
     214             : 
     215             : 
     216           7 : size_t Heap::CommittedPhysicalMemory() {
     217           7 :   if (!HasBeenSetUp()) return 0;
     218             : 
     219          14 :   return new_space_->CommittedPhysicalMemory() +
     220          14 :          old_space_->CommittedPhysicalMemory() +
     221          14 :          code_space_->CommittedPhysicalMemory() +
     222           7 :          map_space_->CommittedPhysicalMemory() +
     223           7 :          lo_space_->CommittedPhysicalMemory();
     224             : }
     225             : 
     226      256492 : size_t Heap::CommittedMemoryExecutable() {
     227      128246 :   if (!HasBeenSetUp()) return 0;
     228             : 
     229      128246 :   return static_cast<size_t>(memory_allocator()->SizeExecutable());
     230             : }
     231             : 
     232             : 
     233      304355 : void Heap::UpdateMaximumCommitted() {
     234      608710 :   if (!HasBeenSetUp()) return;
     235             : 
     236      304355 :   const size_t current_committed_memory = CommittedMemory();
     237      304355 :   if (current_committed_memory > maximum_committed_) {
     238       84935 :     maximum_committed_ = current_committed_memory;
     239             :   }
     240             : }
     241             : 
     242          31 : size_t Heap::Available() {
     243          31 :   if (!HasBeenSetUp()) return 0;
     244             : 
     245             :   size_t total = 0;
     246             :   AllSpaces spaces(this);
     247         186 :   for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
     248         155 :     total += space->Available();
     249             :   }
     250             :   return total;
     251             : }
     252             : 
     253             : 
     254           0 : bool Heap::HasBeenSetUp() {
     255    14678596 :   return old_space_ != NULL && code_space_ != NULL && map_space_ != NULL &&
     256     7308907 :          lo_space_ != NULL;
     257             : }
     258             : 
     259             : 
     260      108151 : GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
     261      138383 :                                               const char** reason) {
     262             :   // Is global GC requested?
     263      108151 :   if (space != NEW_SPACE) {
     264       38442 :     isolate_->counters()->gc_compactor_caused_by_request()->Increment();
     265       38442 :     *reason = "GC in old space requested";
     266       38442 :     return MARK_COMPACTOR;
     267             :   }
     268             : 
     269       69709 :   if (FLAG_gc_global || (FLAG_stress_compaction && (gc_count_ & 1) != 0)) {
     270         515 :     *reason = "GC in old space forced by flags";
     271         515 :     return MARK_COMPACTOR;
     272             :   }
     273             : 
     274       69290 :   if (incremental_marking()->NeedsFinalization() &&
     275          96 :       AllocationLimitOvershotByLargeMargin()) {
     276           5 :     *reason = "Incremental marking needs finalization";
     277           5 :     return MARK_COMPACTOR;
     278             :   }
     279             : 
     280             :   // Is there enough space left in OLD to guarantee that a scavenge can
     281             :   // succeed?
     282             :   //
     283             :   // Note that MemoryAllocator->MaxAvailable() undercounts the memory available
     284             :   // for object promotion. It counts only the bytes that the memory
     285             :   // allocator has not yet allocated from the OS and assigned to any space,
     286             :   // and does not count available bytes already in the old space or code
     287             :   // space.  Undercounting is safe---we may get an unrequested full GC when
     288             :   // a scavenge would have succeeded.
     289       69189 :   if (memory_allocator()->MaxAvailable() <= new_space_->Size()) {
     290             :     isolate_->counters()
     291             :         ->gc_compactor_caused_by_oldspace_exhaustion()
     292           0 :         ->Increment();
     293           0 :     *reason = "scavenge might not succeed";
     294           0 :     return MARK_COMPACTOR;
     295             :   }
     296             : 
     297             :   // Default
     298       69189 :   *reason = NULL;
     299       69189 :   return YoungGenerationCollector();
     300             : }
     301             : 
     302           0 : void Heap::SetGCState(HeapState state) {
     303      245070 :   gc_state_ = state;
     304           0 : }
     305             : 
     306             : // TODO(1238405): Combine the infrastructure for --heap-stats and
     307             : // --log-gc to avoid the complicated preprocessor and flag testing.
     308           0 : void Heap::ReportStatisticsBeforeGC() {
     309             : // Heap::ReportHeapStatistics will also log NewSpace statistics when
     310             : // compiled --log-gc is set.  The following logic is used to avoid
     311             : // double logging.
     312             : #ifdef DEBUG
     313             :   if (FLAG_heap_stats || FLAG_log_gc) new_space_->CollectStatistics();
     314             :   if (FLAG_heap_stats) {
     315             :     ReportHeapStatistics("Before GC");
     316             :   } else if (FLAG_log_gc) {
     317             :     new_space_->ReportStatistics();
     318             :   }
     319             :   if (FLAG_heap_stats || FLAG_log_gc) new_space_->ClearHistograms();
     320             : #else
     321           0 :   if (FLAG_log_gc) {
     322           0 :     new_space_->CollectStatistics();
     323           0 :     new_space_->ReportStatistics();
     324           0 :     new_space_->ClearHistograms();
     325             :   }
     326             : #endif  // DEBUG
     327           0 : }
     328             : 
     329             : 
     330          30 : void Heap::PrintShortHeapStatistics() {
     331          60 :   if (!FLAG_trace_gc_verbose) return;
     332             :   PrintIsolate(isolate_, "Memory allocator,   used: %6" PRIuS
     333             :                          " KB,"
     334             :                          " available: %6" PRIuS " KB\n",
     335             :                memory_allocator()->Size() / KB,
     336           0 :                memory_allocator()->Available() / KB);
     337             :   PrintIsolate(isolate_, "New space,          used: %6" PRIuS
     338             :                          " KB"
     339             :                          ", available: %6" PRIuS
     340             :                          " KB"
     341             :                          ", committed: %6" PRIuS " KB\n",
     342           0 :                new_space_->Size() / KB, new_space_->Available() / KB,
     343           0 :                new_space_->CommittedMemory() / KB);
     344             :   PrintIsolate(isolate_, "Old space,          used: %6" PRIuS
     345             :                          " KB"
     346             :                          ", available: %6" PRIuS
     347             :                          " KB"
     348             :                          ", committed: %6" PRIuS " KB\n",
     349           0 :                old_space_->SizeOfObjects() / KB, old_space_->Available() / KB,
     350           0 :                old_space_->CommittedMemory() / KB);
     351             :   PrintIsolate(isolate_, "Code space,         used: %6" PRIuS
     352             :                          " KB"
     353             :                          ", available: %6" PRIuS
     354             :                          " KB"
     355             :                          ", committed: %6" PRIuS "KB\n",
     356           0 :                code_space_->SizeOfObjects() / KB, code_space_->Available() / KB,
     357           0 :                code_space_->CommittedMemory() / KB);
     358             :   PrintIsolate(isolate_, "Map space,          used: %6" PRIuS
     359             :                          " KB"
     360             :                          ", available: %6" PRIuS
     361             :                          " KB"
     362             :                          ", committed: %6" PRIuS " KB\n",
     363           0 :                map_space_->SizeOfObjects() / KB, map_space_->Available() / KB,
     364           0 :                map_space_->CommittedMemory() / KB);
     365             :   PrintIsolate(isolate_, "Large object space, used: %6" PRIuS
     366             :                          " KB"
     367             :                          ", available: %6" PRIuS
     368             :                          " KB"
     369             :                          ", committed: %6" PRIuS " KB\n",
     370           0 :                lo_space_->SizeOfObjects() / KB, lo_space_->Available() / KB,
     371           0 :                lo_space_->CommittedMemory() / KB);
     372             :   PrintIsolate(isolate_, "All spaces,         used: %6" PRIuS
     373             :                          " KB"
     374             :                          ", available: %6" PRIuS
     375             :                          " KB"
     376             :                          ", committed: %6" PRIuS "KB\n",
     377           0 :                this->SizeOfObjects() / KB, this->Available() / KB,
     378           0 :                this->CommittedMemory() / KB);
     379             :   PrintIsolate(isolate_, "External memory reported: %6" PRId64 " KB\n",
     380           0 :                external_memory_ / KB);
     381             :   PrintIsolate(isolate_, "Total time spent in GC  : %.1f ms\n",
     382           0 :                total_gc_time_ms_);
     383             : }
     384             : 
     385             : // TODO(1238405): Combine the infrastructure for --heap-stats and
     386             : // --log-gc to avoid the complicated preprocessor and flag testing.
     387           0 : void Heap::ReportStatisticsAfterGC() {
     388             : // Similar to the before GC, we use some complicated logic to ensure that
     389             : // NewSpace statistics are logged exactly once when --log-gc is turned on.
     390             : #if defined(DEBUG)
     391             :   if (FLAG_heap_stats) {
     392             :     new_space_->CollectStatistics();
     393             :     ReportHeapStatistics("After GC");
     394             :   } else if (FLAG_log_gc) {
     395             :     new_space_->ReportStatistics();
     396             :   }
     397             : #else
     398           0 :   if (FLAG_log_gc) new_space_->ReportStatistics();
     399             : #endif  // DEBUG
     400           0 :   for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount);
     401             :        ++i) {
     402           0 :     int count = deferred_counters_[i];
     403           0 :     deferred_counters_[i] = 0;
     404           0 :     while (count > 0) {
     405           0 :       count--;
     406           0 :       isolate()->CountUsage(static_cast<v8::Isolate::UseCounterFeature>(i));
     407             :     }
     408             :   }
     409           0 : }
     410             : 
     411             : 
     412       13744 : void Heap::IncrementDeferredCount(v8::Isolate::UseCounterFeature feature) {
     413       13744 :   deferred_counters_[feature]++;
     414       13744 : }
     415             : 
     416       24393 : bool Heap::UncommitFromSpace() { return new_space_->UncommitFromSpace(); }
     417             : 
     418      122535 : void Heap::GarbageCollectionPrologue() {
     419      490140 :   TRACE_GC(tracer(), GCTracer::Scope::HEAP_PROLOGUE);
     420             :   {
     421             :     AllowHeapAllocation for_the_first_part_of_prologue;
     422      122535 :     gc_count_++;
     423             : 
     424             : #ifdef VERIFY_HEAP
     425             :     if (FLAG_verify_heap) {
     426             :       Verify();
     427             :     }
     428             : #endif
     429             :   }
     430             : 
     431             :   // Reset GC statistics.
     432      122535 :   promoted_objects_size_ = 0;
     433      122535 :   previous_semi_space_copied_object_size_ = semi_space_copied_object_size_;
     434      122535 :   semi_space_copied_object_size_ = 0;
     435      122535 :   nodes_died_in_new_space_ = 0;
     436      122535 :   nodes_copied_in_new_space_ = 0;
     437      122535 :   nodes_promoted_ = 0;
     438             : 
     439      122535 :   UpdateMaximumCommitted();
     440             : 
     441             : #ifdef DEBUG
     442             :   DCHECK(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC);
     443             : 
     444             :   if (FLAG_gc_verbose) Print();
     445             : 
     446             :   ReportStatisticsBeforeGC();
     447             : #endif  // DEBUG
     448             : 
     449      245070 :   if (new_space_->IsAtMaximumCapacity()) {
     450        3965 :     maximum_size_scavenges_++;
     451             :   } else {
     452      118570 :     maximum_size_scavenges_ = 0;
     453             :   }
     454      122535 :   CheckNewSpaceExpansionCriteria();
     455      122535 :   UpdateNewSpaceAllocationCounter();
     456      122535 : }
     457             : 
     458      734182 : size_t Heap::SizeOfObjects() {
     459             :   size_t total = 0;
     460             :   AllSpaces spaces(this);
     461     4405092 :   for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
     462     3670910 :     total += space->SizeOfObjects();
     463             :   }
     464      734182 :   return total;
     465             : }
     466             : 
     467             : 
     468           0 : const char* Heap::GetSpaceName(int idx) {
     469           0 :   switch (idx) {
     470             :     case NEW_SPACE:
     471             :       return "new_space";
     472             :     case OLD_SPACE:
     473           0 :       return "old_space";
     474             :     case MAP_SPACE:
     475           0 :       return "map_space";
     476             :     case CODE_SPACE:
     477           0 :       return "code_space";
     478             :     case LO_SPACE:
     479           0 :       return "large_object_space";
     480             :     default:
     481           0 :       UNREACHABLE();
     482             :   }
     483             :   return nullptr;
     484             : }
     485             : 
     486      237649 : void Heap::SetRootCodeStubs(UnseededNumberDictionary* value) {
     487      237649 :   roots_[kCodeStubsRootIndex] = value;
     488      237649 : }
     489             : 
     490       60739 : void Heap::RepairFreeListsAfterDeserialization() {
     491             :   PagedSpaces spaces(this);
     492      242956 :   for (PagedSpace* space = spaces.next(); space != NULL;
     493             :        space = spaces.next()) {
     494      182217 :     space->RepairFreeListsAfterDeserialization();
     495             :   }
     496       60739 : }
     497             : 
     498       59260 : void Heap::MergeAllocationSitePretenuringFeedback(
     499             :     const base::HashMap& local_pretenuring_feedback) {
     500             :   AllocationSite* site = nullptr;
     501      366726 :   for (base::HashMap::Entry* local_entry = local_pretenuring_feedback.Start();
     502             :        local_entry != nullptr;
     503             :        local_entry = local_pretenuring_feedback.Next(local_entry)) {
     504      248206 :     site = reinterpret_cast<AllocationSite*>(local_entry->key);
     505             :     MapWord map_word = site->map_word();
     506      248206 :     if (map_word.IsForwardingAddress()) {
     507        1891 :       site = AllocationSite::cast(map_word.ToForwardingAddress());
     508             :     }
     509             : 
     510             :     // We have not validated the allocation site yet, since we have not
     511             :     // dereferenced the site during collecting information.
     512             :     // This is an inlined check of AllocationMemento::IsValid.
     513      496412 :     if (!site->IsAllocationSite() || site->IsZombie()) continue;
     514             : 
     515             :     int value =
     516      229126 :         static_cast<int>(reinterpret_cast<intptr_t>(local_entry->value));
     517             :     DCHECK_GT(value, 0);
     518             : 
     519      229126 :     if (site->IncrementMementoFoundCount(value)) {
     520             :       global_pretenuring_feedback_->LookupOrInsert(site,
     521         948 :                                                    ObjectHash(site->address()));
     522             :     }
     523             :   }
     524       59260 : }
     525             : 
     526             : class Heap::SkipStoreBufferScope {
     527             :  public:
     528             :   explicit SkipStoreBufferScope(StoreBuffer* store_buffer)
     529             :       : store_buffer_(store_buffer) {
     530      122535 :     store_buffer_->MoveAllEntriesToRememberedSet();
     531             :     store_buffer_->SetMode(StoreBuffer::IN_GC);
     532             :   }
     533             : 
     534             :   ~SkipStoreBufferScope() {
     535             :     DCHECK(store_buffer_->Empty());
     536             :     store_buffer_->SetMode(StoreBuffer::NOT_IN_GC);
     537             :   }
     538             : 
     539             :  private:
     540             :   StoreBuffer* store_buffer_;
     541             : };
     542             : 
     543             : class Heap::PretenuringScope {
     544             :  public:
     545      122535 :   explicit PretenuringScope(Heap* heap) : heap_(heap) {
     546             :     heap_->global_pretenuring_feedback_ =
     547      245070 :         new base::HashMap(kInitialFeedbackCapacity);
     548      122535 :   }
     549             : 
     550      122535 :   ~PretenuringScope() {
     551      245070 :     delete heap_->global_pretenuring_feedback_;
     552      122535 :     heap_->global_pretenuring_feedback_ = nullptr;
     553      122535 :   }
     554             : 
     555             :  private:
     556             :   Heap* heap_;
     557             : };
     558             : 
     559             : 
     560      367695 : void Heap::ProcessPretenuringFeedback() {
     561             :   bool trigger_deoptimization = false;
     562      122535 :   if (FLAG_allocation_site_pretenuring) {
     563             :     int tenure_decisions = 0;
     564             :     int dont_tenure_decisions = 0;
     565             :     int allocation_mementos_found = 0;
     566             :     int allocation_sites = 0;
     567             :     int active_allocation_sites = 0;
     568             : 
     569             :     AllocationSite* site = nullptr;
     570             : 
     571             :     // Step 1: Digest feedback for recorded allocation sites.
     572             :     bool maximum_size_scavenge = MaximumSizeScavenge();
     573      371462 :     for (base::HashMap::Entry* e = global_pretenuring_feedback_->Start();
     574        3857 :          e != nullptr; e = global_pretenuring_feedback_->Next(e)) {
     575        3857 :       allocation_sites++;
     576        3857 :       site = reinterpret_cast<AllocationSite*>(e->key);
     577             :       int found_count = site->memento_found_count();
     578             :       // An entry in the storage does not imply that the count is > 0 because
     579             :       // allocation sites might have been reset due to too many objects dying
     580             :       // in old space.
     581        3857 :       if (found_count > 0) {
     582             :         DCHECK(site->IsAllocationSite());
     583        3857 :         active_allocation_sites++;
     584        3857 :         allocation_mementos_found += found_count;
     585        3857 :         if (site->DigestPretenuringFeedback(maximum_size_scavenge)) {
     586             :           trigger_deoptimization = true;
     587             :         }
     588        3857 :         if (site->GetPretenureMode() == TENURED) {
     589         166 :           tenure_decisions++;
     590             :         } else {
     591        3691 :           dont_tenure_decisions++;
     592             :         }
     593             :       }
     594             :     }
     595             : 
     596             :     // Step 2: Deopt maybe tenured allocation sites if necessary.
     597             :     bool deopt_maybe_tenured = DeoptMaybeTenuredAllocationSites();
     598      122535 :     if (deopt_maybe_tenured) {
     599             :       Object* list_element = allocation_sites_list();
     600        2683 :       while (list_element->IsAllocationSite()) {
     601             :         site = AllocationSite::cast(list_element);
     602             :         DCHECK(site->IsAllocationSite());
     603        2503 :         allocation_sites++;
     604        2503 :         if (site->IsMaybeTenure()) {
     605             :           site->set_deopt_dependent_code(true);
     606             :           trigger_deoptimization = true;
     607             :         }
     608             :         list_element = site->weak_next();
     609             :       }
     610             :     }
     611             : 
     612      122535 :     if (trigger_deoptimization) {
     613          68 :       isolate_->stack_guard()->RequestDeoptMarkedAllocationSites();
     614             :     }
     615             : 
     616      122535 :     if (FLAG_trace_pretenuring_statistics &&
     617           0 :         (allocation_mementos_found > 0 || tenure_decisions > 0 ||
     618             :          dont_tenure_decisions > 0)) {
     619             :       PrintIsolate(isolate(),
     620             :                    "pretenuring: deopt_maybe_tenured=%d visited_sites=%d "
     621             :                    "active_sites=%d "
     622             :                    "mementos=%d tenured=%d not_tenured=%d\n",
     623             :                    deopt_maybe_tenured ? 1 : 0, allocation_sites,
     624             :                    active_allocation_sites, allocation_mementos_found,
     625           0 :                    tenure_decisions, dont_tenure_decisions);
     626             :     }
     627             :   }
     628      122535 : }
     629             : 
     630             : 
     631          64 : void Heap::DeoptMarkedAllocationSites() {
     632             :   // TODO(hpayer): If iterating over the allocation sites list becomes a
     633             :   // performance issue, use a cache data structure in heap instead.
     634             :   Object* list_element = allocation_sites_list();
     635        1149 :   while (list_element->IsAllocationSite()) {
     636             :     AllocationSite* site = AllocationSite::cast(list_element);
     637        1021 :     if (site->deopt_dependent_code()) {
     638             :       site->dependent_code()->MarkCodeForDeoptimization(
     639         386 :           isolate_, DependentCode::kAllocationSiteTenuringChangedGroup);
     640             :       site->set_deopt_dependent_code(false);
     641             :     }
     642             :     list_element = site->weak_next();
     643             :   }
     644          64 :   Deoptimizer::DeoptimizeMarkedCode(isolate_);
     645          64 : }
     646             : 
     647             : 
     648     4662028 : void Heap::GarbageCollectionEpilogue() {
     649      490140 :   TRACE_GC(tracer(), GCTracer::Scope::HEAP_EPILOGUE);
     650             :   // In release mode, we only zap the from space under heap verification.
     651             :   if (Heap::ShouldZapGarbage()) {
     652             :     ZapFromSpace();
     653             :   }
     654             : 
     655             : #ifdef VERIFY_HEAP
     656             :   if (FLAG_verify_heap) {
     657             :     Verify();
     658             :   }
     659             : #endif
     660             : 
     661             :   AllowHeapAllocation for_the_rest_of_the_epilogue;
     662             : 
     663             : #ifdef DEBUG
     664             :   if (FLAG_print_global_handles) isolate_->global_handles()->Print();
     665             :   if (FLAG_print_handles) PrintHandles();
     666             :   if (FLAG_gc_verbose) Print();
     667             :   if (FLAG_code_stats) ReportCodeStatistics("After GC");
     668             :   if (FLAG_check_handle_count) CheckHandleCount();
     669             : #endif
     670      122535 :   if (FLAG_deopt_every_n_garbage_collections > 0) {
     671             :     // TODO(jkummerow/ulan/jarin): This is not safe! We can't assume that
     672             :     // the topmost optimized frame can be deoptimized safely, because it
     673             :     // might not have a lazy bailout point right after its current PC.
     674           0 :     if (++gcs_since_last_deopt_ == FLAG_deopt_every_n_garbage_collections) {
     675           0 :       Deoptimizer::DeoptimizeAll(isolate());
     676           0 :       gcs_since_last_deopt_ = 0;
     677             :     }
     678             :   }
     679             : 
     680      122535 :   UpdateMaximumCommitted();
     681             : 
     682             :   isolate_->counters()->alive_after_last_gc()->Set(
     683     3923969 :       static_cast<int>(SizeOfObjects()));
     684             : 
     685             :   isolate_->counters()->string_table_capacity()->Set(
     686      245070 :       string_table()->Capacity());
     687             :   isolate_->counters()->number_of_symbols()->Set(
     688      245070 :       string_table()->NumberOfElements());
     689             : 
     690      122535 :   if (CommittedMemory() > 0) {
     691             :     isolate_->counters()->external_fragmentation_total()->AddSample(
     692      245070 :         static_cast<int>(100 - (SizeOfObjects() * 100.0) / CommittedMemory()));
     693             : 
     694             :     isolate_->counters()->heap_fraction_new_space()->AddSample(static_cast<int>(
     695      245070 :         (new_space()->CommittedMemory() * 100.0) / CommittedMemory()));
     696             :     isolate_->counters()->heap_fraction_old_space()->AddSample(static_cast<int>(
     697      245070 :         (old_space()->CommittedMemory() * 100.0) / CommittedMemory()));
     698             :     isolate_->counters()->heap_fraction_code_space()->AddSample(
     699      245070 :         static_cast<int>((code_space()->CommittedMemory() * 100.0) /
     700      367605 :                          CommittedMemory()));
     701             :     isolate_->counters()->heap_fraction_map_space()->AddSample(static_cast<int>(
     702      245070 :         (map_space()->CommittedMemory() * 100.0) / CommittedMemory()));
     703             :     isolate_->counters()->heap_fraction_lo_space()->AddSample(static_cast<int>(
     704      245070 :         (lo_space()->CommittedMemory() * 100.0) / CommittedMemory()));
     705             : 
     706             :     isolate_->counters()->heap_sample_total_committed()->AddSample(
     707      245070 :         static_cast<int>(CommittedMemory() / KB));
     708             :     isolate_->counters()->heap_sample_total_used()->AddSample(
     709      245070 :         static_cast<int>(SizeOfObjects() / KB));
     710             :     isolate_->counters()->heap_sample_map_space_committed()->AddSample(
     711      245070 :         static_cast<int>(map_space()->CommittedMemory() / KB));
     712             :     isolate_->counters()->heap_sample_code_space_committed()->AddSample(
     713      245070 :         static_cast<int>(code_space()->CommittedMemory() / KB));
     714             : 
     715             :     isolate_->counters()->heap_sample_maximum_committed()->AddSample(
     716      245070 :         static_cast<int>(MaximumCommittedMemory() / KB));
     717             :   }
     718             : 
     719             : #define UPDATE_COUNTERS_FOR_SPACE(space)                \
     720             :   isolate_->counters()->space##_bytes_available()->Set( \
     721             :       static_cast<int>(space()->Available()));          \
     722             :   isolate_->counters()->space##_bytes_committed()->Set( \
     723             :       static_cast<int>(space()->CommittedMemory()));    \
     724             :   isolate_->counters()->space##_bytes_used()->Set(      \
     725             :       static_cast<int>(space()->SizeOfObjects()));
     726             : #define UPDATE_FRAGMENTATION_FOR_SPACE(space)                          \
     727             :   if (space()->CommittedMemory() > 0) {                                \
     728             :     isolate_->counters()->external_fragmentation_##space()->AddSample( \
     729             :         static_cast<int>(100 -                                         \
     730             :                          (space()->SizeOfObjects() * 100.0) /          \
     731             :                              space()->CommittedMemory()));             \
     732             :   }
     733             : #define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space) \
     734             :   UPDATE_COUNTERS_FOR_SPACE(space)                         \
     735             :   UPDATE_FRAGMENTATION_FOR_SPACE(space)
     736             : 
     737      735210 :   UPDATE_COUNTERS_FOR_SPACE(new_space)
     738     1225350 :   UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_space)
     739     1225350 :   UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(code_space)
     740     1225350 :   UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(map_space)
     741      866292 :   UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(lo_space)
     742             : #undef UPDATE_COUNTERS_FOR_SPACE
     743             : #undef UPDATE_FRAGMENTATION_FOR_SPACE
     744             : #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE
     745             : 
     746             : #ifdef DEBUG
     747             :   ReportStatisticsAfterGC();
     748             : #endif  // DEBUG
     749             : 
     750             :   // Remember the last top pointer so that we can later find out
     751             :   // whether we allocated in new space since the last GC.
     752      122535 :   new_space_top_after_last_gc_ = new_space()->top();
     753      122535 :   last_gc_time_ = MonotonicallyIncreasingTimeInMs();
     754             : 
     755             :   {
     756      490140 :     TRACE_GC(tracer(), GCTracer::Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE);
     757      245070 :     ReduceNewSpaceSize();
     758      122535 :   }
     759      122535 : }
     760             : 
     761             : 
     762       53346 : void Heap::PreprocessStackTraces() {
     763             :   WeakFixedArray::Iterator iterator(weak_stack_trace_list());
     764             :   FixedArray* elements;
     765       53346 :   while ((elements = iterator.Next<FixedArray>()) != nullptr) {
     766           0 :     for (int j = 1; j < elements->length(); j += 4) {
     767           0 :       Object* maybe_code = elements->get(j + 2);
     768             :       // If GC happens while adding a stack trace to the weak fixed array,
     769             :       // which has been copied into a larger backing store, we may run into
     770             :       // a stack trace that has already been preprocessed. Guard against this.
     771           0 :       if (!maybe_code->IsAbstractCode()) break;
     772             :       AbstractCode* abstract_code = AbstractCode::cast(maybe_code);
     773           0 :       int offset = Smi::cast(elements->get(j + 3))->value();
     774           0 :       int pos = abstract_code->SourcePosition(offset);
     775             :       elements->set(j + 2, Smi::FromInt(pos));
     776             :     }
     777             :   }
     778             :   // We must not compact the weak fixed list here, as we may be in the middle
     779             :   // of writing to it, when the GC triggered. Instead, we reset the root value.
     780             :   set_weak_stack_trace_list(Smi::kZero);
     781       53346 : }
     782             : 
     783             : 
     784             : class GCCallbacksScope {
     785             :  public:
     786             :   explicit GCCallbacksScope(Heap* heap) : heap_(heap) {
     787      246454 :     heap_->gc_callbacks_depth_++;
     788             :   }
     789      246454 :   ~GCCallbacksScope() { heap_->gc_callbacks_depth_--; }
     790             : 
     791             :   bool CheckReenter() { return heap_->gc_callbacks_depth_ == 1; }
     792             : 
     793             :  private:
     794             :   Heap* heap_;
     795             : };
     796             : 
     797             : 
     798         834 : void Heap::HandleGCRequest() {
     799         417 :   if (HighMemoryPressure()) {
     800             :     incremental_marking()->reset_request_type();
     801           6 :     CheckMemoryPressure();
     802         411 :   } else if (incremental_marking()->request_type() ==
     803             :              IncrementalMarking::COMPLETE_MARKING) {
     804             :     incremental_marking()->reset_request_type();
     805             :     CollectAllGarbage(current_gc_flags_,
     806             :                       GarbageCollectionReason::kFinalizeMarkingViaStackGuard,
     807         161 :                       current_gc_callback_flags_);
     808         250 :   } else if (incremental_marking()->request_type() ==
     809         208 :                  IncrementalMarking::FINALIZATION &&
     810         458 :              incremental_marking()->IsMarking() &&
     811         208 :              !incremental_marking()->finalize_marking_completed()) {
     812             :     incremental_marking()->reset_request_type();
     813             :     FinalizeIncrementalMarking(
     814         208 :         GarbageCollectionReason::kFinalizeMarkingViaStackGuard);
     815             :   }
     816         417 : }
     817             : 
     818             : 
     819           0 : void Heap::ScheduleIdleScavengeIfNeeded(int bytes_allocated) {
     820       99851 :   scavenge_job_->ScheduleIdleTaskIfNeeded(this, bytes_allocated);
     821           0 : }
     822             : 
     823        3460 : void Heap::FinalizeIncrementalMarking(GarbageCollectionReason gc_reason) {
     824         692 :   if (FLAG_trace_incremental_marking) {
     825             :     isolate()->PrintWithTimestamp(
     826             :         "[IncrementalMarking] (%s).\n",
     827           0 :         Heap::GarbageCollectionReasonToString(gc_reason));
     828             :   }
     829             : 
     830             :   HistogramTimerScope incremental_marking_scope(
     831         692 :       isolate()->counters()->gc_incremental_marking_finalize());
     832        2076 :   TRACE_EVENT0("v8", "V8.GCIncrementalMarkingFinalize");
     833        2768 :   TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE);
     834             : 
     835             :   {
     836             :     GCCallbacksScope scope(this);
     837         692 :     if (scope.CheckReenter()) {
     838             :       AllowHeapAllocation allow_allocation;
     839        2768 :       TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE);
     840        1384 :       VMState<EXTERNAL> state(isolate_);
     841         692 :       HandleScope handle_scope(isolate_);
     842        1384 :       CallGCPrologueCallbacks(kGCTypeIncrementalMarking, kNoGCCallbackFlags);
     843             :     }
     844             :   }
     845         692 :   incremental_marking()->FinalizeIncrementally();
     846             :   {
     847             :     GCCallbacksScope scope(this);
     848         692 :     if (scope.CheckReenter()) {
     849             :       AllowHeapAllocation allow_allocation;
     850        2768 :       TRACE_GC(tracer(), GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE);
     851        1384 :       VMState<EXTERNAL> state(isolate_);
     852         692 :       HandleScope handle_scope(isolate_);
     853        1384 :       CallGCEpilogueCallbacks(kGCTypeIncrementalMarking, kNoGCCallbackFlags);
     854             :     }
     855             :   }
     856         692 : }
     857             : 
     858             : 
     859      177083 : HistogramTimer* Heap::GCTypeTimer(GarbageCollector collector) {
     860      122535 :   if (IsYoungGenerationCollector(collector)) {
     861      122535 :     return isolate_->counters()->gc_scavenger();
     862             :   } else {
     863       53346 :     if (!incremental_marking()->IsStopped()) {
     864        1202 :       if (ShouldReduceMemory()) {
     865          16 :         return isolate_->counters()->gc_finalize_reduce_memory();
     866             :       } else {
     867        2388 :         return isolate_->counters()->gc_finalize();
     868             :       }
     869             :     } else {
     870      104288 :       return isolate_->counters()->gc_compactor();
     871             :     }
     872             :   }
     873             : }
     874             : 
     875       35269 : void Heap::CollectAllGarbage(int flags, GarbageCollectionReason gc_reason,
     876             :                              const v8::GCCallbackFlags gc_callback_flags) {
     877             :   // Since we are ignoring the return value, the exact choice of space does
     878             :   // not matter, so long as we do not specify NEW_SPACE, which would not
     879             :   // cause a full GC.
     880             :   set_current_gc_flags(flags);
     881       36453 :   CollectGarbage(OLD_SPACE, gc_reason, gc_callback_flags);
     882             :   set_current_gc_flags(kNoGCFlags);
     883       35269 : }
     884             : 
     885        7192 : void Heap::CollectAllAvailableGarbage(GarbageCollectionReason gc_reason) {
     886             :   // Since we are ignoring the return value, the exact choice of space does
     887             :   // not matter, so long as we do not specify NEW_SPACE, which would not
     888             :   // cause a full GC.
     889             :   // Major GC would invoke weak handle callbacks on weakly reachable
     890             :   // handles, but won't collect weakly reachable objects until next
     891             :   // major GC.  Therefore if we collect aggressively and weak handle callback
     892             :   // has been invoked, we rerun major GC to release objects which become
     893             :   // garbage.
     894             :   // Note: as weak callbacks can execute arbitrary code, we cannot
     895             :   // hope that eventually there will be no weak callbacks invocations.
     896             :   // Therefore stop recollecting after several attempts.
     897        7192 :   if (gc_reason == GarbageCollectionReason::kLastResort) {
     898             :     InvokeOutOfMemoryCallback();
     899             :   }
     900       14384 :   RuntimeCallTimerScope(isolate(), &RuntimeCallStats::GC_AllAvailableGarbage);
     901        7192 :   if (isolate()->concurrent_recompilation_enabled()) {
     902             :     // The optimizing compiler may be unnecessarily holding on to memory.
     903             :     DisallowHeapAllocation no_recursive_gc;
     904             :     isolate()->optimizing_compile_dispatcher()->Flush(
     905        7127 :         OptimizingCompileDispatcher::BlockingBehavior::kDontBlock);
     906             :   }
     907        7192 :   isolate()->ClearSerializerData();
     908             :   set_current_gc_flags(kMakeHeapIterableMask | kReduceMemoryFootprintMask);
     909        7192 :   isolate_->compilation_cache()->Clear();
     910             :   const int kMaxNumberOfAttempts = 7;
     911             :   const int kMinNumberOfAttempts = 2;
     912       14384 :   for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
     913       14384 :     if (!CollectGarbage(MARK_COMPACTOR, gc_reason, NULL,
     914       14384 :                         v8::kGCCallbackFlagCollectAllAvailableGarbage) &&
     915             :         attempt + 1 >= kMinNumberOfAttempts) {
     916             :       break;
     917             :     }
     918             :   }
     919             :   set_current_gc_flags(kNoGCFlags);
     920        7192 :   new_space_->Shrink();
     921             :   UncommitFromSpace();
     922        7192 : }
     923             : 
     924        1406 : void Heap::ReportExternalMemoryPressure() {
     925        1238 :   if (external_memory_ >
     926         619 :       (external_memory_at_last_mark_compact_ + external_memory_hard_limit())) {
     927             :     CollectAllGarbage(
     928             :         kReduceMemoryFootprintMask | kFinalizeIncrementalMarkingMask,
     929             :         GarbageCollectionReason::kExternalMemoryPressure,
     930             :         static_cast<GCCallbackFlags>(kGCCallbackFlagCollectAllAvailableGarbage |
     931             :                                      kGCCallbackFlagCollectAllExternalMemory));
     932         619 :     return;
     933             :   }
     934         503 :   if (incremental_marking()->IsStopped()) {
     935         219 :     if (incremental_marking()->CanBeActivated()) {
     936             :       StartIncrementalMarking(
     937             :           i::Heap::kNoGCFlags, GarbageCollectionReason::kExternalMemoryPressure,
     938             :           static_cast<GCCallbackFlags>(
     939             :               kGCCallbackFlagSynchronousPhantomCallbackProcessing |
     940             :               kGCCallbackFlagCollectAllExternalMemory));
     941             :     } else {
     942             :       CollectAllGarbage(i::Heap::kNoGCFlags,
     943             :                         GarbageCollectionReason::kExternalMemoryPressure,
     944             :                         kGCCallbackFlagSynchronousPhantomCallbackProcessing);
     945             :     }
     946             :   } else {
     947             :     // Incremental marking is turned on an has already been started.
     948             :     const double pressure =
     949         284 :         static_cast<double>(external_memory_ -
     950         284 :                             external_memory_at_last_mark_compact_ -
     951         284 :                             kExternalAllocationSoftLimit) /
     952         284 :         external_memory_hard_limit();
     953             :     DCHECK_GE(1, pressure);
     954             :     const double kMaxStepSizeOnExternalLimit = 25;
     955         284 :     const double deadline = MonotonicallyIncreasingTimeInMs() +
     956         284 :                             pressure * kMaxStepSizeOnExternalLimit;
     957             :     incremental_marking()->AdvanceIncrementalMarking(
     958             :         deadline, IncrementalMarking::GC_VIA_STACK_GUARD,
     959         284 :         IncrementalMarking::FORCE_COMPLETION, StepOrigin::kV8);
     960             :   }
     961             : }
     962             : 
     963             : 
     964      122535 : void Heap::EnsureFillerObjectAtTop() {
     965             :   // There may be an allocation memento behind objects in new space. Upon
     966             :   // evacuation of a non-full new space (or if we are on the last page) there
     967             :   // may be uninitialized memory behind top. We fill the remainder of the page
     968             :   // with a filler.
     969      122535 :   Address to_top = new_space_->top();
     970      122535 :   Page* page = Page::FromAddress(to_top - kPointerSize);
     971      122535 :   if (page->Contains(to_top)) {
     972       81025 :     int remaining_in_page = static_cast<int>(page->area_end() - to_top);
     973       81025 :     CreateFillerObjectAt(to_top, remaining_in_page, ClearRecordedSlots::kNo);
     974             :   }
     975      122535 : }
     976             : 
     977      122535 : bool Heap::CollectGarbage(GarbageCollector collector,
     978             :                           GarbageCollectionReason gc_reason,
     979             :                           const char* collector_reason,
     980      435411 :                           const v8::GCCallbackFlags gc_callback_flags) {
     981             :   // The VM is in the GC state until exiting this function.
     982      122535 :   VMState<GC> state(isolate_);
     983      245070 :   RuntimeCallTimerScope(isolate(), &RuntimeCallStats::GC);
     984             : 
     985             : #ifdef DEBUG
     986             :   // Reset the allocation timeout to the GC interval, but make sure to
     987             :   // allow at least a few allocations after a collection. The reason
     988             :   // for this is that we have a lot of allocation sequences and we
     989             :   // assume that a garbage collection will allow the subsequent
     990             :   // allocation attempts to go through.
     991             :   allocation_timeout_ = Max(6, FLAG_gc_interval);
     992             : #endif
     993             : 
     994      122535 :   EnsureFillerObjectAtTop();
     995             : 
     996      191724 :   if (IsYoungGenerationCollector(collector) &&
     997             :       !incremental_marking()->IsStopped()) {
     998         830 :     if (FLAG_trace_incremental_marking) {
     999             :       isolate()->PrintWithTimestamp(
    1000           0 :           "[IncrementalMarking] Scavenge during marking.\n");
    1001             :     }
    1002             :   }
    1003             : 
    1004             :   bool next_gc_likely_to_collect_more = false;
    1005             :   size_t committed_memory_before = 0;
    1006             : 
    1007      122535 :   if (collector == MARK_COMPACTOR) {
    1008       53346 :     committed_memory_before = CommittedOldGenerationMemory();
    1009             :   }
    1010             : 
    1011             :   {
    1012      122535 :     tracer()->Start(collector, gc_reason, collector_reason);
    1013             :     DCHECK(AllowHeapAllocation::IsAllowed());
    1014             :     DisallowHeapAllocation no_allocation_during_gc;
    1015      122535 :     GarbageCollectionPrologue();
    1016             : 
    1017             :     {
    1018      122535 :       HistogramTimer* gc_type_timer = GCTypeTimer(collector);
    1019             :       HistogramTimerScope histogram_timer_scope(gc_type_timer);
    1020      367605 :       TRACE_EVENT0("v8", gc_type_timer->name());
    1021             : 
    1022             :       next_gc_likely_to_collect_more =
    1023      122535 :           PerformGarbageCollection(collector, gc_callback_flags);
    1024             :     }
    1025             : 
    1026      122535 :     GarbageCollectionEpilogue();
    1027      122535 :     if (collector == MARK_COMPACTOR && FLAG_track_detached_contexts) {
    1028       53346 :       isolate()->CheckDetachedContextsAfterGC();
    1029             :     }
    1030             : 
    1031      122535 :     if (collector == MARK_COMPACTOR) {
    1032       53346 :       size_t committed_memory_after = CommittedOldGenerationMemory();
    1033       53346 :       size_t used_memory_after = PromotedSpaceSizeOfObjects();
    1034             :       MemoryReducer::Event event;
    1035       53346 :       event.type = MemoryReducer::kMarkCompact;
    1036       53346 :       event.time_ms = MonotonicallyIncreasingTimeInMs();
    1037             :       // Trigger one more GC if
    1038             :       // - this GC decreased committed memory,
    1039             :       // - there is high fragmentation,
    1040             :       // - there are live detached contexts.
    1041             :       event.next_gc_likely_to_collect_more =
    1042      105309 :           (committed_memory_before > committed_memory_after + MB) ||
    1043      105309 :           HasHighFragmentation(used_memory_after, committed_memory_after) ||
    1044       53346 :           (detached_contexts()->length() > 0);
    1045       53346 :       event.committed_memory = committed_memory_after;
    1046       53346 :       if (deserialization_complete_) {
    1047       53346 :         memory_reducer_->NotifyMarkCompact(event);
    1048             :       }
    1049             :       memory_pressure_level_.SetValue(MemoryPressureLevel::kNone);
    1050             :     }
    1051             : 
    1052      122535 :     tracer()->Stop(collector);
    1053             :   }
    1054             : 
    1055      175881 :   if (collector == MARK_COMPACTOR &&
    1056       53346 :       (gc_callback_flags & (kGCCallbackFlagForced |
    1057             :                             kGCCallbackFlagCollectAllAvailableGarbage)) != 0) {
    1058       25023 :     isolate()->CountUsage(v8::Isolate::kForcedGC);
    1059             :   }
    1060             : 
    1061             :   // Start incremental marking for the next cycle. The heap snapshot
    1062             :   // generator needs incremental marking to stay off after it aborted.
    1063             :   // We do this only for scavenger to avoid a loop where mark-compact
    1064             :   // causes another mark-compact.
    1065      191724 :   if (IsYoungGenerationCollector(collector) &&
    1066             :       !ShouldAbortIncrementalMarking()) {
    1067             :     StartIncrementalMarkingIfAllocationLimitIsReached(kNoGCFlags,
    1068       69177 :                                                       kNoGCCallbackFlags);
    1069             :   }
    1070             : 
    1071      122535 :   return next_gc_likely_to_collect_more;
    1072             : }
    1073             : 
    1074             : 
    1075       17172 : int Heap::NotifyContextDisposed(bool dependant_context) {
    1076        5720 :   if (!dependant_context) {
    1077          12 :     tracer()->ResetSurvivalEvents();
    1078          12 :     old_generation_size_configured_ = false;
    1079             :     MemoryReducer::Event event;
    1080          12 :     event.type = MemoryReducer::kPossibleGarbage;
    1081          12 :     event.time_ms = MonotonicallyIncreasingTimeInMs();
    1082          12 :     memory_reducer_->NotifyPossibleGarbage(event);
    1083             :   }
    1084        5720 :   if (isolate()->concurrent_recompilation_enabled()) {
    1085             :     // Flush the queued recompilation tasks.
    1086             :     isolate()->optimizing_compile_dispatcher()->Flush(
    1087        5704 :         OptimizingCompileDispatcher::BlockingBehavior::kDontBlock);
    1088             :   }
    1089             :   AgeInlineCaches();
    1090        5720 :   number_of_disposed_maps_ = retained_maps()->Length();
    1091       11440 :   tracer()->AddContextDisposalTime(MonotonicallyIncreasingTimeInMs());
    1092        5720 :   return ++contexts_disposed_;
    1093             : }
    1094             : 
    1095         446 : void Heap::StartIncrementalMarking(int gc_flags,
    1096             :                                    GarbageCollectionReason gc_reason,
    1097        1216 :                                    GCCallbackFlags gc_callback_flags) {
    1098             :   DCHECK(incremental_marking()->IsStopped());
    1099             :   set_current_gc_flags(gc_flags);
    1100        1216 :   current_gc_callback_flags_ = gc_callback_flags;
    1101        1216 :   incremental_marking()->Start(gc_reason);
    1102         446 : }
    1103             : 
    1104     2915858 : void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
    1105     2919433 :     int gc_flags, const GCCallbackFlags gc_callback_flags) {
    1106     2915858 :   if (incremental_marking()->IsStopped()) {
    1107     2901890 :     IncrementalMarkingLimit reached_limit = IncrementalMarkingLimitReached();
    1108     2901885 :     if (reached_limit == IncrementalMarkingLimit::kSoftLimit) {
    1109        3575 :       incremental_marking()->incremental_marking_job()->ScheduleTask(this);
    1110     2898310 :     } else if (reached_limit == IncrementalMarkingLimit::kHardLimit) {
    1111             :       StartIncrementalMarking(gc_flags,
    1112             :                               GarbageCollectionReason::kAllocationLimit,
    1113             :                               gc_callback_flags);
    1114             :     }
    1115             :   }
    1116     2915853 : }
    1117             : 
    1118           6 : void Heap::StartIdleIncrementalMarking(GarbageCollectionReason gc_reason) {
    1119           6 :   gc_idle_time_handler_->ResetNoProgressCounter();
    1120             :   StartIncrementalMarking(kReduceMemoryFootprintMask, gc_reason,
    1121             :                           kNoGCCallbackFlags);
    1122           6 : }
    1123             : 
    1124             : 
    1125        9941 : void Heap::MoveElements(FixedArray* array, int dst_index, int src_index,
    1126        9941 :                         int len) {
    1127       19882 :   if (len == 0) return;
    1128             : 
    1129             :   DCHECK(array->map() != fixed_cow_array_map());
    1130        9941 :   Object** dst_objects = array->data_start() + dst_index;
    1131        9941 :   MemMove(dst_objects, array->data_start() + src_index, len * kPointerSize);
    1132       19882 :   FIXED_ARRAY_ELEMENTS_WRITE_BARRIER(this, array, dst_index, len);
    1133             : }
    1134             : 
    1135             : 
    1136             : #ifdef VERIFY_HEAP
    1137             : // Helper class for verifying the string table.
    1138             : class StringTableVerifier : public ObjectVisitor {
    1139             :  public:
    1140             :   void VisitPointers(HeapObject* host, Object** start, Object** end) override {
    1141             :     // Visit all HeapObject pointers in [start, end).
    1142             :     for (Object** p = start; p < end; p++) {
    1143             :       if ((*p)->IsHeapObject()) {
    1144             :         HeapObject* object = HeapObject::cast(*p);
    1145             :         Isolate* isolate = object->GetIsolate();
    1146             :         // Check that the string is actually internalized.
    1147             :         CHECK(object->IsTheHole(isolate) || object->IsUndefined(isolate) ||
    1148             :               object->IsInternalizedString());
    1149             :       }
    1150             :     }
    1151             :   }
    1152             : };
    1153             : 
    1154             : 
    1155             : static void VerifyStringTable(Heap* heap) {
    1156             :   StringTableVerifier verifier;
    1157             :   heap->string_table()->IterateElements(&verifier);
    1158             : }
    1159             : #endif  // VERIFY_HEAP
    1160             : 
    1161    52177855 : bool Heap::ReserveSpace(Reservation* reservations, List<Address>* maps) {
    1162             :   bool gc_performed = true;
    1163             :   int counter = 0;
    1164             :   static const int kThreshold = 20;
    1165      504024 :   while (gc_performed && counter++ < kThreshold) {
    1166             :     gc_performed = false;
    1167      840028 :     for (int space = NEW_SPACE; space < SerializerDeserializer::kNumberOfSpaces;
    1168             :          space++) {
    1169      840046 :       Reservation* reservation = &reservations[space];
    1170             :       DCHECK_LE(1, reservation->length());
    1171      840046 :       if (reservation->at(0).size == 0) continue;
    1172             :       bool perform_gc = false;
    1173      396952 :       if (space == MAP_SPACE) {
    1174             :         // We allocate each map individually to avoid fragmentation.
    1175             :         maps->Clear();
    1176             :         DCHECK_EQ(1, reservation->length());
    1177      167605 :         int num_maps = reservation->at(0).size / Map::kSize;
    1178   104354212 :         for (int i = 0; i < num_maps; i++) {
    1179             :           // The deserializer will update the skip list.
    1180             :           AllocationResult allocation = map_space()->AllocateRawUnaligned(
    1181    52009499 :               Map::kSize, PagedSpace::IGNORE_SKIP_LIST);
    1182             :           HeapObject* free_space = nullptr;
    1183    52009498 :           if (allocation.To(&free_space)) {
    1184             :             // Mark with a free list node, in case we have a GC before
    1185             :             // deserializing.
    1186    52009498 :             Address free_space_address = free_space->address();
    1187             :             CreateFillerObjectAt(free_space_address, Map::kSize,
    1188    52009498 :                                  ClearRecordedSlots::kNo);
    1189    52009500 :             maps->Add(free_space_address);
    1190             :           } else {
    1191             :             perform_gc = true;
    1192           0 :             break;
    1193             :           }
    1194             :         }
    1195      229345 :       } else if (space == LO_SPACE) {
    1196             :         // Just check that we can allocate during deserialization.
    1197             :         DCHECK_EQ(1, reservation->length());
    1198          72 :         perform_gc = !CanExpandOldGeneration(reservation->at(0).size);
    1199             :       } else {
    1200      809465 :         for (auto& chunk : *reservation) {
    1201             :           AllocationResult allocation;
    1202      350865 :           int size = chunk.size;
    1203             :           DCHECK_LE(static_cast<size_t>(size),
    1204             :                     MemoryAllocator::PageAreaSize(
    1205             :                         static_cast<AllocationSpace>(space)));
    1206      350865 :           if (space == NEW_SPACE) {
    1207             :             allocation = new_space()->AllocateRawUnaligned(size);
    1208             :           } else {
    1209             :             // The deserializer will update the skip list.
    1210             :             allocation = paged_space(space)->AllocateRawUnaligned(
    1211      350511 :                 size, PagedSpace::IGNORE_SKIP_LIST);
    1212             :           }
    1213             :           HeapObject* free_space = nullptr;
    1214      350865 :           if (allocation.To(&free_space)) {
    1215             :             // Mark with a free list node, in case we have a GC before
    1216             :             // deserializing.
    1217      350847 :             Address free_space_address = free_space->address();
    1218             :             CreateFillerObjectAt(free_space_address, size,
    1219      350847 :                                  ClearRecordedSlots::kNo);
    1220             :             DCHECK(space < SerializerDeserializer::kNumberOfPreallocatedSpaces);
    1221      350847 :             chunk.start = free_space_address;
    1222      350847 :             chunk.end = free_space_address + size;
    1223             :           } else {
    1224             :             perform_gc = true;
    1225             :             break;
    1226             :           }
    1227             :         }
    1228             :       }
    1229      396952 :       if (perform_gc) {
    1230          18 :         if (space == NEW_SPACE) {
    1231           0 :           CollectGarbage(NEW_SPACE, GarbageCollectionReason::kDeserializer);
    1232             :         } else {
    1233          18 :           if (counter > 1) {
    1234             :             CollectAllGarbage(
    1235             :                 kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask,
    1236             :                 GarbageCollectionReason::kDeserializer);
    1237             :           } else {
    1238             :             CollectAllGarbage(kAbortIncrementalMarkingMask,
    1239             :                               GarbageCollectionReason::kDeserializer);
    1240             :           }
    1241             :         }
    1242             :         gc_performed = true;
    1243             :         break;  // Abort for-loop over spaces and retry.
    1244             :       }
    1245             :     }
    1246             :   }
    1247             : 
    1248      168002 :   return !gc_performed;
    1249             : }
    1250             : 
    1251             : 
    1252      122535 : void Heap::EnsureFromSpaceIsCommitted() {
    1253      367605 :   if (new_space_->CommitFromSpaceIfNeeded()) return;
    1254             : 
    1255             :   // Committing memory to from space failed.
    1256             :   // Memory is exhausted and we will die.
    1257           0 :   V8::FatalProcessOutOfMemory("Committing semi space failed.");
    1258             : }
    1259             : 
    1260             : 
    1261      106692 : void Heap::ClearNormalizedMapCaches() {
    1262       53372 :   if (isolate_->bootstrapper()->IsActive() &&
    1263             :       !incremental_marking()->IsMarking()) {
    1264       53346 :     return;
    1265             :   }
    1266             : 
    1267             :   Object* context = native_contexts_list();
    1268      183117 :   while (!context->IsUndefined(isolate())) {
    1269             :     // GC can happen when the context is not fully initialized,
    1270             :     // so the cache can be undefined.
    1271             :     Object* cache =
    1272             :         Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX);
    1273       76477 :     if (!cache->IsUndefined(isolate())) {
    1274       76477 :       NormalizedMapCache::cast(cache)->Clear();
    1275             :     }
    1276             :     context = Context::cast(context)->next_context_link();
    1277             :   }
    1278             : }
    1279             : 
    1280             : 
    1281      232921 : void Heap::UpdateSurvivalStatistics(int start_new_space_size) {
    1282      245070 :   if (start_new_space_size == 0) return;
    1283             : 
    1284      110386 :   promotion_ratio_ = (static_cast<double>(promoted_objects_size_) /
    1285      110386 :                       static_cast<double>(start_new_space_size) * 100);
    1286             : 
    1287      110386 :   if (previous_semi_space_copied_object_size_ > 0) {
    1288             :     promotion_rate_ =
    1289       78979 :         (static_cast<double>(promoted_objects_size_) /
    1290       78979 :          static_cast<double>(previous_semi_space_copied_object_size_) * 100);
    1291             :   } else {
    1292       31407 :     promotion_rate_ = 0;
    1293             :   }
    1294             : 
    1295             :   semi_space_copied_rate_ =
    1296      110386 :       (static_cast<double>(semi_space_copied_object_size_) /
    1297      110386 :        static_cast<double>(start_new_space_size) * 100);
    1298             : 
    1299      110386 :   double survival_rate = promotion_ratio_ + semi_space_copied_rate_;
    1300      110386 :   tracer()->AddSurvivalRatio(survival_rate);
    1301             : }
    1302             : 
    1303      122535 : bool Heap::PerformGarbageCollection(
    1304      804327 :     GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) {
    1305             :   int freed_global_handles = 0;
    1306             : 
    1307      122535 :   if (!IsYoungGenerationCollector(collector)) {
    1308      474297 :     PROFILE(isolate_, CodeMovingGCEvent());
    1309             :   }
    1310             : 
    1311             : #ifdef VERIFY_HEAP
    1312             :   if (FLAG_verify_heap) {
    1313             :     VerifyStringTable(this);
    1314             :   }
    1315             : #endif
    1316             : 
    1317             :   GCType gc_type =
    1318      122535 :       collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge;
    1319             : 
    1320             :   {
    1321             :     GCCallbacksScope scope(this);
    1322      122535 :     if (scope.CheckReenter()) {
    1323             :       AllowHeapAllocation allow_allocation;
    1324      489996 :       TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_PROLOGUE);
    1325      244998 :       VMState<EXTERNAL> state(isolate_);
    1326      122499 :       HandleScope handle_scope(isolate_);
    1327      244998 :       CallGCPrologueCallbacks(gc_type, kNoGCCallbackFlags);
    1328             :     }
    1329             :   }
    1330             : 
    1331      122535 :   EnsureFromSpaceIsCommitted();
    1332             : 
    1333      122535 :   int start_new_space_size = static_cast<int>(Heap::new_space()->Size());
    1334             : 
    1335             :   {
    1336      122535 :     Heap::PretenuringScope pretenuring_scope(this);
    1337      122535 :     Heap::SkipStoreBufferScope skip_store_buffer_scope(store_buffer_);
    1338             : 
    1339      122535 :     switch (collector) {
    1340             :       case MARK_COMPACTOR:
    1341             :         UpdateOldGenerationAllocationCounter();
    1342             :         // Perform mark-sweep with optional compaction.
    1343       53346 :         MarkCompact();
    1344       53346 :         old_generation_size_configured_ = true;
    1345             :         // This should be updated before PostGarbageCollectionProcessing, which
    1346             :         // can cause another GC. Take into account the objects promoted during
    1347             :         // GC.
    1348             :         old_generation_allocation_counter_at_last_gc_ +=
    1349       53346 :             static_cast<size_t>(promoted_objects_size_);
    1350       53346 :         old_generation_size_at_last_gc_ = PromotedSpaceSizeOfObjects();
    1351       53346 :         break;
    1352             :       case MINOR_MARK_COMPACTOR:
    1353           0 :         MinorMarkCompact();
    1354           0 :         break;
    1355             :       case SCAVENGER:
    1356       69189 :         if ((fast_promotion_mode_ &&
    1357      138378 :              CanExpandOldGeneration(new_space()->Size())) ||
    1358       69189 :             concurrent_marking_->IsTaskPending()) {
    1359             :           tracer()->NotifyYoungGenerationHandling(
    1360           0 :               YoungGenerationHandling::kFastPromotionDuringScavenge);
    1361           0 :           EvacuateYoungGeneration();
    1362             :         } else {
    1363             :           tracer()->NotifyYoungGenerationHandling(
    1364       69189 :               YoungGenerationHandling::kRegularScavenge);
    1365             : 
    1366       69189 :           Scavenge();
    1367             :         }
    1368             :         break;
    1369             :     }
    1370             : 
    1371      245070 :     ProcessPretenuringFeedback();
    1372             :   }
    1373             : 
    1374      122535 :   UpdateSurvivalStatistics(start_new_space_size);
    1375      122535 :   ConfigureInitialOldGenerationSize();
    1376             : 
    1377      122535 :   if (!fast_promotion_mode_ || collector == MARK_COMPACTOR) {
    1378      122535 :     ComputeFastPromotionMode(promotion_ratio_ + semi_space_copied_rate_);
    1379             :   }
    1380             : 
    1381      245070 :   isolate_->counters()->objs_since_last_young()->Set(0);
    1382             : 
    1383      122535 :   gc_post_processing_depth_++;
    1384             :   {
    1385             :     AllowHeapAllocation allow_allocation;
    1386      490140 :     TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES);
    1387             :     freed_global_handles =
    1388             :         isolate_->global_handles()->PostGarbageCollectionProcessing(
    1389      367605 :             collector, gc_callback_flags);
    1390             :   }
    1391      122535 :   gc_post_processing_depth_--;
    1392             : 
    1393      245070 :   isolate_->eternal_handles()->PostGarbageCollectionProcessing(this);
    1394             : 
    1395             :   // Update relocatables.
    1396      122535 :   Relocatable::PostGarbageCollectionProcessing(isolate_);
    1397             : 
    1398      122535 :   double gc_speed = tracer()->CombinedMarkCompactSpeedInBytesPerMillisecond();
    1399             :   double mutator_speed =
    1400      122535 :       tracer()->CurrentOldGenerationAllocationThroughputInBytesPerMillisecond();
    1401      122535 :   size_t old_gen_size = PromotedSpaceSizeOfObjects();
    1402      122535 :   if (collector == MARK_COMPACTOR) {
    1403             :     // Register the amount of external allocated memory.
    1404       53346 :     external_memory_at_last_mark_compact_ = external_memory_;
    1405       53346 :     external_memory_limit_ = external_memory_ + kExternalAllocationSoftLimit;
    1406       53346 :     SetOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed);
    1407       69189 :   } else if (HasLowYoungGenerationAllocationRate() &&
    1408             :              old_generation_size_configured_) {
    1409         110 :     DampenOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed);
    1410             :   }
    1411             : 
    1412             :   {
    1413             :     GCCallbacksScope scope(this);
    1414      122535 :     if (scope.CheckReenter()) {
    1415             :       AllowHeapAllocation allow_allocation;
    1416      489996 :       TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_EPILOGUE);
    1417      244998 :       VMState<EXTERNAL> state(isolate_);
    1418      122499 :       HandleScope handle_scope(isolate_);
    1419      244998 :       CallGCEpilogueCallbacks(gc_type, gc_callback_flags);
    1420             :     }
    1421             :   }
    1422             : 
    1423             : #ifdef VERIFY_HEAP
    1424             :   if (FLAG_verify_heap) {
    1425             :     VerifyStringTable(this);
    1426             :   }
    1427             : #endif
    1428             : 
    1429      122535 :   return freed_global_handles > 0;
    1430             : }
    1431             : 
    1432             : 
    1433      123288 : void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) {
    1434      246576 :   RuntimeCallTimerScope(isolate(), &RuntimeCallStats::GCPrologueCallback);
    1435      246684 :   for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
    1436      123450 :     if (gc_type & gc_prologue_callbacks_[i].gc_type) {
    1437          54 :       if (!gc_prologue_callbacks_[i].pass_isolate) {
    1438             :         v8::GCCallback callback = reinterpret_cast<v8::GCCallback>(
    1439           0 :             gc_prologue_callbacks_[i].callback);
    1440           0 :         callback(gc_type, flags);
    1441             :       } else {
    1442             :         v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate());
    1443          54 :         gc_prologue_callbacks_[i].callback(isolate, gc_type, flags);
    1444             :       }
    1445             :     }
    1446             :   }
    1447      123288 : }
    1448             : 
    1449             : 
    1450      123288 : void Heap::CallGCEpilogueCallbacks(GCType gc_type,
    1451             :                                    GCCallbackFlags gc_callback_flags) {
    1452      246576 :   RuntimeCallTimerScope(isolate(), &RuntimeCallStats::GCEpilogueCallback);
    1453      246684 :   for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
    1454      123450 :     if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
    1455          54 :       if (!gc_epilogue_callbacks_[i].pass_isolate) {
    1456             :         v8::GCCallback callback = reinterpret_cast<v8::GCCallback>(
    1457           0 :             gc_epilogue_callbacks_[i].callback);
    1458           0 :         callback(gc_type, gc_callback_flags);
    1459             :       } else {
    1460             :         v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate());
    1461          54 :         gc_epilogue_callbacks_[i].callback(isolate, gc_type, gc_callback_flags);
    1462             :       }
    1463             :     }
    1464             :   }
    1465      123288 : }
    1466             : 
    1467             : 
    1468      160038 : void Heap::MarkCompact() {
    1469       53346 :   PauseAllocationObserversScope pause_observers(this);
    1470             : 
    1471             :   SetGCState(MARK_COMPACT);
    1472             : 
    1473      106692 :   LOG(isolate_, ResourceEvent("markcompact", "begin"));
    1474             : 
    1475       53346 :   uint64_t size_of_objects_before_gc = SizeOfObjects();
    1476             : 
    1477       53346 :   mark_compact_collector()->Prepare();
    1478             : 
    1479       53346 :   ms_count_++;
    1480             : 
    1481       53346 :   MarkCompactPrologue();
    1482             : 
    1483       53346 :   mark_compact_collector()->CollectGarbage();
    1484             : 
    1485      106692 :   LOG(isolate_, ResourceEvent("markcompact", "end"));
    1486             : 
    1487       53346 :   MarkCompactEpilogue();
    1488             : 
    1489       53346 :   if (FLAG_allocation_site_pretenuring) {
    1490       53346 :     EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc);
    1491       53346 :   }
    1492       53346 : }
    1493             : 
    1494           0 : void Heap::MinorMarkCompact() {
    1495             :   DCHECK(FLAG_minor_mc);
    1496             : 
    1497             :   SetGCState(MINOR_MARK_COMPACT);
    1498           0 :   LOG(isolate_, ResourceEvent("MinorMarkCompact", "begin"));
    1499             : 
    1500           0 :   TRACE_GC(tracer(), GCTracer::Scope::MC_MINOR_MC);
    1501             :   AlwaysAllocateScope always_allocate(isolate());
    1502           0 :   PauseAllocationObserversScope pause_observers(this);
    1503             : 
    1504           0 :   minor_mark_compact_collector()->CollectGarbage();
    1505             : 
    1506           0 :   LOG(isolate_, ResourceEvent("MinorMarkCompact", "end"));
    1507           0 :   SetGCState(NOT_IN_GC);
    1508           0 : }
    1509             : 
    1510      160038 : void Heap::MarkCompactEpilogue() {
    1511      213384 :   TRACE_GC(tracer(), GCTracer::Scope::MC_EPILOGUE);
    1512             :   SetGCState(NOT_IN_GC);
    1513             : 
    1514       53346 :   isolate_->counters()->objs_since_last_full()->Set(0);
    1515             : 
    1516       53346 :   incremental_marking()->Epilogue();
    1517             : 
    1518       53346 :   PreprocessStackTraces();
    1519             :   DCHECK(incremental_marking()->IsStopped());
    1520             : 
    1521      106692 :   mark_compact_collector()->marking_deque()->StopUsing();
    1522       53346 : }
    1523             : 
    1524             : 
    1525      160038 : void Heap::MarkCompactPrologue() {
    1526      213384 :   TRACE_GC(tracer(), GCTracer::Scope::MC_PROLOGUE);
    1527      160038 :   isolate_->context_slot_cache()->Clear();
    1528      106692 :   isolate_->descriptor_lookup_cache()->Clear();
    1529       53346 :   RegExpResultsCache::Clear(string_split_cache());
    1530       53346 :   RegExpResultsCache::Clear(regexp_multiple_cache());
    1531             : 
    1532      106692 :   isolate_->compilation_cache()->MarkCompactPrologue();
    1533             : 
    1534             :   CompletelyClearInstanceofCache();
    1535             : 
    1536       53346 :   FlushNumberStringCache();
    1537      106692 :   ClearNormalizedMapCaches();
    1538       53346 : }
    1539             : 
    1540             : 
    1541      122535 : void Heap::CheckNewSpaceExpansionCriteria() {
    1542      122535 :   if (FLAG_experimental_new_space_growth_heuristic) {
    1543           0 :     if (new_space_->TotalCapacity() < new_space_->MaximumCapacity() &&
    1544           0 :         survived_last_scavenge_ * 100 / new_space_->TotalCapacity() >= 10) {
    1545             :       // Grow the size of new space if there is room to grow, and more than 10%
    1546             :       // have survived the last scavenge.
    1547           0 :       new_space_->Grow();
    1548           0 :       survived_since_last_expansion_ = 0;
    1549             :     }
    1550      363640 :   } else if (new_space_->TotalCapacity() < new_space_->MaximumCapacity() &&
    1551      118570 :              survived_since_last_expansion_ > new_space_->TotalCapacity()) {
    1552             :     // Grow the size of new space if there is room to grow, and enough data
    1553             :     // has survived scavenge since the last expansion.
    1554        2012 :     new_space_->Grow();
    1555        2012 :     survived_since_last_expansion_ = 0;
    1556             :   }
    1557      122535 : }
    1558             : 
    1559             : 
    1560       24991 : static bool IsUnscavengedHeapObject(Heap* heap, Object** p) {
    1561       74955 :   return heap->InNewSpace(*p) &&
    1562       24991 :          !HeapObject::cast(*p)->map_word().IsForwardingAddress();
    1563             : }
    1564             : 
    1565       69189 : void PromotionQueue::Initialize() {
    1566             :   // The last to-space page may be used for promotion queue. On promotion
    1567             :   // conflict, we use the emergency stack.
    1568             :   DCHECK((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize) ==
    1569             :          0);
    1570             :   front_ = rear_ =
    1571      138378 :       reinterpret_cast<struct Entry*>(heap_->new_space()->ToSpaceEnd());
    1572             :   limit_ = reinterpret_cast<struct Entry*>(
    1573             :       Page::FromAllocationAreaAddress(reinterpret_cast<Address>(rear_))
    1574       69189 :           ->area_start());
    1575       69189 :   emergency_stack_ = NULL;
    1576       69189 : }
    1577             : 
    1578       69189 : void PromotionQueue::Destroy() {
    1579             :   DCHECK(is_empty());
    1580       69189 :   delete emergency_stack_;
    1581       69189 :   emergency_stack_ = NULL;
    1582       69189 : }
    1583             : 
    1584         232 : void PromotionQueue::RelocateQueueHead() {
    1585             :   DCHECK(emergency_stack_ == NULL);
    1586             : 
    1587         232 :   Page* p = Page::FromAllocationAreaAddress(reinterpret_cast<Address>(rear_));
    1588             :   struct Entry* head_start = rear_;
    1589             :   struct Entry* head_end =
    1590         232 :       Min(front_, reinterpret_cast<struct Entry*>(p->area_end()));
    1591             : 
    1592             :   int entries_count =
    1593         232 :       static_cast<int>(head_end - head_start) / sizeof(struct Entry);
    1594             : 
    1595         464 :   emergency_stack_ = new List<Entry>(2 * entries_count);
    1596             : 
    1597     1340680 :   while (head_start != head_end) {
    1598     1340216 :     struct Entry* entry = head_start++;
    1599             :     // New space allocation in SemiSpaceCopyObject marked the region
    1600             :     // overlapping with promotion queue as uninitialized.
    1601             :     MSAN_MEMORY_IS_INITIALIZED(entry, sizeof(struct Entry));
    1602     1340216 :     emergency_stack_->Add(*entry);
    1603             :   }
    1604         232 :   rear_ = head_end;
    1605         232 : }
    1606             : 
    1607             : 
    1608       69189 : class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
    1609             :  public:
    1610       69189 :   explicit ScavengeWeakObjectRetainer(Heap* heap) : heap_(heap) {}
    1611             : 
    1612     2211072 :   virtual Object* RetainAs(Object* object) {
    1613     2211072 :     if (!heap_->InFromSpace(object)) {
    1614             :       return object;
    1615             :     }
    1616             : 
    1617             :     MapWord map_word = HeapObject::cast(object)->map_word();
    1618     1359261 :     if (map_word.IsForwardingAddress()) {
    1619      138110 :       return map_word.ToForwardingAddress();
    1620             :     }
    1621             :     return NULL;
    1622             :   }
    1623             : 
    1624             :  private:
    1625             :   Heap* heap_;
    1626             : };
    1627             : 
    1628           0 : void Heap::EvacuateYoungGeneration() {
    1629           0 :   TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_EVACUATE);
    1630             :   if (!FLAG_concurrent_marking) {
    1631             :     DCHECK(fast_promotion_mode_);
    1632             :     DCHECK(CanExpandOldGeneration(new_space()->Size()));
    1633             :   }
    1634             : 
    1635           0 :   mark_compact_collector()->sweeper().EnsureNewSpaceCompleted();
    1636             : 
    1637             :   SetGCState(SCAVENGE);
    1638           0 :   LOG(isolate_, ResourceEvent("scavenge", "begin"));
    1639             : 
    1640             :   // Move pages from new->old generation.
    1641           0 :   PageRange range(new_space()->bottom(), new_space()->top());
    1642           0 :   for (auto it = range.begin(); it != range.end();) {
    1643           0 :     Page* p = (*++it)->prev_page();
    1644           0 :     p->Unlink();
    1645           0 :     Page::ConvertNewToOld(p);
    1646           0 :     if (incremental_marking()->IsMarking())
    1647           0 :       mark_compact_collector()->RecordLiveSlotsOnPage(p);
    1648             :   }
    1649             : 
    1650             :   // Reset new space.
    1651           0 :   if (!new_space()->Rebalance()) {
    1652             :     FatalProcessOutOfMemory("NewSpace::Rebalance");
    1653             :   }
    1654           0 :   new_space()->ResetAllocationInfo();
    1655             :   new_space()->set_age_mark(new_space()->top());
    1656             : 
    1657             :   // Fix up special trackers.
    1658           0 :   external_string_table_.PromoteAllNewSpaceStrings();
    1659             :   // GlobalHandles are updated in PostGarbageCollectonProcessing
    1660             : 
    1661           0 :   IncrementYoungSurvivorsCounter(new_space()->Size());
    1662           0 :   IncrementPromotedObjectsSize(new_space()->Size());
    1663             :   IncrementSemiSpaceCopiedObjectSize(0);
    1664             : 
    1665           0 :   LOG(isolate_, ResourceEvent("scavenge", "end"));
    1666           0 :   SetGCState(NOT_IN_GC);
    1667           0 : }
    1668             : 
    1669      691890 : void Heap::Scavenge() {
    1670      276756 :   TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE);
    1671             :   RelocationLock relocation_lock(this);
    1672             :   // There are soft limits in the allocation code, designed to trigger a mark
    1673             :   // sweep collection by failing allocations. There is no sense in trying to
    1674             :   // trigger one during scavenge: scavenges allocation should always succeed.
    1675             :   AlwaysAllocateScope scope(isolate());
    1676             : 
    1677             :   // Bump-pointer allocations done during scavenge are not real allocations.
    1678             :   // Pause the inline allocation steps.
    1679      138378 :   PauseAllocationObserversScope pause_observers(this);
    1680             : 
    1681       69189 :   mark_compact_collector()->sweeper().EnsureNewSpaceCompleted();
    1682             : 
    1683             :   SetGCState(SCAVENGE);
    1684             : 
    1685             :   // Implements Cheney's copying algorithm
    1686      138378 :   LOG(isolate_, ResourceEvent("scavenge", "begin"));
    1687             : 
    1688             :   // Used for updating survived_since_last_expansion_ at function end.
    1689       69189 :   size_t survived_watermark = PromotedSpaceSizeOfObjects();
    1690             : 
    1691       69189 :   scavenge_collector_->SelectScavengingVisitorsTable();
    1692             : 
    1693             :   // Flip the semispaces.  After flipping, to space is empty, from space has
    1694             :   // live objects.
    1695       69189 :   new_space_->Flip();
    1696       69189 :   new_space_->ResetAllocationInfo();
    1697             : 
    1698             :   // We need to sweep newly copied objects which can be either in the
    1699             :   // to space or promoted to the old generation.  For to-space
    1700             :   // objects, we treat the bottom of the to space as a queue.  Newly
    1701             :   // copied and unswept objects lie between a 'front' mark and the
    1702             :   // allocation pointer.
    1703             :   //
    1704             :   // Promoted objects can go into various old-generation spaces, and
    1705             :   // can be allocated internally in the spaces (from the free list).
    1706             :   // We treat the top of the to space as a queue of addresses of
    1707             :   // promoted objects.  The addresses of newly promoted and unswept
    1708             :   // objects lie between a 'front' mark and a 'rear' mark that is
    1709             :   // updated as a side effect of promoting an object.
    1710             :   //
    1711             :   // There is guaranteed to be enough room at the top of the to space
    1712             :   // for the addresses of promoted objects: every object promoted
    1713             :   // frees up its size in bytes from the top of the new space, and
    1714             :   // objects are at least one pointer in size.
    1715       69189 :   Address new_space_front = new_space_->ToSpaceStart();
    1716       69189 :   promotion_queue_.Initialize();
    1717             : 
    1718             :   RootScavengeVisitor root_scavenge_visitor(this);
    1719             : 
    1720             :   isolate()->global_handles()->IdentifyWeakUnmodifiedObjects(
    1721       69189 :       &IsUnmodifiedHeapObject);
    1722             : 
    1723             :   {
    1724             :     // Copy roots.
    1725      276756 :     TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_ROOTS);
    1726      138378 :     IterateRoots(&root_scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
    1727             :   }
    1728             : 
    1729             :   {
    1730             :     // Copy objects reachable from the old generation.
    1731      276756 :     TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS);
    1732             :     RememberedSet<OLD_TO_NEW>::Iterate(
    1733             :         this, SYNCHRONIZED, [this](Address addr) {
    1734             :           return Scavenger::CheckAndScavengeObject(this, addr);
    1735    80386363 :         });
    1736             : 
    1737             :     RememberedSet<OLD_TO_NEW>::IterateTyped(
    1738             :         this, SYNCHRONIZED,
    1739             :         [this](SlotType type, Address host_addr, Address addr) {
    1740             :           return UpdateTypedSlotHelper::UpdateTypedSlot(
    1741             :               isolate(), type, addr, [this](Object** addr) {
    1742             :                 // We expect that objects referenced by code are long living.
    1743             :                 // If we do not force promotion, then we need to clear
    1744             :                 // old_to_new slots in dead code objects after mark-compact.
    1745             :                 return Scavenger::CheckAndScavengeObject(
    1746             :                     this, reinterpret_cast<Address>(addr));
    1747      474480 :               });
    1748      543669 :         });
    1749             :   }
    1750             : 
    1751             :   {
    1752      276756 :     TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_WEAK);
    1753       69189 :     IterateEncounteredWeakCollections(&root_scavenge_visitor);
    1754             :   }
    1755             : 
    1756             :   {
    1757             :     // Copy objects reachable from the code flushing candidates list.
    1758      276756 :     TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_CODE_FLUSH_CANDIDATES);
    1759      138378 :     MarkCompactCollector* collector = mark_compact_collector();
    1760       69189 :     if (collector->is_code_flushing_enabled()) {
    1761       69189 :       collector->code_flusher()->VisitListHeads(&root_scavenge_visitor);
    1762             :       collector->code_flusher()
    1763       69189 :           ->IteratePointersToFromSpace<StaticScavengeVisitor>();
    1764       69189 :     }
    1765             :   }
    1766             : 
    1767             :   {
    1768      276756 :     TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE);
    1769      138378 :     new_space_front = DoScavenge(new_space_front);
    1770             :   }
    1771             : 
    1772             :   isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending(
    1773       69189 :       &IsUnscavengedHeapObject);
    1774             : 
    1775             :   isolate()
    1776             :       ->global_handles()
    1777             :       ->IterateNewSpaceWeakUnmodifiedRoots<
    1778             :           GlobalHandles::HANDLE_PHANTOM_NODES_VISIT_OTHERS>(
    1779       69189 :           &root_scavenge_visitor);
    1780       69189 :   new_space_front = DoScavenge(new_space_front);
    1781             : 
    1782             :   UpdateNewSpaceReferencesInExternalStringTable(
    1783       69189 :       &UpdateNewSpaceReferenceInExternalStringTableEntry);
    1784             : 
    1785       69189 :   promotion_queue_.Destroy();
    1786             : 
    1787       69189 :   incremental_marking()->UpdateMarkingDequeAfterScavenge();
    1788             : 
    1789             :   ScavengeWeakObjectRetainer weak_object_retainer(this);
    1790             :   ProcessYoungWeakReferences(&weak_object_retainer);
    1791             : 
    1792             :   DCHECK(new_space_front == new_space_->top());
    1793             : 
    1794             :   // Set age mark.
    1795       69189 :   new_space_->set_age_mark(new_space_->top());
    1796             : 
    1797       69189 :   ArrayBufferTracker::FreeDeadInNewSpace(this);
    1798             : 
    1799             :   // Update how much has survived scavenge.
    1800             :   DCHECK_GE(PromotedSpaceSizeOfObjects(), survived_watermark);
    1801      138378 :   IncrementYoungSurvivorsCounter(PromotedSpaceSizeOfObjects() +
    1802      138378 :                                  new_space_->Size() - survived_watermark);
    1803             : 
    1804             :   // Scavenger may find new wrappers by iterating objects promoted onto a black
    1805             :   // page.
    1806       69189 :   local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer();
    1807             : 
    1808      138378 :   LOG(isolate_, ResourceEvent("scavenge", "end"));
    1809             : 
    1810       69189 :   SetGCState(NOT_IN_GC);
    1811       69189 : }
    1812             : 
    1813      122535 : void Heap::ComputeFastPromotionMode(double survival_rate) {
    1814             :   const size_t survived_in_new_space =
    1815      245070 :       survived_last_scavenge_ * 100 / new_space_->Capacity();
    1816             :   fast_promotion_mode_ =
    1817      245010 :       !FLAG_optimize_for_size && FLAG_fast_promotion_new_space &&
    1818      122535 :       !ShouldReduceMemory() && new_space_->IsAtMaximumCapacity() &&
    1819      122535 :       survived_in_new_space >= kMinPromotedPercentForFastPromotionMode;
    1820      122535 :   if (FLAG_trace_gc_verbose) {
    1821             :     PrintIsolate(
    1822             :         isolate(), "Fast promotion mode: %s survival rate: %" PRIuS "%%\n",
    1823           0 :         fast_promotion_mode_ ? "true" : "false", survived_in_new_space);
    1824             :   }
    1825      122535 : }
    1826             : 
    1827        1873 : String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
    1828             :                                                                 Object** p) {
    1829        1873 :   MapWord first_word = HeapObject::cast(*p)->map_word();
    1830             : 
    1831        1873 :   if (!first_word.IsForwardingAddress()) {
    1832             :     // Unreachable external string can be finalized.
    1833         343 :     String* string = String::cast(*p);
    1834         343 :     if (!string->IsExternalString()) {
    1835             :       // Original external string has been internalized.
    1836             :       DCHECK(string->IsThinString());
    1837             :       return NULL;
    1838             :     }
    1839             :     heap->FinalizeExternalString(string);
    1840             :     return NULL;
    1841             :   }
    1842             : 
    1843             :   // String is still reachable.
    1844        1530 :   String* string = String::cast(first_word.ToForwardingAddress());
    1845        1530 :   if (string->IsThinString()) string = ThinString::cast(string)->actual();
    1846             :   // Internalization can replace external strings with non-external strings.
    1847        1530 :   return string->IsExternalString() ? string : nullptr;
    1848             : }
    1849             : 
    1850             : 
    1851      122535 : void Heap::UpdateNewSpaceReferencesInExternalStringTable(
    1852             :     ExternalStringTableUpdaterCallback updater_func) {
    1853      245070 :   if (external_string_table_.new_space_strings_.is_empty()) return;
    1854             : 
    1855             :   Object** start = &external_string_table_.new_space_strings_[0];
    1856         235 :   Object** end = start + external_string_table_.new_space_strings_.length();
    1857             :   Object** last = start;
    1858             : 
    1859        3820 :   for (Object** p = start; p < end; ++p) {
    1860        3585 :     String* target = updater_func(this, p);
    1861             : 
    1862        3585 :     if (target == NULL) continue;
    1863             : 
    1864             :     DCHECK(target->IsExternalString());
    1865             : 
    1866        3242 :     if (InNewSpace(target)) {
    1867             :       // String is still in new space.  Update the table entry.
    1868        1635 :       *last = target;
    1869        1635 :       ++last;
    1870             :     } else {
    1871             :       // String got promoted.  Move it to the old string list.
    1872             :       external_string_table_.AddOldString(target);
    1873             :     }
    1874             :   }
    1875             : 
    1876             :   DCHECK(last <= end);
    1877         235 :   external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
    1878             : }
    1879             : 
    1880             : 
    1881       53346 : void Heap::UpdateReferencesInExternalStringTable(
    1882             :     ExternalStringTableUpdaterCallback updater_func) {
    1883             :   // Update old space string references.
    1884       53346 :   if (external_string_table_.old_space_strings_.length() > 0) {
    1885             :     Object** start = &external_string_table_.old_space_strings_[0];
    1886       53274 :     Object** end = start + external_string_table_.old_space_strings_.length();
    1887       53274 :     for (Object** p = start; p < end; ++p) *p = updater_func(this, p);
    1888             :   }
    1889             : 
    1890       53346 :   UpdateNewSpaceReferencesInExternalStringTable(updater_func);
    1891       53346 : }
    1892             : 
    1893             : 
    1894       53346 : void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) {
    1895             :   ProcessNativeContexts(retainer);
    1896             :   ProcessAllocationSites(retainer);
    1897       53346 : }
    1898             : 
    1899             : 
    1900           0 : void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) {
    1901             :   ProcessNativeContexts(retainer);
    1902           0 : }
    1903             : 
    1904             : 
    1905      122535 : void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
    1906      122535 :   Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
    1907             :   // Update the head of the list of contexts.
    1908             :   set_native_contexts_list(head);
    1909           0 : }
    1910             : 
    1911             : 
    1912       53346 : void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
    1913             :   Object* allocation_site_obj =
    1914       53346 :       VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
    1915             :   set_allocation_sites_list(allocation_site_obj);
    1916           0 : }
    1917             : 
    1918      160038 : void Heap::ProcessWeakListRoots(WeakObjectRetainer* retainer) {
    1919      106692 :   set_native_contexts_list(retainer->RetainAs(native_contexts_list()));
    1920      106692 :   set_allocation_sites_list(retainer->RetainAs(allocation_sites_list()));
    1921       53346 : }
    1922             : 
    1923          60 : void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) {
    1924             :   DisallowHeapAllocation no_allocation_scope;
    1925             :   Object* cur = allocation_sites_list();
    1926             :   bool marked = false;
    1927        4031 :   while (cur->IsAllocationSite()) {
    1928             :     AllocationSite* casted = AllocationSite::cast(cur);
    1929        3911 :     if (casted->GetPretenureMode() == flag) {
    1930           0 :       casted->ResetPretenureDecision();
    1931             :       casted->set_deopt_dependent_code(true);
    1932             :       marked = true;
    1933             :       RemoveAllocationSitePretenuringFeedback(casted);
    1934             :     }
    1935             :     cur = casted->weak_next();
    1936             :   }
    1937          60 :   if (marked) isolate_->stack_guard()->RequestDeoptMarkedAllocationSites();
    1938          60 : }
    1939             : 
    1940             : 
    1941       53346 : void Heap::EvaluateOldSpaceLocalPretenuring(
    1942             :     uint64_t size_of_objects_before_gc) {
    1943       53346 :   uint64_t size_of_objects_after_gc = SizeOfObjects();
    1944             :   double old_generation_survival_rate =
    1945       53346 :       (static_cast<double>(size_of_objects_after_gc) * 100) /
    1946       53346 :       static_cast<double>(size_of_objects_before_gc);
    1947             : 
    1948       53346 :   if (old_generation_survival_rate < kOldSurvivalRateLowThreshold) {
    1949             :     // Too many objects died in the old generation, pretenuring of wrong
    1950             :     // allocation sites may be the cause for that. We have to deopt all
    1951             :     // dependent code registered in the allocation sites to re-evaluate
    1952             :     // our pretenuring decisions.
    1953          60 :     ResetAllAllocationSitesDependentCode(TENURED);
    1954          60 :     if (FLAG_trace_pretenuring) {
    1955             :       PrintF(
    1956             :           "Deopt all allocation sites dependent code due to low survival "
    1957             :           "rate in the old generation %f\n",
    1958           0 :           old_generation_survival_rate);
    1959             :     }
    1960             :   }
    1961       53346 : }
    1962             : 
    1963             : 
    1964           6 : void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
    1965             :   DisallowHeapAllocation no_allocation;
    1966             :   // All external strings are listed in the external string table.
    1967             : 
    1968           0 :   class ExternalStringTableVisitorAdapter : public RootVisitor {
    1969             :    public:
    1970             :     explicit ExternalStringTableVisitorAdapter(
    1971             :         v8::ExternalResourceVisitor* visitor)
    1972           6 :         : visitor_(visitor) {}
    1973           6 :     virtual void VisitRootPointers(Root root, Object** start, Object** end) {
    1974         126 :       for (Object** p = start; p < end; p++) {
    1975             :         DCHECK((*p)->IsExternalString());
    1976             :         visitor_->VisitExternalString(
    1977         240 :             Utils::ToLocal(Handle<String>(String::cast(*p))));
    1978             :       }
    1979           6 :     }
    1980             : 
    1981             :    private:
    1982             :     v8::ExternalResourceVisitor* visitor_;
    1983             :   } external_string_table_visitor(visitor);
    1984             : 
    1985           6 :   external_string_table_.IterateAll(&external_string_table_visitor);
    1986           6 : }
    1987             : 
    1988      138378 : Address Heap::DoScavenge(Address new_space_front) {
    1989      141232 :   do {
    1990             :     SemiSpace::AssertValidRange(new_space_front, new_space_->top());
    1991             :     // The addresses new_space_front and new_space_.top() define a
    1992             :     // queue of unprocessed copied objects.  Process them until the
    1993             :     // queue is empty.
    1994   141312678 :     while (new_space_front != new_space_->top()) {
    1995    70515107 :       if (!Page::IsAlignedToPageSize(new_space_front)) {
    1996    70508953 :         HeapObject* object = HeapObject::FromAddress(new_space_front);
    1997    70508953 :         new_space_front +=
    1998    70508953 :             StaticScavengeVisitor::IterateBody(object->map(), object);
    1999             :       } else {
    2000             :         new_space_front = Page::FromAllocationAreaAddress(new_space_front)
    2001             :                               ->next_page()
    2002        6154 :                               ->area_start();
    2003             :       }
    2004             :     }
    2005             : 
    2006             :     // Promote and process all the to-be-promoted objects.
    2007             :     {
    2008    36347544 :       while (!promotion_queue()->is_empty()) {
    2009             :         HeapObject* target;
    2010             :         int32_t size;
    2011             :         bool was_marked_black;
    2012    36206312 :         promotion_queue()->remove(&target, &size, &was_marked_black);
    2013             : 
    2014             :         // Promoted object might be already partially visited
    2015             :         // during old space pointer iteration. Thus we search specifically
    2016             :         // for pointers to from semispace instead of looking for pointers
    2017             :         // to new space.
    2018             :         DCHECK(!target->IsMap());
    2019             : 
    2020             :         IterateAndScavengePromotedObject(target, static_cast<int>(size),
    2021    36206312 :                                          was_marked_black);
    2022             :       }
    2023             :     }
    2024             : 
    2025             :     // Take another spin if there are now unswept objects in new space
    2026             :     // (there are currently no more unswept promoted objects).
    2027      141232 :   } while (new_space_front != new_space_->top());
    2028             : 
    2029      138378 :   return new_space_front;
    2030             : }
    2031             : 
    2032             : 
    2033             : STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) ==
    2034             :               0);  // NOLINT
    2035             : STATIC_ASSERT((FixedTypedArrayBase::kDataOffset & kDoubleAlignmentMask) ==
    2036             :               0);  // NOLINT
    2037             : #ifdef V8_HOST_ARCH_32_BIT
    2038             : STATIC_ASSERT((HeapNumber::kValueOffset & kDoubleAlignmentMask) !=
    2039             :               0);  // NOLINT
    2040             : #endif
    2041             : 
    2042             : 
    2043          18 : int Heap::GetMaximumFillToAlign(AllocationAlignment alignment) {
    2044          18 :   switch (alignment) {
    2045             :     case kWordAligned:
    2046             :       return 0;
    2047             :     case kDoubleAligned:
    2048             :     case kDoubleUnaligned:
    2049             :       return kDoubleSize - kPointerSize;
    2050             :     default:
    2051           0 :       UNREACHABLE();
    2052             :   }
    2053             :   return 0;
    2054             : }
    2055             : 
    2056             : 
    2057    15526662 : int Heap::GetFillToAlign(Address address, AllocationAlignment alignment) {
    2058             :   intptr_t offset = OffsetFrom(address);
    2059    15526662 :   if (alignment == kDoubleAligned && (offset & kDoubleAlignmentMask) != 0)
    2060             :     return kPointerSize;
    2061             :   if (alignment == kDoubleUnaligned && (offset & kDoubleAlignmentMask) == 0)
    2062             :     return kDoubleSize - kPointerSize;  // No fill if double is always aligned.
    2063             :   return 0;
    2064             : }
    2065             : 
    2066             : 
    2067           0 : HeapObject* Heap::PrecedeWithFiller(HeapObject* object, int filler_size) {
    2068           0 :   CreateFillerObjectAt(object->address(), filler_size, ClearRecordedSlots::kNo);
    2069           0 :   return HeapObject::FromAddress(object->address() + filler_size);
    2070             : }
    2071             : 
    2072             : 
    2073           0 : HeapObject* Heap::AlignWithFiller(HeapObject* object, int object_size,
    2074             :                                   int allocation_size,
    2075             :                                   AllocationAlignment alignment) {
    2076           0 :   int filler_size = allocation_size - object_size;
    2077             :   DCHECK(filler_size > 0);
    2078           0 :   int pre_filler = GetFillToAlign(object->address(), alignment);
    2079           0 :   if (pre_filler) {
    2080           0 :     object = PrecedeWithFiller(object, pre_filler);
    2081           0 :     filler_size -= pre_filler;
    2082             :   }
    2083           0 :   if (filler_size)
    2084           0 :     CreateFillerObjectAt(object->address() + object_size, filler_size,
    2085           0 :                          ClearRecordedSlots::kNo);
    2086           0 :   return object;
    2087             : }
    2088             : 
    2089             : 
    2090           0 : HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) {
    2091           0 :   return AlignWithFiller(object, size - kPointerSize, size, kDoubleAligned);
    2092             : }
    2093             : 
    2094             : 
    2095      131363 : void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) {
    2096      131363 :   ArrayBufferTracker::RegisterNew(this, buffer);
    2097      131363 : }
    2098             : 
    2099             : 
    2100        3483 : void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) {
    2101        3483 :   ArrayBufferTracker::Unregister(this, buffer);
    2102        3483 : }
    2103             : 
    2104      235487 : void Heap::ConfigureInitialOldGenerationSize() {
    2105      179011 :   if (!old_generation_size_configured_ && tracer()->SurvivalEventsRecorded()) {
    2106             :     old_generation_allocation_limit_ =
    2107             :         Max(MinimumAllocationLimitGrowingStep(),
    2108             :             static_cast<size_t>(
    2109      112952 :                 static_cast<double>(old_generation_allocation_limit_) *
    2110      169428 :                 (tracer()->AverageSurvivalRatio() / 100)));
    2111             :   }
    2112      122535 : }
    2113             : 
    2114         215 : AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
    2115             :                                           int instance_size) {
    2116             :   Object* result = nullptr;
    2117         215 :   AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE);
    2118         215 :   if (!allocation.To(&result)) return allocation;
    2119             : 
    2120             :   // Map::cast cannot be used due to uninitialized map field.
    2121             :   reinterpret_cast<Map*>(result)->set_map(
    2122         215 :       reinterpret_cast<Map*>(root(kMetaMapRootIndex)));
    2123             :   reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
    2124             :   reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
    2125             :   // Initialize to only containing tagged fields.
    2126             :   reinterpret_cast<Map*>(result)->set_visitor_id(
    2127         215 :       StaticVisitorBase::GetVisitorId(instance_type, instance_size, false));
    2128             :   if (FLAG_unbox_double_fields) {
    2129             :     reinterpret_cast<Map*>(result)
    2130         215 :         ->set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
    2131             :   }
    2132             :   reinterpret_cast<Map*>(result)->clear_unused();
    2133             :   reinterpret_cast<Map*>(result)
    2134             :       ->set_inobject_properties_or_constructor_function_index(0);
    2135             :   reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
    2136             :   reinterpret_cast<Map*>(result)->set_bit_field(0);
    2137             :   reinterpret_cast<Map*>(result)->set_bit_field2(0);
    2138             :   int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
    2139             :                    Map::OwnsDescriptors::encode(true) |
    2140             :                    Map::ConstructionCounter::encode(Map::kNoSlackTracking);
    2141             :   reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3);
    2142         215 :   reinterpret_cast<Map*>(result)->set_weak_cell_cache(Smi::kZero);
    2143         215 :   return result;
    2144             : }
    2145             : 
    2146             : 
    2147    31726380 : AllocationResult Heap::AllocateMap(InstanceType instance_type,
    2148             :                                    int instance_size,
    2149   190358204 :                                    ElementsKind elements_kind) {
    2150             :   HeapObject* result = nullptr;
    2151    31726380 :   AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE);
    2152    31726392 :   if (!allocation.To(&result)) return allocation;
    2153             : 
    2154    31726385 :   isolate()->counters()->maps_created()->Increment();
    2155             :   result->set_map_no_write_barrier(meta_map());
    2156             :   Map* map = Map::cast(result);
    2157             :   map->set_instance_type(instance_type);
    2158    31726382 :   map->set_prototype(null_value(), SKIP_WRITE_BARRIER);
    2159    31726365 :   map->set_constructor_or_backpointer(null_value(), SKIP_WRITE_BARRIER);
    2160             :   map->set_instance_size(instance_size);
    2161             :   map->clear_unused();
    2162             :   map->set_inobject_properties_or_constructor_function_index(0);
    2163    31726361 :   map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER);
    2164             :   map->set_dependent_code(DependentCode::cast(empty_fixed_array()),
    2165    31726344 :                           SKIP_WRITE_BARRIER);
    2166    31726343 :   map->set_weak_cell_cache(Smi::kZero);
    2167    31726356 :   map->set_raw_transitions(Smi::kZero);
    2168             :   map->set_unused_property_fields(0);
    2169    31726370 :   map->set_instance_descriptors(empty_descriptor_array());
    2170             :   if (FLAG_unbox_double_fields) {
    2171    31726370 :     map->set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
    2172             :   }
    2173             :   // Must be called only after |instance_type|, |instance_size| and
    2174             :   // |layout_descriptor| are set.
    2175             :   map->set_visitor_id(Heap::GetStaticVisitorIdForMap(map));
    2176             :   map->set_bit_field(0);
    2177             :   map->set_bit_field2(1 << Map::kIsExtensible);
    2178             :   int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
    2179             :                    Map::OwnsDescriptors::encode(true) |
    2180             :                    Map::ConstructionCounter::encode(Map::kNoSlackTracking);
    2181             :   map->set_bit_field3(bit_field3);
    2182             :   map->set_elements_kind(elements_kind);
    2183             :   map->set_new_target_is_base(true);
    2184             : 
    2185    31726377 :   return map;
    2186             : }
    2187             : 
    2188             : 
    2189      239747 : AllocationResult Heap::AllocateFillerObject(int size, bool double_align,
    2190             :                                             AllocationSpace space) {
    2191             :   HeapObject* obj = nullptr;
    2192             :   {
    2193      239747 :     AllocationAlignment align = double_align ? kDoubleAligned : kWordAligned;
    2194      239747 :     AllocationResult allocation = AllocateRaw(size, space, align);
    2195      239747 :     if (!allocation.To(&obj)) return allocation;
    2196             :   }
    2197             : #ifdef DEBUG
    2198             :   MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
    2199             :   DCHECK(chunk->owner()->identity() == space);
    2200             : #endif
    2201      204486 :   CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
    2202      204486 :   return obj;
    2203             : }
    2204             : 
    2205             : 
    2206             : const Heap::StringTypeTable Heap::string_type_table[] = {
    2207             : #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \
    2208             :   { type, size, k##camel_name##MapRootIndex }             \
    2209             :   ,
    2210             :     STRING_TYPE_LIST(STRING_TYPE_ELEMENT)
    2211             : #undef STRING_TYPE_ELEMENT
    2212             : };
    2213             : 
    2214             : 
    2215             : const Heap::ConstantStringTable Heap::constant_string_table[] = {
    2216             :     {"", kempty_stringRootIndex},
    2217             : #define CONSTANT_STRING_ELEMENT(name, contents) \
    2218             :   { contents, k##name##RootIndex }              \
    2219             :   ,
    2220             :     INTERNALIZED_STRING_LIST(CONSTANT_STRING_ELEMENT)
    2221             : #undef CONSTANT_STRING_ELEMENT
    2222             : };
    2223             : 
    2224             : 
    2225             : const Heap::StructTable Heap::struct_table[] = {
    2226             : #define STRUCT_TABLE_ELEMENT(NAME, Name, name)        \
    2227             :   { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex } \
    2228             :   ,
    2229             :     STRUCT_LIST(STRUCT_TABLE_ELEMENT)
    2230             : #undef STRUCT_TABLE_ELEMENT
    2231             : };
    2232             : 
    2233             : namespace {
    2234             : 
    2235        1075 : void FinalizePartialMap(Heap* heap, Map* map) {
    2236         215 :   map->set_code_cache(heap->empty_fixed_array());
    2237         215 :   map->set_dependent_code(DependentCode::cast(heap->empty_fixed_array()));
    2238         215 :   map->set_raw_transitions(Smi::kZero);
    2239         215 :   map->set_instance_descriptors(heap->empty_descriptor_array());
    2240             :   if (FLAG_unbox_double_fields) {
    2241         215 :     map->set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
    2242             :   }
    2243         215 :   map->set_prototype(heap->null_value());
    2244         215 :   map->set_constructor_or_backpointer(heap->null_value());
    2245         215 : }
    2246             : 
    2247             : }  // namespace
    2248             : 
    2249         645 : bool Heap::CreateInitialMaps() {
    2250             :   HeapObject* obj = nullptr;
    2251             :   {
    2252          43 :     AllocationResult allocation = AllocatePartialMap(MAP_TYPE, Map::kSize);
    2253          43 :     if (!allocation.To(&obj)) return false;
    2254             :   }
    2255             :   // Map::cast cannot be used due to uninitialized map field.
    2256             :   Map* new_meta_map = reinterpret_cast<Map*>(obj);
    2257             :   set_meta_map(new_meta_map);
    2258          43 :   new_meta_map->set_map(new_meta_map);
    2259             : 
    2260             :   {  // Partial map allocation
    2261             : #define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name)                \
    2262             :   {                                                                          \
    2263             :     Map* map;                                                                \
    2264             :     if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \
    2265             :     set_##field_name##_map(map);                                             \
    2266             :   }
    2267             : 
    2268          86 :     ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array);
    2269             :     fixed_array_map()->set_elements_kind(FAST_HOLEY_ELEMENTS);
    2270          86 :     ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, undefined);
    2271          86 :     ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, null);
    2272          86 :     ALLOCATE_PARTIAL_MAP(ODDBALL_TYPE, Oddball::kSize, the_hole);
    2273             : 
    2274             : #undef ALLOCATE_PARTIAL_MAP
    2275             :   }
    2276             : 
    2277             :   // Allocate the empty array.
    2278             :   {
    2279          43 :     AllocationResult allocation = AllocateEmptyFixedArray();
    2280          43 :     if (!allocation.To(&obj)) return false;
    2281             :   }
    2282             :   set_empty_fixed_array(FixedArray::cast(obj));
    2283             : 
    2284             :   {
    2285          43 :     AllocationResult allocation = Allocate(null_map(), OLD_SPACE);
    2286          43 :     if (!allocation.To(&obj)) return false;
    2287             :   }
    2288             :   set_null_value(Oddball::cast(obj));
    2289             :   Oddball::cast(obj)->set_kind(Oddball::kNull);
    2290             : 
    2291             :   {
    2292          43 :     AllocationResult allocation = Allocate(undefined_map(), OLD_SPACE);
    2293          43 :     if (!allocation.To(&obj)) return false;
    2294             :   }
    2295             :   set_undefined_value(Oddball::cast(obj));
    2296             :   Oddball::cast(obj)->set_kind(Oddball::kUndefined);
    2297             :   DCHECK(!InNewSpace(undefined_value()));
    2298             :   {
    2299          43 :     AllocationResult allocation = Allocate(the_hole_map(), OLD_SPACE);
    2300          43 :     if (!allocation.To(&obj)) return false;
    2301             :   }
    2302             :   set_the_hole_value(Oddball::cast(obj));
    2303             :   Oddball::cast(obj)->set_kind(Oddball::kTheHole);
    2304             : 
    2305             :   // Set preliminary exception sentinel value before actually initializing it.
    2306             :   set_exception(null_value());
    2307             : 
    2308             :   // Allocate the empty descriptor array.
    2309             :   {
    2310          43 :     AllocationResult allocation = AllocateEmptyFixedArray();
    2311          43 :     if (!allocation.To(&obj)) return false;
    2312             :   }
    2313             :   set_empty_descriptor_array(DescriptorArray::cast(obj));
    2314             : 
    2315             :   // Fix the instance_descriptors for the existing maps.
    2316          43 :   FinalizePartialMap(this, meta_map());
    2317          43 :   FinalizePartialMap(this, fixed_array_map());
    2318          43 :   FinalizePartialMap(this, undefined_map());
    2319             :   undefined_map()->set_is_undetectable();
    2320          43 :   FinalizePartialMap(this, null_map());
    2321             :   null_map()->set_is_undetectable();
    2322          43 :   FinalizePartialMap(this, the_hole_map());
    2323             : 
    2324             :   {  // Map allocation
    2325             : #define ALLOCATE_MAP(instance_type, size, field_name)               \
    2326             :   {                                                                 \
    2327             :     Map* map;                                                       \
    2328             :     if (!AllocateMap((instance_type), size).To(&map)) return false; \
    2329             :     set_##field_name##_map(map);                                    \
    2330             :   }
    2331             : 
    2332             : #define ALLOCATE_VARSIZE_MAP(instance_type, field_name) \
    2333             :   ALLOCATE_MAP(instance_type, kVariableSizeSentinel, field_name)
    2334             : 
    2335             : #define ALLOCATE_PRIMITIVE_MAP(instance_type, size, field_name, \
    2336             :                                constructor_function_index)      \
    2337             :   {                                                             \
    2338             :     ALLOCATE_MAP((instance_type), (size), field_name);          \
    2339             :     field_name##_map()->SetConstructorFunctionIndex(            \
    2340             :         (constructor_function_index));                          \
    2341             :   }
    2342             : 
    2343          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, fixed_cow_array)
    2344             :     fixed_cow_array_map()->set_elements_kind(FAST_HOLEY_ELEMENTS);
    2345             :     DCHECK_NE(fixed_array_map(), fixed_cow_array_map());
    2346             : 
    2347          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, scope_info)
    2348          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, module_info)
    2349          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, feedback_vector)
    2350          86 :     ALLOCATE_PRIMITIVE_MAP(HEAP_NUMBER_TYPE, HeapNumber::kSize, heap_number,
    2351             :                            Context::NUMBER_FUNCTION_INDEX)
    2352          86 :     ALLOCATE_MAP(MUTABLE_HEAP_NUMBER_TYPE, HeapNumber::kSize,
    2353             :                  mutable_heap_number)
    2354          86 :     ALLOCATE_PRIMITIVE_MAP(SYMBOL_TYPE, Symbol::kSize, symbol,
    2355             :                            Context::SYMBOL_FUNCTION_INDEX)
    2356          86 :     ALLOCATE_MAP(FOREIGN_TYPE, Foreign::kSize, foreign)
    2357             : 
    2358          86 :     ALLOCATE_PRIMITIVE_MAP(ODDBALL_TYPE, Oddball::kSize, boolean,
    2359             :                            Context::BOOLEAN_FUNCTION_INDEX);
    2360          86 :     ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, uninitialized);
    2361          86 :     ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, arguments_marker);
    2362          86 :     ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, exception);
    2363          86 :     ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, termination_exception);
    2364          86 :     ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, optimized_out);
    2365          86 :     ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, stale_register);
    2366             : 
    2367          86 :     ALLOCATE_MAP(JS_PROMISE_CAPABILITY_TYPE, JSPromiseCapability::kSize,
    2368             :                  js_promise_capability);
    2369             : 
    2370         989 :     for (unsigned i = 0; i < arraysize(string_type_table); i++) {
    2371         946 :       const StringTypeTable& entry = string_type_table[i];
    2372             :       {
    2373         946 :         AllocationResult allocation = AllocateMap(entry.type, entry.size);
    2374         946 :         if (!allocation.To(&obj)) return false;
    2375             :       }
    2376             :       Map* map = Map::cast(obj);
    2377             :       map->SetConstructorFunctionIndex(Context::STRING_FUNCTION_INDEX);
    2378             :       // Mark cons string maps as unstable, because their objects can change
    2379             :       // maps during GC.
    2380        1892 :       if (StringShape(entry.type).IsCons()) map->mark_unstable();
    2381         946 :       roots_[entry.index] = map;
    2382             :     }
    2383             : 
    2384             :     {  // Create a separate external one byte string map for native sources.
    2385             :       AllocationResult allocation =
    2386             :           AllocateMap(SHORT_EXTERNAL_ONE_BYTE_STRING_TYPE,
    2387          43 :                       ExternalOneByteString::kShortSize);
    2388          43 :       if (!allocation.To(&obj)) return false;
    2389             :       Map* map = Map::cast(obj);
    2390             :       map->SetConstructorFunctionIndex(Context::STRING_FUNCTION_INDEX);
    2391             :       set_native_source_string_map(map);
    2392             :     }
    2393             : 
    2394          86 :     ALLOCATE_VARSIZE_MAP(FIXED_DOUBLE_ARRAY_TYPE, fixed_double_array)
    2395             :     fixed_double_array_map()->set_elements_kind(FAST_HOLEY_DOUBLE_ELEMENTS);
    2396          86 :     ALLOCATE_VARSIZE_MAP(BYTE_ARRAY_TYPE, byte_array)
    2397          86 :     ALLOCATE_VARSIZE_MAP(BYTECODE_ARRAY_TYPE, bytecode_array)
    2398          86 :     ALLOCATE_VARSIZE_MAP(FREE_SPACE_TYPE, free_space)
    2399             : 
    2400             : #define ALLOCATE_FIXED_TYPED_ARRAY_MAP(Type, type, TYPE, ctype, size) \
    2401             :   ALLOCATE_VARSIZE_MAP(FIXED_##TYPE##_ARRAY_TYPE, fixed_##type##_array)
    2402             : 
    2403         774 :     TYPED_ARRAYS(ALLOCATE_FIXED_TYPED_ARRAY_MAP)
    2404             : #undef ALLOCATE_FIXED_TYPED_ARRAY_MAP
    2405             : 
    2406          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, sloppy_arguments_elements)
    2407             : 
    2408          86 :     ALLOCATE_VARSIZE_MAP(CODE_TYPE, code)
    2409             : 
    2410          86 :     ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell)
    2411          86 :     ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell)
    2412          86 :     ALLOCATE_MAP(WEAK_CELL_TYPE, WeakCell::kSize, weak_cell)
    2413          86 :     ALLOCATE_MAP(CELL_TYPE, Cell::kSize, no_closures_cell)
    2414          86 :     ALLOCATE_MAP(CELL_TYPE, Cell::kSize, one_closure_cell)
    2415          86 :     ALLOCATE_MAP(CELL_TYPE, Cell::kSize, many_closures_cell)
    2416          86 :     ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler)
    2417          86 :     ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler)
    2418             : 
    2419          86 :     ALLOCATE_VARSIZE_MAP(TRANSITION_ARRAY_TYPE, transition_array)
    2420             : 
    2421         946 :     for (unsigned i = 0; i < arraysize(struct_table); i++) {
    2422         903 :       const StructTable& entry = struct_table[i];
    2423             :       Map* map;
    2424        1806 :       if (!AllocateMap(entry.type, entry.size).To(&map)) return false;
    2425         903 :       roots_[entry.index] = map;
    2426             :     }
    2427             : 
    2428          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, hash_table)
    2429          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, ordered_hash_table)
    2430          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, unseeded_number_dictionary)
    2431             : 
    2432          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, function_context)
    2433          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, catch_context)
    2434          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, with_context)
    2435          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, debug_evaluate_context)
    2436          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, block_context)
    2437          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, module_context)
    2438          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, eval_context)
    2439          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, script_context)
    2440          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, script_context_table)
    2441             : 
    2442          86 :     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, native_context)
    2443             :     native_context_map()->set_dictionary_map(true);
    2444             :     native_context_map()->set_visitor_id(
    2445             :         StaticVisitorBase::kVisitNativeContext);
    2446             : 
    2447          86 :     ALLOCATE_MAP(SHARED_FUNCTION_INFO_TYPE, SharedFunctionInfo::kAlignedSize,
    2448             :                  shared_function_info)
    2449             : 
    2450          86 :     ALLOCATE_MAP(JS_MESSAGE_OBJECT_TYPE, JSMessageObject::kSize, message_object)
    2451          86 :     ALLOCATE_MAP(JS_OBJECT_TYPE, JSObject::kHeaderSize + kPointerSize, external)
    2452             :     external_map()->set_is_extensible(false);
    2453             : #undef ALLOCATE_PRIMITIVE_MAP
    2454             : #undef ALLOCATE_VARSIZE_MAP
    2455             : #undef ALLOCATE_MAP
    2456             :   }
    2457             : 
    2458             :   {
    2459          43 :     AllocationResult allocation = AllocateEmptyScopeInfo();
    2460          43 :     if (!allocation.To(&obj)) return false;
    2461             :   }
    2462             : 
    2463             :   set_empty_scope_info(ScopeInfo::cast(obj));
    2464             :   {
    2465          43 :     AllocationResult allocation = Allocate(boolean_map(), OLD_SPACE);
    2466          43 :     if (!allocation.To(&obj)) return false;
    2467             :   }
    2468             :   set_true_value(Oddball::cast(obj));
    2469             :   Oddball::cast(obj)->set_kind(Oddball::kTrue);
    2470             : 
    2471             :   {
    2472          43 :     AllocationResult allocation = Allocate(boolean_map(), OLD_SPACE);
    2473          43 :     if (!allocation.To(&obj)) return false;
    2474             :   }
    2475             :   set_false_value(Oddball::cast(obj));
    2476             :   Oddball::cast(obj)->set_kind(Oddball::kFalse);
    2477             : 
    2478             :   {  // Empty arrays
    2479             :     {
    2480             :       ByteArray* byte_array;
    2481          86 :       if (!AllocateByteArray(0, TENURED).To(&byte_array)) return false;
    2482             :       set_empty_byte_array(byte_array);
    2483             :     }
    2484             : 
    2485             : #define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size) \
    2486             :   {                                                                     \
    2487             :     FixedTypedArrayBase* obj;                                           \
    2488             :     if (!AllocateEmptyFixedTypedArray(kExternal##Type##Array).To(&obj)) \
    2489             :       return false;                                                     \
    2490             :     set_empty_fixed_##type##_array(obj);                                \
    2491             :   }
    2492             : 
    2493         387 :     TYPED_ARRAYS(ALLOCATE_EMPTY_FIXED_TYPED_ARRAY)
    2494             : #undef ALLOCATE_EMPTY_FIXED_TYPED_ARRAY
    2495             :   }
    2496             :   DCHECK(!InNewSpace(empty_fixed_array()));
    2497          43 :   return true;
    2498             : }
    2499             : 
    2500    36576407 : AllocationResult Heap::AllocateHeapNumber(MutableMode mode,
    2501       71663 :                                           PretenureFlag pretenure) {
    2502             :   // Statically ensure that it is safe to allocate heap numbers in paged
    2503             :   // spaces.
    2504             :   int size = HeapNumber::kSize;
    2505             :   STATIC_ASSERT(HeapNumber::kSize <= kMaxRegularHeapObjectSize);
    2506             : 
    2507             :   AllocationSpace space = SelectSpace(pretenure);
    2508             : 
    2509             :   HeapObject* result = nullptr;
    2510             :   {
    2511    36576407 :     AllocationResult allocation = AllocateRaw(size, space, kDoubleUnaligned);
    2512    36576407 :     if (!allocation.To(&result)) return allocation;
    2513             :   }
    2514             : 
    2515    36560994 :   Map* map = mode == MUTABLE ? mutable_heap_number_map() : heap_number_map();
    2516             :   HeapObject::cast(result)->set_map_no_write_barrier(map);
    2517    36560994 :   return result;
    2518             : }
    2519             : 
    2520    30418506 : AllocationResult Heap::AllocateCell(Object* value) {
    2521             :   int size = Cell::kSize;
    2522             :   STATIC_ASSERT(Cell::kSize <= kMaxRegularHeapObjectSize);
    2523             : 
    2524             :   HeapObject* result = nullptr;
    2525             :   {
    2526    15209239 :     AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
    2527    15209268 :     if (!allocation.To(&result)) return allocation;
    2528             :   }
    2529             :   result->set_map_no_write_barrier(cell_map());
    2530    15209267 :   Cell::cast(result)->set_value(value);
    2531    15209258 :   return result;
    2532             : }
    2533             : 
    2534    35417742 : AllocationResult Heap::AllocatePropertyCell() {
    2535             :   int size = PropertyCell::kSize;
    2536             :   STATIC_ASSERT(PropertyCell::kSize <= kMaxRegularHeapObjectSize);
    2537             : 
    2538             :   HeapObject* result = nullptr;
    2539     8854434 :   AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
    2540     8854436 :   if (!allocation.To(&result)) return allocation;
    2541             : 
    2542             :   result->set_map_no_write_barrier(global_property_cell_map());
    2543             :   PropertyCell* cell = PropertyCell::cast(result);
    2544             :   cell->set_dependent_code(DependentCode::cast(empty_fixed_array()),
    2545     8854436 :                            SKIP_WRITE_BARRIER);
    2546             :   cell->set_property_details(PropertyDetails(Smi::kZero));
    2547     8854436 :   cell->set_value(the_hole_value());
    2548     8854434 :   return result;
    2549             : }
    2550             : 
    2551             : 
    2552   118102869 : AllocationResult Heap::AllocateWeakCell(HeapObject* value) {
    2553             :   int size = WeakCell::kSize;
    2554             :   STATIC_ASSERT(WeakCell::kSize <= kMaxRegularHeapObjectSize);
    2555             :   HeapObject* result = nullptr;
    2556             :   {
    2557    39367606 :     AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
    2558    39367640 :     if (!allocation.To(&result)) return allocation;
    2559             :   }
    2560             :   result->set_map_no_write_barrier(weak_cell_map());
    2561    39367637 :   WeakCell::cast(result)->initialize(value);
    2562             :   WeakCell::cast(result)->clear_next(the_hole_value());
    2563    39367619 :   return result;
    2564             : }
    2565             : 
    2566             : 
    2567     3173725 : AllocationResult Heap::AllocateTransitionArray(int capacity) {
    2568             :   DCHECK(capacity > 0);
    2569             :   HeapObject* raw_array = nullptr;
    2570             :   {
    2571      634745 :     AllocationResult allocation = AllocateRawFixedArray(capacity, TENURED);
    2572      634745 :     if (!allocation.To(&raw_array)) return allocation;
    2573             :   }
    2574             :   raw_array->set_map_no_write_barrier(transition_array_map());
    2575             :   TransitionArray* array = TransitionArray::cast(raw_array);
    2576             :   array->set_length(capacity);
    2577      634745 :   MemsetPointer(array->data_start(), undefined_value(), capacity);
    2578             :   // Transition arrays are tenured. When black allocation is on we have to
    2579             :   // add the transition array to the list of encountered_transition_arrays.
    2580      634745 :   if (incremental_marking()->black_allocation()) {
    2581             :     array->set_next_link(encountered_transition_arrays(),
    2582             :                          UPDATE_WEAK_WRITE_BARRIER);
    2583             :     set_encountered_transition_arrays(array);
    2584             :   } else {
    2585             :     array->set_next_link(undefined_value(), SKIP_WRITE_BARRIER);
    2586             :   }
    2587      634745 :   return array;
    2588             : }
    2589             : 
    2590          43 : bool Heap::CreateApiObjects() {
    2591             :   HandleScope scope(isolate());
    2592          86 :   set_message_listeners(*TemplateList::New(isolate(), 2));
    2593             :   HeapObject* obj = nullptr;
    2594             :   {
    2595          43 :     AllocationResult allocation = AllocateStruct(INTERCEPTOR_INFO_TYPE);
    2596          43 :     if (!allocation.To(&obj)) return false;
    2597             :   }
    2598             :   InterceptorInfo* info = InterceptorInfo::cast(obj);
    2599             :   info->set_flags(0);
    2600             :   set_noop_interceptor_info(info);
    2601          43 :   return true;
    2602             : }
    2603             : 
    2604             : 
    2605          43 : void Heap::CreateJSEntryStub() {
    2606             :   JSEntryStub stub(isolate(), StackFrame::ENTRY);
    2607          86 :   set_js_entry_code(*stub.GetCode());
    2608          43 : }
    2609             : 
    2610             : 
    2611          43 : void Heap::CreateJSConstructEntryStub() {
    2612             :   JSEntryStub stub(isolate(), StackFrame::ENTRY_CONSTRUCT);
    2613          86 :   set_js_construct_entry_code(*stub.GetCode());
    2614          43 : }
    2615             : 
    2616             : 
    2617          43 : void Heap::CreateFixedStubs() {
    2618             :   // Here we create roots for fixed stubs. They are needed at GC
    2619             :   // for cooking and uncooking (check out frames.cc).
    2620             :   // The eliminates the need for doing dictionary lookup in the
    2621             :   // stub cache for these stubs.
    2622             :   HandleScope scope(isolate());
    2623             : 
    2624             :   // Create stubs that should be there, so we don't unexpectedly have to
    2625             :   // create them if we need them during the creation of another stub.
    2626             :   // Stub creation mixes raw pointers and handles in an unsafe manner so
    2627             :   // we cannot create stubs while we are creating stubs.
    2628          43 :   CodeStub::GenerateStubsAheadOfTime(isolate());
    2629             : 
    2630             :   // MacroAssembler::Abort calls (usually enabled with --debug-code) depend on
    2631             :   // CEntryStub, so we need to call GenerateStubsAheadOfTime before JSEntryStub
    2632             :   // is created.
    2633             : 
    2634             :   // gcc-4.4 has problem generating correct code of following snippet:
    2635             :   // {  JSEntryStub stub;
    2636             :   //    js_entry_code_ = *stub.GetCode();
    2637             :   // }
    2638             :   // {  JSConstructEntryStub stub;
    2639             :   //    js_construct_entry_code_ = *stub.GetCode();
    2640             :   // }
    2641             :   // To workaround the problem, make separate functions without inlining.
    2642          43 :   Heap::CreateJSEntryStub();
    2643          43 :   Heap::CreateJSConstructEntryStub();
    2644          43 : }
    2645             : 
    2646             : 
    2647         301 : void Heap::CreateInitialObjects() {
    2648             :   HandleScope scope(isolate());
    2649             :   Factory* factory = isolate()->factory();
    2650             : 
    2651             :   // The -0 value must be set before NewNumber works.
    2652             :   set_minus_zero_value(*factory->NewHeapNumber(-0.0, IMMUTABLE, TENURED));
    2653             :   DCHECK(std::signbit(minus_zero_value()->Number()) != 0);
    2654             : 
    2655             :   set_nan_value(*factory->NewHeapNumber(
    2656             :       std::numeric_limits<double>::quiet_NaN(), IMMUTABLE, TENURED));
    2657             :   set_hole_nan_value(
    2658             :       *factory->NewHeapNumberFromBits(kHoleNanInt64, IMMUTABLE, TENURED));
    2659             :   set_infinity_value(*factory->NewHeapNumber(V8_INFINITY, IMMUTABLE, TENURED));
    2660             :   set_minus_infinity_value(
    2661             :       *factory->NewHeapNumber(-V8_INFINITY, IMMUTABLE, TENURED));
    2662             : 
    2663             :   // Allocate initial string table.
    2664          86 :   set_string_table(*StringTable::New(isolate(), kInitialStringTableSize));
    2665             : 
    2666             :   // Allocate
    2667             : 
    2668             :   // Finish initializing oddballs after creating the string table.
    2669             :   Oddball::Initialize(isolate(), factory->undefined_value(), "undefined",
    2670          43 :                       factory->nan_value(), "undefined", Oddball::kUndefined);
    2671             : 
    2672             :   // Initialize the null_value.
    2673             :   Oddball::Initialize(isolate(), factory->null_value(), "null",
    2674          43 :                       handle(Smi::kZero, isolate()), "object", Oddball::kNull);
    2675             : 
    2676             :   // Initialize the_hole_value.
    2677             :   Oddball::Initialize(isolate(), factory->the_hole_value(), "hole",
    2678             :                       factory->hole_nan_value(), "undefined",
    2679          43 :                       Oddball::kTheHole);
    2680             : 
    2681             :   // Initialize the true_value.
    2682             :   Oddball::Initialize(isolate(), factory->true_value(), "true",
    2683             :                       handle(Smi::FromInt(1), isolate()), "boolean",
    2684          43 :                       Oddball::kTrue);
    2685             : 
    2686             :   // Initialize the false_value.
    2687             :   Oddball::Initialize(isolate(), factory->false_value(), "false",
    2688             :                       handle(Smi::kZero, isolate()), "boolean",
    2689          43 :                       Oddball::kFalse);
    2690             : 
    2691             :   set_uninitialized_value(
    2692             :       *factory->NewOddball(factory->uninitialized_map(), "uninitialized",
    2693             :                            handle(Smi::FromInt(-1), isolate()), "undefined",
    2694          86 :                            Oddball::kUninitialized));
    2695             : 
    2696             :   set_arguments_marker(
    2697             :       *factory->NewOddball(factory->arguments_marker_map(), "arguments_marker",
    2698             :                            handle(Smi::FromInt(-4), isolate()), "undefined",
    2699          86 :                            Oddball::kArgumentsMarker));
    2700             : 
    2701             :   set_termination_exception(*factory->NewOddball(
    2702             :       factory->termination_exception_map(), "termination_exception",
    2703          86 :       handle(Smi::FromInt(-3), isolate()), "undefined", Oddball::kOther));
    2704             : 
    2705             :   set_exception(*factory->NewOddball(factory->exception_map(), "exception",
    2706             :                                      handle(Smi::FromInt(-5), isolate()),
    2707          86 :                                      "undefined", Oddball::kException));
    2708             : 
    2709             :   set_optimized_out(*factory->NewOddball(factory->optimized_out_map(),
    2710             :                                          "optimized_out",
    2711             :                                          handle(Smi::FromInt(-6), isolate()),
    2712          86 :                                          "undefined", Oddball::kOptimizedOut));
    2713             : 
    2714             :   set_stale_register(
    2715             :       *factory->NewOddball(factory->stale_register_map(), "stale_register",
    2716             :                            handle(Smi::FromInt(-7), isolate()), "undefined",
    2717          86 :                            Oddball::kStaleRegister));
    2718             : 
    2719        8041 :   for (unsigned i = 0; i < arraysize(constant_string_table); i++) {
    2720             :     Handle<String> str =
    2721        7998 :         factory->InternalizeUtf8String(constant_string_table[i].contents);
    2722       15996 :     roots_[constant_string_table[i].index] = *str;
    2723             :   }
    2724             : 
    2725             :   // Create the code_stubs dictionary. The initial size is set to avoid
    2726             :   // expanding the dictionary during bootstrapping.
    2727          86 :   set_code_stubs(*UnseededNumberDictionary::New(isolate(), 128));
    2728             : 
    2729             :   set_instanceof_cache_function(Smi::kZero);
    2730             :   set_instanceof_cache_map(Smi::kZero);
    2731             :   set_instanceof_cache_answer(Smi::kZero);
    2732             : 
    2733             :   {
    2734             :     HandleScope scope(isolate());
    2735             : #define SYMBOL_INIT(name)                                              \
    2736             :   {                                                                    \
    2737             :     Handle<String> name##d = factory->NewStringFromStaticChars(#name); \
    2738             :     Handle<Symbol> symbol(isolate()->factory()->NewPrivateSymbol());   \
    2739             :     symbol->set_name(*name##d);                                        \
    2740             :     roots_[k##name##RootIndex] = *symbol;                              \
    2741             :   }
    2742        2967 :     PRIVATE_SYMBOL_LIST(SYMBOL_INIT)
    2743             : #undef SYMBOL_INIT
    2744             :   }
    2745             : 
    2746             :   {
    2747             :     HandleScope scope(isolate());
    2748             : #define SYMBOL_INIT(name, description)                                      \
    2749             :   Handle<Symbol> name = factory->NewSymbol();                               \
    2750             :   Handle<String> name##d = factory->NewStringFromStaticChars(#description); \
    2751             :   name->set_name(*name##d);                                                 \
    2752             :   roots_[k##name##RootIndex] = *name;
    2753         903 :     PUBLIC_SYMBOL_LIST(SYMBOL_INIT)
    2754             : #undef SYMBOL_INIT
    2755             : 
    2756             : #define SYMBOL_INIT(name, description)                                      \
    2757             :   Handle<Symbol> name = factory->NewSymbol();                               \
    2758             :   Handle<String> name##d = factory->NewStringFromStaticChars(#description); \
    2759             :   name->set_is_well_known_symbol(true);                                     \
    2760             :   name->set_name(*name##d);                                                 \
    2761             :   roots_[k##name##RootIndex] = *name;
    2762         301 :     WELL_KNOWN_SYMBOL_LIST(SYMBOL_INIT)
    2763             : #undef SYMBOL_INIT
    2764             :   }
    2765             : 
    2766             :   Handle<NameDictionary> empty_properties_dictionary =
    2767          43 :       NameDictionary::NewEmpty(isolate(), TENURED);
    2768          43 :   empty_properties_dictionary->SetRequiresCopyOnCapacityChange();
    2769             :   set_empty_properties_dictionary(*empty_properties_dictionary);
    2770             : 
    2771             :   set_public_symbol_table(*empty_properties_dictionary);
    2772             :   set_api_symbol_table(*empty_properties_dictionary);
    2773             :   set_api_private_symbol_table(*empty_properties_dictionary);
    2774             : 
    2775             :   set_number_string_cache(
    2776          86 :       *factory->NewFixedArray(kInitialNumberStringCacheSize * 2, TENURED));
    2777             : 
    2778             :   // Allocate cache for single character one byte strings.
    2779             :   set_single_character_string_cache(
    2780          86 :       *factory->NewFixedArray(String::kMaxOneByteCharCode + 1, TENURED));
    2781             : 
    2782             :   // Allocate cache for string split and regexp-multiple.
    2783             :   set_string_split_cache(*factory->NewFixedArray(
    2784          86 :       RegExpResultsCache::kRegExpResultsCacheSize, TENURED));
    2785             :   set_regexp_multiple_cache(*factory->NewFixedArray(
    2786          86 :       RegExpResultsCache::kRegExpResultsCacheSize, TENURED));
    2787             : 
    2788          86 :   set_undefined_cell(*factory->NewCell(factory->undefined_value()));
    2789             : 
    2790             :   // Microtask queue uses the empty fixed array as a sentinel for "empty".
    2791             :   // Number of queued microtasks stored in Isolate::pending_microtask_count().
    2792             :   set_microtask_queue(empty_fixed_array());
    2793             : 
    2794             :   {
    2795             :     Handle<FixedArray> empty_sloppy_arguments_elements =
    2796          43 :         factory->NewFixedArray(2, TENURED);
    2797          43 :     empty_sloppy_arguments_elements->set_map(sloppy_arguments_elements_map());
    2798             :     set_empty_sloppy_arguments_elements(*empty_sloppy_arguments_elements);
    2799             :   }
    2800             : 
    2801             :   {
    2802          43 :     Handle<WeakCell> cell = factory->NewWeakCell(factory->undefined_value());
    2803             :     set_empty_weak_cell(*cell);
    2804             :     cell->clear();
    2805             :   }
    2806             : 
    2807             :   set_detached_contexts(empty_fixed_array());
    2808             :   set_retained_maps(ArrayList::cast(empty_fixed_array()));
    2809             : 
    2810             :   set_weak_object_to_code_table(
    2811             :       *WeakHashTable::New(isolate(), 16, USE_DEFAULT_MINIMUM_CAPACITY,
    2812          86 :                           TENURED));
    2813             : 
    2814             :   set_weak_new_space_object_to_code_list(
    2815          86 :       ArrayList::cast(*(factory->NewFixedArray(16, TENURED))));
    2816             :   weak_new_space_object_to_code_list()->SetLength(0);
    2817             : 
    2818             :   set_code_coverage_list(undefined_value());
    2819             : 
    2820             :   set_script_list(Smi::kZero);
    2821             : 
    2822             :   Handle<SeededNumberDictionary> slow_element_dictionary =
    2823          43 :       SeededNumberDictionary::NewEmpty(isolate(), TENURED);
    2824             :   slow_element_dictionary->set_requires_slow_elements();
    2825             :   set_empty_slow_element_dictionary(*slow_element_dictionary);
    2826             : 
    2827          86 :   set_materialized_objects(*factory->NewFixedArray(0, TENURED));
    2828             : 
    2829             :   // Handling of script id generation is in Heap::NextScriptId().
    2830             :   set_last_script_id(Smi::FromInt(v8::UnboundScript::kNoScriptId));
    2831             :   set_next_template_serial_number(Smi::kZero);
    2832             : 
    2833             :   // Allocate the empty script.
    2834          43 :   Handle<Script> script = factory->NewScript(factory->empty_string());
    2835             :   script->set_type(Script::TYPE_NATIVE);
    2836             :   set_empty_script(*script);
    2837             : 
    2838          43 :   Handle<PropertyCell> cell = factory->NewPropertyCell();
    2839          43 :   cell->set_value(Smi::FromInt(Isolate::kProtectorValid));
    2840             :   set_array_protector(*cell);
    2841             : 
    2842          43 :   cell = factory->NewPropertyCell();
    2843          43 :   cell->set_value(the_hole_value());
    2844             :   set_empty_property_cell(*cell);
    2845             : 
    2846          43 :   cell = factory->NewPropertyCell();
    2847          43 :   cell->set_value(Smi::FromInt(Isolate::kProtectorValid));
    2848             :   set_array_iterator_protector(*cell);
    2849             : 
    2850             :   Handle<Cell> is_concat_spreadable_cell = factory->NewCell(
    2851          43 :       handle(Smi::FromInt(Isolate::kProtectorValid), isolate()));
    2852             :   set_is_concat_spreadable_protector(*is_concat_spreadable_cell);
    2853             : 
    2854             :   Handle<Cell> species_cell = factory->NewCell(
    2855          43 :       handle(Smi::FromInt(Isolate::kProtectorValid), isolate()));
    2856             :   set_species_protector(*species_cell);
    2857             : 
    2858          43 :   cell = factory->NewPropertyCell();
    2859          43 :   cell->set_value(Smi::FromInt(Isolate::kProtectorValid));
    2860             :   set_string_length_protector(*cell);
    2861             : 
    2862             :   Handle<Cell> fast_array_iteration_cell = factory->NewCell(
    2863          43 :       handle(Smi::FromInt(Isolate::kProtectorValid), isolate()));
    2864             :   set_fast_array_iteration_protector(*fast_array_iteration_cell);
    2865             : 
    2866          43 :   cell = factory->NewPropertyCell();
    2867          43 :   cell->set_value(Smi::FromInt(Isolate::kProtectorValid));
    2868             :   set_array_buffer_neutering_protector(*cell);
    2869             : 
    2870             :   set_serialized_templates(empty_fixed_array());
    2871             :   set_serialized_global_proxy_sizes(empty_fixed_array());
    2872             : 
    2873             :   set_weak_stack_trace_list(Smi::kZero);
    2874             : 
    2875             :   set_noscript_shared_function_infos(Smi::kZero);
    2876             : 
    2877             :   // Initialize context slot cache.
    2878         129 :   isolate_->context_slot_cache()->Clear();
    2879             : 
    2880             :   // Initialize descriptor cache.
    2881          86 :   isolate_->descriptor_lookup_cache()->Clear();
    2882             : 
    2883             :   // Initialize compilation cache.
    2884          86 :   isolate_->compilation_cache()->Clear();
    2885             : 
    2886             :   // Finish creating JSPromiseCapabilityMap
    2887             :   {
    2888             :     // TODO(caitp): This initialization can be removed once PromiseCapability
    2889             :     // object is no longer used by builtins implemented in javascript.
    2890             :     Handle<Map> map = factory->js_promise_capability_map();
    2891             :     map->set_inobject_properties_or_constructor_function_index(3);
    2892             : 
    2893          43 :     Map::EnsureDescriptorSlack(map, 3);
    2894             : 
    2895             :     PropertyAttributes attrs =
    2896             :         static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
    2897             :     {  // promise
    2898             :       Descriptor d = Descriptor::DataField(factory->promise_string(),
    2899             :                                            JSPromiseCapability::kPromiseIndex,
    2900          43 :                                            attrs, Representation::Tagged());
    2901          43 :       map->AppendDescriptor(&d);
    2902             :     }
    2903             : 
    2904             :     {  // resolve
    2905             :       Descriptor d = Descriptor::DataField(factory->resolve_string(),
    2906             :                                            JSPromiseCapability::kResolveIndex,
    2907          43 :                                            attrs, Representation::Tagged());
    2908          43 :       map->AppendDescriptor(&d);
    2909             :     }
    2910             : 
    2911             :     {  // reject
    2912             :       Descriptor d = Descriptor::DataField(factory->reject_string(),
    2913             :                                            JSPromiseCapability::kRejectIndex,
    2914          43 :                                            attrs, Representation::Tagged());
    2915          43 :       map->AppendDescriptor(&d);
    2916             :     }
    2917             : 
    2918             :     map->set_is_extensible(false);
    2919             :     set_js_promise_capability_map(*map);
    2920             :   }
    2921          43 : }
    2922             : 
    2923        2448 : bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) {
    2924    10134430 :   switch (root_index) {
    2925             :     case kNumberStringCacheRootIndex:
    2926             :     case kInstanceofCacheFunctionRootIndex:
    2927             :     case kInstanceofCacheMapRootIndex:
    2928             :     case kInstanceofCacheAnswerRootIndex:
    2929             :     case kCodeStubsRootIndex:
    2930             :     case kScriptListRootIndex:
    2931             :     case kMaterializedObjectsRootIndex:
    2932             :     case kMicrotaskQueueRootIndex:
    2933             :     case kDetachedContextsRootIndex:
    2934             :     case kWeakObjectToCodeTableRootIndex:
    2935             :     case kWeakNewSpaceObjectToCodeListRootIndex:
    2936             :     case kRetainedMapsRootIndex:
    2937             :     case kCodeCoverageListRootIndex:
    2938             :     case kNoScriptSharedFunctionInfosRootIndex:
    2939             :     case kWeakStackTraceListRootIndex:
    2940             :     case kSerializedTemplatesRootIndex:
    2941             :     case kSerializedGlobalProxySizesRootIndex:
    2942             :     case kPublicSymbolTableRootIndex:
    2943             :     case kApiSymbolTableRootIndex:
    2944             :     case kApiPrivateSymbolTableRootIndex:
    2945             : // Smi values
    2946             : #define SMI_ENTRY(type, name, Name) case k##Name##RootIndex:
    2947             :       SMI_ROOT_LIST(SMI_ENTRY)
    2948             : #undef SMI_ENTRY
    2949             :     // String table
    2950             :     case kStringTableRootIndex:
    2951             :       return true;
    2952             : 
    2953             :     default:
    2954        2328 :       return false;
    2955             :   }
    2956             : }
    2957             : 
    2958    10131982 : bool Heap::RootCanBeTreatedAsConstant(RootListIndex root_index) {
    2959    19872543 :   return !RootCanBeWrittenAfterInitialization(root_index) &&
    2960    10131982 :          !InNewSpace(root(root_index));
    2961             : }
    2962             : 
    2963       66645 : bool Heap::IsUnmodifiedHeapObject(Object** p) {
    2964       66645 :   Object* object = *p;
    2965       66645 :   if (object->IsSmi()) return false;
    2966             :   HeapObject* heap_object = HeapObject::cast(object);
    2967       66645 :   if (!object->IsJSObject()) return false;
    2968             :   JSObject* js_object = JSObject::cast(object);
    2969       49011 :   if (!js_object->WasConstructedFromApiFunction()) return false;
    2970           7 :   Object* maybe_constructor = js_object->map()->GetConstructor();
    2971           7 :   if (!maybe_constructor->IsJSFunction()) return false;
    2972             :   JSFunction* constructor = JSFunction::cast(maybe_constructor);
    2973           7 :   if (js_object->elements()->length() != 0) return false;
    2974             : 
    2975           7 :   return constructor->initial_map() == heap_object->map();
    2976             : }
    2977             : 
    2978    42897637 : int Heap::FullSizeNumberStringCacheLength() {
    2979             :   // Compute the size of the number string cache based on the max newspace size.
    2980             :   // The number string cache has a minimum size based on twice the initial cache
    2981             :   // size to ensure that it is bigger after being made 'full size'.
    2982    42897637 :   size_t number_string_cache_size = max_semi_space_size_ / 512;
    2983             :   number_string_cache_size =
    2984             :       Max(static_cast<size_t>(kInitialNumberStringCacheSize * 2),
    2985             :           Min<size_t>(0x4000u, number_string_cache_size));
    2986             :   // There is a string and a number per entry so the length is twice the number
    2987             :   // of entries.
    2988    42897637 :   return static_cast<int>(number_string_cache_size * 2);
    2989             : }
    2990             : 
    2991             : 
    2992   338543714 : void Heap::FlushNumberStringCache() {
    2993             :   // Flush the number to string cache.
    2994             :   int len = number_string_cache()->length();
    2995   338543714 :   for (int i = 0; i < len; i++) {
    2996   338490368 :     number_string_cache()->set_undefined(i);
    2997             :   }
    2998       53346 : }
    2999             : 
    3000             : 
    3001         774 : Map* Heap::MapForFixedTypedArray(ExternalArrayType array_type) {
    3002       17705 :   return Map::cast(roots_[RootIndexForFixedTypedArray(array_type)]);
    3003             : }
    3004             : 
    3005             : 
    3006       17705 : Heap::RootListIndex Heap::RootIndexForFixedTypedArray(
    3007             :     ExternalArrayType array_type) {
    3008       17705 :   switch (array_type) {
    3009             : #define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
    3010             :   case kExternal##Type##Array:                                  \
    3011             :     return kFixed##Type##ArrayMapRootIndex;
    3012             : 
    3013        1312 :     TYPED_ARRAYS(ARRAY_TYPE_TO_ROOT_INDEX)
    3014             : #undef ARRAY_TYPE_TO_ROOT_INDEX
    3015             : 
    3016             :     default:
    3017           0 :       UNREACHABLE();
    3018             :       return kUndefinedValueRootIndex;
    3019             :   }
    3020             : }
    3021             : 
    3022             : 
    3023        3886 : Heap::RootListIndex Heap::RootIndexForEmptyFixedTypedArray(
    3024             :     ElementsKind elementsKind) {
    3025        3886 :   switch (elementsKind) {
    3026             : #define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
    3027             :   case TYPE##_ELEMENTS:                                           \
    3028             :     return kEmptyFixed##Type##ArrayRootIndex;
    3029             : 
    3030         429 :     TYPED_ARRAYS(ELEMENT_KIND_TO_ROOT_INDEX)
    3031             : #undef ELEMENT_KIND_TO_ROOT_INDEX
    3032             :     default:
    3033           0 :       UNREACHABLE();
    3034             :       return kUndefinedValueRootIndex;
    3035             :   }
    3036             : }
    3037             : 
    3038             : 
    3039           0 : FixedTypedArrayBase* Heap::EmptyFixedTypedArrayForMap(Map* map) {
    3040             :   return FixedTypedArrayBase::cast(
    3041        3886 :       roots_[RootIndexForEmptyFixedTypedArray(map->elements_kind())]);
    3042             : }
    3043             : 
    3044             : 
    3045     4373615 : AllocationResult Heap::AllocateForeign(Address address,
    3046     4373615 :                                        PretenureFlag pretenure) {
    3047             :   // Statically ensure that it is safe to allocate foreigns in paged spaces.
    3048             :   STATIC_ASSERT(Foreign::kSize <= kMaxRegularHeapObjectSize);
    3049     4373615 :   AllocationSpace space = (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
    3050             :   Foreign* result = nullptr;
    3051     4373615 :   AllocationResult allocation = Allocate(foreign_map(), space);
    3052     4373618 :   if (!allocation.To(&result)) return allocation;
    3053             :   result->set_foreign_address(address);
    3054     4373610 :   return result;
    3055             : }
    3056             : 
    3057             : 
    3058    13109834 : AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
    3059     6554914 :   if (length < 0 || length > ByteArray::kMaxLength) {
    3060             :     v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
    3061             :   }
    3062             :   int size = ByteArray::SizeFor(length);
    3063             :   AllocationSpace space = SelectSpace(pretenure);
    3064             :   HeapObject* result = nullptr;
    3065             :   {
    3066     6554924 :     AllocationResult allocation = AllocateRaw(size, space);
    3067     6554943 :     if (!allocation.To(&result)) return allocation;
    3068             :   }
    3069             : 
    3070             :   result->set_map_no_write_barrier(byte_array_map());
    3071             :   ByteArray::cast(result)->set_length(length);
    3072     6554920 :   return result;
    3073             : }
    3074             : 
    3075             : 
    3076     2103822 : AllocationResult Heap::AllocateBytecodeArray(int length,
    3077             :                                              const byte* const raw_bytecodes,
    3078             :                                              int frame_size,
    3079             :                                              int parameter_count,
    3080     6311493 :                                              FixedArray* constant_pool) {
    3081     2103822 :   if (length < 0 || length > BytecodeArray::kMaxLength) {
    3082             :     v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
    3083             :   }
    3084             :   // Bytecode array is pretenured, so constant pool array should be to.
    3085             :   DCHECK(!InNewSpace(constant_pool));
    3086             : 
    3087             :   int size = BytecodeArray::SizeFor(length);
    3088             :   HeapObject* result = nullptr;
    3089             :   {
    3090     2103822 :     AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
    3091     2103831 :     if (!allocation.To(&result)) return allocation;
    3092             :   }
    3093             : 
    3094             :   result->set_map_no_write_barrier(bytecode_array_map());
    3095             :   BytecodeArray* instance = BytecodeArray::cast(result);
    3096             :   instance->set_length(length);
    3097             :   instance->set_frame_size(frame_size);
    3098             :   instance->set_parameter_count(parameter_count);
    3099     2103831 :   instance->set_interrupt_budget(interpreter::Interpreter::InterruptBudget());
    3100             :   instance->set_osr_loop_nesting_level(0);
    3101             :   instance->set_bytecode_age(BytecodeArray::kNoAgeBytecodeAge);
    3102     2103825 :   instance->set_constant_pool(constant_pool);
    3103     2103830 :   instance->set_handler_table(empty_fixed_array());
    3104     2103832 :   instance->set_source_position_table(empty_byte_array());
    3105     2103831 :   CopyBytes(instance->GetFirstBytecodeAddress(), raw_bytecodes, length);
    3106             : 
    3107     2103829 :   return result;
    3108             : }
    3109             : 
    3110   135335259 : HeapObject* Heap::CreateFillerObjectAt(Address addr, int size,
    3111             :                                        ClearRecordedSlots mode) {
    3112   135335259 :   if (size == 0) return nullptr;
    3113   134655478 :   HeapObject* filler = HeapObject::FromAddress(addr);
    3114   134655478 :   if (size == kPointerSize) {
    3115             :     filler->set_map_no_write_barrier(
    3116             :         reinterpret_cast<Map*>(root(kOnePointerFillerMapRootIndex)));
    3117   131282910 :   } else if (size == 2 * kPointerSize) {
    3118             :     filler->set_map_no_write_barrier(
    3119             :         reinterpret_cast<Map*>(root(kTwoPointerFillerMapRootIndex)));
    3120             :   } else {
    3121             :     DCHECK_GT(size, 2 * kPointerSize);
    3122             :     filler->set_map_no_write_barrier(
    3123             :         reinterpret_cast<Map*>(root(kFreeSpaceMapRootIndex)));
    3124             :     FreeSpace::cast(filler)->nobarrier_set_size(size);
    3125             :   }
    3126   134655478 :   if (mode == ClearRecordedSlots::kYes) {
    3127    16130249 :     ClearRecordedSlotRange(addr, addr + size);
    3128             :   }
    3129             : 
    3130             :   // At this point, we may be deserializing the heap from a snapshot, and
    3131             :   // none of the maps have been created yet and are NULL.
    3132             :   DCHECK((filler->map() == NULL && !deserialization_complete_) ||
    3133             :          filler->map()->IsMap());
    3134   134658412 :   return filler;
    3135             : }
    3136             : 
    3137             : 
    3138      628946 : bool Heap::CanMoveObjectStart(HeapObject* object) {
    3139      318973 :   if (!FLAG_move_object_start) return false;
    3140             : 
    3141             :   // Sampling heap profiler may have a reference to the object.
    3142      637946 :   if (isolate()->heap_profiler()->is_sampling_allocations()) return false;
    3143             : 
    3144      309973 :   Address address = object->address();
    3145             : 
    3146      309973 :   if (lo_space()->Contains(object)) return false;
    3147             : 
    3148             :   // We can move the object start if the page was already swept.
    3149      309952 :   return Page::FromAddress(address)->SweepingDone();
    3150             : }
    3151             : 
    3152           6 : bool Heap::IsImmovable(HeapObject* object) {
    3153           6 :   MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
    3154           6 :   return chunk->NeverEvacuate() || chunk->owner()->identity() == LO_SPACE;
    3155             : }
    3156             : 
    3157   137374347 : void Heap::AdjustLiveBytes(HeapObject* object, int by) {
    3158             :   // As long as the inspected object is black and we are currently not iterating
    3159             :   // the heap using HeapIterator, we can update the live byte count. We cannot
    3160             :   // update while using HeapIterator because the iterator is temporarily
    3161             :   // marking the whole object graph, without updating live bytes.
    3162    45791551 :   if (lo_space()->Contains(object)) {
    3163             :     lo_space()->AdjustLiveBytes(by);
    3164    91582675 :   } else if (!in_heap_iterator() &&
    3165    90689840 :              !mark_compact_collector()->sweeping_in_progress() &&
    3166             :              ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
    3167             :     DCHECK(MemoryChunk::FromAddress(object->address())->SweepingDone());
    3168       88601 :     MarkingState::Internal(object).IncrementLiveBytes(by);
    3169             :   }
    3170    45791553 : }
    3171             : 
    3172             : 
    3173      308217 : FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
    3174      308217 :                                          int elements_to_trim) {
    3175      308217 :   CHECK_NOT_NULL(object);
    3176             :   DCHECK(CanMoveObjectStart(object));
    3177             :   DCHECK(!object->IsFixedTypedArrayBase());
    3178             :   DCHECK(!object->IsByteArray());
    3179             :   const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize;
    3180      308217 :   const int bytes_to_trim = elements_to_trim * element_size;
    3181             :   Map* map = object->map();
    3182             : 
    3183             :   // For now this trick is only applied to objects in new and paged space.
    3184             :   // In large object space the object's start must coincide with chunk
    3185             :   // and thus the trick is just not applicable.
    3186             :   DCHECK(!lo_space()->Contains(object));
    3187             :   DCHECK(object->map() != fixed_cow_array_map());
    3188             : 
    3189             :   STATIC_ASSERT(FixedArrayBase::kMapOffset == 0);
    3190             :   STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize);
    3191             :   STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
    3192             : 
    3193             :   const int len = object->length();
    3194             :   DCHECK(elements_to_trim <= len);
    3195             : 
    3196             :   // Calculate location of new array start.
    3197      308217 :   Address old_start = object->address();
    3198      308217 :   Address new_start = old_start + bytes_to_trim;
    3199             : 
    3200             :   // Transfer the mark bits to their new location if the object is not within
    3201             :   // a black area.
    3202      308403 :   if (!incremental_marking()->black_allocation() ||
    3203             :       !Marking::IsBlack(ObjectMarking::MarkBitFrom(
    3204             :           HeapObject::FromAddress(new_start),
    3205         186 :           MarkingState::Internal(HeapObject::FromAddress(new_start))))) {
    3206             :     IncrementalMarking::TransferMark(this, object,
    3207      308031 :                                      HeapObject::FromAddress(new_start));
    3208             :   }
    3209             : 
    3210             :   // Technically in new space this write might be omitted (except for
    3211             :   // debug mode which iterates through the heap), but to play safer
    3212             :   // we still do it.
    3213      308217 :   CreateFillerObjectAt(old_start, bytes_to_trim, ClearRecordedSlots::kYes);
    3214             : 
    3215             :   // Initialize header of the trimmed array. Since left trimming is only
    3216             :   // performed on pages which are not concurrently swept creating a filler
    3217             :   // object does not require synchronization.
    3218             :   Object** former_start = HeapObject::RawField(object, 0);
    3219             :   int new_start_index = elements_to_trim * (element_size / kPointerSize);
    3220      308217 :   former_start[new_start_index] = map;
    3221      616434 :   former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim);
    3222             : 
    3223             :   FixedArrayBase* new_object =
    3224      308217 :       FixedArrayBase::cast(HeapObject::FromAddress(new_start));
    3225             : 
    3226             :   // Maintain consistency of live bytes during incremental marking
    3227      308217 :   AdjustLiveBytes(new_object, -bytes_to_trim);
    3228             : 
    3229             :   // Remove recorded slots for the new map and length offset.
    3230      308217 :   ClearRecordedSlot(new_object, HeapObject::RawField(new_object, 0));
    3231             :   ClearRecordedSlot(new_object, HeapObject::RawField(
    3232      308217 :                                     new_object, FixedArrayBase::kLengthOffset));
    3233             : 
    3234             :   // Notify the heap profiler of change in object layout.
    3235      308217 :   OnMoveEvent(new_object, object, new_object->Size());
    3236      308217 :   return new_object;
    3237             : }
    3238             : 
    3239    48869933 : void Heap::RightTrimFixedArray(FixedArrayBase* object, int elements_to_trim) {
    3240             :   const int len = object->length();
    3241             :   DCHECK_LE(elements_to_trim, len);
    3242             :   DCHECK_GE(elements_to_trim, 0);
    3243             : 
    3244             :   int bytes_to_trim;
    3245    18131342 :   if (object->IsFixedTypedArrayBase()) {
    3246             :     InstanceType type = object->map()->instance_type();
    3247             :     bytes_to_trim =
    3248             :         FixedTypedArrayBase::TypedArraySize(type, len) -
    3249         146 :         FixedTypedArrayBase::TypedArraySize(type, len - elements_to_trim);
    3250    18131268 :   } else if (object->IsByteArray()) {
    3251           0 :     int new_size = ByteArray::SizeFor(len - elements_to_trim);
    3252           0 :     bytes_to_trim = ByteArray::SizeFor(len) - new_size;
    3253             :     DCHECK_GE(bytes_to_trim, 0);
    3254             :   } else {
    3255             :     const int element_size =
    3256             :         object->IsFixedArray() ? kPointerSize : kDoubleSize;
    3257    18131269 :     bytes_to_trim = elements_to_trim * element_size;
    3258             :   }
    3259             : 
    3260             : 
    3261             :   // For now this trick is only applied to objects in new and paged space.
    3262             :   DCHECK(object->map() != fixed_cow_array_map());
    3263             : 
    3264    18131342 :   if (bytes_to_trim == 0) {
    3265             :     // No need to create filler and update live bytes counters, just initialize
    3266             :     // header of the trimmed array.
    3267     2761995 :     object->synchronized_set_length(len - elements_to_trim);
    3268    18131342 :     return;
    3269             :   }
    3270             : 
    3271             :   // Calculate location of new array end.
    3272    15369347 :   Address old_end = object->address() + object->Size();
    3273    15369347 :   Address new_end = old_end - bytes_to_trim;
    3274             : 
    3275             :   // Technically in new space this write might be omitted (except for
    3276             :   // debug mode which iterates through the heap), but to play safer
    3277             :   // we still do it.
    3278             :   // We do not create a filler for objects in large object space.
    3279             :   // TODO(hpayer): We should shrink the large object page if the size
    3280             :   // of the object changed significantly.
    3281    15369347 :   if (!lo_space()->Contains(object)) {
    3282             :     HeapObject* filler =
    3283    15369247 :         CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kYes);
    3284             :     DCHECK_NOT_NULL(filler);
    3285             :     // Clear the mark bits of the black area that belongs now to the filler.
    3286             :     // This is an optimization. The sweeper will release black fillers anyway.
    3287    15388911 :     if (incremental_marking()->black_allocation() &&
    3288             :         ObjectMarking::IsBlackOrGrey(filler, MarkingState::Internal(filler))) {
    3289             :       Page* page = Page::FromAddress(new_end);
    3290             :       MarkingState::Internal(page).bitmap()->ClearRange(
    3291             :           page->AddressToMarkbitIndex(new_end),
    3292         374 :           page->AddressToMarkbitIndex(new_end + bytes_to_trim));
    3293             :     }
    3294             :   }
    3295             : 
    3296             :   // Initialize header of the trimmed array. We are storing the new length
    3297             :   // using release store after creating a filler for the left-over space to
    3298             :   // avoid races with the sweeper thread.
    3299    15369346 :   object->synchronized_set_length(len - elements_to_trim);
    3300             : 
    3301             :   // Maintain consistency of live bytes during incremental marking
    3302    15369346 :   AdjustLiveBytes(object, -bytes_to_trim);
    3303             : 
    3304             :   // Notify the heap profiler of change in object layout. The array may not be
    3305             :   // moved during GC, and size has to be adjusted nevertheless.
    3306    15369347 :   HeapProfiler* profiler = isolate()->heap_profiler();
    3307    15369347 :   if (profiler->is_tracking_allocations()) {
    3308           6 :     profiler->UpdateObjectSizeEvent(object->address(), object->Size());
    3309             :   }
    3310             : }
    3311             : 
    3312             : 
    3313       15830 : AllocationResult Heap::AllocateFixedTypedArrayWithExternalPointer(
    3314             :     int length, ExternalArrayType array_type, void* external_pointer,
    3315             :     PretenureFlag pretenure) {
    3316             :   int size = FixedTypedArrayBase::kHeaderSize;
    3317             :   AllocationSpace space = SelectSpace(pretenure);
    3318             :   HeapObject* result = nullptr;
    3319             :   {
    3320       15830 :     AllocationResult allocation = AllocateRaw(size, space);
    3321       15830 :     if (!allocation.To(&result)) return allocation;
    3322             :   }
    3323             : 
    3324             :   result->set_map_no_write_barrier(MapForFixedTypedArray(array_type));
    3325             :   FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(result);
    3326       15830 :   elements->set_base_pointer(Smi::kZero, SKIP_WRITE_BARRIER);
    3327             :   elements->set_external_pointer(external_pointer, SKIP_WRITE_BARRIER);
    3328             :   elements->set_length(length);
    3329       15830 :   return elements;
    3330             : }
    3331             : 
    3332        1101 : static void ForFixedTypedArray(ExternalArrayType array_type, int* element_size,
    3333             :                                ElementsKind* element_kind) {
    3334        1101 :   switch (array_type) {
    3335             : #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
    3336             :   case kExternal##Type##Array:                          \
    3337             :     *element_size = size;                               \
    3338             :     *element_kind = TYPE##_ELEMENTS;                    \
    3339             :     return;
    3340             : 
    3341          74 :     TYPED_ARRAYS(TYPED_ARRAY_CASE)
    3342             : #undef TYPED_ARRAY_CASE
    3343             : 
    3344             :     default:
    3345           0 :       *element_size = 0;               // Bogus
    3346           0 :       *element_kind = UINT8_ELEMENTS;  // Bogus
    3347           0 :       UNREACHABLE();
    3348             :   }
    3349             : }
    3350             : 
    3351             : 
    3352        1101 : AllocationResult Heap::AllocateFixedTypedArray(int length,
    3353             :                                                ExternalArrayType array_type,
    3354             :                                                bool initialize,
    3355             :                                                PretenureFlag pretenure) {
    3356             :   int element_size;
    3357             :   ElementsKind elements_kind;
    3358        1101 :   ForFixedTypedArray(array_type, &element_size, &elements_kind);
    3359        1101 :   int size = OBJECT_POINTER_ALIGN(length * element_size +
    3360             :                                   FixedTypedArrayBase::kDataOffset);
    3361             :   AllocationSpace space = SelectSpace(pretenure);
    3362             : 
    3363             :   HeapObject* object = nullptr;
    3364             :   AllocationResult allocation = AllocateRaw(
    3365             :       size, space,
    3366        1101 :       array_type == kExternalFloat64Array ? kDoubleAligned : kWordAligned);
    3367        1101 :   if (!allocation.To(&object)) return allocation;
    3368             : 
    3369             :   object->set_map_no_write_barrier(MapForFixedTypedArray(array_type));
    3370             :   FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object);
    3371        1101 :   elements->set_base_pointer(elements, SKIP_WRITE_BARRIER);
    3372             :   elements->set_external_pointer(
    3373        2202 :       ExternalReference::fixed_typed_array_base_data_offset().address(),
    3374             :       SKIP_WRITE_BARRIER);
    3375             :   elements->set_length(length);
    3376        1101 :   if (initialize) memset(elements->DataPtr(), 0, elements->DataSize());
    3377        1101 :   return elements;
    3378             : }
    3379             : 
    3380             : 
    3381     5111487 : AllocationResult Heap::AllocateCode(int object_size, bool immovable) {
    3382             :   DCHECK(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment));
    3383     2555739 :   AllocationResult allocation = AllocateRaw(object_size, CODE_SPACE);
    3384             : 
    3385             :   HeapObject* result = nullptr;
    3386     2555752 :   if (!allocation.To(&result)) return allocation;
    3387     2555748 :   if (immovable) {
    3388           6 :     Address address = result->address();
    3389             :     MemoryChunk* chunk = MemoryChunk::FromAddress(address);
    3390             :     // Code objects which should stay at a fixed address are allocated either
    3391             :     // in the first page of code space (objects on the first page of each space
    3392             :     // are never moved), in large object space, or (during snapshot creation)
    3393             :     // the containing page is marked as immovable.
    3394          12 :     if (!Heap::IsImmovable(result) &&
    3395           6 :         !code_space_->FirstPage()->Contains(address)) {
    3396           6 :       if (isolate()->serializer_enabled()) {
    3397             :         chunk->MarkNeverEvacuate();
    3398             :       } else {
    3399             :         // Discard the first code allocation, which was on a page where it could
    3400             :         // be moved.
    3401             :         CreateFillerObjectAt(result->address(), object_size,
    3402           0 :                              ClearRecordedSlots::kNo);
    3403           0 :         allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE);
    3404           0 :         if (!allocation.To(&result)) return allocation;
    3405           0 :         OnAllocationEvent(result, object_size);
    3406             :       }
    3407             :     }
    3408             :   }
    3409             : 
    3410             :   result->set_map_no_write_barrier(code_map());
    3411             :   Code* code = Code::cast(result);
    3412             :   DCHECK(IsAligned(bit_cast<intptr_t>(code->address()), kCodeAlignment));
    3413             :   DCHECK(!memory_allocator()->code_range()->valid() ||
    3414             :          memory_allocator()->code_range()->contains(code->address()) ||
    3415             :          object_size <= code_space()->AreaSize());
    3416     2555748 :   code->set_gc_metadata(Smi::kZero);
    3417     2555723 :   code->set_ic_age(global_ic_age_);
    3418     2555723 :   return code;
    3419             : }
    3420             : 
    3421             : 
    3422      333342 : AllocationResult Heap::CopyCode(Code* code) {
    3423             :   AllocationResult allocation;
    3424             : 
    3425             :   HeapObject* result = nullptr;
    3426             :   // Allocate an object the same size as the code object.
    3427      166681 :   int obj_size = code->Size();
    3428      166681 :   allocation = AllocateRaw(obj_size, CODE_SPACE);
    3429      166681 :   if (!allocation.To(&result)) return allocation;
    3430             : 
    3431             :   // Copy code object.
    3432      166661 :   Address old_addr = code->address();
    3433      166661 :   Address new_addr = result->address();
    3434             :   CopyBlock(new_addr, old_addr, obj_size);
    3435             :   Code* new_code = Code::cast(result);
    3436             : 
    3437             :   // Relocate the copy.
    3438             :   DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment));
    3439             :   DCHECK(!memory_allocator()->code_range()->valid() ||
    3440             :          memory_allocator()->code_range()->contains(code->address()) ||
    3441             :          obj_size <= code_space()->AreaSize());
    3442      166661 :   new_code->Relocate(new_addr - old_addr);
    3443             :   // We have to iterate over the object and process its pointers when black
    3444             :   // allocation is on.
    3445      166661 :   incremental_marking()->IterateBlackObject(new_code);
    3446             :   // Record all references to embedded objects in the new code object.
    3447      166661 :   RecordWritesIntoCode(new_code);
    3448      166661 :   return new_code;
    3449             : }
    3450             : 
    3451       18980 : AllocationResult Heap::CopyBytecodeArray(BytecodeArray* bytecode_array) {
    3452             :   int size = BytecodeArray::SizeFor(bytecode_array->length());
    3453             :   HeapObject* result = nullptr;
    3454             :   {
    3455        9490 :     AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
    3456        9490 :     if (!allocation.To(&result)) return allocation;
    3457             :   }
    3458             : 
    3459             :   result->set_map_no_write_barrier(bytecode_array_map());
    3460             :   BytecodeArray* copy = BytecodeArray::cast(result);
    3461             :   copy->set_length(bytecode_array->length());
    3462             :   copy->set_frame_size(bytecode_array->frame_size());
    3463             :   copy->set_parameter_count(bytecode_array->parameter_count());
    3464        9490 :   copy->set_constant_pool(bytecode_array->constant_pool());
    3465        9490 :   copy->set_handler_table(bytecode_array->handler_table());
    3466        9490 :   copy->set_source_position_table(bytecode_array->source_position_table());
    3467             :   copy->set_interrupt_budget(bytecode_array->interrupt_budget());
    3468             :   copy->set_osr_loop_nesting_level(bytecode_array->osr_loop_nesting_level());
    3469             :   copy->set_bytecode_age(bytecode_array->bytecode_age());
    3470        9490 :   bytecode_array->CopyBytecodesTo(copy);
    3471        9490 :   return copy;
    3472             : }
    3473             : 
    3474     7485931 : void Heap::InitializeAllocationMemento(AllocationMemento* memento,
    3475     7485931 :                                        AllocationSite* allocation_site) {
    3476             :   memento->set_map_no_write_barrier(allocation_memento_map());
    3477             :   DCHECK(allocation_site->map() == allocation_site_map());
    3478     7485931 :   memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER);
    3479     7485930 :   if (FLAG_allocation_site_pretenuring) {
    3480             :     allocation_site->IncrementMementoCreateCount();
    3481             :   }
    3482     7485930 : }
    3483             : 
    3484             : 
    3485   136561227 : AllocationResult Heap::Allocate(Map* map, AllocationSpace space,
    3486             :                                 AllocationSite* allocation_site) {
    3487             :   DCHECK(gc_state_ == NOT_IN_GC);
    3488             :   DCHECK(map->instance_type() != MAP_TYPE);
    3489             :   int size = map->instance_size();
    3490   136561227 :   if (allocation_site != NULL) {
    3491      362735 :     size += AllocationMemento::kSize;
    3492             :   }
    3493             :   HeapObject* result = nullptr;
    3494   136561227 :   AllocationResult allocation = AllocateRaw(size, space);
    3495   136561222 :   if (!allocation.To(&result)) return allocation;
    3496             :   // No need for write barrier since object is white and map is in old space.
    3497             :   result->set_map_no_write_barrier(map);
    3498   136559602 :   if (allocation_site != NULL) {
    3499             :     AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
    3500      362731 :         reinterpret_cast<Address>(result) + map->instance_size());
    3501      362731 :     InitializeAllocationMemento(alloc_memento, allocation_site);
    3502             :   }
    3503   136559602 :   return result;
    3504             : }
    3505             : 
    3506             : 
    3507    47770739 : void Heap::InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties,
    3508             :                                      Map* map) {
    3509    47770739 :   obj->set_properties(properties);
    3510    47770746 :   obj->initialize_elements();
    3511             :   // TODO(1240798): Initialize the object's body using valid initial values
    3512             :   // according to the object's initial map.  For example, if the map's
    3513             :   // instance type is JS_ARRAY_TYPE, the length field should be initialized
    3514             :   // to a number (e.g. Smi::kZero) and the elements initialized to a
    3515             :   // fixed array (e.g. Heap::empty_fixed_array()).  Currently, the object
    3516             :   // verification code has to cope with (temporarily) invalid objects.  See
    3517             :   // for example, JSArray::JSArrayVerify).
    3518    47770748 :   InitializeJSObjectBody(obj, map, JSObject::kHeaderSize);
    3519    47770762 : }
    3520             : 
    3521             : 
    3522    74697920 : void Heap::InitializeJSObjectBody(JSObject* obj, Map* map, int start_offset) {
    3523   148579623 :   if (start_offset == map->instance_size()) return;
    3524             :   DCHECK_LT(start_offset, map->instance_size());
    3525             : 
    3526             :   // We cannot always fill with one_pointer_filler_map because objects
    3527             :   // created from API functions expect their embedder fields to be initialized
    3528             :   // with undefined_value.
    3529             :   // Pre-allocated fields need to be initialized with undefined_value as well
    3530             :   // so that object accesses before the constructor completes (e.g. in the
    3531             :   // debugger) will not cause a crash.
    3532             : 
    3533             :   // In case of Array subclassing the |map| could already be transitioned
    3534             :   // to different elements kind from the initial map on which we track slack.
    3535             :   bool in_progress = map->IsInobjectSlackTrackingInProgress();
    3536             :   Object* filler;
    3537    45730476 :   if (in_progress) {
    3538             :     filler = one_pointer_filler_map();
    3539             :   } else {
    3540             :     filler = undefined_value();
    3541             :   }
    3542    45730476 :   obj->InitializeBody(map, start_offset, Heap::undefined_value(), filler);
    3543    45730477 :   if (in_progress) {
    3544      408109 :     map->FindRootMap()->InobjectSlackTrackingStep();
    3545             :   }
    3546             : }
    3547             : 
    3548             : 
    3549    47557367 : AllocationResult Heap::AllocateJSObjectFromMap(
    3550    47557367 :     Map* map, PretenureFlag pretenure, AllocationSite* allocation_site) {
    3551             :   // JSFunctions should be allocated using AllocateFunction to be
    3552             :   // properly initialized.
    3553             :   DCHECK(map->instance_type() != JS_FUNCTION_TYPE);
    3554             : 
    3555             :   // Both types of global objects should be allocated using
    3556             :   // AllocateGlobalObject to be properly initialized.
    3557             :   DCHECK(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
    3558             : 
    3559             :   // Allocate the backing storage for the properties.
    3560             :   FixedArray* properties = empty_fixed_array();
    3561             : 
    3562             :   // Allocate the JSObject.
    3563             :   AllocationSpace space = SelectSpace(pretenure);
    3564             :   JSObject* js_obj = nullptr;
    3565    47557367 :   AllocationResult allocation = Allocate(map, space, allocation_site);
    3566    47557349 :   if (!allocation.To(&js_obj)) return allocation;
    3567             : 
    3568             :   // Initialize the JSObject.
    3569    47556909 :   InitializeJSObjectFromMap(js_obj, properties, map);
    3570             :   DCHECK(js_obj->HasFastElements() || js_obj->HasFixedTypedArrayElements() ||
    3571             :          js_obj->HasFastStringWrapperElements() ||
    3572             :          js_obj->HasFastArgumentsElements());
    3573    47556932 :   return js_obj;
    3574             : }
    3575             : 
    3576             : 
    3577    16809958 : AllocationResult Heap::AllocateJSObject(JSFunction* constructor,
    3578             :                                         PretenureFlag pretenure,
    3579             :                                         AllocationSite* allocation_site) {
    3580             :   DCHECK(constructor->has_initial_map());
    3581             : 
    3582             :   // Allocate the object based on the constructors initial map.
    3583             :   AllocationResult allocation = AllocateJSObjectFromMap(
    3584    16809958 :       constructor->initial_map(), pretenure, allocation_site);
    3585             : #ifdef DEBUG
    3586             :   // Make sure result is NOT a global object if valid.
    3587             :   HeapObject* obj = nullptr;
    3588             :   DCHECK(!allocation.To(&obj) || !obj->IsJSGlobalObject());
    3589             : #endif
    3590    16809958 :   return allocation;
    3591             : }
    3592             : 
    3593             : 
    3594    11466386 : AllocationResult Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
    3595             :   // Make the clone.
    3596             :   Map* map = source->map();
    3597             : 
    3598             :   // We can only clone regexps, normal objects, api objects, errors or arrays.
    3599             :   // Copying anything else will break invariants.
    3600     9565835 :   CHECK(map->instance_type() == JS_REGEXP_TYPE ||
    3601             :         map->instance_type() == JS_OBJECT_TYPE ||
    3602             :         map->instance_type() == JS_ERROR_TYPE ||
    3603             :         map->instance_type() == JS_ARRAY_TYPE ||
    3604             :         map->instance_type() == JS_API_OBJECT_TYPE ||
    3605             :         map->instance_type() == JS_SPECIAL_API_OBJECT_TYPE);
    3606             : 
    3607             :   int object_size = map->instance_size();
    3608             :   HeapObject* clone = nullptr;
    3609             : 
    3610             :   DCHECK(site == NULL || AllocationSite::CanTrack(map->instance_type()));
    3611             : 
    3612             :   int adjusted_object_size =
    3613     9565835 :       site != NULL ? object_size + AllocationMemento::kSize : object_size;
    3614     9565835 :   AllocationResult allocation = AllocateRaw(adjusted_object_size, NEW_SPACE);
    3615     9565836 :   if (!allocation.To(&clone)) return allocation;
    3616             : 
    3617             :   SLOW_DCHECK(InNewSpace(clone));
    3618             :   // Since we know the clone is allocated in new space, we can copy
    3619             :   // the contents without worrying about updating the write barrier.
    3620     9565649 :   CopyBlock(clone->address(), source->address(), object_size);
    3621             : 
    3622     9565646 :   if (site != NULL) {
    3623             :     AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
    3624     7123200 :         reinterpret_cast<Address>(clone) + object_size);
    3625     7123200 :     InitializeAllocationMemento(alloc_memento, site);
    3626             :   }
    3627             : 
    3628             :   SLOW_DCHECK(JSObject::cast(clone)->GetElementsKind() ==
    3629             :               source->GetElementsKind());
    3630             :   FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
    3631             :   FixedArray* properties = FixedArray::cast(source->properties());
    3632             :   // Update elements if necessary.
    3633     9565645 :   if (elements->length() > 0) {
    3634             :     FixedArrayBase* elem = nullptr;
    3635             :     {
    3636             :       AllocationResult allocation;
    3637     1900551 :       if (elements->map() == fixed_cow_array_map()) {
    3638      641994 :         allocation = FixedArray::cast(elements);
    3639     1258557 :       } else if (source->HasFastDoubleElements()) {
    3640      309500 :         allocation = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
    3641             :       } else {
    3642      949057 :         allocation = CopyFixedArray(FixedArray::cast(elements));
    3643             :       }
    3644     1900551 :       if (!allocation.To(&elem)) return allocation;
    3645             :     }
    3646     1900508 :     JSObject::cast(clone)->set_elements(elem, SKIP_WRITE_BARRIER);
    3647             :   }
    3648             :   // Update properties if necessary.
    3649     9565602 :   if (properties->length() > 0) {
    3650             :     FixedArray* prop = nullptr;
    3651             :     {
    3652        1265 :       AllocationResult allocation = CopyFixedArray(properties);
    3653        1265 :       if (!allocation.To(&prop)) return allocation;
    3654             :     }
    3655        1263 :     JSObject::cast(clone)->set_properties(prop, SKIP_WRITE_BARRIER);
    3656             :   }
    3657             :   // Return the new clone.
    3658     9565600 :   return clone;
    3659             : }
    3660             : 
    3661             : 
    3662             : static inline void WriteOneByteData(Vector<const char> vector, uint8_t* chars,
    3663             :                                     int len) {
    3664             :   // Only works for one byte strings.
    3665             :   DCHECK(vector.length() == len);
    3666             :   MemCopy(chars, vector.start(), len);
    3667             : }
    3668             : 
    3669         601 : static inline void WriteTwoByteData(Vector<const char> vector, uint16_t* chars,
    3670             :                                     int len) {
    3671         601 :   const uint8_t* stream = reinterpret_cast<const uint8_t*>(vector.start());
    3672         601 :   size_t stream_length = vector.length();
    3673       30343 :   while (stream_length != 0) {
    3674       29742 :     size_t consumed = 0;
    3675       29742 :     uint32_t c = unibrow::Utf8::ValueOf(stream, stream_length, &consumed);
    3676             :     DCHECK(c != unibrow::Utf8::kBadChar);
    3677             :     DCHECK(consumed <= stream_length);
    3678       29742 :     stream_length -= consumed;
    3679       29742 :     stream += consumed;
    3680       29742 :     if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
    3681           0 :       len -= 2;
    3682           0 :       if (len < 0) break;
    3683           0 :       *chars++ = unibrow::Utf16::LeadSurrogate(c);
    3684           0 :       *chars++ = unibrow::Utf16::TrailSurrogate(c);
    3685             :     } else {
    3686       29742 :       len -= 1;
    3687       29742 :       if (len < 0) break;
    3688       29742 :       *chars++ = c;
    3689             :     }
    3690             :   }
    3691             :   DCHECK(stream_length == 0);
    3692             :   DCHECK(len == 0);
    3693         601 : }
    3694             : 
    3695             : 
    3696             : static inline void WriteOneByteData(String* s, uint8_t* chars, int len) {
    3697             :   DCHECK(s->length() == len);
    3698     1867127 :   String::WriteToFlat(s, chars, 0, len);
    3699             : }
    3700             : 
    3701             : 
    3702             : static inline void WriteTwoByteData(String* s, uint16_t* chars, int len) {
    3703             :   DCHECK(s->length() == len);
    3704       26310 :   String::WriteToFlat(s, chars, 0, len);
    3705             : }
    3706             : 
    3707             : 
    3708             : template <bool is_one_byte, typename T>
    3709     1894037 : AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars,
    3710     1893436 :                                                       uint32_t hash_field) {
    3711             :   DCHECK(chars >= 0);
    3712             :   // Compute map and object size.
    3713             :   int size;
    3714             :   Map* map;
    3715             : 
    3716             :   DCHECK_LE(0, chars);
    3717             :   DCHECK_GE(String::kMaxLength, chars);
    3718             :   if (is_one_byte) {
    3719             :     map = one_byte_internalized_string_map();
    3720             :     size = SeqOneByteString::SizeFor(chars);
    3721             :   } else {
    3722             :     map = internalized_string_map();
    3723             :     size = SeqTwoByteString::SizeFor(chars);
    3724             :   }
    3725             : 
    3726             :   // Allocate string.
    3727             :   HeapObject* result = nullptr;
    3728             :   {
    3729     1894037 :     AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
    3730     1894038 :     if (!allocation.To(&result)) return allocation;
    3731             :   }
    3732             : 
    3733             :   result->set_map_no_write_barrier(map);
    3734             :   // Set length and hash fields of the allocated string.
    3735             :   String* answer = String::cast(result);
    3736             :   answer->set_length(chars);
    3737             :   answer->set_hash_field(hash_field);
    3738             : 
    3739             :   DCHECK_EQ(size, answer->Size());
    3740             : 
    3741             :   if (is_one_byte) {
    3742     1867127 :     WriteOneByteData(t, SeqOneByteString::cast(answer)->GetChars(), chars);
    3743             :   } else {
    3744       27512 :     WriteTwoByteData(t, SeqTwoByteString::cast(answer)->GetChars(), chars);
    3745             :   }
    3746     1894037 :   return answer;
    3747             : }
    3748             : 
    3749             : 
    3750             : // Need explicit instantiations.
    3751             : template AllocationResult Heap::AllocateInternalizedStringImpl<true>(String*,
    3752             :                                                                      int,
    3753             :                                                                      uint32_t);
    3754             : template AllocationResult Heap::AllocateInternalizedStringImpl<false>(String*,
    3755             :                                                                       int,
    3756             :                                                                       uint32_t);
    3757             : template AllocationResult Heap::AllocateInternalizedStringImpl<false>(
    3758             :     Vector<const char>, int, uint32_t);
    3759             : 
    3760             : 
    3761   144513538 : AllocationResult Heap::AllocateRawOneByteString(int length,
    3762   144508988 :                                                 PretenureFlag pretenure) {
    3763             :   DCHECK_LE(0, length);
    3764             :   DCHECK_GE(String::kMaxLength, length);
    3765             :   int size = SeqOneByteString::SizeFor(length);
    3766             :   DCHECK(size <= SeqOneByteString::kMaxSize);
    3767             :   AllocationSpace space = SelectSpace(pretenure);
    3768             : 
    3769             :   HeapObject* result = nullptr;
    3770             :   {
    3771   144513538 :     AllocationResult allocation = AllocateRaw(size, space);
    3772   144513568 :     if (!allocation.To(&result)) return allocation;
    3773             :   }
    3774             : 
    3775             :   // Partially initialize the object.
    3776             :   result->set_map_no_write_barrier(one_byte_string_map());
    3777             :   String::cast(result)->set_length(length);
    3778             :   String::cast(result)->set_hash_field(String::kEmptyHashField);
    3779             :   DCHECK_EQ(size, HeapObject::cast(result)->Size());
    3780             : 
    3781   144508988 :   return result;
    3782             : }
    3783             : 
    3784             : 
    3785    40502449 : AllocationResult Heap::AllocateRawTwoByteString(int length,
    3786    40501368 :                                                 PretenureFlag pretenure) {
    3787             :   DCHECK_LE(0, length);
    3788             :   DCHECK_GE(String::kMaxLength, length);
    3789             :   int size = SeqTwoByteString::SizeFor(length);
    3790             :   DCHECK(size <= SeqTwoByteString::kMaxSize);
    3791             :   AllocationSpace space = SelectSpace(pretenure);
    3792             : 
    3793             :   HeapObject* result = nullptr;
    3794             :   {
    3795    40502449 :     AllocationResult allocation = AllocateRaw(size, space);
    3796    40502449 :     if (!allocation.To(&result)) return allocation;
    3797             :   }
    3798             : 
    3799             :   // Partially initialize the object.
    3800             :   result->set_map_no_write_barrier(string_map());
    3801             :   String::cast(result)->set_length(length);
    3802             :   String::cast(result)->set_hash_field(String::kEmptyHashField);
    3803             :   DCHECK_EQ(size, HeapObject::cast(result)->Size());
    3804    40501368 :   return result;
    3805             : }
    3806             : 
    3807             : 
    3808         172 : AllocationResult Heap::AllocateEmptyFixedArray() {
    3809             :   int size = FixedArray::SizeFor(0);
    3810             :   HeapObject* result = nullptr;
    3811             :   {
    3812          86 :     AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
    3813          86 :     if (!allocation.To(&result)) return allocation;
    3814             :   }
    3815             :   // Initialize the object.
    3816             :   result->set_map_no_write_barrier(fixed_array_map());
    3817             :   FixedArray::cast(result)->set_length(0);
    3818          86 :   return result;
    3819             : }
    3820             : 
    3821          86 : AllocationResult Heap::AllocateEmptyScopeInfo() {
    3822             :   int size = FixedArray::SizeFor(0);
    3823             :   HeapObject* result = nullptr;
    3824             :   {
    3825          43 :     AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
    3826          43 :     if (!allocation.To(&result)) return allocation;
    3827             :   }
    3828             :   // Initialize the object.
    3829             :   result->set_map_no_write_barrier(scope_info_map());
    3830             :   FixedArray::cast(result)->set_length(0);
    3831          43 :   return result;
    3832             : }
    3833             : 
    3834           0 : AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) {
    3835           0 :   if (!InNewSpace(src)) {
    3836           0 :     return src;
    3837             :   }
    3838             : 
    3839             :   int len = src->length();
    3840             :   HeapObject* obj = nullptr;
    3841             :   {
    3842           0 :     AllocationResult allocation = AllocateRawFixedArray(len, TENURED);
    3843           0 :     if (!allocation.To(&obj)) return allocation;
    3844             :   }
    3845             :   obj->set_map_no_write_barrier(fixed_array_map());
    3846             :   FixedArray* result = FixedArray::cast(obj);
    3847             :   result->set_length(len);
    3848             : 
    3849             :   // Copy the content.
    3850             :   DisallowHeapAllocation no_gc;
    3851           0 :   WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
    3852           0 :   for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
    3853             : 
    3854             :   // TODO(mvstanton): The map is set twice because of protection against calling
    3855             :   // set() on a COW FixedArray. Issue v8:3221 created to track this, and
    3856             :   // we might then be able to remove this whole method.
    3857             :   HeapObject::cast(obj)->set_map_no_write_barrier(fixed_cow_array_map());
    3858           0 :   return result;
    3859             : }
    3860             : 
    3861             : 
    3862           0 : AllocationResult Heap::AllocateEmptyFixedTypedArray(
    3863             :     ExternalArrayType array_type) {
    3864         387 :   return AllocateFixedTypedArray(0, array_type, false, TENURED);
    3865             : }
    3866             : 
    3867             : 
    3868     5714464 : AllocationResult Heap::CopyFixedArrayAndGrow(FixedArray* src, int grow_by,
    3869    11428562 :                                              PretenureFlag pretenure) {
    3870             :   int old_len = src->length();
    3871     5714464 :   int new_len = old_len + grow_by;
    3872             :   DCHECK(new_len >= old_len);
    3873             :   HeapObject* obj = nullptr;
    3874             :   {
    3875     5714464 :     AllocationResult allocation = AllocateRawFixedArray(new_len, pretenure);
    3876     5714465 :     if (!allocation.To(&obj)) return allocation;
    3877             :   }
    3878             : 
    3879             :   obj->set_map_no_write_barrier(fixed_array_map());
    3880             :   FixedArray* result = FixedArray::cast(obj);
    3881             :   result->set_length(new_len);
    3882             : 
    3883             :   // Copy the content.
    3884             :   DisallowHeapAllocation no_gc;
    3885     5714281 :   WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc);
    3886    82610828 :   for (int i = 0; i < old_len; i++) result->set(i, src->get(i), mode);
    3887     5714281 :   MemsetPointer(result->data_start() + old_len, undefined_value(), grow_by);
    3888     5714281 :   return result;
    3889             : }
    3890             : 
    3891     3539679 : AllocationResult Heap::CopyFixedArrayUpTo(FixedArray* src, int new_len,
    3892     3539637 :                                           PretenureFlag pretenure) {
    3893     3539688 :   if (new_len == 0) return empty_fixed_array();
    3894             : 
    3895             :   DCHECK_LE(new_len, src->length());
    3896             : 
    3897             :   HeapObject* obj = nullptr;
    3898             :   {
    3899     3539670 :     AllocationResult allocation = AllocateRawFixedArray(new_len, pretenure);
    3900     3539670 :     if (!allocation.To(&obj)) return allocation;
    3901             :   }
    3902             :   obj->set_map_no_write_barrier(fixed_array_map());
    3903             : 
    3904             :   FixedArray* result = FixedArray::cast(obj);
    3905             :   result->set_length(new_len);
    3906             : 
    3907             :   // Copy the content.
    3908             :   DisallowHeapAllocation no_gc;
    3909     3539628 :   WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
    3910    22184388 :   for (int i = 0; i < new_len; i++) result->set(i, src->get(i), mode);
    3911     3539628 :   return result;
    3912             : }
    3913             : 
    3914     1584952 : AllocationResult Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
    3915             :   int len = src->length();
    3916             :   HeapObject* obj = nullptr;
    3917             :   {
    3918     1584952 :     AllocationResult allocation = AllocateRawFixedArray(len, NOT_TENURED);
    3919     1584952 :     if (!allocation.To(&obj)) return allocation;
    3920             :   }
    3921             :   obj->set_map_no_write_barrier(map);
    3922             : 
    3923             :   FixedArray* result = FixedArray::cast(obj);
    3924             :   DisallowHeapAllocation no_gc;
    3925     1584897 :   WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
    3926             : 
    3927             :   // Eliminate the write barrier if possible.
    3928     1584897 :   if (mode == SKIP_WRITE_BARRIER) {
    3929             :     CopyBlock(obj->address() + kPointerSize, src->address() + kPointerSize,
    3930     1575100 :               FixedArray::SizeFor(len) - kPointerSize);
    3931     1575100 :     return obj;
    3932             :   }
    3933             : 
    3934             :   // Slow case: Just copy the content one-by-one.
    3935             :   result->set_length(len);
    3936    10013309 :   for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
    3937        9797 :   return result;
    3938             : }
    3939             : 
    3940             : 
    3941      314298 : AllocationResult Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src,
    3942             :                                                    Map* map) {
    3943             :   int len = src->length();
    3944             :   HeapObject* obj = nullptr;
    3945             :   {
    3946      314298 :     AllocationResult allocation = AllocateRawFixedDoubleArray(len, NOT_TENURED);
    3947      314298 :     if (!allocation.To(&obj)) return allocation;
    3948             :   }
    3949             :   obj->set_map_no_write_barrier(map);
    3950             :   CopyBlock(obj->address() + FixedDoubleArray::kLengthOffset,
    3951             :             src->address() + FixedDoubleArray::kLengthOffset,
    3952      314275 :             FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset);
    3953      314275 :   return obj;
    3954             : }
    3955             : 
    3956             : 
    3957   104035854 : AllocationResult Heap::AllocateRawFixedArray(int length,
    3958             :                                              PretenureFlag pretenure) {
    3959   104035854 :   if (length < 0 || length > FixedArray::kMaxLength) {
    3960             :     v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
    3961             :   }
    3962             :   int size = FixedArray::SizeFor(length);
    3963             :   AllocationSpace space = SelectSpace(pretenure);
    3964             : 
    3965   104035981 :   AllocationResult result = AllocateRaw(size, space);
    3966   104035887 :   if (!result.IsRetry() && size > kMaxRegularHeapObjectSize &&
    3967             :       FLAG_use_marking_progress_bar) {
    3968             :     MemoryChunk* chunk =
    3969        8709 :         MemoryChunk::FromAddress(result.ToObjectChecked()->address());
    3970             :     chunk->SetFlag(MemoryChunk::HAS_PROGRESS_BAR);
    3971             :   }
    3972   104035887 :   return result;
    3973             : }
    3974             : 
    3975             : 
    3976    94362484 : AllocationResult Heap::AllocateFixedArrayWithFiller(int length,
    3977             :                                                     PretenureFlag pretenure,
    3978    94356120 :                                                     Object* filler) {
    3979             :   DCHECK(length >= 0);
    3980             :   DCHECK(empty_fixed_array()->IsFixedArray());
    3981    99068177 :   if (length == 0) return empty_fixed_array();
    3982             : 
    3983             :   DCHECK(!InNewSpace(filler));
    3984             :   HeapObject* result = nullptr;
    3985             :   {
    3986    89656791 :     AllocationResult allocation = AllocateRawFixedArray(length, pretenure);
    3987    89656823 :     if (!allocation.To(&result)) return allocation;
    3988             :   }
    3989             : 
    3990             :   result->set_map_no_write_barrier(fixed_array_map());
    3991             :   FixedArray* array = FixedArray::cast(result);
    3992             :   array->set_length(length);
    3993    89650427 :   MemsetPointer(array->data_start(), filler, length);
    3994    89650469 :   return array;
    3995             : }
    3996             : 
    3997             : 
    3998    88080081 : AllocationResult Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
    3999    88080081 :   return AllocateFixedArrayWithFiller(length, pretenure, undefined_value());
    4000             : }
    4001             : 
    4002             : 
    4003     5810006 : AllocationResult Heap::AllocateUninitializedFixedArray(int length) {
    4004     2905345 :   if (length == 0) return empty_fixed_array();
    4005             : 
    4006             :   HeapObject* obj = nullptr;
    4007             :   {
    4008     2905287 :     AllocationResult allocation = AllocateRawFixedArray(length, NOT_TENURED);
    4009     2905287 :     if (!allocation.To(&obj)) return allocation;
    4010             :   }
    4011             : 
    4012             :   obj->set_map_no_write_barrier(fixed_array_map());
    4013             :   FixedArray::cast(obj)->set_length(length);
    4014     2904661 :   return obj;
    4015             : }
    4016             : 
    4017             : 
    4018      903008 : AllocationResult Heap::AllocateUninitializedFixedDoubleArray(
    4019      902965 :     int length, PretenureFlag pretenure) {
    4020      903008 :   if (length == 0) return empty_fixed_array();
    4021             : 
    4022             :   HeapObject* elements = nullptr;
    4023      903008 :   AllocationResult allocation = AllocateRawFixedDoubleArray(length, pretenure);
    4024      903008 :   if (!allocation.To(&elements)) return allocation;
    4025             : 
    4026             :   elements->set_map_no_write_barrier(fixed_double_array_map());
    4027             :   FixedDoubleArray::cast(elements)->set_length(length);
    4028      902965 :   return elements;
    4029             : }
    4030             : 
    4031             : 
    4032     1217306 : AllocationResult Heap::AllocateRawFixedDoubleArray(int length,
    4033             :                                                    PretenureFlag pretenure) {
    4034     1217306 :   if (length < 0 || length > FixedDoubleArray::kMaxLength) {
    4035             :     v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
    4036             :   }
    4037             :   int size = FixedDoubleArray::SizeFor(length);
    4038             :   AllocationSpace space = SelectSpace(pretenure);
    4039             : 
    4040             :   HeapObject* object = nullptr;
    4041             :   {
    4042     1217306 :     AllocationResult allocation = AllocateRaw(size, space, kDoubleAligned);
    4043     1217306 :     if (!allocation.To(&object)) return allocation;
    4044             :   }
    4045             : 
    4046     1217240 :   return object;
    4047             : }
    4048             : 
    4049             : 
    4050     1311920 : AllocationResult Heap::AllocateSymbol() {
    4051             :   // Statically ensure that it is safe to allocate symbols in paged spaces.
    4052             :   STATIC_ASSERT(Symbol::kSize <= kMaxRegularHeapObjectSize);
    4053             : 
    4054             :   HeapObject* result = nullptr;
    4055      437306 :   AllocationResult allocation = AllocateRaw(Symbol::kSize, OLD_SPACE);
    4056      437307 :   if (!allocation.To(&result)) return allocation;
    4057             : 
    4058             :   result->set_map_no_write_barrier(symbol_map());
    4059             : 
    4060             :   // Generate a random hash value.
    4061      437307 :   int hash = isolate()->GenerateIdentityHash(Name::kHashBitMask);
    4062             : 
    4063             :   Symbol::cast(result)
    4064      437307 :       ->set_hash_field(Name::kIsNotArrayIndexMask | (hash << Name::kHashShift));
    4065      437307 :   Symbol::cast(result)->set_name(undefined_value());
    4066             :   Symbol::cast(result)->set_flags(0);
    4067             : 
    4068             :   DCHECK(!Symbol::cast(result)->is_private());
    4069      437307 :   return result;
    4070             : }
    4071             : 
    4072             : 
    4073    33074572 : AllocationResult Heap::AllocateStruct(InstanceType type) {
    4074             :   Map* map;
    4075    16537286 :   switch (type) {
    4076             : #define MAKE_CASE(NAME, Name, name) \
    4077             :   case NAME##_TYPE:                 \
    4078             :     map = name##_map();             \
    4079             :     break;
    4080    16537286 :     STRUCT_LIST(MAKE_CASE)
    4081             : #undef MAKE_CASE
    4082             :     default:
    4083           0 :       UNREACHABLE();
    4084             :       return exception();
    4085             :   }
    4086             :   int size = map->instance_size();
    4087             :   Struct* result = nullptr;
    4088             :   {
    4089    16537286 :     AllocationResult allocation = Allocate(map, OLD_SPACE);
    4090    16537278 :     if (!allocation.To(&result)) return allocation;
    4091             :   }
    4092             :   result->InitializeBody(size);
    4093    16537277 :   return result;
    4094             : }
    4095             : 
    4096             : 
    4097       24569 : void Heap::MakeHeapIterable() {
    4098       24569 :   mark_compact_collector()->EnsureSweepingCompleted();
    4099           0 : }
    4100             : 
    4101             : 
    4102             : static double ComputeMutatorUtilization(double mutator_speed, double gc_speed) {
    4103             :   const double kMinMutatorUtilization = 0.0;
    4104             :   const double kConservativeGcSpeedInBytesPerMillisecond = 200000;
    4105       69199 :   if (mutator_speed == 0) return kMinMutatorUtilization;
    4106       66483 :   if (gc_speed == 0) gc_speed = kConservativeGcSpeedInBytesPerMillisecond;
    4107             :   // Derivation:
    4108             :   // mutator_utilization = mutator_time / (mutator_time + gc_time)
    4109             :   // mutator_time = 1 / mutator_speed
    4110             :   // gc_time = 1 / gc_speed
    4111             :   // mutator_utilization = (1 / mutator_speed) /
    4112             :   //                       (1 / mutator_speed + 1 / gc_speed)
    4113             :   // mutator_utilization = gc_speed / (mutator_speed + gc_speed)
    4114       66483 :   return gc_speed / (mutator_speed + gc_speed);
    4115             : }
    4116             : 
    4117             : 
    4118      138398 : double Heap::YoungGenerationMutatorUtilization() {
    4119             :   double mutator_speed = static_cast<double>(
    4120       69199 :       tracer()->NewSpaceAllocationThroughputInBytesPerMillisecond());
    4121             :   double gc_speed =
    4122       69199 :       tracer()->ScavengeSpeedInBytesPerMillisecond(kForSurvivedObjects);
    4123             :   double result = ComputeMutatorUtilization(mutator_speed, gc_speed);
    4124       69199 :   if (FLAG_trace_mutator_utilization) {
    4125             :     isolate()->PrintWithTimestamp(
    4126             :         "Young generation mutator utilization = %.3f ("
    4127             :         "mutator_speed=%.f, gc_speed=%.f)\n",
    4128           0 :         result, mutator_speed, gc_speed);
    4129             :   }
    4130       69199 :   return result;
    4131             : }
    4132             : 
    4133             : 
    4134           0 : double Heap::OldGenerationMutatorUtilization() {
    4135             :   double mutator_speed = static_cast<double>(
    4136           0 :       tracer()->OldGenerationAllocationThroughputInBytesPerMillisecond());
    4137             :   double gc_speed = static_cast<double>(
    4138           0 :       tracer()->CombinedMarkCompactSpeedInBytesPerMillisecond());
    4139             :   double result = ComputeMutatorUtilization(mutator_speed, gc_speed);
    4140           0 :   if (FLAG_trace_mutator_utilization) {
    4141             :     isolate()->PrintWithTimestamp(
    4142             :         "Old generation mutator utilization = %.3f ("
    4143             :         "mutator_speed=%.f, gc_speed=%.f)\n",
    4144           0 :         result, mutator_speed, gc_speed);
    4145             :   }
    4146           0 :   return result;
    4147             : }
    4148             : 
    4149             : 
    4150           0 : bool Heap::HasLowYoungGenerationAllocationRate() {
    4151             :   const double high_mutator_utilization = 0.993;
    4152       69199 :   return YoungGenerationMutatorUtilization() > high_mutator_utilization;
    4153             : }
    4154             : 
    4155             : 
    4156           0 : bool Heap::HasLowOldGenerationAllocationRate() {
    4157             :   const double high_mutator_utilization = 0.993;
    4158           0 :   return OldGenerationMutatorUtilization() > high_mutator_utilization;
    4159             : }
    4160             : 
    4161             : 
    4162          10 : bool Heap::HasLowAllocationRate() {
    4163          10 :   return HasLowYoungGenerationAllocationRate() &&
    4164          10 :          HasLowOldGenerationAllocationRate();
    4165             : }
    4166             : 
    4167             : 
    4168           0 : bool Heap::HasHighFragmentation() {
    4169           0 :   size_t used = PromotedSpaceSizeOfObjects();
    4170           0 :   size_t committed = CommittedOldGenerationMemory();
    4171           0 :   return HasHighFragmentation(used, committed);
    4172             : }
    4173             : 
    4174           0 : bool Heap::HasHighFragmentation(size_t used, size_t committed) {
    4175             :   const size_t kSlack = 16 * MB;
    4176             :   // Fragmentation is high if committed > 2 * used + kSlack.
    4177             :   // Rewrite the exression to avoid overflow.
    4178             :   DCHECK_GE(committed, used);
    4179       51963 :   return committed - used > used + kSlack;
    4180             : }
    4181             : 
    4182      489998 : bool Heap::ShouldOptimizeForMemoryUsage() {
    4183      489804 :   return FLAG_optimize_for_size || isolate()->IsIsolateInBackground() ||
    4184      734900 :          HighMemoryPressure() || IsLowMemoryDevice();
    4185             : }
    4186             : 
    4187           0 : void Heap::ActivateMemoryReducerIfNeeded() {
    4188             :   // Activate memory reducer when switching to background if
    4189             :   // - there was no mark compact since the start.
    4190             :   // - the committed memory can be potentially reduced.
    4191             :   // 2 pages for the old, code, and map space + 1 page for new space.
    4192             :   const int kMinCommittedMemory = 7 * Page::kPageSize;
    4193           0 :   if (ms_count_ == 0 && CommittedMemory() > kMinCommittedMemory &&
    4194           0 :       isolate()->IsIsolateInBackground()) {
    4195             :     MemoryReducer::Event event;
    4196           0 :     event.type = MemoryReducer::kPossibleGarbage;
    4197           0 :     event.time_ms = MonotonicallyIncreasingTimeInMs();
    4198           0 :     memory_reducer_->NotifyPossibleGarbage(event);
    4199             :   }
    4200           0 : }
    4201             : 
    4202      244398 : void Heap::ReduceNewSpaceSize() {
    4203             :   // TODO(ulan): Unify this constant with the similar constant in
    4204             :   // GCIdleTimeHandler once the change is merged to 4.5.
    4205             :   static const size_t kLowAllocationThroughput = 1000;
    4206             :   const double allocation_throughput =
    4207      122535 :       tracer()->CurrentAllocationThroughputInBytesPerMillisecond();
    4208             : 
    4209      245070 :   if (FLAG_predictable) return;
    4210             : 
    4211      243726 :   if (ShouldReduceMemory() ||
    4212       99335 :       ((allocation_throughput != 0) &&
    4213             :        (allocation_throughput < kLowAllocationThroughput))) {
    4214       17201 :     new_space_->Shrink();
    4215             :     UncommitFromSpace();
    4216             :   }
    4217             : }
    4218             : 
    4219        5652 : void Heap::FinalizeIncrementalMarkingIfComplete(
    4220       20116 :     GarbageCollectionReason gc_reason) {
    4221       16956 :   if (incremental_marking()->IsMarking() &&
    4222       10828 :       (incremental_marking()->IsReadyToOverApproximateWeakClosure() ||
    4223        4616 :        (!incremental_marking()->finalize_marking_completed() &&
    4224        4616 :         mark_compact_collector()->marking_deque()->IsEmpty() &&
    4225           0 :         local_embedder_heap_tracer()->ShouldFinalizeIncrementalMarking()))) {
    4226         476 :     FinalizeIncrementalMarking(gc_reason);
    4227       15024 :   } else if (incremental_marking()->IsComplete() ||
    4228        4672 :              (mark_compact_collector()->marking_deque()->IsEmpty() &&
    4229             :               local_embedder_heap_tracer()
    4230           0 :                   ->ShouldFinalizeIncrementalMarking())) {
    4231             :     CollectAllGarbage(current_gc_flags_, gc_reason);
    4232             :   }
    4233        5652 : }
    4234             : 
    4235          20 : bool Heap::TryFinalizeIdleIncrementalMarking(
    4236          52 :     double idle_time_in_ms, GarbageCollectionReason gc_reason) {
    4237          20 :   size_t size_of_objects = static_cast<size_t>(SizeOfObjects());
    4238             :   double final_incremental_mark_compact_speed_in_bytes_per_ms =
    4239          20 :       tracer()->FinalIncrementalMarkCompactSpeedInBytesPerMillisecond();
    4240          60 :   if (incremental_marking()->IsReadyToOverApproximateWeakClosure() ||
    4241           0 :       (!incremental_marking()->finalize_marking_completed() &&
    4242           0 :        mark_compact_collector()->marking_deque()->IsEmpty() &&
    4243           0 :        local_embedder_heap_tracer()->ShouldFinalizeIncrementalMarking() &&
    4244             :        gc_idle_time_handler_->ShouldDoOverApproximateWeakClosure(
    4245           0 :            idle_time_in_ms))) {
    4246           8 :     FinalizeIncrementalMarking(gc_reason);
    4247           8 :     return true;
    4248          24 :   } else if (incremental_marking()->IsComplete() ||
    4249           0 :              (mark_compact_collector()->marking_deque()->IsEmpty() &&
    4250             :               local_embedder_heap_tracer()
    4251           0 :                   ->ShouldFinalizeIncrementalMarking() &&
    4252             :               gc_idle_time_handler_->ShouldDoFinalIncrementalMarkCompact(
    4253             :                   idle_time_in_ms, size_of_objects,
    4254           0 :                   final_incremental_mark_compact_speed_in_bytes_per_ms))) {
    4255             :     CollectAllGarbage(current_gc_flags_, gc_reason);
    4256          12 :     return true;
    4257             :   }
    4258             :   return false;
    4259             : }
    4260             : 
    4261      139937 : void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) {
    4262             :   // TODO(hpayer): We do not have to iterate reservations on black objects
    4263             :   // for marking. We just have to execute the special visiting side effect
    4264             :   // code that adds objects to global data structures, e.g. for array buffers.
    4265             : 
    4266      107263 :   if (incremental_marking()->black_allocation()) {
    4267             :     // Iterate black objects in old space, code space, map space, and large
    4268             :     // object space for side effects.
    4269          68 :     for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) {
    4270          68 :       const Heap::Reservation& res = reservations[i];
    4271         204 :       for (auto& chunk : res) {
    4272          68 :         Address addr = chunk.start;
    4273       32810 :         while (addr < chunk.end) {
    4274       32674 :           HeapObject* obj = HeapObject::FromAddress(addr);
    4275             :           // There might be grey objects due to black to grey transitions in
    4276             :           // incremental marking. E.g. see VisitNativeContextIncremental.
    4277             :           DCHECK(
    4278             :               ObjectMarking::IsBlackOrGrey(obj, MarkingState::Internal(obj)));
    4279       32674 :           if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) {
    4280       32674 :             incremental_marking()->IterateBlackObject(obj);
    4281             :           }
    4282       32674 :           addr += obj->Size();
    4283             :         }
    4284             :       }
    4285             :     }
    4286             :   }
    4287      107263 : }
    4288             : 
    4289    13694922 : void Heap::NotifyObjectLayoutChange(HeapObject* object,
    4290    13689156 :                                     const DisallowHeapAllocation&) {
    4291    27384077 :   if (FLAG_incremental_marking && incremental_marking()->IsMarking()) {
    4292       91312 :     incremental_marking()->MarkGrey(this, object);
    4293             :   }
    4294             : #ifdef VERIFY_HEAP
    4295             :   DCHECK(pending_layout_change_object_ == nullptr);
    4296             :   pending_layout_change_object_ = object;
    4297             : #endif
    4298    13694921 : }
    4299             : 
    4300             : #ifdef VERIFY_HEAP
    4301             : void Heap::VerifyObjectLayoutChange(HeapObject* object, Map* new_map) {
    4302             :   if (pending_layout_change_object_ == nullptr) {
    4303             :     DCHECK(!object->IsJSObject() ||
    4304             :            !object->map()->TransitionRequiresSynchronizationWithGC(new_map));
    4305             :   } else {
    4306             :     DCHECK_EQ(pending_layout_change_object_, object);
    4307             :     pending_layout_change_object_ = nullptr;
    4308             :   }
    4309             : }
    4310             : #endif
    4311             : 
    4312       16350 : GCIdleTimeHeapState Heap::ComputeHeapState() {
    4313             :   GCIdleTimeHeapState heap_state;
    4314        5450 :   heap_state.contexts_disposed = contexts_disposed_;
    4315             :   heap_state.contexts_disposal_rate =
    4316        5450 :       tracer()->ContextDisposalRateInMilliseconds();
    4317        5450 :   heap_state.size_of_objects = static_cast<size_t>(SizeOfObjects());
    4318        5450 :   heap_state.incremental_marking_stopped = incremental_marking()->IsStopped();
    4319        5450 :   return heap_state;
    4320             : }
    4321             : 
    4322             : 
    4323        5450 : bool Heap::PerformIdleTimeAction(GCIdleTimeAction action,
    4324             :                                  GCIdleTimeHeapState heap_state,
    4325          40 :                                  double deadline_in_ms) {
    4326             :   bool result = false;
    4327        5450 :   switch (action.type) {
    4328             :     case DONE:
    4329             :       result = true;
    4330        4931 :       break;
    4331             :     case DO_INCREMENTAL_STEP: {
    4332             :       const double remaining_idle_time_in_ms =
    4333             :           incremental_marking()->AdvanceIncrementalMarking(
    4334             :               deadline_in_ms, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
    4335          20 :               IncrementalMarking::FORCE_COMPLETION, StepOrigin::kTask);
    4336          20 :       if (remaining_idle_time_in_ms > 0.0) {
    4337             :         TryFinalizeIdleIncrementalMarking(
    4338             :             remaining_idle_time_in_ms,
    4339          20 :             GarbageCollectionReason::kFinalizeMarkingViaTask);
    4340             :       }
    4341             :       result = incremental_marking()->IsStopped();
    4342          20 :       break;
    4343             :     }
    4344             :     case DO_FULL_GC: {
    4345             :       DCHECK(contexts_disposed_ > 0);
    4346         347 :       HistogramTimerScope scope(isolate_->counters()->gc_context());
    4347        1041 :       TRACE_EVENT0("v8", "V8.GCContext");
    4348             :       CollectAllGarbage(kNoGCFlags, GarbageCollectionReason::kContextDisposal);
    4349             :       break;
    4350             :     }
    4351             :     case DO_NOTHING:
    4352             :       break;
    4353             :   }
    4354             : 
    4355        5450 :   return result;
    4356             : }
    4357             : 
    4358             : 
    4359        5450 : void Heap::IdleNotificationEpilogue(GCIdleTimeAction action,
    4360             :                                     GCIdleTimeHeapState heap_state,
    4361             :                                     double start_ms, double deadline_in_ms) {
    4362        5450 :   double idle_time_in_ms = deadline_in_ms - start_ms;
    4363        5450 :   double current_time = MonotonicallyIncreasingTimeInMs();
    4364        5450 :   last_idle_notification_time_ = current_time;
    4365        5450 :   double deadline_difference = deadline_in_ms - current_time;
    4366             : 
    4367        5450 :   contexts_disposed_ = 0;
    4368             : 
    4369             :   isolate()->counters()->gc_idle_time_allotted_in_ms()->AddSample(
    4370       10900 :       static_cast<int>(idle_time_in_ms));
    4371             : 
    4372        5450 :   if (deadline_in_ms - start_ms >
    4373             :       GCIdleTimeHandler::kMaxFrameRenderingIdleTime) {
    4374        4955 :     int committed_memory = static_cast<int>(CommittedMemory() / KB);
    4375        4955 :     int used_memory = static_cast<int>(heap_state.size_of_objects / KB);
    4376             :     isolate()->counters()->aggregated_memory_heap_committed()->AddSample(
    4377        4955 :         start_ms, committed_memory);
    4378             :     isolate()->counters()->aggregated_memory_heap_used()->AddSample(
    4379        4955 :         start_ms, used_memory);
    4380             :   }
    4381             : 
    4382        5450 :   if (deadline_difference >= 0) {
    4383        4955 :     if (action.type != DONE && action.type != DO_NOTHING) {
    4384             :       isolate()->counters()->gc_idle_time_limit_undershot()->AddSample(
    4385          20 :           static_cast<int>(deadline_difference));
    4386             :     }
    4387             :   } else {
    4388             :     isolate()->counters()->gc_idle_time_limit_overshot()->AddSample(
    4389         495 :         static_cast<int>(-deadline_difference));
    4390             :   }
    4391             : 
    4392        5450 :   if ((FLAG_trace_idle_notification && action.type > DO_NOTHING) ||
    4393             :       FLAG_trace_idle_notification_verbose) {
    4394             :     isolate_->PrintWithTimestamp(
    4395             :         "Idle notification: requested idle time %.2f ms, used idle time %.2f "
    4396             :         "ms, deadline usage %.2f ms [",
    4397             :         idle_time_in_ms, idle_time_in_ms - deadline_difference,
    4398           0 :         deadline_difference);
    4399           0 :     action.Print();
    4400           0 :     PrintF("]");
    4401           0 :     if (FLAG_trace_idle_notification_verbose) {
    4402           0 :       PrintF("[");
    4403           0 :       heap_state.Print();
    4404           0 :       PrintF("]");
    4405             :     }
    4406           0 :     PrintF("\n");
    4407             :   }
    4408        5450 : }
    4409             : 
    4410             : 
    4411     6985850 : double Heap::MonotonicallyIncreasingTimeInMs() {
    4412     6985850 :   return V8::GetCurrentPlatform()->MonotonicallyIncreasingTime() *
    4413     6985853 :          static_cast<double>(base::Time::kMillisecondsPerSecond);
    4414             : }
    4415             : 
    4416             : 
    4417           0 : bool Heap::IdleNotification(int idle_time_in_ms) {
    4418             :   return IdleNotification(
    4419           0 :       V8::GetCurrentPlatform()->MonotonicallyIncreasingTime() +
    4420           0 :       (static_cast<double>(idle_time_in_ms) /
    4421           0 :        static_cast<double>(base::Time::kMillisecondsPerSecond)));
    4422             : }
    4423             : 
    4424             : 
    4425       10900 : bool Heap::IdleNotification(double deadline_in_seconds) {
    4426        5450 :   CHECK(HasBeenSetUp());
    4427             :   double deadline_in_ms =
    4428             :       deadline_in_seconds *
    4429        5450 :       static_cast<double>(base::Time::kMillisecondsPerSecond);
    4430             :   HistogramTimerScope idle_notification_scope(
    4431        5450 :       isolate_->counters()->gc_idle_notification());
    4432       16350 :   TRACE_EVENT0("v8", "V8.GCIdleNotification");
    4433        5450 :   double start_ms = MonotonicallyIncreasingTimeInMs();
    4434        5450 :   double idle_time_in_ms = deadline_in_ms - start_ms;
    4435             : 
    4436             :   tracer()->SampleAllocation(start_ms, NewSpaceAllocationCounter(),
    4437        5450 :                              OldGenerationAllocationCounter());
    4438             : 
    4439        5450 :   GCIdleTimeHeapState heap_state = ComputeHeapState();
    4440             : 
    4441             :   GCIdleTimeAction action =
    4442        5450 :       gc_idle_time_handler_->Compute(idle_time_in_ms, heap_state);
    4443             : 
    4444        5450 :   bool result = PerformIdleTimeAction(action, heap_state, deadline_in_ms);
    4445             : 
    4446        5450 :   IdleNotificationEpilogue(action, heap_state, start_ms, deadline_in_ms);
    4447        5450 :   return result;
    4448             : }
    4449             : 
    4450             : 
    4451           0 : bool Heap::RecentIdleNotificationHappened() {
    4452           0 :   return (last_idle_notification_time_ +
    4453             :           GCIdleTimeHandler::kMaxScheduledIdleTime) >
    4454           0 :          MonotonicallyIncreasingTimeInMs();
    4455             : }
    4456             : 
    4457             : class MemoryPressureInterruptTask : public CancelableTask {
    4458             :  public:
    4459             :   explicit MemoryPressureInterruptTask(Heap* heap)
    4460          13 :       : CancelableTask(heap->isolate()), heap_(heap) {}
    4461             : 
    4462          26 :   virtual ~MemoryPressureInterruptTask() {}
    4463             : 
    4464             :  private:
    4465             :   // v8::internal::CancelableTask overrides.
    4466          13 :   void RunInternal() override { heap_->CheckMemoryPressure(); }
    4467             : 
    4468             :   Heap* heap_;
    4469             :   DISALLOW_COPY_AND_ASSIGN(MemoryPressureInterruptTask);
    4470             : };
    4471             : 
    4472          32 : void Heap::CheckMemoryPressure() {
    4473          32 :   if (HighMemoryPressure()) {
    4474          20 :     if (isolate()->concurrent_recompilation_enabled()) {
    4475             :       // The optimizing compiler may be unnecessarily holding on to memory.
    4476             :       DisallowHeapAllocation no_recursive_gc;
    4477             :       isolate()->optimizing_compile_dispatcher()->Flush(
    4478          20 :           OptimizingCompileDispatcher::BlockingBehavior::kDontBlock);
    4479             :     }
    4480             :   }
    4481          32 :   if (memory_pressure_level_.Value() == MemoryPressureLevel::kCritical) {
    4482          20 :     CollectGarbageOnMemoryPressure();
    4483          12 :   } else if (memory_pressure_level_.Value() == MemoryPressureLevel::kModerate) {
    4484           0 :     if (FLAG_incremental_marking && incremental_marking()->IsStopped()) {
    4485             :       StartIncrementalMarking(kReduceMemoryFootprintMask,
    4486             :                               GarbageCollectionReason::kMemoryPressure);
    4487             :     }
    4488             :   }
    4489             :   MemoryReducer::Event event;
    4490          32 :   event.type = MemoryReducer::kPossibleGarbage;
    4491          32 :   event.time_ms = MonotonicallyIncreasingTimeInMs();
    4492          32 :   memory_reducer_->NotifyPossibleGarbage(event);
    4493          32 : }
    4494             : 
    4495          20 : void Heap::CollectGarbageOnMemoryPressure() {
    4496             :   const int kGarbageThresholdInBytes = 8 * MB;
    4497             :   const double kGarbageThresholdAsFractionOfTotalMemory = 0.1;
    4498             :   // This constant is the maximum response time in RAIL performance model.
    4499             :   const double kMaxMemoryPressurePauseMs = 100;
    4500             : 
    4501          20 :   double start = MonotonicallyIncreasingTimeInMs();
    4502             :   CollectAllGarbage(kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask,
    4503             :                     GarbageCollectionReason::kMemoryPressure,
    4504             :                     kGCCallbackFlagCollectAllAvailableGarbage);
    4505          20 :   double end = MonotonicallyIncreasingTimeInMs();
    4506             : 
    4507             :   // Estimate how much memory we can free.
    4508             :   int64_t potential_garbage =
    4509          20 :       (CommittedMemory() - SizeOfObjects()) + external_memory_;
    4510             :   // If we can potentially free large amount of memory, then start GC right
    4511             :   // away instead of waiting for memory reducer.
    4512          26 :   if (potential_garbage >= kGarbageThresholdInBytes &&
    4513           6 :       potential_garbage >=
    4514           6 :           CommittedMemory() * kGarbageThresholdAsFractionOfTotalMemory) {
    4515             :     // If we spent less than half of the time budget, then perform full GC
    4516             :     // Otherwise, start incremental marking.
    4517           6 :     if (end - start < kMaxMemoryPressurePauseMs / 2) {
    4518             :       CollectAllGarbage(
    4519             :           kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask,
    4520             :           GarbageCollectionReason::kMemoryPressure,
    4521             :           kGCCallbackFlagCollectAllAvailableGarbage);
    4522             :     } else {
    4523           0 :       if (FLAG_incremental_marking && incremental_marking()->IsStopped()) {
    4524             :         StartIncrementalMarking(kReduceMemoryFootprintMask,
    4525             :                                 GarbageCollectionReason::kMemoryPressure);
    4526             :       }
    4527             :     }
    4528             :   }
    4529          20 : }
    4530             : 
    4531          32 : void Heap::MemoryPressureNotification(MemoryPressureLevel level,
    4532             :                                       bool is_isolate_locked) {
    4533             :   MemoryPressureLevel previous = memory_pressure_level_.Value();
    4534             :   memory_pressure_level_.SetValue(level);
    4535          64 :   if ((previous != MemoryPressureLevel::kCritical &&
    4536          44 :        level == MemoryPressureLevel::kCritical) ||
    4537          24 :       (previous == MemoryPressureLevel::kNone &&
    4538          12 :        level == MemoryPressureLevel::kModerate)) {
    4539          26 :     if (is_isolate_locked) {
    4540          13 :       CheckMemoryPressure();
    4541             :     } else {
    4542             :       ExecutionAccess access(isolate());
    4543          13 :       isolate()->stack_guard()->RequestGC();
    4544          13 :       V8::GetCurrentPlatform()->CallOnForegroundThread(
    4545             :           reinterpret_cast<v8::Isolate*>(isolate()),
    4546          26 :           new MemoryPressureInterruptTask(this));
    4547             :     }
    4548             :   }
    4549          32 : }
    4550             : 
    4551        8624 : void Heap::SetOutOfMemoryCallback(v8::debug::OutOfMemoryCallback callback,
    4552             :                                   void* data) {
    4553        8624 :   out_of_memory_callback_ = callback;
    4554        8624 :   out_of_memory_callback_data_ = data;
    4555        8624 : }
    4556             : 
    4557           0 : void Heap::InvokeOutOfMemoryCallback() {
    4558          12 :   if (out_of_memory_callback_) {
    4559          12 :     out_of_memory_callback_(out_of_memory_callback_data_);
    4560             :   }
    4561           0 : }
    4562             : 
    4563           0 : void Heap::CollectCodeStatistics() {
    4564           0 :   CodeStatistics::ResetCodeAndMetadataStatistics(isolate());
    4565             :   // We do not look for code in new space, or map space.  If code
    4566             :   // somehow ends up in those spaces, we would miss it here.
    4567           0 :   CodeStatistics::CollectCodeStatistics(code_space_, isolate());
    4568           0 :   CodeStatistics::CollectCodeStatistics(old_space_, isolate());
    4569           0 :   CodeStatistics::CollectCodeStatistics(lo_space_, isolate());
    4570           0 : }
    4571             : 
    4572             : #ifdef DEBUG
    4573             : 
    4574             : void Heap::Print() {
    4575             :   if (!HasBeenSetUp()) return;
    4576             :   isolate()->PrintStack(stdout);
    4577             :   AllSpaces spaces(this);
    4578             :   for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
    4579             :     space->Print();
    4580             :   }
    4581             : }
    4582             : 
    4583             : 
    4584             : void Heap::ReportCodeStatistics(const char* title) {
    4585             :   PrintF(">>>>>> Code Stats (%s) >>>>>>\n", title);
    4586             :   CollectCodeStatistics();
    4587             :   CodeStatistics::ReportCodeStatistics(isolate());
    4588             : }
    4589             : 
    4590             : 
    4591             : // This function expects that NewSpace's allocated objects histogram is
    4592             : // populated (via a call to CollectStatistics or else as a side effect of a
    4593             : // just-completed scavenge collection).
    4594             : void Heap::ReportHeapStatistics(const char* title) {
    4595             :   USE(title);
    4596             :   PrintF(">>>>>> =============== %s (%d) =============== >>>>>>\n", title,
    4597             :          gc_count_);
    4598             :   PrintF("old_generation_allocation_limit_ %" PRIuS "\n",
    4599             :          old_generation_allocation_limit_);
    4600             : 
    4601             :   PrintF("\n");
    4602             :   PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles(isolate_));
    4603             :   isolate_->global_handles()->PrintStats();
    4604             :   PrintF("\n");
    4605             : 
    4606             :   PrintF("Heap statistics : ");
    4607             :   memory_allocator()->ReportStatistics();
    4608             :   PrintF("To space : ");
    4609             :   new_space_->ReportStatistics();
    4610             :   PrintF("Old space : ");
    4611             :   old_space_->ReportStatistics();
    4612             :   PrintF("Code space : ");
    4613             :   code_space_->ReportStatistics();
    4614             :   PrintF("Map space : ");
    4615             :   map_space_->ReportStatistics();
    4616             :   PrintF("Large object space : ");
    4617             :   lo_space_->ReportStatistics();
    4618             :   PrintF(">>>>>> ========================================= >>>>>>\n");
    4619             : }
    4620             : 
    4621             : #endif  // DEBUG
    4622             : 
    4623      122513 : const char* Heap::GarbageCollectionReasonToString(
    4624             :     GarbageCollectionReason gc_reason) {
    4625      122513 :   switch (gc_reason) {
    4626             :     case GarbageCollectionReason::kAllocationFailure:
    4627             :       return "allocation failure";
    4628             :     case GarbageCollectionReason::kAllocationLimit:
    4629           0 :       return "allocation limit";
    4630             :     case GarbageCollectionReason::kContextDisposal:
    4631         347 :       return "context disposal";
    4632             :     case GarbageCollectionReason::kCountersExtension:
    4633           0 :       return "counters extension";
    4634             :     case GarbageCollectionReason::kDebugger:
    4635       18886 :       return "debugger";
    4636             :     case GarbageCollectionReason::kDeserializer:
    4637          18 :       return "deserialize";
    4638             :     case GarbageCollectionReason::kExternalMemoryPressure:
    4639         116 :       return "external memory pressure";
    4640             :     case GarbageCollectionReason::kFinalizeMarkingViaStackGuard:
    4641         161 :       return "finalize incremental marking via stack guard";
    4642             :     case GarbageCollectionReason::kFinalizeMarkingViaTask:
    4643         516 :       return "finalize incremental marking via task";
    4644             :     case GarbageCollectionReason::kFullHashtable:
    4645           0 :       return "full hash-table";
    4646             :     case GarbageCollectionReason::kHeapProfiler:
    4647         844 :       return "heap profiler";
    4648             :     case GarbageCollectionReason::kIdleTask:
    4649        2446 :       return "idle task";
    4650             :     case GarbageCollectionReason::kLastResort:
    4651          24 :       return "last resort";
    4652             :     case GarbageCollectionReason::kLowMemoryNotification:
    4653       13072 :       return "low memory notification";
    4654             :     case GarbageCollectionReason::kMakeHeapIterable:
    4655           0 :       return "make heap iterable";
    4656             :     case GarbageCollectionReason::kMemoryPressure:
    4657          26 :       return "memory pressure";
    4658             :     case GarbageCollectionReason::kMemoryReducer:
    4659           0 :       return "memory reducer";
    4660             :     case GarbageCollectionReason::kRuntime:
    4661          28 :       return "runtime";
    4662             :     case GarbageCollectionReason::kSamplingProfiler:
    4663           0 :       return "sampling profiler";
    4664             :     case GarbageCollectionReason::kSnapshotCreator:
    4665         194 :       return "snapshot creator";
    4666             :     case GarbageCollectionReason::kTesting:
    4667       20425 :       return "testing";
    4668             :     case GarbageCollectionReason::kUnknown:
    4669           0 :       return "unknown";
    4670             :   }
    4671           0 :   UNREACHABLE();
    4672             :   return "";
    4673             : }
    4674             : 
    4675     6257390 : bool Heap::Contains(HeapObject* value) {
    4676     6257390 :   if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
    4677             :     return false;
    4678             :   }
    4679     6257390 :   return HasBeenSetUp() &&
    4680     6257043 :          (new_space_->ToSpaceContains(value) || old_space_->Contains(value) ||
    4681        2978 :           code_space_->Contains(value) || map_space_->Contains(value) ||
    4682     3128695 :           lo_space_->Contains(value));
    4683             : }
    4684             : 
    4685           0 : bool Heap::ContainsSlow(Address addr) {
    4686           0 :   if (memory_allocator()->IsOutsideAllocatedSpace(addr)) {
    4687             :     return false;
    4688             :   }
    4689           0 :   return HasBeenSetUp() &&
    4690           0 :          (new_space_->ToSpaceContainsSlow(addr) ||
    4691           0 :           old_space_->ContainsSlow(addr) || code_space_->ContainsSlow(addr) ||
    4692           0 :           map_space_->ContainsSlow(addr) || lo_space_->ContainsSlow(addr));
    4693             : }
    4694             : 
    4695         140 : bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
    4696         140 :   if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
    4697             :     return false;
    4698             :   }
    4699          70 :   if (!HasBeenSetUp()) return false;
    4700             : 
    4701          70 :   switch (space) {
    4702             :     case NEW_SPACE:
    4703          42 :       return new_space_->ToSpaceContains(value);
    4704             :     case OLD_SPACE:
    4705          19 :       return old_space_->Contains(value);
    4706             :     case CODE_SPACE:
    4707           0 :       return code_space_->Contains(value);
    4708             :     case MAP_SPACE:
    4709           0 :       return map_space_->Contains(value);
    4710             :     case LO_SPACE:
    4711          30 :       return lo_space_->Contains(value);
    4712             :   }
    4713           0 :   UNREACHABLE();
    4714             :   return false;
    4715             : }
    4716             : 
    4717           0 : bool Heap::InSpaceSlow(Address addr, AllocationSpace space) {
    4718           0 :   if (memory_allocator()->IsOutsideAllocatedSpace(addr)) {
    4719             :     return false;
    4720             :   }
    4721           0 :   if (!HasBeenSetUp()) return false;
    4722             : 
    4723           0 :   switch (space) {
    4724             :     case NEW_SPACE:
    4725           0 :       return new_space_->ToSpaceContainsSlow(addr);
    4726             :     case OLD_SPACE:
    4727           0 :       return old_space_->ContainsSlow(addr);
    4728             :     case CODE_SPACE:
    4729           0 :       return code_space_->ContainsSlow(addr);
    4730             :     case MAP_SPACE:
    4731           0 :       return map_space_->ContainsSlow(addr);
    4732             :     case LO_SPACE:
    4733           0 :       return lo_space_->ContainsSlow(addr);
    4734             :   }
    4735           0 :   UNREACHABLE();
    4736             :   return false;
    4737             : }
    4738             : 
    4739             : 
    4740           0 : bool Heap::IsValidAllocationSpace(AllocationSpace space) {
    4741           0 :   switch (space) {
    4742             :     case NEW_SPACE:
    4743             :     case OLD_SPACE:
    4744             :     case CODE_SPACE:
    4745             :     case MAP_SPACE:
    4746             :     case LO_SPACE:
    4747             :       return true;
    4748             :     default:
    4749           0 :       return false;
    4750             :   }
    4751             : }
    4752             : 
    4753             : 
    4754     4575562 : bool Heap::RootIsImmortalImmovable(int root_index) {
    4755     4575562 :   switch (root_index) {
    4756             : #define IMMORTAL_IMMOVABLE_ROOT(name) case Heap::k##name##RootIndex:
    4757             :     IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT)
    4758             : #undef IMMORTAL_IMMOVABLE_ROOT
    4759             : #define INTERNALIZED_STRING(name, value) case Heap::k##name##RootIndex:
    4760             :     INTERNALIZED_STRING_LIST(INTERNALIZED_STRING)
    4761             : #undef INTERNALIZED_STRING
    4762             : #define STRING_TYPE(NAME, size, name, Name) case Heap::k##Name##MapRootIndex:
    4763             :     STRING_TYPE_LIST(STRING_TYPE)
    4764             : #undef STRING_TYPE
    4765             :     return true;
    4766             :     default:
    4767      425914 :       return false;
    4768             :   }
    4769             : }
    4770             : 
    4771             : #ifdef VERIFY_HEAP
    4772             : void Heap::Verify() {
    4773             :   CHECK(HasBeenSetUp());
    4774             :   HandleScope scope(isolate());
    4775             : 
    4776             :   // We have to wait here for the sweeper threads to have an iterable heap.
    4777             :   mark_compact_collector()->EnsureSweepingCompleted();
    4778             : 
    4779             :   VerifyPointersVisitor visitor;
    4780             :   IterateRoots(&visitor, VISIT_ONLY_STRONG);
    4781             : 
    4782             :   VerifySmisVisitor smis_visitor;
    4783             :   IterateSmiRoots(&smis_visitor);
    4784             : 
    4785             :   new_space_->Verify();
    4786             : 
    4787             :   old_space_->Verify(&visitor);
    4788             :   map_space_->Verify(&visitor);
    4789             : 
    4790             :   VerifyPointersVisitor no_dirty_regions_visitor;
    4791             :   code_space_->Verify(&no_dirty_regions_visitor);
    4792             : 
    4793             :   lo_space_->Verify();
    4794             : 
    4795             :   mark_compact_collector()->VerifyWeakEmbeddedObjectsInCode();
    4796             :   if (FLAG_omit_map_checks_for_leaf_maps) {
    4797             :     mark_compact_collector()->VerifyOmittedMapChecks();
    4798             :   }
    4799             : }
    4800             : #endif
    4801             : 
    4802             : 
    4803           0 : void Heap::ZapFromSpace() {
    4804           0 :   if (!new_space_->IsFromSpaceCommitted()) return;
    4805           0 :   for (Page* page :
    4806           0 :        PageRange(new_space_->FromSpaceStart(), new_space_->FromSpaceEnd())) {
    4807           0 :     for (Address cursor = page->area_start(), limit = page->area_end();
    4808             :          cursor < limit; cursor += kPointerSize) {
    4809           0 :       Memory::Address_at(cursor) = kFromSpaceZapValue;
    4810             :     }
    4811             :   }
    4812             : }
    4813             : 
    4814           0 : class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
    4815             :  public:
    4816             :   IterateAndScavengePromotedObjectsVisitor(Heap* heap, HeapObject* target,
    4817             :                                            bool record_slots)
    4818    36206312 :       : heap_(heap), target_(target), record_slots_(record_slots) {}
    4819             : 
    4820    37267491 :   inline void VisitPointers(HeapObject* host, Object** start,
    4821             :                             Object** end) override {
    4822             :     DCHECK_EQ(host, target_);
    4823             :     Address slot_address = reinterpret_cast<Address>(start);
    4824             :     Page* page = Page::FromAddress(slot_address);
    4825             : 
    4826   354386464 :     while (slot_address < reinterpret_cast<Address>(end)) {
    4827             :       Object** slot = reinterpret_cast<Object**>(slot_address);
    4828   279851482 :       Object* target = *slot;
    4829             : 
    4830   279851482 :       if (target->IsHeapObject()) {
    4831   228669607 :         if (heap_->InFromSpace(target)) {
    4832             :           Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(slot),
    4833    36010083 :                                     HeapObject::cast(target));
    4834    36010083 :           target = *slot;
    4835    36010083 :           if (heap_->InNewSpace(target)) {
    4836             :             SLOW_DCHECK(heap_->InToSpace(target));
    4837             :             SLOW_DCHECK(target->IsHeapObject());
    4838      796908 :             RememberedSet<OLD_TO_NEW>::Insert(page, slot_address);
    4839             :           }
    4840             :           SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
    4841             :               HeapObject::cast(target)));
    4842   192897731 :         } else if (record_slots_ &&
    4843             :                    MarkCompactCollector::IsOnEvacuationCandidate(
    4844             :                        HeapObject::cast(target))) {
    4845          53 :           heap_->mark_compact_collector()->RecordSlot(target_, slot, target);
    4846             :         }
    4847             :       }
    4848             : 
    4849   279851482 :       slot_address += kPointerSize;
    4850             :     }
    4851    37267491 :   }
    4852             : 
    4853           0 :   inline void VisitCodeEntry(JSFunction* host,
    4854             :                              Address code_entry_slot) override {
    4855             :     // Black allocation requires us to process objects referenced by
    4856             :     // promoted objects.
    4857           0 :     if (heap_->incremental_marking()->black_allocation()) {
    4858           0 :       Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
    4859           0 :       IncrementalMarking::MarkGrey(heap_, code);
    4860             :     }
    4861           0 :   }
    4862             : 
    4863             :  private:
    4864             :   Heap* heap_;
    4865             :   HeapObject* target_;
    4866             :   bool record_slots_;
    4867             : };
    4868             : 
    4869    36206312 : void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size,
    4870    72366319 :                                             bool was_marked_black) {
    4871             :   // We are not collecting slots on new space objects during mutation
    4872             :   // thus we have to scan for pointers to evacuation candidates when we
    4873             :   // promote objects. But we should not record any slots in non-black
    4874             :   // objects. Grey object's slots would be rescanned.
    4875             :   // White object might not survive until the end of collection
    4876             :   // it would be a violation of the invariant to record it's slots.
    4877             :   bool record_slots = false;
    4878    36206312 :   if (incremental_marking()->IsCompacting()) {
    4879             :     record_slots =
    4880             :         ObjectMarking::IsBlack(target, MarkingState::Internal(target));
    4881             :   }
    4882             : 
    4883             :   // TODO(ulan): remove the target, the visitor now gets the host object
    4884             :   // in each visit method.
    4885             :   IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots);
    4886    36206312 :   if (target->IsJSFunction()) {
    4887             :     // JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for
    4888             :     // this links are recorded during processing of weak lists.
    4889     1038344 :     JSFunction::BodyDescriptorWeakCode::IterateBody(target, size, &visitor);
    4890             :   } else {
    4891    35167968 :     target->IterateBody(target->map()->instance_type(), size, &visitor);
    4892             :   }
    4893             : 
    4894             :   // When black allocations is on, we have to visit not already marked black
    4895             :   // objects (in new space) promoted to black pages to keep their references
    4896             :   // alive.
    4897             :   // TODO(hpayer): Implement a special promotion visitor that incorporates
    4898             :   // regular visiting and IteratePromotedObjectPointers.
    4899    36206312 :   if (!was_marked_black) {
    4900    36157448 :     if (incremental_marking()->black_allocation()) {
    4901        2559 :       IncrementalMarking::MarkGrey(this, target->map());
    4902        2559 :       incremental_marking()->IterateBlackObject(target);
    4903             :     }
    4904             :   }
    4905    36206312 : }
    4906             : 
    4907      125143 : void Heap::IterateRoots(RootVisitor* v, VisitMode mode) {
    4908      125143 :   IterateStrongRoots(v, mode);
    4909      125143 :   IterateWeakRoots(v, mode);
    4910      125143 : }
    4911             : 
    4912      186033 : void Heap::IterateWeakRoots(RootVisitor* v, VisitMode mode) {
    4913             :   v->VisitRootPointer(Root::kStringTable, reinterpret_cast<Object**>(
    4914      186033 :                                               &roots_[kStringTableRootIndex]));
    4915      186033 :   v->Synchronize(VisitorSynchronization::kStringTable);
    4916      186033 :   if (mode != VISIT_ALL_IN_SCAVENGE && mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
    4917             :     // Scavenge collections have special processing for this.
    4918       63498 :     external_string_table_.IterateAll(v);
    4919             :   }
    4920      186033 :   v->Synchronize(VisitorSynchronization::kExternalStringsTable);
    4921      186033 : }
    4922             : 
    4923       60890 : void Heap::IterateSmiRoots(RootVisitor* v) {
    4924             :   // Acquire execution access since we are going to read stack limit values.
    4925             :   ExecutionAccess access(isolate());
    4926             :   v->VisitRootPointers(Root::kSmiRootList, &roots_[kSmiRootsStart],
    4927       60890 :                        &roots_[kRootListLength]);
    4928       60890 :   v->Synchronize(VisitorSynchronization::kSmiRootList);
    4929       60890 : }
    4930             : 
    4931           0 : void Heap::IterateEncounteredWeakCollections(RootVisitor* visitor) {
    4932             :   visitor->VisitRootPointer(Root::kWeakCollections,
    4933       69189 :                             &encountered_weak_collections_);
    4934           0 : }
    4935             : 
    4936             : // We cannot avoid stale handles to left-trimmed objects, but can only make
    4937             : // sure all handles still needed are updated. Filter out a stale pointer
    4938             : // and clear the slot to allow post processing of handles (needed because
    4939             : // the sweeper might actually free the underlying page).
    4940           0 : class FixStaleLeftTrimmedHandlesVisitor : public RootVisitor {
    4941             :  public:
    4942      241585 :   explicit FixStaleLeftTrimmedHandlesVisitor(Heap* heap) : heap_(heap) {
    4943             :     USE(heap_);
    4944             :   }
    4945             : 
    4946         640 :   void VisitRootPointer(Root root, Object** p) override { FixHandle(p); }
    4947             : 
    4948      505297 :   void VisitRootPointers(Root root, Object** start, Object** end) override {
    4949      505297 :     for (Object** p = start; p < end; p++) FixHandle(p);
    4950      505297 :   }
    4951             : 
    4952             :  private:
    4953    82711276 :   inline void FixHandle(Object** p) {
    4954    82711276 :     HeapObject* current = reinterpret_cast<HeapObject*>(*p);
    4955   165422552 :     if (!current->IsHeapObject()) return;
    4956             :     const MapWord map_word = current->map_word();
    4957   148320963 :     if (!map_word.IsForwardingAddress() && current->IsFiller()) {
    4958             : #ifdef DEBUG
    4959             :       // We need to find a FixedArrayBase map after walking the fillers.
    4960             :       while (current->IsFiller()) {
    4961             :         Address next = reinterpret_cast<Address>(current);
    4962             :         if (current->map() == heap_->one_pointer_filler_map()) {
    4963             :           next += kPointerSize;
    4964             :         } else if (current->map() == heap_->two_pointer_filler_map()) {
    4965             :           next += 2 * kPointerSize;
    4966             :         } else {
    4967             :           next += current->Size();
    4968             :         }
    4969             :         current = reinterpret_cast<HeapObject*>(next);
    4970             :       }
    4971             :       DCHECK(current->IsFixedArrayBase());
    4972             : #endif  // DEBUG
    4973          72 :       *p = nullptr;
    4974             :     }
    4975             :   }
    4976             : 
    4977             :   Heap* heap_;
    4978             : };
    4979             : 
    4980      302475 : void Heap::IterateStrongRoots(RootVisitor* v, VisitMode mode) {
    4981             :   v->VisitRootPointers(Root::kStrongRootList, &roots_[0],
    4982      302475 :                        &roots_[kStrongRootListLength]);
    4983      302475 :   v->Synchronize(VisitorSynchronization::kStrongRootList);
    4984             :   // The serializer/deserializer iterates the root list twice, first to pick
    4985             :   // off immortal immovable roots to make sure they end up on the first page,
    4986             :   // and then again for the rest.
    4987      363365 :   if (mode == VISIT_ONLY_STRONG_ROOT_LIST) return;
    4988             : 
    4989     2104925 :   isolate_->bootstrapper()->Iterate(v);
    4990      241585 :   v->Synchronize(VisitorSynchronization::kBootstrapper);
    4991      241585 :   isolate_->Iterate(v);
    4992      241585 :   v->Synchronize(VisitorSynchronization::kTop);
    4993      241585 :   Relocatable::Iterate(isolate_, v);
    4994      241585 :   v->Synchronize(VisitorSynchronization::kRelocatable);
    4995      483170 :   isolate_->debug()->Iterate(v);
    4996      241585 :   v->Synchronize(VisitorSynchronization::kDebug);
    4997             : 
    4998      483170 :   isolate_->compilation_cache()->Iterate(v);
    4999      241585 :   v->Synchronize(VisitorSynchronization::kCompilationCache);
    5000             : 
    5001             :   // Iterate over local handles in handle scopes.
    5002             :   FixStaleLeftTrimmedHandlesVisitor left_trim_visitor(this);
    5003      483170 :   isolate_->handle_scope_implementer()->Iterate(&left_trim_visitor);
    5004      483170 :   isolate_->handle_scope_implementer()->Iterate(v);
    5005      241585 :   isolate_->IterateDeferredHandles(v);
    5006      241585 :   v->Synchronize(VisitorSynchronization::kHandleScope);
    5007             : 
    5008             :   // Iterate over the builtin code objects and code stubs in the
    5009             :   // heap. Note that it is not necessary to iterate over code objects
    5010             :   // on scavenge collections.
    5011      241585 :   if (mode != VISIT_ALL_IN_SCAVENGE) {
    5012      172396 :     isolate_->builtins()->IterateBuiltins(v);
    5013      172396 :     v->Synchronize(VisitorSynchronization::kBuiltins);
    5014      344792 :     isolate_->interpreter()->IterateDispatchTable(v);
    5015      172396 :     v->Synchronize(VisitorSynchronization::kDispatchTable);
    5016             :   }
    5017             : 
    5018             :   // Iterate over global handles.
    5019      241585 :   switch (mode) {
    5020             :     case VISIT_ONLY_STRONG_ROOT_LIST:
    5021           0 :       UNREACHABLE();
    5022             :       break;
    5023             :     case VISIT_ONLY_STRONG_FOR_SERIALIZATION:
    5024             :       break;
    5025             :     case VISIT_ONLY_STRONG:
    5026      233312 :       isolate_->global_handles()->IterateStrongRoots(v);
    5027      116656 :       break;
    5028             :     case VISIT_ALL_IN_SCAVENGE:
    5029      138378 :       isolate_->global_handles()->IterateNewSpaceStrongAndDependentRoots(v);
    5030       69189 :       break;
    5031             :     case VISIT_ALL_IN_SWEEP_NEWSPACE:
    5032             :     case VISIT_ALL:
    5033      111178 :       isolate_->global_handles()->IterateAllRoots(v);
    5034       55589 :       break;
    5035             :   }
    5036      241585 :   v->Synchronize(VisitorSynchronization::kGlobalHandles);
    5037             : 
    5038             :   // Iterate over eternal handles.
    5039      241585 :   if (mode == VISIT_ALL_IN_SCAVENGE) {
    5040      138378 :     isolate_->eternal_handles()->IterateNewSpaceRoots(v);
    5041             :   } else {
    5042      344792 :     isolate_->eternal_handles()->IterateAllRoots(v);
    5043             :   }
    5044      241585 :   v->Synchronize(VisitorSynchronization::kEternalHandles);
    5045             : 
    5046             :   // Iterate over pointers being held by inactive threads.
    5047      483170 :   isolate_->thread_manager()->Iterate(v);
    5048      241585 :   v->Synchronize(VisitorSynchronization::kThreadManager);
    5049             : 
    5050             :   // Iterate over other strong roots (currently only identity maps).
    5051      263595 :   for (StrongRootsList* list = strong_roots_list_; list; list = list->next) {
    5052       22010 :     v->VisitRootPointers(Root::kStrongRoots, list->start, list->end);
    5053             :   }
    5054      241585 :   v->Synchronize(VisitorSynchronization::kStrongRoots);
    5055             : 
    5056             :   // Iterate over the partial snapshot cache unless serializing.
    5057      241585 :   if (mode != VISIT_ONLY_STRONG_FOR_SERIALIZATION) {
    5058      241434 :     SerializerDeserializer::Iterate(isolate_, v);
    5059             :   }
    5060             :   // We don't do a v->Synchronize call here, because in debug mode that will
    5061             :   // output a flag to the snapshot.  However at this point the serializer and
    5062             :   // deserializer are deliberately a little unsynchronized (see above) so the
    5063             :   // checking of the sync flag in the snapshot would fail.
    5064             : }
    5065             : 
    5066             : 
    5067             : // TODO(1236194): Since the heap size is configurable on the command line
    5068             : // and through the API, we should gracefully handle the case that the heap
    5069             : // size is not big enough to fit all the initial objects.
    5070       60782 : bool Heap::ConfigureHeap(size_t max_semi_space_size, size_t max_old_space_size,
    5071             :                          size_t max_executable_size, size_t code_range_size) {
    5072       60782 :   if (HasBeenSetUp()) return false;
    5073             : 
    5074             :   // Overwrite default configuration.
    5075       60782 :   if (max_semi_space_size != 0) {
    5076       28636 :     max_semi_space_size_ = max_semi_space_size * MB;
    5077             :   }
    5078       60782 :   if (max_old_space_size != 0) {
    5079       28648 :     max_old_generation_size_ = max_old_space_size * MB;
    5080             :   }
    5081       60782 :   if (max_executable_size != 0) {
    5082       28618 :     max_executable_size_ = max_executable_size * MB;
    5083             :   }
    5084             : 
    5085             :   // If max space size flags are specified overwrite the configuration.
    5086       60782 :   if (FLAG_max_semi_space_size > 0) {
    5087         197 :     max_semi_space_size_ = static_cast<size_t>(FLAG_max_semi_space_size) * MB;
    5088             :   }
    5089       60782 :   if (FLAG_max_old_space_size > 0) {
    5090             :     max_old_generation_size_ =
    5091          19 :         static_cast<size_t>(FLAG_max_old_space_size) * MB;
    5092             :   }
    5093       60782 :   if (FLAG_max_executable_size > 0) {
    5094           0 :     max_executable_size_ = static_cast<size_t>(FLAG_max_executable_size) * MB;
    5095             :   }
    5096             : 
    5097             :   if (Page::kPageSize > MB) {
    5098             :     max_semi_space_size_ = ROUND_UP(max_semi_space_size_, Page::kPageSize);
    5099             :     max_old_generation_size_ =
    5100             :         ROUND_UP(max_old_generation_size_, Page::kPageSize);
    5101             :     max_executable_size_ = ROUND_UP(max_executable_size_, Page::kPageSize);
    5102             :   }
    5103             : 
    5104       60782 :   if (FLAG_stress_compaction) {
    5105             :     // This will cause more frequent GCs when stressing.
    5106          89 :     max_semi_space_size_ = MB;
    5107             :   }
    5108             : 
    5109             :   // The new space size must be a power of two to support single-bit testing
    5110             :   // for containment.
    5111             :   max_semi_space_size_ = base::bits::RoundUpToPowerOfTwo32(
    5112       60782 :       static_cast<uint32_t>(max_semi_space_size_));
    5113             : 
    5114       60782 :   if (FLAG_min_semi_space_size > 0) {
    5115             :     size_t initial_semispace_size =
    5116          44 :         static_cast<size_t>(FLAG_min_semi_space_size) * MB;
    5117          44 :     if (initial_semispace_size > max_semi_space_size_) {
    5118           7 :       initial_semispace_size_ = max_semi_space_size_;
    5119           7 :       if (FLAG_trace_gc) {
    5120             :         PrintIsolate(isolate_,
    5121             :                      "Min semi-space size cannot be more than the maximum "
    5122             :                      "semi-space size of %" PRIuS " MB\n",
    5123           0 :                      max_semi_space_size_ / MB);
    5124             :       }
    5125             :     } else {
    5126             :       initial_semispace_size_ =
    5127          37 :           ROUND_UP(initial_semispace_size, Page::kPageSize);
    5128             :     }
    5129             :   }
    5130             : 
    5131      121564 :   initial_semispace_size_ = Min(initial_semispace_size_, max_semi_space_size_);
    5132             : 
    5133       60782 :   if (FLAG_semi_space_growth_factor < 2) {
    5134           0 :     FLAG_semi_space_growth_factor = 2;
    5135             :   }
    5136             : 
    5137             :   // The old generation is paged and needs at least one page for each space.
    5138             :   int paged_space_count = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1;
    5139             :   initial_max_old_generation_size_ = max_old_generation_size_ =
    5140             :       Max(static_cast<size_t>(paged_space_count * Page::kPageSize),
    5141      121564 :           max_old_generation_size_);
    5142             : 
    5143             :   // The max executable size must be less than or equal to the max old
    5144             :   // generation size.
    5145       60782 :   if (max_executable_size_ > max_old_generation_size_) {
    5146          49 :     max_executable_size_ = max_old_generation_size_;
    5147             :   }
    5148             : 
    5149       60782 :   if (FLAG_initial_old_space_size > 0) {
    5150           0 :     initial_old_generation_size_ = FLAG_initial_old_space_size * MB;
    5151             :   } else {
    5152             :     initial_old_generation_size_ =
    5153       60782 :         max_old_generation_size_ / kInitalOldGenerationLimitFactor;
    5154             :   }
    5155       60782 :   old_generation_allocation_limit_ = initial_old_generation_size_;
    5156             : 
    5157             :   // We rely on being able to allocate new arrays in paged spaces.
    5158             :   DCHECK(kMaxRegularHeapObjectSize >=
    5159             :          (JSArray::kSize +
    5160             :           FixedArray::SizeFor(JSArray::kInitialMaxFastElementArray) +
    5161             :           AllocationMemento::kSize));
    5162             : 
    5163       60782 :   code_range_size_ = code_range_size * MB;
    5164             : 
    5165       60782 :   configured_ = true;
    5166       60782 :   return true;
    5167             : }
    5168             : 
    5169             : 
    5170      122507 : void Heap::AddToRingBuffer(const char* string) {
    5171             :   size_t first_part =
    5172      122507 :       Min(strlen(string), kTraceRingBufferSize - ring_buffer_end_);
    5173      122507 :   memcpy(trace_ring_buffer_ + ring_buffer_end_, string, first_part);
    5174      122507 :   ring_buffer_end_ += first_part;
    5175      122507 :   if (first_part < strlen(string)) {
    5176       19809 :     ring_buffer_full_ = true;
    5177       19809 :     size_t second_part = strlen(string) - first_part;
    5178       19809 :     memcpy(trace_ring_buffer_, string + first_part, second_part);
    5179       19809 :     ring_buffer_end_ = second_part;
    5180             :   }
    5181      122507 : }
    5182             : 
    5183             : 
    5184           0 : void Heap::GetFromRingBuffer(char* buffer) {
    5185             :   size_t copied = 0;
    5186           0 :   if (ring_buffer_full_) {
    5187           0 :     copied = kTraceRingBufferSize - ring_buffer_end_;
    5188           0 :     memcpy(buffer, trace_ring_buffer_ + ring_buffer_end_, copied);
    5189             :   }
    5190           0 :   memcpy(buffer + copied, trace_ring_buffer_, ring_buffer_end_);
    5191           0 : }
    5192             : 
    5193             : 
    5194       32134 : bool Heap::ConfigureHeapDefault() { return ConfigureHeap(0, 0, 0, 0); }
    5195             : 
    5196             : 
    5197           0 : void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
    5198           0 :   *stats->start_marker = HeapStats::kStartMarker;
    5199           0 :   *stats->end_marker = HeapStats::kEndMarker;
    5200           0 :   *stats->new_space_size = new_space_->Size();
    5201           0 :   *stats->new_space_capacity = new_space_->Capacity();
    5202           0 :   *stats->old_space_size = old_space_->SizeOfObjects();
    5203           0 :   *stats->old_space_capacity = old_space_->Capacity();
    5204           0 :   *stats->code_space_size = code_space_->SizeOfObjects();
    5205           0 :   *stats->code_space_capacity = code_space_->Capacity();
    5206           0 :   *stats->map_space_size = map_space_->SizeOfObjects();
    5207           0 :   *stats->map_space_capacity = map_space_->Capacity();
    5208           0 :   *stats->lo_space_size = lo_space_->Size();
    5209           0 :   isolate_->global_handles()->RecordStats(stats);
    5210           0 :   *stats->memory_allocator_size = memory_allocator()->Size();
    5211             :   *stats->memory_allocator_capacity =
    5212           0 :       memory_allocator()->Size() + memory_allocator()->Available();
    5213           0 :   *stats->os_error = base::OS::GetLastError();
    5214           0 :   *stats->malloced_memory = isolate_->allocator()->GetCurrentMemoryUsage();
    5215           0 :   *stats->malloced_peak_memory = isolate_->allocator()->GetMaxMemoryUsage();
    5216           0 :   if (take_snapshot) {
    5217           0 :     HeapIterator iterator(this);
    5218           0 :     for (HeapObject* obj = iterator.next(); obj != NULL;
    5219             :          obj = iterator.next()) {
    5220             :       InstanceType type = obj->map()->instance_type();
    5221             :       DCHECK(0 <= type && type <= LAST_TYPE);
    5222           0 :       stats->objects_per_type[type]++;
    5223           0 :       stats->size_per_type[type] += obj->Size();
    5224           0 :     }
    5225             :   }
    5226           0 :   if (stats->last_few_messages != NULL)
    5227           0 :     GetFromRingBuffer(stats->last_few_messages);
    5228           0 :   if (stats->js_stacktrace != NULL) {
    5229             :     FixedStringAllocator fixed(stats->js_stacktrace, kStacktraceBufferSize - 1);
    5230             :     StringStream accumulator(&fixed, StringStream::kPrintObjectConcise);
    5231           0 :     if (gc_state() == Heap::NOT_IN_GC) {
    5232           0 :       isolate()->PrintStack(&accumulator, Isolate::kPrintStackVerbose);
    5233             :     } else {
    5234           0 :       accumulator.Add("Cannot get stack trace in GC.");
    5235             :     }
    5236             :   }
    5237           0 : }
    5238             : 
    5239     3211604 : size_t Heap::PromotedSpaceSizeOfObjects() {
    5240     6423208 :   return old_space_->SizeOfObjects() + code_space_->SizeOfObjects() +
    5241     6423208 :          map_space_->SizeOfObjects() + lo_space_->SizeOfObjects();
    5242             : }
    5243             : 
    5244         208 : uint64_t Heap::PromotedExternalMemorySize() {
    5245      630241 :   if (external_memory_ <= external_memory_at_last_mark_compact_) return 0;
    5246        8535 :   return static_cast<uint64_t>(external_memory_ -
    5247        8535 :                                external_memory_at_last_mark_compact_);
    5248             : }
    5249             : 
    5250             : 
    5251             : const double Heap::kMinHeapGrowingFactor = 1.1;
    5252             : const double Heap::kMaxHeapGrowingFactor = 4.0;
    5253             : const double Heap::kMaxHeapGrowingFactorMemoryConstrained = 2.0;
    5254             : const double Heap::kMaxHeapGrowingFactorIdle = 1.5;
    5255             : const double Heap::kConservativeHeapGrowingFactor = 1.3;
    5256             : const double Heap::kTargetMutatorUtilization = 0.97;
    5257             : 
    5258             : // Given GC speed in bytes per ms, the allocation throughput in bytes per ms
    5259             : // (mutator speed), this function returns the heap growing factor that will
    5260             : // achieve the kTargetMutatorUtilisation if the GC speed and the mutator speed
    5261             : // remain the same until the next GC.
    5262             : //
    5263             : // For a fixed time-frame T = TM + TG, the mutator utilization is the ratio
    5264             : // TM / (TM + TG), where TM is the time spent in the mutator and TG is the
    5265             : // time spent in the garbage collector.
    5266             : //
    5267             : // Let MU be kTargetMutatorUtilisation, the desired mutator utilization for the
    5268             : // time-frame from the end of the current GC to the end of the next GC. Based
    5269             : // on the MU we can compute the heap growing factor F as
    5270             : //
    5271             : // F = R * (1 - MU) / (R * (1 - MU) - MU), where R = gc_speed / mutator_speed.
    5272             : //
    5273             : // This formula can be derived as follows.
    5274             : //
    5275             : // F = Limit / Live by definition, where the Limit is the allocation limit,
    5276             : // and the Live is size of live objects.
    5277             : // Let’s assume that we already know the Limit. Then:
    5278             : //   TG = Limit / gc_speed
    5279             : //   TM = (TM + TG) * MU, by definition of MU.
    5280             : //   TM = TG * MU / (1 - MU)
    5281             : //   TM = Limit *  MU / (gc_speed * (1 - MU))
    5282             : // On the other hand, if the allocation throughput remains constant:
    5283             : //   Limit = Live + TM * allocation_throughput = Live + TM * mutator_speed
    5284             : // Solving it for TM, we get
    5285             : //   TM = (Limit - Live) / mutator_speed
    5286             : // Combining the two equation for TM:
    5287             : //   (Limit - Live) / mutator_speed = Limit * MU / (gc_speed * (1 - MU))
    5288             : //   (Limit - Live) = Limit * MU * mutator_speed / (gc_speed * (1 - MU))
    5289             : // substitute R = gc_speed / mutator_speed
    5290             : //   (Limit - Live) = Limit * MU  / (R * (1 - MU))
    5291             : // substitute F = Limit / Live
    5292             : //   F - 1 = F * MU  / (R * (1 - MU))
    5293             : //   F - F * MU / (R * (1 - MU)) = 1
    5294             : //   F * (1 - MU / (R * (1 - MU))) = 1
    5295             : //   F * (R * (1 - MU) - MU) / (R * (1 - MU)) = 1
    5296             : //   F = R * (1 - MU) / (R * (1 - MU) - MU)
    5297       53465 : double Heap::HeapGrowingFactor(double gc_speed, double mutator_speed) {
    5298       53465 :   if (gc_speed == 0 || mutator_speed == 0) return kMaxHeapGrowingFactor;
    5299             : 
    5300       43445 :   const double speed_ratio = gc_speed / mutator_speed;
    5301             :   const double mu = kTargetMutatorUtilization;
    5302             : 
    5303       43445 :   const double a = speed_ratio * (1 - mu);
    5304       43445 :   const double b = speed_ratio * (1 - mu) - mu;
    5305             : 
    5306             :   // The factor is a / b, but we need to check for small b first.
    5307             :   double factor =
    5308       43445 :       (a < b * kMaxHeapGrowingFactor) ? a / b : kMaxHeapGrowingFactor;
    5309             :   factor = Min(factor, kMaxHeapGrowingFactor);
    5310             :   factor = Max(factor, kMinHeapGrowingFactor);
    5311       43445 :   return factor;
    5312             : }
    5313             : 
    5314       53456 : size_t Heap::CalculateOldGenerationAllocationLimit(double factor,
    5315             :                                                    size_t old_gen_size) {
    5316       53456 :   CHECK(factor > 1.0);
    5317       53456 :   CHECK(old_gen_size > 0);
    5318       53456 :   uint64_t limit = static_cast<uint64_t>(old_gen_size * factor);
    5319             :   limit = Max(limit, static_cast<uint64_t>(old_gen_size) +
    5320       53456 :                          MinimumAllocationLimitGrowingStep());
    5321      106912 :   limit += new_space_->Capacity();
    5322             :   uint64_t halfway_to_the_max =
    5323       53456 :       (static_cast<uint64_t>(old_gen_size) + max_old_generation_size_) / 2;
    5324       53456 :   return static_cast<size_t>(Min(limit, halfway_to_the_max));
    5325             : }
    5326             : 
    5327           0 : size_t Heap::MinimumAllocationLimitGrowingStep() {
    5328             :   const size_t kRegularAllocationLimitGrowingStep = 8;
    5329             :   const size_t kLowMemoryAllocationLimitGrowingStep = 2;
    5330             :   size_t limit = (Page::kPageSize > MB ? Page::kPageSize : MB);
    5331      109932 :   return limit * (ShouldOptimizeForMemoryUsage()
    5332             :                       ? kLowMemoryAllocationLimitGrowingStep
    5333      109932 :                       : kRegularAllocationLimitGrowingStep);
    5334             : }
    5335             : 
    5336       53346 : void Heap::SetOldGenerationAllocationLimit(size_t old_gen_size, double gc_speed,
    5337      105839 :                                            double mutator_speed) {
    5338       53346 :   double factor = HeapGrowingFactor(gc_speed, mutator_speed);
    5339             : 
    5340       53346 :   if (FLAG_trace_gc_verbose) {
    5341             :     isolate_->PrintWithTimestamp(
    5342             :         "Heap growing factor %.1f based on mu=%.3f, speed_ratio=%.f "
    5343             :         "(gc=%.f, mutator=%.f)\n",
    5344             :         factor, kTargetMutatorUtilization, gc_speed / mutator_speed, gc_speed,
    5345           0 :         mutator_speed);
    5346             :   }
    5347             : 
    5348       53346 :   if (IsMemoryConstrainedDevice()) {
    5349             :     factor = Min(factor, kMaxHeapGrowingFactorMemoryConstrained);
    5350             :   }
    5351             : 
    5352      160038 :   if (memory_reducer_->ShouldGrowHeapSlowly() ||
    5353       53346 :       ShouldOptimizeForMemoryUsage()) {
    5354             :     factor = Min(factor, kConservativeHeapGrowingFactor);
    5355             :   }
    5356             : 
    5357      105839 :   if (FLAG_stress_compaction || ShouldReduceMemory()) {
    5358             :     factor = kMinHeapGrowingFactor;
    5359             :   }
    5360             : 
    5361       53346 :   if (FLAG_heap_growing_percent > 0) {
    5362           0 :     factor = 1.0 + FLAG_heap_growing_percent / 100.0;
    5363             :   }
    5364             : 
    5365             :   old_generation_allocation_limit_ =
    5366       53346 :       CalculateOldGenerationAllocationLimit(factor, old_gen_size);
    5367             : 
    5368       53346 :   if (FLAG_trace_gc_verbose) {
    5369             :     isolate_->PrintWithTimestamp(
    5370             :         "Grow: old size: %" PRIuS " KB, new limit: %" PRIuS " KB (%.1f)\n",
    5371           0 :         old_gen_size / KB, old_generation_allocation_limit_ / KB, factor);
    5372             :   }
    5373       53346 : }
    5374             : 
    5375         110 : void Heap::DampenOldGenerationAllocationLimit(size_t old_gen_size,
    5376             :                                               double gc_speed,
    5377             :                                               double mutator_speed) {
    5378         110 :   double factor = HeapGrowingFactor(gc_speed, mutator_speed);
    5379         110 :   size_t limit = CalculateOldGenerationAllocationLimit(factor, old_gen_size);
    5380         110 :   if (limit < old_generation_allocation_limit_) {
    5381           0 :     if (FLAG_trace_gc_verbose) {
    5382             :       isolate_->PrintWithTimestamp(
    5383             :           "Dampen: old size: %" PRIuS " KB, old limit: %" PRIuS
    5384             :           " KB, "
    5385             :           "new limit: %" PRIuS " KB (%.1f)\n",
    5386             :           old_gen_size / KB, old_generation_allocation_limit_ / KB, limit / KB,
    5387           0 :           factor);
    5388             :     }
    5389           0 :     old_generation_allocation_limit_ = limit;
    5390             :   }
    5391         110 : }
    5392             : 
    5393        8808 : bool Heap::ShouldOptimizeForLoadTime() {
    5394           0 :   return isolate()->rail_mode() == PERFORMANCE_LOAD &&
    5395        8808 :          !AllocationLimitOvershotByLargeMargin() &&
    5396           0 :          MonotonicallyIncreasingTimeInMs() <
    5397        8808 :              isolate()->LoadStartTimeMs() + kMaxLoadTimeMs;
    5398             : }
    5399             : 
    5400             : // This predicate is called when an old generation space cannot allocated from
    5401             : // the free list and is about to add a new page. Returning false will cause a
    5402             : // major GC. It happens when the old generation allocation limit is reached and
    5403             : // - either we need to optimize for memory usage,
    5404             : // - or the incremental marking is not in progress and we cannot start it.
    5405      536278 : bool Heap::ShouldExpandOldGenerationOnSlowAllocation() {
    5406      531597 :   if (always_allocate() || OldGenerationSpaceAvailable() > 0) return true;
    5407             :   // We reached the old generation allocation limit.
    5408             : 
    5409        4681 :   if (ShouldOptimizeForMemoryUsage()) return false;
    5410             : 
    5411        4681 :   if (ShouldOptimizeForLoadTime()) return true;
    5412             : 
    5413        4681 :   if (incremental_marking()->NeedsFinalization()) {
    5414        1908 :     return !AllocationLimitOvershotByLargeMargin();
    5415             :   }
    5416             : 
    5417        4292 :   if (incremental_marking()->IsStopped() &&
    5418        1519 :       IncrementalMarkingLimitReached() == IncrementalMarkingLimit::kNoLimit) {
    5419             :     // We cannot start incremental marking.
    5420             :     return false;
    5421             :   }
    5422        1259 :   return true;
    5423             : }
    5424             : 
    5425             : // This function returns either kNoLimit, kSoftLimit, or kHardLimit.
    5426             : // The kNoLimit means that either incremental marking is disabled or it is too
    5427             : // early to start incremental marking.
    5428             : // The kSoftLimit means that incremental marking should be started soon.
    5429             : // The kHardLimit means that incremental marking should be started immediately.
    5430     2903871 : Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
    5431     4899047 :   if (!incremental_marking()->CanBeActivated() ||
    5432     1995176 :       PromotedSpaceSizeOfObjects() <=
    5433             :           IncrementalMarking::kActivationThreshold) {
    5434             :     // Incremental marking is disabled or it is too early to start.
    5435             :     return IncrementalMarkingLimit::kNoLimit;
    5436             :   }
    5437      206572 :   if ((FLAG_stress_compaction && (gc_count_ & 1) != 0) ||
    5438      103258 :       HighMemoryPressure()) {
    5439             :     // If there is high memory pressure or stress testing is enabled, then
    5440             :     // start marking immediately.
    5441             :     return IncrementalMarkingLimit::kHardLimit;
    5442             :   }
    5443      103258 :   size_t old_generation_space_available = OldGenerationSpaceAvailable();
    5444      206516 :   if (old_generation_space_available > new_space_->Capacity()) {
    5445             :     return IncrementalMarkingLimit::kNoLimit;
    5446             :   }
    5447        4127 :   if (ShouldOptimizeForMemoryUsage()) {
    5448             :     return IncrementalMarkingLimit::kHardLimit;
    5449             :   }
    5450        4127 :   if (ShouldOptimizeForLoadTime()) {
    5451             :     return IncrementalMarkingLimit::kNoLimit;
    5452             :   }
    5453        4127 :   if (old_generation_space_available == 0) {
    5454             :     return IncrementalMarkingLimit::kHardLimit;
    5455             :   }
    5456        3633 :   return IncrementalMarkingLimit::kSoftLimit;
    5457             : }
    5458             : 
    5459          96 : void Heap::EnableInlineAllocation() {
    5460          96 :   if (!inline_allocation_disabled_) return;
    5461          48 :   inline_allocation_disabled_ = false;
    5462             : 
    5463             :   // Update inline allocation limit for new space.
    5464          48 :   new_space()->UpdateInlineAllocationLimit(0);
    5465             : }
    5466             : 
    5467             : 
    5468         148 : void Heap::DisableInlineAllocation() {
    5469          74 :   if (inline_allocation_disabled_) return;
    5470          74 :   inline_allocation_disabled_ = true;
    5471             : 
    5472             :   // Update inline allocation limit for new space.
    5473          74 :   new_space()->UpdateInlineAllocationLimit(0);
    5474             : 
    5475             :   // Update inline allocation limit for old spaces.
    5476             :   PagedSpaces spaces(this);
    5477         296 :   for (PagedSpace* space = spaces.next(); space != NULL;
    5478             :        space = spaces.next()) {
    5479         222 :     space->EmptyAllocationInfo();
    5480             :   }
    5481             : }
    5482             : 
    5483             : 
    5484             : V8_DECLARE_ONCE(initialize_gc_once);
    5485             : 
    5486       58018 : static void InitializeGCOnce() {
    5487       58018 :   Scavenger::Initialize();
    5488       58018 :   StaticScavengeVisitor::Initialize();
    5489       58018 :   MarkCompactCollector::Initialize();
    5490       58018 : }
    5491             : 
    5492             : 
    5493      425474 : bool Heap::SetUp() {
    5494             : #ifdef DEBUG
    5495             :   allocation_timeout_ = FLAG_gc_interval;
    5496             : #endif
    5497             : 
    5498             :   // Initialize heap spaces and initial maps and objects. Whenever something
    5499             :   // goes wrong, just return false. The caller should check the results and
    5500             :   // call Heap::TearDown() to release allocated memory.
    5501             :   //
    5502             :   // If the heap is not yet configured (e.g. through the API), configure it.
    5503             :   // Configuration is based on the flags new-space-size (really the semispace
    5504             :   // size) and old-space-size if set or the initial values of semispace_size_
    5505             :   // and old_generation_size_ otherwise.
    5506       60782 :   if (!configured_) {
    5507       32134 :     if (!ConfigureHeapDefault()) return false;
    5508             :   }
    5509             : 
    5510       60782 :   base::CallOnce(&initialize_gc_once, &InitializeGCOnce);
    5511             : 
    5512             :   // Set up memory allocator.
    5513      182346 :   memory_allocator_ = new MemoryAllocator(isolate_);
    5514       60782 :   if (!memory_allocator_->SetUp(MaxReserved(), MaxExecutableSize(),
    5515      121564 :                                 code_range_size_))
    5516             :     return false;
    5517             : 
    5518       60782 :   store_buffer_ = new StoreBuffer(this);
    5519             : 
    5520       60782 :   incremental_marking_ = new IncrementalMarking(this);
    5521             : 
    5522       60782 :   concurrent_marking_ = new ConcurrentMarking(this);
    5523             : 
    5524      364692 :   for (int i = 0; i <= LAST_SPACE; i++) {
    5525      303910 :     space_[i] = nullptr;
    5526             :   }
    5527             : 
    5528       60782 :   space_[NEW_SPACE] = new_space_ = new NewSpace(this);
    5529       60782 :   if (!new_space_->SetUp(initial_semispace_size_, max_semi_space_size_)) {
    5530             :     return false;
    5531             :   }
    5532       60782 :   new_space_top_after_last_gc_ = new_space()->top();
    5533             : 
    5534             :   space_[OLD_SPACE] = old_space_ =
    5535       60782 :       new OldSpace(this, OLD_SPACE, NOT_EXECUTABLE);
    5536       60782 :   if (!old_space_->SetUp()) return false;
    5537             : 
    5538       60782 :   space_[CODE_SPACE] = code_space_ = new OldSpace(this, CODE_SPACE, EXECUTABLE);
    5539       60782 :   if (!code_space_->SetUp()) return false;
    5540             : 
    5541       60782 :   space_[MAP_SPACE] = map_space_ = new MapSpace(this, MAP_SPACE);
    5542       60782 :   if (!map_space_->SetUp()) return false;
    5543             : 
    5544             :   // The large object code space may contain code or data.  We set the memory
    5545             :   // to be non-executable here for safety, but this means we need to enable it
    5546             :   // explicitly when allocating large code objects.
    5547       60782 :   space_[LO_SPACE] = lo_space_ = new LargeObjectSpace(this, LO_SPACE);
    5548       60782 :   if (!lo_space_->SetUp()) return false;
    5549             : 
    5550             :   // Set up the seed that is used to randomize the string hash function.
    5551             :   DCHECK(hash_seed() == 0);
    5552       60782 :   if (FLAG_randomize_hashes) {
    5553       60782 :     if (FLAG_hash_seed == 0) {
    5554       60782 :       int rnd = isolate()->random_number_generator()->NextInt();
    5555       60782 :       set_hash_seed(Smi::FromInt(rnd & Name::kHashBitMask));
    5556             :     } else {
    5557             :       set_hash_seed(Smi::FromInt(FLAG_hash_seed));
    5558             :     }
    5559             :   }
    5560             : 
    5561     2370498 :   for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount);
    5562             :        i++) {
    5563     2370498 :     deferred_counters_[i] = 0;
    5564             :   }
    5565             : 
    5566       60782 :   tracer_ = new GCTracer(this);
    5567      121564 :   scavenge_collector_ = new Scavenger(this);
    5568       60782 :   mark_compact_collector_ = new MarkCompactCollector(this);
    5569       60782 :   if (FLAG_minor_mc)
    5570           0 :     minor_mark_compact_collector_ = new MinorMarkCompactCollector(this);
    5571      121564 :   gc_idle_time_handler_ = new GCIdleTimeHandler();
    5572      121564 :   memory_reducer_ = new MemoryReducer(this);
    5573       60782 :   if (V8_UNLIKELY(FLAG_gc_stats)) {
    5574           0 :     live_object_stats_ = new ObjectStats(this);
    5575           0 :     dead_object_stats_ = new ObjectStats(this);
    5576             :   }
    5577      121564 :   scavenge_job_ = new ScavengeJob();
    5578      121564 :   local_embedder_heap_tracer_ = new LocalEmbedderHeapTracer();
    5579             : 
    5580      121564 :   LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
    5581      121564 :   LOG(isolate_, IntPtrTEvent("heap-available", Available()));
    5582             : 
    5583       60782 :   store_buffer()->SetUp();
    5584             : 
    5585       60782 :   mark_compact_collector()->SetUp();
    5586       60782 :   if (minor_mark_compact_collector() != nullptr) {
    5587           0 :     minor_mark_compact_collector()->SetUp();
    5588             :   }
    5589             : 
    5590             :   idle_scavenge_observer_ = new IdleScavengeObserver(
    5591      121564 :       *this, ScavengeJob::kBytesAllocatedBeforeNextIdleTask);
    5592       60782 :   new_space()->AddAllocationObserver(idle_scavenge_observer_);
    5593             : 
    5594       60782 :   return true;
    5595             : }
    5596             : 
    5597             : 
    5598          86 : bool Heap::CreateHeapObjects() {
    5599             :   // Create initial maps.
    5600          43 :   if (!CreateInitialMaps()) return false;
    5601          43 :   if (!CreateApiObjects()) return false;
    5602             : 
    5603             :   // Create initial objects
    5604          43 :   CreateInitialObjects();
    5605          43 :   CHECK_EQ(0u, gc_count_);
    5606             : 
    5607             :   set_native_contexts_list(undefined_value());
    5608             :   set_allocation_sites_list(undefined_value());
    5609             : 
    5610          43 :   return true;
    5611             : }
    5612             : 
    5613             : 
    5614     7642466 : void Heap::SetStackLimits() {
    5615             :   DCHECK(isolate_ != NULL);
    5616             :   DCHECK(isolate_ == isolate());
    5617             :   // On 64 bit machines, pointers are generally out of range of Smis.  We write
    5618             :   // something that looks like an out of range Smi to the GC.
    5619             : 
    5620             :   // Set up the special root array entries containing the stack limits.
    5621             :   // These are actually addresses, but the tag makes the GC ignore it.
    5622             :   roots_[kStackLimitRootIndex] = reinterpret_cast<Object*>(
    5623    15284932 :       (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag);
    5624             :   roots_[kRealStackLimitRootIndex] = reinterpret_cast<Object*>(
    5625     7642466 :       (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag);
    5626     7642466 : }
    5627             : 
    5628         151 : void Heap::ClearStackLimits() {
    5629         151 :   roots_[kStackLimitRootIndex] = Smi::kZero;
    5630         151 :   roots_[kRealStackLimitRootIndex] = Smi::kZero;
    5631         151 : }
    5632             : 
    5633           0 : void Heap::PrintAlloctionsHash() {
    5634           0 :   uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_);
    5635           0 :   PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count(), hash);
    5636           0 : }
    5637             : 
    5638             : 
    5639       60782 : void Heap::NotifyDeserializationComplete() {
    5640             :   DCHECK_EQ(0, gc_count());
    5641             :   PagedSpaces spaces(this);
    5642      243128 :   for (PagedSpace* s = spaces.next(); s != NULL; s = spaces.next()) {
    5643      364692 :     if (isolate()->snapshot_available()) s->ShrinkImmortalImmovablePages();
    5644             : #ifdef DEBUG
    5645             :     // All pages right after bootstrapping must be marked as never-evacuate.
    5646             :     for (Page* p : *s) {
    5647             :       CHECK(p->NeverEvacuate());
    5648             :     }
    5649             : #endif  // DEBUG
    5650             :   }
    5651             : 
    5652       60782 :   deserialization_complete_ = true;
    5653       60782 : }
    5654             : 
    5655           0 : void Heap::SetEmbedderHeapTracer(EmbedderHeapTracer* tracer) {
    5656             :   DCHECK_EQ(gc_state_, HeapState::NOT_IN_GC);
    5657             :   local_embedder_heap_tracer()->SetRemoteTracer(tracer);
    5658           0 : }
    5659             : 
    5660           0 : void Heap::TracePossibleWrapper(JSObject* js_object) {
    5661             :   DCHECK(js_object->WasConstructedFromApiFunction());
    5662           0 :   if (js_object->GetEmbedderFieldCount() >= 2 &&
    5663           0 :       js_object->GetEmbedderField(0) &&
    5664           0 :       js_object->GetEmbedderField(0) != undefined_value() &&
    5665             :       js_object->GetEmbedderField(1) != undefined_value()) {
    5666             :     DCHECK(reinterpret_cast<intptr_t>(js_object->GetEmbedderField(0)) % 2 == 0);
    5667             :     local_embedder_heap_tracer()->AddWrapperToTrace(std::pair<void*, void*>(
    5668             :         reinterpret_cast<void*>(js_object->GetEmbedderField(0)),
    5669             :         reinterpret_cast<void*>(js_object->GetEmbedderField(1))));
    5670             :   }
    5671           0 : }
    5672             : 
    5673           0 : void Heap::RegisterExternallyReferencedObject(Object** object) {
    5674           0 :   HeapObject* heap_object = HeapObject::cast(*object);
    5675           0 :   if (heap_object == nullptr) {
    5676             :     // We might encounter non-empty handles that point to nullptr.
    5677           0 :     return;
    5678             :   }
    5679             :   DCHECK(Contains(heap_object));
    5680           0 :   if (FLAG_incremental_marking_wrappers && incremental_marking()->IsMarking()) {
    5681           0 :     IncrementalMarking::MarkGrey(this, heap_object);
    5682             :   } else {
    5683             :     DCHECK(mark_compact_collector()->in_use());
    5684             :     mark_compact_collector()->MarkObject(heap_object);
    5685             :   }
    5686             : }
    5687             : 
    5688      237140 : void Heap::TearDown() {
    5689             : #ifdef VERIFY_HEAP
    5690             :   if (FLAG_verify_heap) {
    5691             :     Verify();
    5692             :   }
    5693             : #endif
    5694             : 
    5695       59285 :   UpdateMaximumCommitted();
    5696             : 
    5697             :   if (FLAG_verify_predictable) {
    5698             :     PrintAlloctionsHash();
    5699             :   }
    5700             : 
    5701       59285 :   new_space()->RemoveAllocationObserver(idle_scavenge_observer_);
    5702       59285 :   delete idle_scavenge_observer_;
    5703       59285 :   idle_scavenge_observer_ = nullptr;
    5704             : 
    5705       59285 :   delete scavenge_collector_;
    5706       59285 :   scavenge_collector_ = nullptr;
    5707             : 
    5708       59285 :   if (mark_compact_collector_ != nullptr) {
    5709       59285 :     mark_compact_collector_->TearDown();
    5710       59285 :     delete mark_compact_collector_;
    5711       59285 :     mark_compact_collector_ = nullptr;
    5712             :   }
    5713             : 
    5714       59285 :   if (minor_mark_compact_collector_ != nullptr) {
    5715           0 :     minor_mark_compact_collector_->TearDown();
    5716           0 :     delete minor_mark_compact_collector_;
    5717           0 :     minor_mark_compact_collector_ = nullptr;
    5718             :   }
    5719             : 
    5720      118570 :   delete incremental_marking_;
    5721       59285 :   incremental_marking_ = nullptr;
    5722             : 
    5723       59285 :   delete concurrent_marking_;
    5724       59285 :   concurrent_marking_ = nullptr;
    5725             : 
    5726       59285 :   delete gc_idle_time_handler_;
    5727       59285 :   gc_idle_time_handler_ = nullptr;
    5728             : 
    5729       59285 :   if (memory_reducer_ != nullptr) {
    5730       59285 :     memory_reducer_->TearDown();
    5731       59285 :     delete memory_reducer_;
    5732       59285 :     memory_reducer_ = nullptr;
    5733             :   }
    5734             : 
    5735       59285 :   if (live_object_stats_ != nullptr) {
    5736           0 :     delete live_object_stats_;
    5737           0 :     live_object_stats_ = nullptr;
    5738             :   }
    5739             : 
    5740       59285 :   if (dead_object_stats_ != nullptr) {
    5741           0 :     delete dead_object_stats_;
    5742           0 :     dead_object_stats_ = nullptr;
    5743             :   }
    5744             : 
    5745      118570 :   delete local_embedder_heap_tracer_;
    5746       59285 :   local_embedder_heap_tracer_ = nullptr;
    5747             : 
    5748       59285 :   delete scavenge_job_;
    5749       59285 :   scavenge_job_ = nullptr;
    5750             : 
    5751       59285 :   isolate_->global_handles()->TearDown();
    5752             : 
    5753       59285 :   external_string_table_.TearDown();
    5754             : 
    5755       59285 :   delete tracer_;
    5756       59285 :   tracer_ = nullptr;
    5757             : 
    5758       59285 :   new_space_->TearDown();
    5759       59285 :   delete new_space_;
    5760       59285 :   new_space_ = nullptr;
    5761             : 
    5762       59285 :   if (old_space_ != NULL) {
    5763       59285 :     delete old_space_;
    5764       59285 :     old_space_ = NULL;
    5765             :   }
    5766             : 
    5767       59285 :   if (code_space_ != NULL) {
    5768       59285 :     delete code_space_;
    5769       59285 :     code_space_ = NULL;
    5770             :   }
    5771             : 
    5772       59285 :   if (map_space_ != NULL) {
    5773       59285 :     delete map_space_;
    5774       59285 :     map_space_ = NULL;
    5775             :   }
    5776             : 
    5777       59285 :   if (lo_space_ != NULL) {
    5778       59285 :     lo_space_->TearDown();
    5779       59285 :     delete lo_space_;
    5780       59285 :     lo_space_ = NULL;
    5781             :   }
    5782             : 
    5783       59285 :   store_buffer()->TearDown();
    5784             : 
    5785       59285 :   memory_allocator()->TearDown();
    5786             : 
    5787             :   StrongRootsList* next = NULL;
    5788      118570 :   for (StrongRootsList* list = strong_roots_list_; list; list = next) {
    5789           0 :     next = list->next;
    5790           0 :     delete list;
    5791             :   }
    5792       59285 :   strong_roots_list_ = NULL;
    5793             : 
    5794      118570 :   delete store_buffer_;
    5795       59285 :   store_buffer_ = nullptr;
    5796             : 
    5797      118570 :   delete memory_allocator_;
    5798       59285 :   memory_allocator_ = nullptr;
    5799       59285 : }
    5800             : 
    5801             : 
    5802          36 : void Heap::AddGCPrologueCallback(v8::Isolate::GCCallback callback,
    5803             :                                  GCType gc_type, bool pass_isolate) {
    5804             :   DCHECK(callback != NULL);
    5805             :   GCCallbackPair pair(callback, gc_type, pass_isolate);
    5806             :   DCHECK(!gc_prologue_callbacks_.Contains(pair));
    5807          36 :   return gc_prologue_callbacks_.Add(pair);
    5808             : }
    5809             : 
    5810             : 
    5811          30 : void Heap::RemoveGCPrologueCallback(v8::Isolate::GCCallback callback) {
    5812             :   DCHECK(callback != NULL);
    5813          60 :   for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
    5814          90 :     if (gc_prologue_callbacks_[i].callback == callback) {
    5815          30 :       gc_prologue_callbacks_.Remove(i);
    5816          30 :       return;
    5817             :     }
    5818             :   }
    5819           0 :   UNREACHABLE();
    5820             : }
    5821             : 
    5822             : 
    5823          30 : void Heap::AddGCEpilogueCallback(v8::Isolate::GCCallback callback,
    5824             :                                  GCType gc_type, bool pass_isolate) {
    5825             :   DCHECK(callback != NULL);
    5826             :   GCCallbackPair pair(callback, gc_type, pass_isolate);
    5827             :   DCHECK(!gc_epilogue_callbacks_.Contains(pair));
    5828          30 :   return gc_epilogue_callbacks_.Add(pair);
    5829             : }
    5830             : 
    5831             : 
    5832          30 : void Heap::RemoveGCEpilogueCallback(v8::Isolate::GCCallback callback) {
    5833             :   DCHECK(callback != NULL);
    5834          60 :   for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
    5835          90 :     if (gc_epilogue_callbacks_[i].callback == callback) {
    5836          30 :       gc_epilogue_callbacks_.Remove(i);
    5837          30 :       return;
    5838             :     }
    5839             :   }
    5840           0 :   UNREACHABLE();
    5841             : }
    5842             : 
    5843             : // TODO(ishell): Find a better place for this.
    5844      926002 : void Heap::AddWeakNewSpaceObjectToCodeDependency(Handle<HeapObject> obj,
    5845     1852004 :                                                  Handle<WeakCell> code) {
    5846             :   DCHECK(InNewSpace(*obj));
    5847             :   DCHECK(!InNewSpace(*code));
    5848             :   Handle<ArrayList> list(weak_new_space_object_to_code_list(), isolate());
    5849     1852004 :   list = ArrayList::Add(list, isolate()->factory()->NewWeakCell(obj), code);
    5850      926002 :   if (*list != weak_new_space_object_to_code_list()) {
    5851             :     set_weak_new_space_object_to_code_list(*list);
    5852             :   }
    5853      926002 : }
    5854             : 
    5855             : // TODO(ishell): Find a better place for this.
    5856     1050332 : void Heap::AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
    5857     2100664 :                                          Handle<DependentCode> dep) {
    5858             :   DCHECK(!InNewSpace(*obj));
    5859             :   DCHECK(!InNewSpace(*dep));
    5860             :   Handle<WeakHashTable> table(weak_object_to_code_table(), isolate());
    5861     1050332 :   table = WeakHashTable::Put(table, obj, dep);
    5862     1050332 :   if (*table != weak_object_to_code_table())
    5863             :     set_weak_object_to_code_table(*table);
    5864             :   DCHECK_EQ(*dep, LookupWeakObjectToCodeDependency(obj));
    5865     1050332 : }
    5866             : 
    5867             : 
    5868     1197580 : DependentCode* Heap::LookupWeakObjectToCodeDependency(Handle<HeapObject> obj) {
    5869     1050331 :   Object* dep = weak_object_to_code_table()->Lookup(obj);
    5870     1050332 :   if (dep->IsDependentCode()) return DependentCode::cast(dep);
    5871      147249 :   return DependentCode::cast(empty_fixed_array());
    5872             : }
    5873             : 
    5874             : namespace {
    5875         291 : void CompactWeakFixedArray(Object* object) {
    5876         291 :   if (object->IsWeakFixedArray()) {
    5877             :     WeakFixedArray* array = WeakFixedArray::cast(object);
    5878         194 :     array->Compact<WeakFixedArray::NullCallback>();
    5879             :   }
    5880         291 : }
    5881             : }  // anonymous namespace
    5882             : 
    5883         388 : void Heap::CompactWeakFixedArrays() {
    5884             :   // Find known WeakFixedArrays and compact them.
    5885          97 :   HeapIterator iterator(this);
    5886     1273116 :   for (HeapObject* o = iterator.next(); o != NULL; o = iterator.next()) {
    5887     1273019 :     if (o->IsPrototypeInfo()) {
    5888             :       Object* prototype_users = PrototypeInfo::cast(o)->prototype_users();
    5889         732 :       if (prototype_users->IsWeakFixedArray()) {
    5890             :         WeakFixedArray* array = WeakFixedArray::cast(prototype_users);
    5891         121 :         array->Compact<JSObject::PrototypeRegistryCompactionCallback>();
    5892             :       }
    5893             :     }
    5894             :   }
    5895          97 :   CompactWeakFixedArray(noscript_shared_function_infos());
    5896          97 :   CompactWeakFixedArray(script_list());
    5897          97 :   CompactWeakFixedArray(weak_stack_trace_list());
    5898          97 : }
    5899             : 
    5900      114693 : void Heap::AddRetainedMap(Handle<Map> map) {
    5901       38231 :   Handle<WeakCell> cell = Map::WeakCellForMap(map);
    5902             :   Handle<ArrayList> array(retained_maps(), isolate());
    5903       38231 :   if (array->IsFull()) {
    5904        3298 :     CompactRetainedMaps(*array);
    5905             :   }
    5906             :   array = ArrayList::Add(
    5907             :       array, cell, handle(Smi::FromInt(FLAG_retain_maps_for_n_gc), isolate()),
    5908       76462 :       ArrayList::kReloadLengthAfterAllocation);
    5909       38231 :   if (*array != retained_maps()) {
    5910             :     set_retained_maps(*array);
    5911             :   }
    5912       38231 : }
    5913             : 
    5914             : 
    5915        6596 : void Heap::CompactRetainedMaps(ArrayList* retained_maps) {
    5916             :   DCHECK_EQ(retained_maps, this->retained_maps());
    5917        3298 :   int length = retained_maps->Length();
    5918             :   int new_length = 0;
    5919             :   int new_number_of_disposed_maps = 0;
    5920             :   // This loop compacts the array by removing cleared weak cells.
    5921       28504 :   for (int i = 0; i < length; i += 2) {
    5922             :     DCHECK(retained_maps->Get(i)->IsWeakCell());
    5923             :     WeakCell* cell = WeakCell::cast(retained_maps->Get(i));
    5924             :     Object* age = retained_maps->Get(i + 1);
    5925       21908 :     if (cell->cleared()) continue;
    5926       15109 :     if (i != new_length) {
    5927             :       retained_maps->Set(new_length, cell);
    5928             :       retained_maps->Set(new_length + 1, age);
    5929             :     }
    5930       15109 :     if (i < number_of_disposed_maps_) {
    5931          27 :       new_number_of_disposed_maps += 2;
    5932             :     }
    5933       15109 :     new_length += 2;
    5934             :   }
    5935        3298 :   number_of_disposed_maps_ = new_number_of_disposed_maps;
    5936             :   Object* undefined = undefined_value();
    5937       20194 :   for (int i = new_length; i < length; i++) {
    5938             :     retained_maps->Clear(i, undefined);
    5939             :   }
    5940        3298 :   if (new_length != length) retained_maps->SetLength(new_length);
    5941        3298 : }
    5942             : 
    5943           0 : void Heap::FatalProcessOutOfMemory(const char* location, bool is_heap_oom) {
    5944           0 :   v8::internal::V8::FatalProcessOutOfMemory(location, is_heap_oom);
    5945           0 : }
    5946             : 
    5947             : #ifdef DEBUG
    5948             : 
    5949             : class PrintHandleVisitor : public RootVisitor {
    5950             :  public:
    5951             :   void VisitRootPointers(Root root, Object** start, Object** end) override {
    5952             :     for (Object** p = start; p < end; p++)
    5953             :       PrintF("  handle %p to %p\n", reinterpret_cast<void*>(p),
    5954             :              reinterpret_cast<void*>(*p));
    5955             :   }
    5956             : };
    5957             : 
    5958             : 
    5959             : void Heap::PrintHandles() {
    5960             :   PrintF("Handles:\n");
    5961             :   PrintHandleVisitor v;
    5962             :   isolate_->handle_scope_implementer()->Iterate(&v);
    5963             : }
    5964             : 
    5965             : #endif
    5966             : 
    5967             : class CheckHandleCountVisitor : public RootVisitor {
    5968             :  public:
    5969           0 :   CheckHandleCountVisitor() : handle_count_(0) {}
    5970           0 :   ~CheckHandleCountVisitor() override {
    5971           0 :     CHECK(handle_count_ < HandleScope::kCheckHandleThreshold);
    5972           0 :   }
    5973           0 :   void VisitRootPointers(Root root, Object** start, Object** end) override {
    5974           0 :     handle_count_ += end - start;
    5975           0 :   }
    5976             : 
    5977             :  private:
    5978             :   ptrdiff_t handle_count_;
    5979             : };
    5980             : 
    5981             : 
    5982           0 : void Heap::CheckHandleCount() {
    5983             :   CheckHandleCountVisitor v;
    5984           0 :   isolate_->handle_scope_implementer()->Iterate(&v);
    5985           0 : }
    5986             : 
    5987      621214 : void Heap::ClearRecordedSlot(HeapObject* object, Object** slot) {
    5988      618337 :   if (!InNewSpace(object)) {
    5989             :     Address slot_addr = reinterpret_cast<Address>(slot);
    5990             :     Page* page = Page::FromAddress(slot_addr);
    5991             :     DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
    5992             :     store_buffer()->DeleteEntry(slot_addr);
    5993        2877 :     RememberedSet<OLD_TO_OLD>::Remove(page, slot_addr);
    5994             :   }
    5995      618337 : }
    5996             : 
    5997           0 : bool Heap::HasRecordedSlot(HeapObject* object, Object** slot) {
    5998           0 :   if (InNewSpace(object)) {
    5999             :     return false;
    6000             :   }
    6001             :   Address slot_addr = reinterpret_cast<Address>(slot);
    6002             :   Page* page = Page::FromAddress(slot_addr);
    6003             :   DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
    6004           0 :   store_buffer()->MoveAllEntriesToRememberedSet();
    6005           0 :   return RememberedSet<OLD_TO_NEW>::Contains(page, slot_addr) ||
    6006           0 :          RememberedSet<OLD_TO_OLD>::Contains(page, slot_addr);
    6007             : }
    6008             : 
    6009    16921023 : void Heap::ClearRecordedSlotRange(Address start, Address end) {
    6010             :   Page* page = Page::FromAddress(start);
    6011    16719081 :   if (!page->InNewSpace()) {
    6012             :     DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
    6013             :     store_buffer()->DeleteEntry(start, end);
    6014             :     RememberedSet<OLD_TO_OLD>::RemoveRange(page, start, end,
    6015      201942 :                                            SlotSet::FREE_EMPTY_BUCKETS);
    6016             :   }
    6017    16719081 : }
    6018             : 
    6019     2034644 : void Heap::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
    6020             :                                    Object* value) {
    6021             :   DCHECK(InNewSpace(value));
    6022             :   Page* source_page = Page::FromAddress(reinterpret_cast<Address>(host));
    6023             :   RelocInfo::Mode rmode = rinfo->rmode();
    6024             :   Address addr = rinfo->pc();
    6025     1017322 :   SlotType slot_type = SlotTypeForRelocInfoMode(rmode);
    6026     1017322 :   if (rinfo->IsInConstantPool()) {
    6027             :     addr = rinfo->constant_pool_entry_address();
    6028             :     if (RelocInfo::IsCodeTarget(rmode)) {
    6029             :       slot_type = CODE_ENTRY_SLOT;
    6030             :     } else {
    6031             :       DCHECK(RelocInfo::IsEmbeddedObject(rmode));
    6032             :       slot_type = OBJECT_SLOT;
    6033             :     }
    6034             :   }
    6035             :   RememberedSet<OLD_TO_NEW>::InsertTyped(
    6036     1017322 :       source_page, reinterpret_cast<Address>(host), slot_type, addr);
    6037     1017322 : }
    6038             : 
    6039      167158 : void Heap::RecordWritesIntoCode(Code* code) {
    6040      403629 :   for (RelocIterator it(code, RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT));
    6041       69313 :        !it.done(); it.next()) {
    6042      138626 :     RecordWriteIntoCode(code, it.rinfo(), it.rinfo()->target_object());
    6043             :   }
    6044      167158 : }
    6045             : 
    6046     5876382 : Space* AllSpaces::next() {
    6047     5876382 :   switch (counter_++) {
    6048             :     case NEW_SPACE:
    6049     4896985 :       return heap_->new_space();
    6050             :     case OLD_SPACE:
    6051     1958794 :       return heap_->old_space();
    6052             :     case CODE_SPACE:
    6053     1958794 :       return heap_->code_space();
    6054             :     case MAP_SPACE:
    6055     1958794 :       return heap_->map_space();
    6056             :     case LO_SPACE:
    6057     1958794 :       return heap_->lo_space();
    6058             :     default:
    6059             :       return NULL;
    6060             :   }
    6061             : }
    6062             : 
    6063      699764 : PagedSpace* PagedSpaces::next() {
    6064      699764 :   switch (counter_++) {
    6065             :     case OLD_SPACE:
    6066      524823 :       return heap_->old_space();
    6067             :     case CODE_SPACE:
    6068      349882 :       return heap_->code_space();
    6069             :     case MAP_SPACE:
    6070      349882 :       return heap_->map_space();
    6071             :     default:
    6072             :       return NULL;
    6073             :   }
    6074             : }
    6075             : 
    6076             : 
    6077      735045 : OldSpace* OldSpaces::next() {
    6078      735045 :   switch (counter_++) {
    6079             :     case OLD_SPACE:
    6080      490030 :       return heap_->old_space();
    6081             :     case CODE_SPACE:
    6082      490030 :       return heap_->code_space();
    6083             :     default:
    6084             :       return NULL;
    6085             :   }
    6086             : }
    6087             : 
    6088        2431 : SpaceIterator::SpaceIterator(Heap* heap)
    6089       27000 :     : heap_(heap), current_space_(FIRST_SPACE - 1) {}
    6090             : 
    6091       27000 : SpaceIterator::~SpaceIterator() {
    6092       27000 : }
    6093             : 
    6094             : 
    6095       14586 : bool SpaceIterator::has_next() {
    6096             :   // Iterate until no more spaces.
    6097      137431 :   return current_space_ != LAST_SPACE;
    6098             : }
    6099             : 
    6100       12155 : Space* SpaceIterator::next() {
    6101             :   DCHECK(has_next());
    6102      147155 :   return heap_->space(++current_space_);
    6103             : }
    6104             : 
    6105             : 
    6106        1878 : class HeapObjectsFilter {
    6107             :  public:
    6108        1878 :   virtual ~HeapObjectsFilter() {}
    6109             :   virtual bool SkipObject(HeapObject* object) = 0;
    6110             : };
    6111             : 
    6112             : 
    6113             : class UnreachableObjectsFilter : public HeapObjectsFilter {
    6114             :  public:
    6115        1878 :   explicit UnreachableObjectsFilter(Heap* heap) : heap_(heap) {
    6116        1878 :     MarkReachableObjects();
    6117             :   }
    6118             : 
    6119        1878 :   ~UnreachableObjectsFilter() {
    6120        1878 :     heap_->mark_compact_collector()->ClearMarkbits();
    6121        1878 :   }
    6122             : 
    6123    32790758 :   bool SkipObject(HeapObject* object) {
    6124    32790758 :     if (object->IsFiller()) return true;
    6125             :     return ObjectMarking::IsWhite(object, MarkingState::Internal(object));
    6126             :   }
    6127             : 
    6128             :  private:
    6129        1878 :   class MarkingVisitor : public ObjectVisitor, public RootVisitor {
    6130             :    public:
    6131        1878 :     MarkingVisitor() : marking_stack_(10) {}
    6132             : 
    6133    77628109 :     void VisitPointers(HeapObject* host, Object** start,
    6134             :                        Object** end) override {
    6135    77628109 :       MarkPointers(start, end);
    6136    77628109 :     }
    6137             : 
    6138     4824636 :     void VisitRootPointers(Root root, Object** start, Object** end) override {
    6139     4824636 :       MarkPointers(start, end);
    6140     4824636 :     }
    6141             : 
    6142        1878 :     void TransitiveClosure() {
    6143    29489893 :       while (!marking_stack_.is_empty()) {
    6144    58974152 :         HeapObject* obj = marking_stack_.RemoveLast();
    6145    29486137 :         obj->Iterate(this);
    6146             :       }
    6147        1878 :     }
    6148             : 
    6149             :    private:
    6150    82452745 :     void MarkPointers(Object** start, Object** end) {
    6151   317172774 :       for (Object** p = start; p < end; p++) {
    6152   469440058 :         if (!(*p)->IsHeapObject()) continue;
    6153             :         HeapObject* obj = HeapObject::cast(*p);
    6154             :         // Use Marking instead of ObjectMarking to avoid adjusting live bytes
    6155             :         // counter.
    6156             :         MarkBit mark_bit =
    6157             :             ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj));
    6158   194536668 :         if (Marking::IsWhite(mark_bit)) {
    6159             :           Marking::WhiteToBlack(mark_bit);
    6160    29486137 :           marking_stack_.Add(obj);
    6161             :         }
    6162             :       }
    6163    82452745 :     }
    6164             :     List<HeapObject*> marking_stack_;
    6165             :   };
    6166             : 
    6167        1878 :   void MarkReachableObjects() {
    6168             :     MarkingVisitor visitor;
    6169        1878 :     heap_->IterateRoots(&visitor, VISIT_ALL);
    6170        1878 :     visitor.TransitiveClosure();
    6171        1878 :   }
    6172             : 
    6173             :   Heap* heap_;
    6174             :   DisallowHeapAllocation no_allocation_;
    6175             : };
    6176             : 
    6177       24569 : HeapIterator::HeapIterator(Heap* heap,
    6178             :                            HeapIterator::HeapObjectsFiltering filtering)
    6179             :     : no_heap_allocation_(),
    6180             :       heap_(heap),
    6181             :       filtering_(filtering),
    6182             :       filter_(nullptr),
    6183             :       space_iterator_(nullptr),
    6184       24569 :       object_iterator_(nullptr) {
    6185             :   heap_->MakeHeapIterable();
    6186       24569 :   heap_->heap_iterator_start();
    6187             :   // Start the iteration.
    6188       49138 :   space_iterator_ = new SpaceIterator(heap_);
    6189       24569 :   switch (filtering_) {
    6190             :     case kFilterUnreachable:
    6191        3756 :       filter_ = new UnreachableObjectsFilter(heap_);
    6192        1878 :       break;
    6193             :     default:
    6194             :       break;
    6195             :   }
    6196       73707 :   object_iterator_ = space_iterator_->next()->GetObjectIterator();
    6197       24569 : }
    6198             : 
    6199             : 
    6200       24569 : HeapIterator::~HeapIterator() {
    6201       24569 :   heap_->heap_iterator_end();
    6202             : #ifdef DEBUG
    6203             :   // Assert that in filtering mode we have iterated through all
    6204             :   // objects. Otherwise, heap will be left in an inconsistent state.
    6205             :   if (filtering_ != kNoFiltering) {
    6206             :     DCHECK(object_iterator_ == nullptr);
    6207             :   }
    6208             : #endif
    6209       24569 :   delete space_iterator_;
    6210       24569 :   delete filter_;
    6211       24569 : }
    6212             : 
    6213             : 
    6214   366877973 : HeapObject* HeapIterator::next() {
    6215   366877973 :   if (filter_ == nullptr) return NextObject();
    6216             : 
    6217    29488015 :   HeapObject* obj = NextObject();
    6218    29488015 :   while ((obj != nullptr) && (filter_->SkipObject(obj))) obj = NextObject();
    6219             :   return obj;
    6220             : }
    6221             : 
    6222             : 
    6223   370182594 : HeapObject* HeapIterator::NextObject() {
    6224             :   // No iterator means we are done.
    6225   370182594 :   if (object_iterator_.get() == nullptr) return nullptr;
    6226             : 
    6227   370182594 :   if (HeapObject* obj = object_iterator_.get()->Next()) {
    6228             :     // If the current iterator has more objects we are fine.
    6229             :     return obj;
    6230             :   } else {
    6231             :     // Go though the spaces looking for one that has objects.
    6232      245690 :     while (space_iterator_->has_next()) {
    6233      196552 :       object_iterator_ = space_iterator_->next()->GetObjectIterator();
    6234       98276 :       if (HeapObject* obj = object_iterator_.get()->Next()) {
    6235             :         return obj;
    6236             :       }
    6237             :     }
    6238             :   }
    6239             :   // Done with the last space.
    6240             :   object_iterator_.reset(nullptr);
    6241             :   return nullptr;
    6242             : }
    6243             : 
    6244             : 
    6245      122507 : void Heap::UpdateTotalGCTime(double duration) {
    6246      122507 :   if (FLAG_trace_gc_verbose) {
    6247           0 :     total_gc_time_ms_ += duration;
    6248             :   }
    6249      122507 : }
    6250             : 
    6251       53346 : void Heap::ExternalStringTable::CleanUpNewSpaceStrings() {
    6252             :   int last = 0;
    6253       53346 :   Isolate* isolate = heap_->isolate();
    6254      110842 :   for (int i = 0; i < new_space_strings_.length(); ++i) {
    6255       61283 :     Object* o = new_space_strings_[i];
    6256        2075 :     if (o->IsTheHole(isolate)) {
    6257             :       continue;
    6258             :     }
    6259        1712 :     if (o->IsThinString()) {
    6260             :       o = ThinString::cast(o)->actual();
    6261           0 :       if (!o->IsExternalString()) continue;
    6262             :     }
    6263             :     DCHECK(o->IsExternalString());
    6264        1712 :     if (heap_->InNewSpace(o)) {
    6265        3424 :       new_space_strings_[last++] = o;
    6266             :     } else {
    6267           0 :       old_space_strings_.Add(o);
    6268             :     }
    6269             :   }
    6270             :   new_space_strings_.Rewind(last);
    6271       53346 :   new_space_strings_.Trim();
    6272       53346 : }
    6273             : 
    6274       53346 : void Heap::ExternalStringTable::CleanUpAll() {
    6275       53346 :   CleanUpNewSpaceStrings();
    6276             :   int last = 0;
    6277       53346 :   Isolate* isolate = heap_->isolate();
    6278     1970388 :   for (int i = 0; i < old_space_strings_.length(); ++i) {
    6279     3771952 :     Object* o = old_space_strings_[i];
    6280      931848 :     if (o->IsTheHole(isolate)) {
    6281             :       continue;
    6282             :     }
    6283      923062 :     if (o->IsThinString()) {
    6284             :       o = ThinString::cast(o)->actual();
    6285           0 :       if (!o->IsExternalString()) continue;
    6286             :     }
    6287             :     DCHECK(o->IsExternalString());
    6288             :     DCHECK(!heap_->InNewSpace(o));
    6289     1846124 :     old_space_strings_[last++] = o;
    6290             :   }
    6291             :   old_space_strings_.Rewind(last);
    6292       53346 :   old_space_strings_.Trim();
    6293             : #ifdef VERIFY_HEAP
    6294             :   if (FLAG_verify_heap) {
    6295             :     Verify();
    6296             :   }
    6297             : #endif
    6298       53346 : }
    6299             : 
    6300       59285 : void Heap::ExternalStringTable::TearDown() {
    6301      163706 :   for (int i = 0; i < new_space_strings_.length(); ++i) {
    6302      126989 :     Object* o = new_space_strings_[i];
    6303       22568 :     if (o->IsThinString()) {
    6304             :       o = ThinString::cast(o)->actual();
    6305           0 :       if (!o->IsExternalString()) continue;
    6306             :     }
    6307             :     heap_->FinalizeExternalString(ExternalString::cast(o));
    6308             :   }
    6309             :   new_space_strings_.Free();
    6310     2034760 :   for (int i = 0; i < old_space_strings_.length(); ++i) {
    6311     2933570 :     Object* o = old_space_strings_[i];
    6312      958094 :     if (o->IsThinString()) {
    6313             :       o = ThinString::cast(o)->actual();
    6314           0 :       if (!o->IsExternalString()) continue;
    6315             :     }
    6316             :     heap_->FinalizeExternalString(ExternalString::cast(o));
    6317             :   }
    6318             :   old_space_strings_.Free();
    6319       59285 : }
    6320             : 
    6321             : 
    6322      878371 : void Heap::RememberUnmappedPage(Address page, bool compacted) {
    6323      878371 :   uintptr_t p = reinterpret_cast<uintptr_t>(page);
    6324             :   // Tag the page pointer to make it findable in the dump file.
    6325      878371 :   if (compacted) {
    6326        9613 :     p ^= 0xc1ead & (Page::kPageSize - 1);  // Cleared.
    6327             :   } else {
    6328      868758 :     p ^= 0x1d1ed & (Page::kPageSize - 1);  // I died.
    6329             :   }
    6330      939153 :   remembered_unmapped_pages_[remembered_unmapped_pages_index_] =
    6331      939153 :       reinterpret_cast<Address>(p);
    6332      939153 :   remembered_unmapped_pages_index_++;
    6333      939153 :   remembered_unmapped_pages_index_ %= kRememberedUnmappedPages;
    6334      878371 : }
    6335             : 
    6336             : 
    6337     1217574 : void Heap::RegisterStrongRoots(Object** start, Object** end) {
    6338     1217574 :   StrongRootsList* list = new StrongRootsList();
    6339     1217574 :   list->next = strong_roots_list_;
    6340     1217574 :   list->start = start;
    6341     1217574 :   list->end = end;
    6342     1217574 :   strong_roots_list_ = list;
    6343     1217574 : }
    6344             : 
    6345             : 
    6346     1217573 : void Heap::UnregisterStrongRoots(Object** start) {
    6347             :   StrongRootsList* prev = NULL;
    6348     1217573 :   StrongRootsList* list = strong_roots_list_;
    6349     4007950 :   while (list != nullptr) {
    6350     1572802 :     StrongRootsList* next = list->next;
    6351     1572802 :     if (list->start == start) {
    6352     1217575 :       if (prev) {
    6353          32 :         prev->next = next;
    6354             :       } else {
    6355     1217543 :         strong_roots_list_ = next;
    6356             :       }
    6357     1217575 :       delete list;
    6358             :     } else {
    6359             :       prev = list;
    6360             :     }
    6361             :     list = next;
    6362             :   }
    6363     1217575 : }
    6364             : 
    6365             : 
    6366           0 : size_t Heap::NumberOfTrackedHeapObjectTypes() {
    6367           0 :   return ObjectStats::OBJECT_STATS_COUNT;
    6368             : }
    6369             : 
    6370             : 
    6371           0 : size_t Heap::ObjectCountAtLastGC(size_t index) {
    6372           0 :   if (live_object_stats_ == nullptr || index >= ObjectStats::OBJECT_STATS_COUNT)
    6373             :     return 0;
    6374           0 :   return live_object_stats_->object_count_last_gc(index);
    6375             : }
    6376             : 
    6377             : 
    6378           0 : size_t Heap::ObjectSizeAtLastGC(size_t index) {
    6379           0 :   if (live_object_stats_ == nullptr || index >= ObjectStats::OBJECT_STATS_COUNT)
    6380             :     return 0;
    6381           0 :   return live_object_stats_->object_size_last_gc(index);
    6382             : }
    6383             : 
    6384             : 
    6385           0 : bool Heap::GetObjectTypeName(size_t index, const char** object_type,
    6386             :                              const char** object_sub_type) {
    6387           0 :   if (index >= ObjectStats::OBJECT_STATS_COUNT) return false;
    6388             : 
    6389           0 :   switch (static_cast<int>(index)) {
    6390             : #define COMPARE_AND_RETURN_NAME(name) \
    6391             :   case name:                          \
    6392             :     *object_type = #name;             \
    6393             :     *object_sub_type = "";            \
    6394             :     return true;
    6395           0 :     INSTANCE_TYPE_LIST(COMPARE_AND_RETURN_NAME)
    6396             : #undef COMPARE_AND_RETURN_NAME
    6397             : #define COMPARE_AND_RETURN_NAME(name)                      \
    6398             :   case ObjectStats::FIRST_CODE_KIND_SUB_TYPE + Code::name: \
    6399             :     *object_type = "CODE_TYPE";                            \
    6400             :     *object_sub_type = "CODE_KIND/" #name;                 \
    6401             :     return true;
    6402           0 :     CODE_KIND_LIST(COMPARE_AND_RETURN_NAME)
    6403             : #undef COMPARE_AND_RETURN_NAME
    6404             : #define COMPARE_AND_RETURN_NAME(name)                  \
    6405             :   case ObjectStats::FIRST_FIXED_ARRAY_SUB_TYPE + name: \
    6406             :     *object_type = "FIXED_ARRAY_TYPE";                 \
    6407             :     *object_sub_type = #name;                          \
    6408             :     return true;
    6409           0 :     FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(COMPARE_AND_RETURN_NAME)
    6410             : #undef COMPARE_AND_RETURN_NAME
    6411             : #define COMPARE_AND_RETURN_NAME(name)                                  \
    6412             :   case ObjectStats::FIRST_CODE_AGE_SUB_TYPE + Code::k##name##CodeAge - \
    6413             :       Code::kFirstCodeAge:                                             \
    6414             :     *object_type = "CODE_TYPE";                                        \
    6415             :     *object_sub_type = "CODE_AGE/" #name;                              \
    6416             :     return true;
    6417           0 :     CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME)
    6418             : #undef COMPARE_AND_RETURN_NAME
    6419             :   }
    6420             :   return false;
    6421             : }
    6422             : 
    6423             : 
    6424             : // static
    6425    30377302 : int Heap::GetStaticVisitorIdForMap(Map* map) {
    6426    62103673 :   return StaticVisitorBase::GetVisitorId(map);
    6427             : }
    6428             : 
    6429           1 : const char* AllocationSpaceName(AllocationSpace space) {
    6430           1 :   switch (space) {
    6431             :     case NEW_SPACE:
    6432             :       return "NEW_SPACE";
    6433             :     case OLD_SPACE:
    6434           1 :       return "OLD_SPACE";
    6435             :     case CODE_SPACE:
    6436           0 :       return "CODE_SPACE";
    6437             :     case MAP_SPACE:
    6438           0 :       return "MAP_SPACE";
    6439             :     case LO_SPACE:
    6440           0 :       return "LO_SPACE";
    6441             :     default:
    6442           0 :       UNREACHABLE();
    6443             :   }
    6444             :   return NULL;
    6445             : }
    6446             : 
    6447             : }  // namespace internal
    6448             : }  // namespace v8

Generated by: LCOV version 1.10