Line data Source code
1 : // Copyright 2011 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_HEAP_SPACES_INL_H_
6 : #define V8_HEAP_SPACES_INL_H_
7 :
8 : #include "src/heap/spaces.h"
9 :
10 : #include "src/base/atomic-utils.h"
11 : #include "src/base/bounded-page-allocator.h"
12 : #include "src/base/v8-fallthrough.h"
13 : #include "src/heap/heap-inl.h"
14 : #include "src/heap/incremental-marking.h"
15 : #include "src/msan.h"
16 : #include "src/objects/code-inl.h"
17 :
18 : namespace v8 {
19 : namespace internal {
20 :
21 : template <class PAGE_TYPE>
22 : PageIteratorImpl<PAGE_TYPE>& PageIteratorImpl<PAGE_TYPE>::operator++() {
23 905840 : p_ = p_->next_page();
24 : return *this;
25 : }
26 :
27 : template <class PAGE_TYPE>
28 : PageIteratorImpl<PAGE_TYPE> PageIteratorImpl<PAGE_TYPE>::operator++(int) {
29 : PageIteratorImpl<PAGE_TYPE> tmp(*this);
30 : operator++();
31 : return tmp;
32 : }
33 :
34 : PageRange::PageRange(Address start, Address limit)
35 : : begin_(Page::FromAddress(start)),
36 : end_(Page::FromAllocationAreaAddress(limit)->next_page()) {
37 : #ifdef DEBUG
38 : if (begin_->InNewSpace()) {
39 : SemiSpace::AssertValidRange(start, limit);
40 : }
41 : #endif // DEBUG
42 : }
43 :
44 : // -----------------------------------------------------------------------------
45 : // SemiSpaceIterator
46 :
47 1631609 : HeapObject SemiSpaceIterator::Next() {
48 3316292 : while (current_ != limit_) {
49 1677098 : if (Page::IsAlignedToPageSize(current_)) {
50 : Page* page = Page::FromAllocationAreaAddress(current_);
51 : page = page->next_page();
52 : DCHECK(page);
53 3 : current_ = page->area_start();
54 3 : if (current_ == limit_) return HeapObject();
55 : }
56 3354196 : HeapObject object = HeapObject::FromAddress(current_);
57 1677098 : current_ += object->Size();
58 1677098 : if (!object->IsFiller()) {
59 1624024 : return object;
60 : }
61 : }
62 7585 : return HeapObject();
63 : }
64 :
65 : // -----------------------------------------------------------------------------
66 : // HeapObjectIterator
67 :
68 85888487 : HeapObject HeapObjectIterator::Next() {
69 81926 : do {
70 85939630 : HeapObject next_obj = FromCurrentPage();
71 85939630 : if (!next_obj.is_null()) return next_obj;
72 : } while (AdvanceToNextPage());
73 30783 : return HeapObject();
74 : }
75 :
76 85939630 : HeapObject HeapObjectIterator::FromCurrentPage() {
77 172358282 : while (cur_addr_ != cur_end_) {
78 172683952 : if (cur_addr_ == space_->top() && cur_addr_ != space_->limit()) {
79 10464 : cur_addr_ = space_->limit();
80 10464 : continue;
81 : }
82 86326262 : HeapObject obj = HeapObject::FromAddress(cur_addr_);
83 86326262 : const int obj_size = obj->Size();
84 86326262 : cur_addr_ += obj_size;
85 : DCHECK_LE(cur_addr_, cur_end_);
86 86326262 : if (!obj->IsFiller()) {
87 : if (obj->IsCode()) {
88 : DCHECK_EQ(space_, space_->heap()->code_space());
89 : DCHECK_CODEOBJECT_SIZE(obj_size, space_);
90 : } else {
91 : DCHECK_OBJECT_SIZE(obj_size);
92 : }
93 85857704 : return obj;
94 : }
95 : }
96 81926 : return HeapObject();
97 : }
98 :
99 1642657 : void Space::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
100 1642657 : size_t amount) {
101 1642657 : base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
102 : heap()->IncrementExternalBackingStoreBytes(type, amount);
103 1642657 : }
104 :
105 400460 : void Space::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
106 400460 : size_t amount) {
107 400460 : base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
108 : heap()->DecrementExternalBackingStoreBytes(type, amount);
109 400460 : }
110 :
111 101337 : void Space::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
112 : Space* from, Space* to,
113 : size_t amount) {
114 202674 : if (from == to) return;
115 :
116 18146 : base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
117 18146 : base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
118 : }
119 :
120 : // -----------------------------------------------------------------------------
121 : // SemiSpace
122 :
123 : bool SemiSpace::Contains(HeapObject o) {
124 1936854 : MemoryChunk* memory_chunk = MemoryChunk::FromHeapObject(o);
125 1936854 : if (memory_chunk->IsLargePage()) return false;
126 1936854 : return id_ == kToSpace ? memory_chunk->IsToPage()
127 1936854 : : memory_chunk->IsFromPage();
128 : }
129 :
130 1936854 : bool SemiSpace::Contains(Object o) {
131 3873708 : return o->IsHeapObject() && Contains(HeapObject::cast(o));
132 : }
133 :
134 : bool SemiSpace::ContainsSlow(Address a) {
135 695 : for (Page* p : *this) {
136 680 : if (p == MemoryChunk::FromAddress(a)) return true;
137 : }
138 : return false;
139 : }
140 :
141 : // --------------------------------------------------------------------------
142 : // NewSpace
143 :
144 : bool NewSpace::Contains(Object o) {
145 : return o->IsHeapObject() && Contains(HeapObject::cast(o));
146 : }
147 :
148 : bool NewSpace::Contains(HeapObject o) {
149 : return MemoryChunk::FromHeapObject(o)->InNewSpace();
150 : }
151 :
152 10 : bool NewSpace::ContainsSlow(Address a) {
153 20 : return from_space_.ContainsSlow(a) || to_space_.ContainsSlow(a);
154 : }
155 :
156 : bool NewSpace::ToSpaceContainsSlow(Address a) {
157 : return to_space_.ContainsSlow(a);
158 : }
159 :
160 1936854 : bool NewSpace::ToSpaceContains(Object o) { return to_space_.Contains(o); }
161 : bool NewSpace::FromSpaceContains(Object o) { return from_space_.Contains(o); }
162 :
163 5 : bool PagedSpace::Contains(Address addr) {
164 5 : return MemoryChunk::FromAnyPointerAddress(addr)->owner() == this;
165 : }
166 :
167 7073326 : bool PagedSpace::Contains(Object o) {
168 7073326 : if (!o.IsHeapObject()) return false;
169 7073326 : return Page::FromAddress(o.ptr())->owner() == this;
170 : }
171 :
172 : void PagedSpace::UnlinkFreeListCategories(Page* page) {
173 : DCHECK_EQ(this, page->owner());
174 865497 : page->ForAllFreeListCategories([this](FreeListCategory* category) {
175 : DCHECK_EQ(free_list(), category->owner());
176 : category->set_free_list(nullptr);
177 865503 : free_list()->RemoveCategory(category);
178 865497 : });
179 : }
180 :
181 1012012 : size_t PagedSpace::RelinkFreeListCategories(Page* page) {
182 : DCHECK_EQ(this, page->owner());
183 1012012 : size_t added = 0;
184 12144152 : page->ForAllFreeListCategories([this, &added](FreeListCategory* category) {
185 6072076 : category->set_free_list(&free_list_);
186 6072076 : added += category->available();
187 : category->Relink();
188 6072076 : });
189 : DCHECK_EQ(page->AvailableInFreeList(),
190 : page->AvailableInFreeListFromAllocatedBytes());
191 1012012 : return added;
192 : }
193 :
194 : bool PagedSpace::TryFreeLast(HeapObject object, int object_size) {
195 393901 : if (allocation_info_.top() != kNullAddress) {
196 : const Address object_address = object->address();
197 393905 : if ((allocation_info_.top() - object_size) == object_address) {
198 : allocation_info_.set_top(object_address);
199 : return true;
200 : }
201 : }
202 : return false;
203 : }
204 :
205 52548232 : bool MemoryChunk::HasHeaderSentinel(Address slot_addr) {
206 : Address base = BaseAddress(slot_addr);
207 52548232 : if (slot_addr < base + kHeaderSize) return false;
208 52534431 : return HeapObject::FromAddress(base) ==
209 : ObjectSlot(base + kHeaderSentinelOffset).Relaxed_Load();
210 : }
211 :
212 : MemoryChunk* MemoryChunk::FromAnyPointerAddress(Address addr) {
213 52548232 : while (!HasHeaderSentinel(addr)) {
214 29032413 : addr = BaseAddress(addr) - 1;
215 : }
216 : return FromAddress(addr);
217 : }
218 :
219 492422 : void MemoryChunk::IncrementExternalBackingStoreBytes(
220 : ExternalBackingStoreType type, size_t amount) {
221 492422 : base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
222 492422 : owner()->IncrementExternalBackingStoreBytes(type, amount);
223 492420 : }
224 :
225 107226 : void MemoryChunk::DecrementExternalBackingStoreBytes(
226 : ExternalBackingStoreType type, size_t amount) {
227 107226 : base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
228 107226 : owner()->DecrementExternalBackingStoreBytes(type, amount);
229 107228 : }
230 :
231 101335 : void MemoryChunk::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
232 : MemoryChunk* from,
233 : MemoryChunk* to,
234 : size_t amount) {
235 101335 : base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
236 101335 : base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
237 : Space::MoveExternalBackingStoreBytes(type, from->owner(), to->owner(),
238 101335 : amount);
239 101337 : }
240 :
241 165 : void Page::MarkNeverAllocateForTesting() {
242 : DCHECK(this->owner()->identity() != NEW_SPACE);
243 : DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
244 : SetFlag(NEVER_ALLOCATE_ON_PAGE);
245 : SetFlag(NEVER_EVACUATE);
246 165 : reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
247 165 : }
248 :
249 10550 : void Page::MarkEvacuationCandidate() {
250 : DCHECK(!IsFlagSet(NEVER_EVACUATE));
251 : DCHECK_NULL(slot_set<OLD_TO_OLD>());
252 : DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
253 : SetFlag(EVACUATION_CANDIDATE);
254 10550 : reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
255 10550 : }
256 :
257 : void Page::ClearEvacuationCandidate() {
258 : if (!IsFlagSet(COMPACTION_WAS_ABORTED)) {
259 : DCHECK_NULL(slot_set<OLD_TO_OLD>());
260 : DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
261 : }
262 : ClearFlag(EVACUATION_CANDIDATE);
263 103 : InitializeFreeListCategories();
264 : }
265 :
266 : HeapObject LargePage::GetObject() {
267 293313 : return HeapObject::FromAddress(area_start());
268 : }
269 :
270 235080 : OldGenerationMemoryChunkIterator::OldGenerationMemoryChunkIterator(Heap* heap)
271 : : heap_(heap),
272 : state_(kOldSpaceState),
273 : old_iterator_(heap->old_space()->begin()),
274 : code_iterator_(heap->code_space()->begin()),
275 : map_iterator_(heap->map_space()->begin()),
276 : lo_iterator_(heap->lo_space()->begin()),
277 47016 : code_lo_iterator_(heap->code_lo_space()->begin()) {}
278 :
279 901709 : MemoryChunk* OldGenerationMemoryChunkIterator::next() {
280 901709 : switch (state_) {
281 : case kOldSpaceState: {
282 617597 : if (old_iterator_ != heap_->old_space()->end()) return *(old_iterator_++);
283 47016 : state_ = kMapState;
284 : V8_FALLTHROUGH;
285 : }
286 : case kMapState: {
287 151617 : if (map_iterator_ != heap_->map_space()->end()) return *(map_iterator_++);
288 47016 : state_ = kCodeState;
289 : V8_FALLTHROUGH;
290 : }
291 : case kCodeState: {
292 123134 : if (code_iterator_ != heap_->code_space()->end())
293 : return *(code_iterator_++);
294 47016 : state_ = kLargeObjectState;
295 : V8_FALLTHROUGH;
296 : }
297 : case kLargeObjectState: {
298 70414 : if (lo_iterator_ != heap_->lo_space()->end()) return *(lo_iterator_++);
299 47016 : state_ = kCodeLargeObjectState;
300 : V8_FALLTHROUGH;
301 : }
302 : case kCodeLargeObjectState: {
303 127011 : if (code_lo_iterator_ != heap_->code_lo_space()->end())
304 : return *(code_lo_iterator_++);
305 47016 : state_ = kFinishedState;
306 : V8_FALLTHROUGH;
307 : }
308 : case kFinishedState:
309 : return nullptr;
310 : default:
311 : break;
312 : }
313 0 : UNREACHABLE();
314 : }
315 :
316 : Page* FreeList::GetPageForCategoryType(FreeListCategoryType type) {
317 59491 : return top(type) ? top(type)->page() : nullptr;
318 : }
319 :
320 : FreeList* FreeListCategory::owner() { return free_list_; }
321 :
322 : bool FreeListCategory::is_linked() {
323 0 : return prev_ != nullptr || next_ != nullptr;
324 : }
325 :
326 88792834 : AllocationResult LocalAllocationBuffer::AllocateRawAligned(
327 : int size_in_bytes, AllocationAlignment alignment) {
328 177595687 : Address current_top = allocation_info_.top();
329 88792834 : int filler_size = Heap::GetFillToAlign(current_top, alignment);
330 :
331 88802853 : Address new_top = current_top + filler_size + size_in_bytes;
332 88802853 : if (new_top > allocation_info_.limit()) return AllocationResult::Retry();
333 :
334 : allocation_info_.set_top(new_top);
335 87996720 : if (filler_size > 0) {
336 : return heap_->PrecedeWithFiller(HeapObject::FromAddress(current_top),
337 0 : filler_size);
338 : }
339 :
340 87996720 : return AllocationResult(HeapObject::FromAddress(current_top));
341 : }
342 :
343 0 : bool PagedSpace::EnsureLinearAllocationArea(int size_in_bytes) {
344 271671320 : if (allocation_info_.top() + size_in_bytes <= allocation_info_.limit()) {
345 0 : return true;
346 : }
347 1232386 : return SlowRefillLinearAllocationArea(size_in_bytes);
348 : }
349 :
350 0 : HeapObject PagedSpace::AllocateLinearly(int size_in_bytes) {
351 271676788 : Address current_top = allocation_info_.top();
352 271676788 : Address new_top = current_top + size_in_bytes;
353 : DCHECK_LE(new_top, allocation_info_.limit());
354 0 : allocation_info_.set_top(new_top);
355 0 : return HeapObject::FromAddress(current_top);
356 : }
357 :
358 0 : HeapObject PagedSpace::TryAllocateLinearlyAligned(
359 : int* size_in_bytes, AllocationAlignment alignment) {
360 0 : Address current_top = allocation_info_.top();
361 0 : int filler_size = Heap::GetFillToAlign(current_top, alignment);
362 :
363 0 : Address new_top = current_top + filler_size + *size_in_bytes;
364 0 : if (new_top > allocation_info_.limit()) return HeapObject();
365 :
366 : allocation_info_.set_top(new_top);
367 0 : if (filler_size > 0) {
368 0 : *size_in_bytes += filler_size;
369 : return heap()->PrecedeWithFiller(HeapObject::FromAddress(current_top),
370 0 : filler_size);
371 : }
372 :
373 : return HeapObject::FromAddress(current_top);
374 : }
375 :
376 271671320 : AllocationResult PagedSpace::AllocateRawUnaligned(
377 : int size_in_bytes, UpdateSkipList update_skip_list) {
378 : DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
379 271676953 : if (!EnsureLinearAllocationArea(size_in_bytes)) {
380 246720036 : return AllocationResult::Retry(identity());
381 : }
382 0 : HeapObject object = AllocateLinearly(size_in_bytes);
383 : DCHECK(!object.is_null());
384 518396659 : if (update_skip_list == UPDATE_SKIP_LIST && identity() == CODE_SPACE) {
385 1895157 : SkipList::Update(object->address(), size_in_bytes);
386 : }
387 : MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
388 271676792 : return object;
389 : }
390 :
391 :
392 0 : AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
393 : AllocationAlignment alignment) {
394 : DCHECK(identity() == OLD_SPACE || identity() == RO_SPACE);
395 : DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
396 0 : int allocation_size = size_in_bytes;
397 0 : HeapObject object = TryAllocateLinearlyAligned(&allocation_size, alignment);
398 0 : if (object.is_null()) {
399 : // We don't know exactly how much filler we need to align until space is
400 : // allocated, so assume the worst case.
401 0 : int filler_size = Heap::GetMaximumFillToAlign(alignment);
402 0 : allocation_size += filler_size;
403 0 : if (!EnsureLinearAllocationArea(allocation_size)) {
404 0 : return AllocationResult::Retry(identity());
405 : }
406 0 : allocation_size = size_in_bytes;
407 0 : object = TryAllocateLinearlyAligned(&allocation_size, alignment);
408 : DCHECK(!object.is_null());
409 : }
410 : MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
411 0 : return object;
412 : }
413 :
414 :
415 216408020 : AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
416 : AllocationAlignment alignment) {
417 237712939 : if (top_on_previous_step_ && top() < top_on_previous_step_ &&
418 5 : SupportsInlineAllocation()) {
419 : // Generated code decreased the top() pointer to do folded allocations.
420 : // The top_on_previous_step_ can be one byte beyond the current page.
421 : DCHECK_NE(top(), kNullAddress);
422 : DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
423 : Page::FromAllocationAreaAddress(top_on_previous_step_ - 1));
424 5 : top_on_previous_step_ = top();
425 : }
426 : size_t bytes_since_last =
427 237712934 : top_on_previous_step_ ? top() - top_on_previous_step_ : 0;
428 :
429 : DCHECK_IMPLIES(!SupportsInlineAllocation(), bytes_since_last == 0);
430 : #ifdef V8_HOST_ARCH_32_BIT
431 : AllocationResult result = alignment != kWordAligned
432 : ? AllocateRawAligned(size_in_bytes, alignment)
433 : : AllocateRawUnaligned(size_in_bytes);
434 : #else
435 216408020 : AllocationResult result = AllocateRawUnaligned(size_in_bytes);
436 : #endif
437 216167975 : HeapObject heap_obj;
438 216161902 : if (!result.IsRetry() && result.To(&heap_obj) && !is_local()) {
439 : AllocationStep(static_cast<int>(size_in_bytes + bytes_since_last),
440 119518539 : heap_obj->address(), size_in_bytes);
441 119518551 : StartNextInlineAllocationStep();
442 : DCHECK_IMPLIES(
443 : heap()->incremental_marking()->black_allocation(),
444 : heap()->incremental_marking()->marking_state()->IsBlack(heap_obj));
445 : }
446 216073673 : return result;
447 : }
448 :
449 :
450 : // -----------------------------------------------------------------------------
451 : // NewSpace
452 :
453 :
454 : AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
455 : AllocationAlignment alignment) {
456 0 : Address top = allocation_info_.top();
457 0 : int filler_size = Heap::GetFillToAlign(top, alignment);
458 0 : int aligned_size_in_bytes = size_in_bytes + filler_size;
459 :
460 0 : if (allocation_info_.limit() - top <
461 : static_cast<uintptr_t>(aligned_size_in_bytes)) {
462 : // See if we can create room.
463 0 : if (!EnsureAllocation(size_in_bytes, alignment)) {
464 0 : return AllocationResult::Retry();
465 : }
466 :
467 : top = allocation_info_.top();
468 0 : filler_size = Heap::GetFillToAlign(top, alignment);
469 0 : aligned_size_in_bytes = size_in_bytes + filler_size;
470 : }
471 :
472 0 : HeapObject obj = HeapObject::FromAddress(top);
473 0 : allocation_info_.set_top(top + aligned_size_in_bytes);
474 : DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
475 :
476 0 : if (filler_size > 0) {
477 0 : obj = heap()->PrecedeWithFiller(obj, filler_size);
478 : }
479 :
480 : MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
481 :
482 0 : return obj;
483 : }
484 :
485 :
486 : AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
487 160980622 : Address top = allocation_info_.top();
488 160628076 : if (allocation_info_.limit() < top + size_in_bytes) {
489 : // See if we can create room.
490 372592 : if (!EnsureAllocation(size_in_bytes, kWordAligned)) {
491 20046 : return AllocationResult::Retry();
492 : }
493 :
494 0 : top = allocation_info_.top();
495 : }
496 :
497 160608030 : HeapObject obj = HeapObject::FromAddress(top);
498 160608019 : allocation_info_.set_top(top + size_in_bytes);
499 : DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
500 :
501 : MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
502 :
503 160608019 : return obj;
504 : }
505 :
506 :
507 : AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
508 : AllocationAlignment alignment) {
509 160627773 : if (top() < top_on_previous_step_) {
510 : // Generated code decreased the top() pointer to do folded allocations
511 : DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
512 : Page::FromAllocationAreaAddress(top_on_previous_step_));
513 4 : top_on_previous_step_ = top();
514 : }
515 : #ifdef V8_HOST_ARCH_32_BIT
516 : return alignment != kWordAligned
517 : ? AllocateRawAligned(size_in_bytes, alignment)
518 : : AllocateRawUnaligned(size_in_bytes);
519 : #else
520 : return AllocateRawUnaligned(size_in_bytes);
521 : #endif
522 : }
523 :
524 495631 : V8_WARN_UNUSED_RESULT inline AllocationResult NewSpace::AllocateRawSynchronized(
525 : int size_in_bytes, AllocationAlignment alignment) {
526 495631 : base::MutexGuard guard(&mutex_);
527 495833 : return AllocateRaw(size_in_bytes, alignment);
528 : }
529 :
530 194267 : LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
531 : AllocationResult result,
532 : intptr_t size) {
533 194266 : if (result.IsRetry()) return InvalidBuffer();
534 194035 : HeapObject obj;
535 194035 : bool ok = result.To(&obj);
536 : USE(ok);
537 : DCHECK(ok);
538 : Address top = HeapObject::cast(obj)->address();
539 388064 : return LocalAllocationBuffer(heap, LinearAllocationArea(top, top + size));
540 : }
541 :
542 :
543 : bool LocalAllocationBuffer::TryMerge(LocalAllocationBuffer* other) {
544 193988 : if (allocation_info_.top() == other->allocation_info_.limit()) {
545 : allocation_info_.set_top(other->allocation_info_.top());
546 : other->allocation_info_.Reset(kNullAddress, kNullAddress);
547 : return true;
548 : }
549 : return false;
550 : }
551 :
552 : bool LocalAllocationBuffer::TryFreeLast(HeapObject object, int object_size) {
553 247039 : if (IsValid()) {
554 : const Address object_address = object->address();
555 247032 : if ((allocation_info_.top() - object_size) == object_address) {
556 : allocation_info_.set_top(object_address);
557 : return true;
558 : }
559 : }
560 : return false;
561 : }
562 :
563 : } // namespace internal
564 : } // namespace v8
565 :
566 : #endif // V8_HEAP_SPACES_INL_H_
|