Line data Source code
1 : // Copyright 2011 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #ifndef V8_HEAP_SPACES_INL_H_
6 : #define V8_HEAP_SPACES_INL_H_
7 :
8 : #include "src/base/atomic-utils.h"
9 : #include "src/base/bounded-page-allocator.h"
10 : #include "src/base/v8-fallthrough.h"
11 : #include "src/heap/incremental-marking.h"
12 : #include "src/heap/spaces.h"
13 : #include "src/msan.h"
14 : #include "src/objects/code-inl.h"
15 :
16 : namespace v8 {
17 : namespace internal {
18 :
19 : template <class PAGE_TYPE>
20 : PageIteratorImpl<PAGE_TYPE>& PageIteratorImpl<PAGE_TYPE>::operator++() {
21 895233 : p_ = p_->next_page();
22 : return *this;
23 : }
24 :
25 : template <class PAGE_TYPE>
26 : PageIteratorImpl<PAGE_TYPE> PageIteratorImpl<PAGE_TYPE>::operator++(int) {
27 : PageIteratorImpl<PAGE_TYPE> tmp(*this);
28 : operator++();
29 : return tmp;
30 : }
31 :
32 : PageRange::PageRange(Address start, Address limit)
33 : : begin_(Page::FromAddress(start)),
34 : end_(Page::FromAllocationAreaAddress(limit)->next_page()) {
35 : #ifdef DEBUG
36 : if (begin_->InNewSpace()) {
37 : SemiSpace::AssertValidRange(start, limit);
38 : }
39 : #endif // DEBUG
40 : }
41 :
42 : // -----------------------------------------------------------------------------
43 : // SemiSpaceIterator
44 :
45 1739263 : HeapObject SemiSpaceIterator::Next() {
46 3543114 : while (current_ != limit_) {
47 1795998 : if (Page::IsAlignedToPageSize(current_)) {
48 : Page* page = Page::FromAllocationAreaAddress(current_);
49 : page = page->next_page();
50 : DCHECK(page);
51 2 : current_ = page->area_start();
52 2 : if (current_ == limit_) return HeapObject();
53 : }
54 3591996 : HeapObject object = HeapObject::FromAddress(current_);
55 1795998 : current_ += object->Size();
56 1795998 : if (!object->IsFiller()) {
57 1731410 : return object;
58 : }
59 : }
60 7853 : return HeapObject();
61 : }
62 :
63 : // -----------------------------------------------------------------------------
64 : // HeapObjectIterator
65 :
66 87991407 : HeapObject HeapObjectIterator::Next() {
67 84754 : do {
68 88044308 : HeapObject next_obj = FromCurrentPage();
69 88045592 : if (!next_obj.is_null()) return next_obj;
70 : } while (AdvanceToNextPage());
71 31853 : return HeapObject();
72 : }
73 :
74 88044278 : HeapObject HeapObjectIterator::FromCurrentPage() {
75 176625770 : while (cur_addr_ != cur_end_) {
76 177004325 : if (cur_addr_ == space_->top() && cur_addr_ != space_->limit()) {
77 10814 : cur_addr_ = space_->limit();
78 10814 : continue;
79 : }
80 88485924 : HeapObject obj = HeapObject::FromAddress(cur_addr_);
81 88485924 : const int obj_size = obj->Size();
82 88486534 : cur_addr_ += obj_size;
83 : DCHECK_LE(cur_addr_, cur_end_);
84 88487110 : if (!obj->IsFiller()) {
85 : if (obj->IsCode()) {
86 : DCHECK_EQ(space_, space_->heap()->code_space());
87 : DCHECK_CODEOBJECT_SIZE(obj_size, space_);
88 : } else {
89 : DCHECK_OBJECT_SIZE(obj_size);
90 : }
91 87960848 : return obj;
92 : }
93 : }
94 84754 : return HeapObject();
95 : }
96 :
97 1753990 : void Space::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
98 1753990 : size_t amount) {
99 1753990 : base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
100 : heap()->IncrementExternalBackingStoreBytes(type, amount);
101 1753990 : }
102 :
103 437554 : void Space::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
104 437554 : size_t amount) {
105 437554 : base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
106 : heap()->DecrementExternalBackingStoreBytes(type, amount);
107 437554 : }
108 :
109 107505 : void Space::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
110 : Space* from, Space* to,
111 : size_t amount) {
112 215010 : if (from == to) return;
113 :
114 18048 : base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
115 18048 : base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
116 : }
117 :
118 : // -----------------------------------------------------------------------------
119 : // SemiSpace
120 :
121 : bool SemiSpace::Contains(HeapObject o) {
122 : return id_ == kToSpace ? MemoryChunk::FromHeapObject(o)->InToSpace()
123 1953363 : : MemoryChunk::FromHeapObject(o)->InFromSpace();
124 : }
125 :
126 3906726 : bool SemiSpace::Contains(Object o) {
127 3906726 : return o->IsHeapObject() && Contains(HeapObject::cast(o));
128 : }
129 :
130 : bool SemiSpace::ContainsSlow(Address a) {
131 615 : for (Page* p : *this) {
132 600 : if (p == MemoryChunk::FromAddress(a)) return true;
133 : }
134 : return false;
135 : }
136 :
137 : // --------------------------------------------------------------------------
138 : // NewSpace
139 :
140 : bool NewSpace::Contains(Object o) {
141 : return o->IsHeapObject() && Contains(HeapObject::cast(o));
142 : }
143 :
144 : bool NewSpace::Contains(HeapObject o) {
145 25 : return MemoryChunk::FromHeapObject(o)->InNewSpace();
146 : }
147 :
148 10 : bool NewSpace::ContainsSlow(Address a) {
149 20 : return from_space_.ContainsSlow(a) || to_space_.ContainsSlow(a);
150 : }
151 :
152 : bool NewSpace::ToSpaceContainsSlow(Address a) {
153 : return to_space_.ContainsSlow(a);
154 : }
155 :
156 1953363 : bool NewSpace::ToSpaceContains(Object o) { return to_space_.Contains(o); }
157 : bool NewSpace::FromSpaceContains(Object o) { return from_space_.Contains(o); }
158 :
159 5 : bool PagedSpace::Contains(Address addr) {
160 5 : return MemoryChunk::FromAnyPointerAddress(addr)->owner() == this;
161 : }
162 :
163 6937730 : bool PagedSpace::Contains(Object o) {
164 6937730 : if (!o.IsHeapObject()) return false;
165 6937730 : return Page::FromAddress(o.ptr())->owner() == this;
166 : }
167 :
168 : void PagedSpace::UnlinkFreeListCategories(Page* page) {
169 : DCHECK_EQ(this, page->owner());
170 964902 : page->ForAllFreeListCategories([this](FreeListCategory* category) {
171 : DCHECK_EQ(free_list(), category->owner());
172 : category->set_free_list(nullptr);
173 964908 : free_list()->RemoveCategory(category);
174 964902 : });
175 : }
176 :
177 1082084 : size_t PagedSpace::RelinkFreeListCategories(Page* page) {
178 : DCHECK_EQ(this, page->owner());
179 1082084 : size_t added = 0;
180 12985018 : page->ForAllFreeListCategories([this, &added](FreeListCategory* category) {
181 6492509 : category->set_free_list(&free_list_);
182 6492509 : added += category->available();
183 : category->Relink();
184 6492509 : });
185 : DCHECK_EQ(page->AvailableInFreeList(),
186 : page->AvailableInFreeListFromAllocatedBytes());
187 1082084 : return added;
188 : }
189 :
190 : bool PagedSpace::TryFreeLast(HeapObject object, int object_size) {
191 321105 : if (allocation_info_.top() != kNullAddress) {
192 : const Address object_address = object->address();
193 321111 : if ((allocation_info_.top() - object_size) == object_address) {
194 : allocation_info_.set_top(object_address);
195 : return true;
196 : }
197 : }
198 : return false;
199 : }
200 :
201 52213101 : bool MemoryChunk::HasHeaderSentinel(Address slot_addr) {
202 : Address base = BaseAddress(slot_addr);
203 52213101 : if (slot_addr < base + kHeaderSize) return false;
204 52199224 : return HeapObject::FromAddress(base) ==
205 : ObjectSlot(base + kHeaderSentinelOffset).Relaxed_Load();
206 : }
207 :
208 : MemoryChunk* MemoryChunk::FromAnyPointerAddress(Address addr) {
209 52213101 : while (!HasHeaderSentinel(addr)) {
210 29157914 : addr = BaseAddress(addr) - 1;
211 : }
212 : return FromAddress(addr);
213 : }
214 :
215 541490 : void MemoryChunk::IncrementExternalBackingStoreBytes(
216 : ExternalBackingStoreType type, size_t amount) {
217 541490 : base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
218 541490 : owner()->IncrementExternalBackingStoreBytes(type, amount);
219 541492 : }
220 :
221 111816 : void MemoryChunk::DecrementExternalBackingStoreBytes(
222 : ExternalBackingStoreType type, size_t amount) {
223 111816 : base::CheckedDecrement(&external_backing_store_bytes_[type], amount);
224 111816 : owner()->DecrementExternalBackingStoreBytes(type, amount);
225 111826 : }
226 :
227 107493 : void MemoryChunk::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
228 : MemoryChunk* from,
229 : MemoryChunk* to,
230 : size_t amount) {
231 107493 : base::CheckedDecrement(&(from->external_backing_store_bytes_[type]), amount);
232 107493 : base::CheckedIncrement(&(to->external_backing_store_bytes_[type]), amount);
233 : Space::MoveExternalBackingStoreBytes(type, from->owner(), to->owner(),
234 107493 : amount);
235 107506 : }
236 :
237 0 : bool MemoryChunk::IsInNewLargeObjectSpace() const {
238 0 : return owner()->identity() == NEW_LO_SPACE;
239 : }
240 :
241 165 : void Page::MarkNeverAllocateForTesting() {
242 : DCHECK(this->owner()->identity() != NEW_SPACE);
243 : DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
244 : SetFlag(NEVER_ALLOCATE_ON_PAGE);
245 : SetFlag(NEVER_EVACUATE);
246 165 : reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
247 165 : }
248 :
249 10389 : void Page::MarkEvacuationCandidate() {
250 : DCHECK(!IsFlagSet(NEVER_EVACUATE));
251 : DCHECK_NULL(slot_set<OLD_TO_OLD>());
252 : DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
253 : SetFlag(EVACUATION_CANDIDATE);
254 10389 : reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
255 10389 : }
256 :
257 : void Page::ClearEvacuationCandidate() {
258 : if (!IsFlagSet(COMPACTION_WAS_ABORTED)) {
259 : DCHECK_NULL(slot_set<OLD_TO_OLD>());
260 : DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
261 : }
262 : ClearFlag(EVACUATION_CANDIDATE);
263 47 : InitializeFreeListCategories();
264 : }
265 :
266 : HeapObject LargePage::GetObject() {
267 235773 : return HeapObject::FromAddress(area_start());
268 : }
269 :
270 235950 : OldGenerationMemoryChunkIterator::OldGenerationMemoryChunkIterator(Heap* heap)
271 : : heap_(heap),
272 : state_(kOldSpaceState),
273 : old_iterator_(heap->old_space()->begin()),
274 : code_iterator_(heap->code_space()->begin()),
275 : map_iterator_(heap->map_space()->begin()),
276 : lo_iterator_(heap->lo_space()->begin()),
277 47190 : code_lo_iterator_(heap->code_lo_space()->begin()) {}
278 :
279 889518 : MemoryChunk* OldGenerationMemoryChunkIterator::next() {
280 889518 : switch (state_) {
281 : case kOldSpaceState: {
282 599181 : if (old_iterator_ != heap_->old_space()->end()) return *(old_iterator_++);
283 47190 : state_ = kMapState;
284 : V8_FALLTHROUGH;
285 : }
286 : case kMapState: {
287 152098 : if (map_iterator_ != heap_->map_space()->end()) return *(map_iterator_++);
288 47190 : state_ = kCodeState;
289 : V8_FALLTHROUGH;
290 : }
291 : case kCodeState: {
292 129677 : if (code_iterator_ != heap_->code_space()->end())
293 : return *(code_iterator_++);
294 47190 : state_ = kLargeObjectState;
295 : V8_FALLTHROUGH;
296 : }
297 : case kLargeObjectState: {
298 80259 : if (lo_iterator_ != heap_->lo_space()->end()) return *(lo_iterator_++);
299 47190 : state_ = kCodeLargeObjectState;
300 : V8_FALLTHROUGH;
301 : }
302 : case kCodeLargeObjectState: {
303 117063 : if (code_lo_iterator_ != heap_->code_lo_space()->end())
304 : return *(code_lo_iterator_++);
305 47190 : state_ = kFinishedState;
306 : V8_FALLTHROUGH;
307 : }
308 : case kFinishedState:
309 : return nullptr;
310 : default:
311 : break;
312 : }
313 0 : UNREACHABLE();
314 : }
315 :
316 : Page* FreeList::GetPageForCategoryType(FreeListCategoryType type) {
317 58685 : return top(type) ? top(type)->page() : nullptr;
318 : }
319 :
320 : FreeList* FreeListCategory::owner() { return free_list_; }
321 :
322 : bool FreeListCategory::is_linked() {
323 0 : return prev_ != nullptr || next_ != nullptr;
324 : }
325 :
326 89429906 : AllocationResult LocalAllocationBuffer::AllocateRawAligned(
327 : int size_in_bytes, AllocationAlignment alignment) {
328 178884828 : Address current_top = allocation_info_.top();
329 89429906 : int filler_size = Heap::GetFillToAlign(current_top, alignment);
330 :
331 89454922 : Address new_top = current_top + filler_size + size_in_bytes;
332 89454922 : if (new_top > allocation_info_.limit()) return AllocationResult::Retry();
333 :
334 : allocation_info_.set_top(new_top);
335 88571616 : if (filler_size > 0) {
336 : return heap_->PrecedeWithFiller(HeapObject::FromAddress(current_top),
337 0 : filler_size);
338 : }
339 :
340 88571616 : return AllocationResult(HeapObject::FromAddress(current_top));
341 : }
342 :
343 : bool PagedSpace::EnsureLinearAllocationArea(int size_in_bytes) {
344 280501838 : if (allocation_info_.top() + size_in_bytes <= allocation_info_.limit()) {
345 : return true;
346 : }
347 1332140 : return SlowRefillLinearAllocationArea(size_in_bytes);
348 : }
349 :
350 : HeapObject PagedSpace::AllocateLinearly(int size_in_bytes) {
351 280513332 : Address current_top = allocation_info_.top();
352 280513332 : Address new_top = current_top + size_in_bytes;
353 : DCHECK_LE(new_top, allocation_info_.limit());
354 : allocation_info_.set_top(new_top);
355 : return HeapObject::FromAddress(current_top);
356 : }
357 :
358 : HeapObject PagedSpace::TryAllocateLinearlyAligned(
359 : int* size_in_bytes, AllocationAlignment alignment) {
360 : Address current_top = allocation_info_.top();
361 : int filler_size = Heap::GetFillToAlign(current_top, alignment);
362 :
363 : Address new_top = current_top + filler_size + *size_in_bytes;
364 : if (new_top > allocation_info_.limit()) return HeapObject();
365 :
366 : allocation_info_.set_top(new_top);
367 : if (filler_size > 0) {
368 : *size_in_bytes += filler_size;
369 : return heap()->PrecedeWithFiller(HeapObject::FromAddress(current_top),
370 : filler_size);
371 : }
372 :
373 : return HeapObject::FromAddress(current_top);
374 : }
375 :
376 280501838 : AllocationResult PagedSpace::AllocateRawUnaligned(
377 : int size_in_bytes, UpdateSkipList update_skip_list) {
378 : DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
379 280513548 : if (!EnsureLinearAllocationArea(size_in_bytes)) {
380 255679710 : return AllocationResult::Retry(identity());
381 : }
382 : HeapObject object = AllocateLinearly(size_in_bytes);
383 : DCHECK(!object.is_null());
384 536192826 : if (update_skip_list == UPDATE_SKIP_LIST && identity() == CODE_SPACE) {
385 2201927 : SkipList::Update(object->address(), size_in_bytes);
386 : }
387 : MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
388 280513334 : return object;
389 : }
390 :
391 :
392 : AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
393 : AllocationAlignment alignment) {
394 : DCHECK(identity() == OLD_SPACE || identity() == RO_SPACE);
395 : DCHECK_IMPLIES(identity() == RO_SPACE, heap()->CanAllocateInReadOnlySpace());
396 : int allocation_size = size_in_bytes;
397 : HeapObject object = TryAllocateLinearlyAligned(&allocation_size, alignment);
398 : if (object.is_null()) {
399 : // We don't know exactly how much filler we need to align until space is
400 : // allocated, so assume the worst case.
401 : int filler_size = Heap::GetMaximumFillToAlign(alignment);
402 : allocation_size += filler_size;
403 : if (!EnsureLinearAllocationArea(allocation_size)) {
404 : return AllocationResult::Retry(identity());
405 : }
406 : allocation_size = size_in_bytes;
407 : object = TryAllocateLinearlyAligned(&allocation_size, alignment);
408 : DCHECK(!object.is_null());
409 : }
410 : MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), size_in_bytes);
411 : return object;
412 : }
413 :
414 :
415 222901143 : AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
416 : AllocationAlignment alignment) {
417 244454304 : if (top_on_previous_step_ && top() < top_on_previous_step_ &&
418 5 : SupportsInlineAllocation()) {
419 : // Generated code decreased the top() pointer to do folded allocations.
420 : // The top_on_previous_step_ can be one byte beyond the current page.
421 : DCHECK_NE(top(), kNullAddress);
422 : DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
423 : Page::FromAllocationAreaAddress(top_on_previous_step_ - 1));
424 5 : top_on_previous_step_ = top();
425 : }
426 : size_t bytes_since_last =
427 244454299 : top_on_previous_step_ ? top() - top_on_previous_step_ : 0;
428 :
429 : DCHECK_IMPLIES(!SupportsInlineAllocation(), bytes_since_last == 0);
430 : #ifdef V8_HOST_ARCH_32_BIT
431 : AllocationResult result =
432 : alignment == kDoubleAligned
433 : ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
434 : : AllocateRawUnaligned(size_in_bytes);
435 : #else
436 222901143 : AllocationResult result = AllocateRawUnaligned(size_in_bytes);
437 : #endif
438 222378641 : HeapObject heap_obj;
439 222371258 : if (!result.IsRetry() && result.To(&heap_obj) && !is_local()) {
440 : DCHECK_IMPLIES(
441 : heap()->incremental_marking()->black_allocation(),
442 : heap()->incremental_marking()->marking_state()->IsBlack(heap_obj));
443 : AllocationStep(static_cast<int>(size_in_bytes + bytes_since_last),
444 125261660 : heap_obj->address(), size_in_bytes);
445 125261690 : StartNextInlineAllocationStep();
446 : }
447 222219582 : return result;
448 : }
449 :
450 :
451 : // -----------------------------------------------------------------------------
452 : // NewSpace
453 :
454 :
455 : AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
456 : AllocationAlignment alignment) {
457 : Address top = allocation_info_.top();
458 : int filler_size = Heap::GetFillToAlign(top, alignment);
459 : int aligned_size_in_bytes = size_in_bytes + filler_size;
460 :
461 : if (allocation_info_.limit() - top <
462 : static_cast<uintptr_t>(aligned_size_in_bytes)) {
463 : // See if we can create room.
464 : if (!EnsureAllocation(size_in_bytes, alignment)) {
465 : return AllocationResult::Retry();
466 : }
467 :
468 : top = allocation_info_.top();
469 : filler_size = Heap::GetFillToAlign(top, alignment);
470 : aligned_size_in_bytes = size_in_bytes + filler_size;
471 : }
472 :
473 : HeapObject obj = HeapObject::FromAddress(top);
474 : allocation_info_.set_top(top + aligned_size_in_bytes);
475 : DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
476 :
477 : if (filler_size > 0) {
478 : obj = heap()->PrecedeWithFiller(obj, filler_size);
479 : }
480 :
481 : MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
482 :
483 : return obj;
484 : }
485 :
486 :
487 : AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes) {
488 160609455 : Address top = allocation_info_.top();
489 160267551 : if (allocation_info_.limit() < top + size_in_bytes) {
490 : // See if we can create room.
491 362121 : if (!EnsureAllocation(size_in_bytes, kWordAligned)) {
492 20217 : return AllocationResult::Retry();
493 : }
494 :
495 : top = allocation_info_.top();
496 : }
497 :
498 160247334 : HeapObject obj = HeapObject::FromAddress(top);
499 160247334 : allocation_info_.set_top(top + size_in_bytes);
500 : DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
501 :
502 : MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj->address(), size_in_bytes);
503 :
504 160247334 : return obj;
505 : }
506 :
507 :
508 : AllocationResult NewSpace::AllocateRaw(int size_in_bytes,
509 : AllocationAlignment alignment) {
510 160264293 : if (top() < top_on_previous_step_) {
511 : // Generated code decreased the top() pointer to do folded allocations
512 : DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
513 : Page::FromAllocationAreaAddress(top_on_previous_step_));
514 4 : top_on_previous_step_ = top();
515 : }
516 : #ifdef V8_HOST_ARCH_32_BIT
517 : return alignment == kDoubleAligned
518 : ? AllocateRawAligned(size_in_bytes, kDoubleAligned)
519 : : AllocateRawUnaligned(size_in_bytes);
520 : #else
521 : return AllocateRawUnaligned(size_in_bytes);
522 : #endif
523 : }
524 :
525 506592 : V8_WARN_UNUSED_RESULT inline AllocationResult NewSpace::AllocateRawSynchronized(
526 : int size_in_bytes, AllocationAlignment alignment) {
527 506592 : base::MutexGuard guard(&mutex_);
528 506787 : return AllocateRaw(size_in_bytes, alignment);
529 : }
530 :
531 203383 : LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
532 : AllocationResult result,
533 : intptr_t size) {
534 203383 : if (result.IsRetry()) return InvalidBuffer();
535 203123 : HeapObject obj;
536 203123 : bool ok = result.To(&obj);
537 : USE(ok);
538 : DCHECK(ok);
539 : Address top = HeapObject::cast(obj)->address();
540 406244 : return LocalAllocationBuffer(heap, LinearAllocationArea(top, top + size));
541 : }
542 :
543 :
544 : bool LocalAllocationBuffer::TryMerge(LocalAllocationBuffer* other) {
545 203080 : if (allocation_info_.top() == other->allocation_info_.limit()) {
546 : allocation_info_.set_top(other->allocation_info_.top());
547 : other->allocation_info_.Reset(kNullAddress, kNullAddress);
548 : return true;
549 : }
550 : return false;
551 : }
552 :
553 : bool LocalAllocationBuffer::TryFreeLast(HeapObject object, int object_size) {
554 156945 : if (IsValid()) {
555 : const Address object_address = object->address();
556 156939 : if ((allocation_info_.top() - object_size) == object_address) {
557 : allocation_info_.set_top(object_address);
558 : return true;
559 : }
560 : }
561 : return false;
562 : }
563 :
564 : } // namespace internal
565 : } // namespace v8
566 :
567 : #endif // V8_HEAP_SPACES_INL_H_
|