Line data Source code
1 : // Copyright 2011 the V8 project authors. All rights reserved.
2 : // Redistribution and use in source and binary forms, with or without
3 : // modification, are permitted provided that the following conditions are
4 : // met:
5 : //
6 : // * Redistributions of source code must retain the above copyright
7 : // notice, this list of conditions and the following disclaimer.
8 : // * Redistributions in binary form must reproduce the above
9 : // copyright notice, this list of conditions and the following
10 : // disclaimer in the documentation and/or other materials provided
11 : // with the distribution.
12 : // * Neither the name of Google Inc. nor the names of its
13 : // contributors may be used to endorse or promote products derived
14 : // from this software without specific prior written permission.
15 : //
16 : // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 : // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 : // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 : // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 : // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 : // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 : // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 : // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 : // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 : // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 : // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 :
28 : #include <stdlib.h>
29 :
30 : #include "src/base/bounded-page-allocator.h"
31 : #include "src/base/platform/platform.h"
32 : #include "src/heap/factory.h"
33 : #include "src/heap/spaces-inl.h"
34 : #include "src/objects-inl.h"
35 : #include "src/objects/free-space.h"
36 : #include "src/snapshot/snapshot.h"
37 : #include "test/cctest/cctest.h"
38 : #include "test/cctest/heap/heap-tester.h"
39 : #include "test/cctest/heap/heap-utils.h"
40 :
41 : namespace v8 {
42 : namespace internal {
43 : namespace heap {
44 :
45 : // Temporarily sets a given allocator in an isolate.
46 : class TestMemoryAllocatorScope {
47 : public:
48 1015 : TestMemoryAllocatorScope(Isolate* isolate, size_t max_capacity,
49 : size_t code_range_size)
50 : : isolate_(isolate),
51 1015 : old_allocator_(std::move(isolate->heap()->memory_allocator_)) {
52 1015 : isolate->heap()->memory_allocator_.reset(
53 1015 : new MemoryAllocator(isolate, max_capacity, code_range_size));
54 1015 : }
55 :
56 : MemoryAllocator* allocator() { return isolate_->heap()->memory_allocator(); }
57 :
58 2030 : ~TestMemoryAllocatorScope() {
59 2030 : isolate_->heap()->memory_allocator()->TearDown();
60 1015 : isolate_->heap()->memory_allocator_.swap(old_allocator_);
61 1015 : }
62 :
63 : private:
64 : Isolate* isolate_;
65 : std::unique_ptr<MemoryAllocator> old_allocator_;
66 :
67 : DISALLOW_COPY_AND_ASSIGN(TestMemoryAllocatorScope);
68 : };
69 :
70 : // Temporarily sets a given code page allocator in an isolate.
71 : class TestCodePageAllocatorScope {
72 : public:
73 : TestCodePageAllocatorScope(Isolate* isolate,
74 : v8::PageAllocator* code_page_allocator)
75 : : isolate_(isolate),
76 : old_code_page_allocator_(
77 : isolate->heap()->memory_allocator()->code_page_allocator()) {
78 : isolate->heap()->memory_allocator()->code_page_allocator_ =
79 1000 : code_page_allocator;
80 : }
81 :
82 : ~TestCodePageAllocatorScope() {
83 : isolate_->heap()->memory_allocator()->code_page_allocator_ =
84 1000 : old_code_page_allocator_;
85 : }
86 :
87 : private:
88 : Isolate* isolate_;
89 : v8::PageAllocator* old_code_page_allocator_;
90 :
91 : DISALLOW_COPY_AND_ASSIGN(TestCodePageAllocatorScope);
92 : };
93 :
94 1000 : static void VerifyMemoryChunk(Isolate* isolate, Heap* heap,
95 : v8::PageAllocator* code_page_allocator,
96 : size_t reserve_area_size, size_t commit_area_size,
97 : Executability executable, Space* space) {
98 : TestMemoryAllocatorScope test_allocator_scope(isolate, heap->MaxReserved(),
99 2000 : 0);
100 : MemoryAllocator* memory_allocator = test_allocator_scope.allocator();
101 : TestCodePageAllocatorScope test_code_page_allocator_scope(
102 : isolate, code_page_allocator);
103 :
104 : v8::PageAllocator* page_allocator =
105 : memory_allocator->page_allocator(executable);
106 :
107 : size_t allocatable_memory_area_offset =
108 1000 : MemoryChunkLayout::ObjectStartOffsetInMemoryChunk(space->identity());
109 : size_t guard_size =
110 1000 : (executable == EXECUTABLE) ? MemoryChunkLayout::CodePageGuardSize() : 0;
111 :
112 : MemoryChunk* memory_chunk = memory_allocator->AllocateChunk(
113 1000 : reserve_area_size, commit_area_size, executable, space);
114 : size_t reserved_size =
115 : ((executable == EXECUTABLE))
116 500 : ? allocatable_memory_area_offset +
117 500 : RoundUp(reserve_area_size, page_allocator->CommitPageSize()) +
118 : guard_size
119 500 : : RoundUp(allocatable_memory_area_offset + reserve_area_size,
120 1500 : page_allocator->CommitPageSize());
121 1000 : CHECK(memory_chunk->size() == reserved_size);
122 1000 : CHECK(memory_chunk->area_start() <
123 : memory_chunk->address() + memory_chunk->size());
124 1000 : CHECK(memory_chunk->area_end() <=
125 : memory_chunk->address() + memory_chunk->size());
126 1000 : CHECK(static_cast<size_t>(memory_chunk->area_size()) == commit_area_size);
127 :
128 1000 : memory_allocator->Free<MemoryAllocator::kFull>(memory_chunk);
129 1000 : }
130 :
131 : static unsigned int PseudorandomAreaSize() {
132 : static uint32_t lo = 2345;
133 500 : lo = 18273 * (lo & 0xFFFFF) + (lo >> 16);
134 500 : return lo & 0xFFFFF;
135 : }
136 :
137 :
138 26644 : TEST(MemoryChunk) {
139 : Isolate* isolate = CcTest::i_isolate();
140 : Heap* heap = isolate->heap();
141 :
142 5 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
143 :
144 : size_t reserve_area_size = 1 * MB;
145 : size_t initial_commit_area_size;
146 :
147 1005 : for (int i = 0; i < 100; i++) {
148 : initial_commit_area_size =
149 1000 : RoundUp(PseudorandomAreaSize(), page_allocator->CommitPageSize());
150 :
151 : // With CodeRange.
152 : const size_t code_range_size = 32 * MB;
153 : VirtualMemory code_range_reservation(page_allocator, code_range_size,
154 1000 : nullptr, MemoryChunk::kAlignment);
155 500 : CHECK(code_range_reservation.IsReserved());
156 :
157 : base::BoundedPageAllocator code_page_allocator(
158 : page_allocator, code_range_reservation.address(),
159 500 : code_range_reservation.size(), MemoryChunk::kAlignment);
160 :
161 : VerifyMemoryChunk(isolate, heap, &code_page_allocator, reserve_area_size,
162 500 : initial_commit_area_size, EXECUTABLE, heap->code_space());
163 :
164 : VerifyMemoryChunk(isolate, heap, &code_page_allocator, reserve_area_size,
165 : initial_commit_area_size, NOT_EXECUTABLE,
166 500 : heap->old_space());
167 : }
168 5 : }
169 :
170 :
171 26644 : TEST(MemoryAllocator) {
172 : Isolate* isolate = CcTest::i_isolate();
173 : Heap* heap = isolate->heap();
174 :
175 : TestMemoryAllocatorScope test_allocator_scope(isolate, heap->MaxReserved(),
176 10 : 0);
177 : MemoryAllocator* memory_allocator = test_allocator_scope.allocator();
178 :
179 : int total_pages = 0;
180 : OldSpace faked_space(heap);
181 5 : CHECK(!faked_space.first_page());
182 5 : CHECK(!faked_space.last_page());
183 5 : Page* first_page = memory_allocator->AllocatePage(
184 : faked_space.AreaSize(), static_cast<PagedSpace*>(&faked_space),
185 5 : NOT_EXECUTABLE);
186 :
187 : faked_space.memory_chunk_list().PushBack(first_page);
188 5 : CHECK(first_page->next_page() == nullptr);
189 : total_pages++;
190 :
191 15 : for (Page* p = first_page; p != nullptr; p = p->next_page()) {
192 5 : CHECK(p->owner() == &faked_space);
193 : }
194 :
195 : // Again, we should get n or n - 1 pages.
196 5 : Page* other = memory_allocator->AllocatePage(
197 : faked_space.AreaSize(), static_cast<PagedSpace*>(&faked_space),
198 5 : NOT_EXECUTABLE);
199 : total_pages++;
200 : faked_space.memory_chunk_list().PushBack(other);
201 : int page_count = 0;
202 25 : for (Page* p = first_page; p != nullptr; p = p->next_page()) {
203 10 : CHECK(p->owner() == &faked_space);
204 10 : page_count++;
205 : }
206 5 : CHECK(total_pages == page_count);
207 :
208 : Page* second_page = first_page->next_page();
209 5 : CHECK_NOT_NULL(second_page);
210 :
211 : // OldSpace's destructor will tear down the space and free up all pages.
212 5 : }
213 :
214 26644 : TEST(ComputeDiscardMemoryAreas) {
215 : base::AddressRegion memory_area;
216 5 : size_t page_size = MemoryAllocator::GetCommitPageSize();
217 : size_t free_header_size = FreeSpace::kSize;
218 :
219 5 : memory_area = MemoryAllocator::ComputeDiscardMemoryArea(0, 0);
220 5 : CHECK_EQ(memory_area.begin(), 0);
221 5 : CHECK_EQ(memory_area.size(), 0);
222 :
223 : memory_area = MemoryAllocator::ComputeDiscardMemoryArea(
224 5 : 0, page_size + free_header_size);
225 5 : CHECK_EQ(memory_area.begin(), 0);
226 5 : CHECK_EQ(memory_area.size(), 0);
227 :
228 : memory_area = MemoryAllocator::ComputeDiscardMemoryArea(
229 5 : page_size - free_header_size, page_size + free_header_size);
230 5 : CHECK_EQ(memory_area.begin(), page_size);
231 5 : CHECK_EQ(memory_area.size(), page_size);
232 :
233 5 : memory_area = MemoryAllocator::ComputeDiscardMemoryArea(page_size, page_size);
234 5 : CHECK_EQ(memory_area.begin(), 0);
235 5 : CHECK_EQ(memory_area.size(), 0);
236 :
237 : memory_area = MemoryAllocator::ComputeDiscardMemoryArea(
238 5 : page_size / 2, page_size + page_size / 2);
239 5 : CHECK_EQ(memory_area.begin(), page_size);
240 5 : CHECK_EQ(memory_area.size(), page_size);
241 :
242 : memory_area = MemoryAllocator::ComputeDiscardMemoryArea(
243 5 : page_size / 2, page_size + page_size / 4);
244 5 : CHECK_EQ(memory_area.begin(), 0);
245 5 : CHECK_EQ(memory_area.size(), 0);
246 :
247 : memory_area =
248 5 : MemoryAllocator::ComputeDiscardMemoryArea(page_size / 2, page_size * 3);
249 5 : CHECK_EQ(memory_area.begin(), page_size);
250 5 : CHECK_EQ(memory_area.size(), page_size * 2);
251 5 : }
252 :
253 26644 : TEST(NewSpace) {
254 : Isolate* isolate = CcTest::i_isolate();
255 : Heap* heap = isolate->heap();
256 : TestMemoryAllocatorScope test_allocator_scope(isolate, heap->MaxReserved(),
257 10 : 0);
258 : MemoryAllocator* memory_allocator = test_allocator_scope.allocator();
259 :
260 : NewSpace new_space(heap, memory_allocator->data_page_allocator(),
261 : CcTest::heap()->InitialSemiSpaceSize(),
262 10 : CcTest::heap()->InitialSemiSpaceSize());
263 5 : CHECK(new_space.MaximumCapacity());
264 :
265 25 : while (new_space.Available() >= kMaxRegularHeapObjectSize) {
266 20 : CHECK(new_space.Contains(
267 : new_space.AllocateRawUnaligned(kMaxRegularHeapObjectSize)
268 : .ToObjectChecked()));
269 : }
270 :
271 5 : new_space.TearDown();
272 5 : memory_allocator->unmapper()->EnsureUnmappingCompleted();
273 5 : }
274 :
275 :
276 26644 : TEST(OldSpace) {
277 : Isolate* isolate = CcTest::i_isolate();
278 : Heap* heap = isolate->heap();
279 : TestMemoryAllocatorScope test_allocator_scope(isolate, heap->MaxReserved(),
280 10 : 0);
281 :
282 : OldSpace* s = new OldSpace(heap);
283 5 : CHECK_NOT_NULL(s);
284 :
285 5 : while (s->Available() > 0) {
286 0 : s->AllocateRawUnaligned(kMaxRegularHeapObjectSize).ToObjectChecked();
287 : }
288 :
289 5 : delete s;
290 5 : }
291 :
292 26644 : TEST(LargeObjectSpace) {
293 : // This test does not initialize allocated objects, which confuses the
294 : // incremental marker.
295 5 : FLAG_incremental_marking = false;
296 5 : v8::V8::Initialize();
297 :
298 5 : LargeObjectSpace* lo = CcTest::heap()->lo_space();
299 5 : CHECK_NOT_NULL(lo);
300 :
301 : int lo_size = Page::kPageSize;
302 :
303 10 : Object obj = lo->AllocateRaw(lo_size).ToObjectChecked();
304 5 : CHECK(obj->IsHeapObject());
305 :
306 5 : HeapObject ho = HeapObject::cast(obj);
307 :
308 5 : CHECK(lo->Contains(HeapObject::cast(obj)));
309 :
310 5 : CHECK(lo->Contains(ho));
311 :
312 13990 : while (true) {
313 : {
314 13995 : AllocationResult allocation = lo->AllocateRaw(lo_size);
315 13995 : if (allocation.IsRetry()) break;
316 : }
317 : }
318 :
319 5 : CHECK(!lo->IsEmpty());
320 :
321 10 : CHECK(lo->AllocateRaw(lo_size).IsRetry());
322 5 : }
323 :
324 : #ifndef DEBUG
325 : // The test verifies that committed size of a space is less then some threshold.
326 : // Debug builds pull in all sorts of additional instrumentation that increases
327 : // heap sizes. E.g. CSA_ASSERT creates on-heap strings for error messages. These
328 : // messages are also not stable if files are moved and modified during the build
329 : // process (jumbo builds).
330 26644 : TEST(SizeOfInitialHeap) {
331 6 : if (i::FLAG_always_opt) return;
332 : // Bootstrapping without a snapshot causes more allocations.
333 4 : CcTest::InitializeVM();
334 : Isolate* isolate = CcTest::i_isolate();
335 4 : if (!isolate->snapshot_available()) return;
336 : HandleScope scope(isolate);
337 4 : v8::Local<v8::Context> context = CcTest::isolate()->GetCurrentContext();
338 : // Skip this test on the custom snapshot builder.
339 12 : if (!CcTest::global()
340 12 : ->Get(context, v8_str("assertEquals"))
341 : .ToLocalChecked()
342 : ->IsUndefined()) {
343 : return;
344 : }
345 : // Initial size of LO_SPACE
346 4 : size_t initial_lo_space = isolate->heap()->lo_space()->Size();
347 :
348 : // The limit for each space for an empty isolate containing just the
349 : // snapshot.
350 : // In PPC the page size is 64K, causing more internal fragmentation
351 : // hence requiring a larger limit.
352 : #if V8_OS_LINUX && V8_HOST_ARCH_PPC
353 : const size_t kMaxInitialSizePerSpace = 3 * MB;
354 : #else
355 : const size_t kMaxInitialSizePerSpace = 2 * MB;
356 : #endif
357 :
358 : // Freshly initialized VM gets by with the snapshot size (which is below
359 : // kMaxInitialSizePerSpace per space).
360 : Heap* heap = isolate->heap();
361 4 : int page_count[LAST_GROWABLE_PAGED_SPACE + 1] = {0, 0, 0, 0};
362 28 : for (int i = FIRST_GROWABLE_PAGED_SPACE; i <= LAST_GROWABLE_PAGED_SPACE;
363 : i++) {
364 : // Debug code can be very large, so skip CODE_SPACE if we are generating it.
365 12 : if (i == CODE_SPACE && i::FLAG_debug_code) continue;
366 :
367 12 : page_count[i] = heap->paged_space(i)->CountTotalPages();
368 : // Check that the initial heap is also below the limit.
369 12 : CHECK_LE(heap->paged_space(i)->CommittedMemory(), kMaxInitialSizePerSpace);
370 : }
371 :
372 : // Executing the empty script gets by with the same number of pages, i.e.,
373 : // requires no extra space.
374 : CompileRun("/*empty*/");
375 28 : for (int i = FIRST_GROWABLE_PAGED_SPACE; i <= LAST_GROWABLE_PAGED_SPACE;
376 : i++) {
377 : // Skip CODE_SPACE, since we had to generate code even for an empty script.
378 12 : if (i == CODE_SPACE) continue;
379 8 : CHECK_EQ(page_count[i], isolate->heap()->paged_space(i)->CountTotalPages());
380 : }
381 :
382 : // No large objects required to perform the above steps.
383 4 : CHECK_EQ(initial_lo_space,
384 : static_cast<size_t>(isolate->heap()->lo_space()->Size()));
385 : }
386 : #endif // DEBUG
387 :
388 2935 : static HeapObject AllocateUnaligned(NewSpace* space, int size) {
389 : AllocationResult allocation = space->AllocateRaw(size, kWordAligned);
390 2935 : CHECK(!allocation.IsRetry());
391 : HeapObject filler;
392 2935 : CHECK(allocation.To(&filler));
393 : space->heap()->CreateFillerObjectAt(filler->address(), size,
394 2935 : ClearRecordedSlots::kNo);
395 2935 : return filler;
396 : }
397 :
398 375 : static HeapObject AllocateUnaligned(PagedSpace* space, int size) {
399 375 : AllocationResult allocation = space->AllocateRaw(size, kWordAligned);
400 375 : CHECK(!allocation.IsRetry());
401 : HeapObject filler;
402 375 : CHECK(allocation.To(&filler));
403 : space->heap()->CreateFillerObjectAt(filler->address(), size,
404 375 : ClearRecordedSlots::kNo);
405 375 : return filler;
406 : }
407 :
408 375 : static HeapObject AllocateUnaligned(LargeObjectSpace* space, int size) {
409 375 : AllocationResult allocation = space->AllocateRaw(size);
410 375 : CHECK(!allocation.IsRetry());
411 : HeapObject filler;
412 375 : CHECK(allocation.To(&filler));
413 375 : return filler;
414 : }
415 :
416 63 : class Observer : public AllocationObserver {
417 : public:
418 : explicit Observer(intptr_t step_size)
419 49 : : AllocationObserver(step_size), count_(0) {}
420 :
421 683 : void Step(int bytes_allocated, Address addr, size_t) override { count_++; }
422 :
423 : int count() const { return count_; }
424 :
425 : private:
426 : int count_;
427 : };
428 :
429 : template <typename T>
430 15 : void testAllocationObserver(Isolate* i_isolate, T* space) {
431 : Observer observer1(128);
432 15 : space->AddAllocationObserver(&observer1);
433 :
434 : // The observer should not get notified if we have only allocated less than
435 : // 128 bytes.
436 15 : AllocateUnaligned(space, 64);
437 15 : CHECK_EQ(observer1.count(), 0);
438 :
439 : // The observer should get called when we have allocated exactly 128 bytes.
440 15 : AllocateUnaligned(space, 64);
441 15 : CHECK_EQ(observer1.count(), 1);
442 :
443 : // Another >128 bytes should get another notification.
444 15 : AllocateUnaligned(space, 136);
445 15 : CHECK_EQ(observer1.count(), 2);
446 :
447 : // Allocating a large object should get only one notification.
448 15 : AllocateUnaligned(space, 1024);
449 15 : CHECK_EQ(observer1.count(), 3);
450 :
451 : // Allocating another 2048 bytes in small objects should get 16
452 : // notifications.
453 1935 : for (int i = 0; i < 64; ++i) {
454 960 : AllocateUnaligned(space, 32);
455 : }
456 15 : CHECK_EQ(observer1.count(), 19);
457 :
458 : // Multiple observers should work.
459 : Observer observer2(96);
460 15 : space->AddAllocationObserver(&observer2);
461 :
462 15 : AllocateUnaligned(space, 2048);
463 15 : CHECK_EQ(observer1.count(), 20);
464 15 : CHECK_EQ(observer2.count(), 1);
465 :
466 15 : AllocateUnaligned(space, 104);
467 15 : CHECK_EQ(observer1.count(), 20);
468 15 : CHECK_EQ(observer2.count(), 2);
469 :
470 : // Callback should stop getting called after an observer is removed.
471 15 : space->RemoveAllocationObserver(&observer1);
472 :
473 15 : AllocateUnaligned(space, 384);
474 15 : CHECK_EQ(observer1.count(), 20); // no more notifications.
475 15 : CHECK_EQ(observer2.count(), 3); // this one is still active.
476 :
477 : // Ensure that PauseInlineAllocationObserversScope work correctly.
478 15 : AllocateUnaligned(space, 48);
479 15 : CHECK_EQ(observer2.count(), 3);
480 : {
481 30 : PauseAllocationObserversScope pause_observers(i_isolate->heap());
482 15 : CHECK_EQ(observer2.count(), 3);
483 15 : AllocateUnaligned(space, 384);
484 15 : CHECK_EQ(observer2.count(), 3);
485 : }
486 15 : CHECK_EQ(observer2.count(), 3);
487 : // Coupled with the 48 bytes allocated before the pause, another 48 bytes
488 : // allocated here should trigger a notification.
489 15 : AllocateUnaligned(space, 48);
490 15 : CHECK_EQ(observer2.count(), 4);
491 :
492 15 : space->RemoveAllocationObserver(&observer2);
493 15 : AllocateUnaligned(space, 384);
494 15 : CHECK_EQ(observer1.count(), 20);
495 15 : CHECK_EQ(observer2.count(), 4);
496 15 : }
497 :
498 26644 : UNINITIALIZED_TEST(AllocationObserver) {
499 : v8::Isolate::CreateParams create_params;
500 5 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
501 5 : v8::Isolate* isolate = v8::Isolate::New(create_params);
502 : {
503 : v8::Isolate::Scope isolate_scope(isolate);
504 10 : v8::HandleScope handle_scope(isolate);
505 10 : v8::Context::New(isolate)->Enter();
506 :
507 : Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
508 :
509 5 : testAllocationObserver<NewSpace>(i_isolate, i_isolate->heap()->new_space());
510 : // Old space is used but the code path is shared for all
511 : // classes inheriting from PagedSpace.
512 : testAllocationObserver<PagedSpace>(i_isolate,
513 5 : i_isolate->heap()->old_space());
514 : testAllocationObserver<LargeObjectSpace>(i_isolate,
515 5 : i_isolate->heap()->lo_space());
516 : }
517 5 : isolate->Dispose();
518 5 : }
519 :
520 26644 : UNINITIALIZED_TEST(InlineAllocationObserverCadence) {
521 : v8::Isolate::CreateParams create_params;
522 5 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
523 5 : v8::Isolate* isolate = v8::Isolate::New(create_params);
524 : {
525 : v8::Isolate::Scope isolate_scope(isolate);
526 10 : v8::HandleScope handle_scope(isolate);
527 10 : v8::Context::New(isolate)->Enter();
528 :
529 : Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
530 :
531 : // Clear out any pre-existing garbage to make the test consistent
532 : // across snapshot/no-snapshot builds.
533 5 : CcTest::CollectAllGarbage(i_isolate);
534 :
535 : NewSpace* new_space = i_isolate->heap()->new_space();
536 :
537 : Observer observer1(512);
538 5 : new_space->AddAllocationObserver(&observer1);
539 : Observer observer2(576);
540 5 : new_space->AddAllocationObserver(&observer2);
541 :
542 5125 : for (int i = 0; i < 512; ++i) {
543 2560 : AllocateUnaligned(new_space, 32);
544 : }
545 :
546 5 : new_space->RemoveAllocationObserver(&observer1);
547 5 : new_space->RemoveAllocationObserver(&observer2);
548 :
549 5 : CHECK_EQ(observer1.count(), 32);
550 5 : CHECK_EQ(observer2.count(), 28);
551 : }
552 5 : isolate->Dispose();
553 5 : }
554 :
555 26644 : HEAP_TEST(Regress777177) {
556 5 : CcTest::InitializeVM();
557 : Isolate* isolate = CcTest::i_isolate();
558 : Heap* heap = isolate->heap();
559 : HandleScope scope(isolate);
560 : PagedSpace* old_space = heap->old_space();
561 : Observer observer(128);
562 5 : old_space->AddAllocationObserver(&observer);
563 :
564 : int area_size = old_space->AreaSize();
565 : int max_object_size = kMaxRegularHeapObjectSize;
566 5 : int filler_size = area_size - max_object_size;
567 :
568 : {
569 : // Ensure a new linear allocation area on a fresh page.
570 : AlwaysAllocateScope always_allocate(isolate);
571 5 : heap::SimulateFullSpace(old_space);
572 5 : AllocationResult result = old_space->AllocateRaw(filler_size, kWordAligned);
573 : HeapObject obj = result.ToObjectChecked();
574 : heap->CreateFillerObjectAt(obj->address(), filler_size,
575 5 : ClearRecordedSlots::kNo);
576 : }
577 :
578 : {
579 : // Allocate all bytes of the linear allocation area. This moves top_ and
580 : // top_on_previous_step_ to the next page.
581 : AllocationResult result =
582 5 : old_space->AllocateRaw(max_object_size, kWordAligned);
583 : HeapObject obj = result.ToObjectChecked();
584 : // Simulate allocation folding moving the top pointer back.
585 : old_space->SetTopAndLimit(obj->address(), old_space->limit());
586 : }
587 :
588 : {
589 : // This triggers assert in crbug.com/777177.
590 5 : AllocationResult result = old_space->AllocateRaw(filler_size, kWordAligned);
591 : HeapObject obj = result.ToObjectChecked();
592 : heap->CreateFillerObjectAt(obj->address(), filler_size,
593 5 : ClearRecordedSlots::kNo);
594 : }
595 5 : old_space->RemoveAllocationObserver(&observer);
596 5 : }
597 :
598 26644 : HEAP_TEST(Regress791582) {
599 5 : CcTest::InitializeVM();
600 : Isolate* isolate = CcTest::i_isolate();
601 : Heap* heap = isolate->heap();
602 : HandleScope scope(isolate);
603 : NewSpace* new_space = heap->new_space();
604 5 : if (new_space->TotalCapacity() < new_space->MaximumCapacity()) {
605 5 : new_space->Grow();
606 : }
607 :
608 10 : int until_page_end = static_cast<int>(new_space->limit() - new_space->top());
609 :
610 5 : if (!IsAligned(until_page_end, kTaggedSize)) {
611 : // The test works if the size of allocation area size is a multiple of
612 : // pointer size. This is usually the case unless some allocation observer
613 : // is already active (e.g. incremental marking observer).
614 : return;
615 : }
616 :
617 : Observer observer(128);
618 4 : new_space->AddAllocationObserver(&observer);
619 :
620 : {
621 : AllocationResult result =
622 : new_space->AllocateRaw(until_page_end, kWordAligned);
623 : HeapObject obj = result.ToObjectChecked();
624 : heap->CreateFillerObjectAt(obj->address(), until_page_end,
625 4 : ClearRecordedSlots::kNo);
626 : // Simulate allocation folding moving the top pointer back.
627 4 : *new_space->allocation_top_address() = obj->address();
628 : }
629 :
630 : {
631 : // This triggers assert in crbug.com/791582
632 : AllocationResult result = new_space->AllocateRaw(256, kWordAligned);
633 : HeapObject obj = result.ToObjectChecked();
634 4 : heap->CreateFillerObjectAt(obj->address(), 256, ClearRecordedSlots::kNo);
635 : }
636 4 : new_space->RemoveAllocationObserver(&observer);
637 : }
638 :
639 26644 : TEST(ShrinkPageToHighWaterMarkFreeSpaceEnd) {
640 5 : FLAG_stress_incremental_marking = false;
641 5 : CcTest::InitializeVM();
642 : Isolate* isolate = CcTest::i_isolate();
643 : HandleScope scope(isolate);
644 :
645 5 : heap::SealCurrentObjects(CcTest::heap());
646 :
647 : // Prepare page that only contains a single object and a trailing FreeSpace
648 : // filler.
649 : Handle<FixedArray> array =
650 5 : isolate->factory()->NewFixedArray(128, AllocationType::kOld);
651 : Page* page = Page::FromHeapObject(*array);
652 :
653 : // Reset space so high water mark is consistent.
654 5 : PagedSpace* old_space = CcTest::heap()->old_space();
655 5 : old_space->FreeLinearAllocationArea();
656 5 : old_space->ResetFreeList();
657 :
658 5 : HeapObject filler = HeapObject::FromAddress(array->address() + array->Size());
659 5 : CHECK(filler->IsFreeSpace());
660 5 : size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
661 10 : size_t should_have_shrunk = RoundDown(
662 5 : static_cast<size_t>(MemoryChunkLayout::AllocatableMemoryInDataPage() -
663 10 : array->Size()),
664 5 : CommitPageSize());
665 5 : CHECK_EQ(should_have_shrunk, shrunk);
666 5 : }
667 :
668 26644 : TEST(ShrinkPageToHighWaterMarkNoFiller) {
669 5 : CcTest::InitializeVM();
670 : Isolate* isolate = CcTest::i_isolate();
671 : HandleScope scope(isolate);
672 5 : heap::SealCurrentObjects(CcTest::heap());
673 :
674 : const int kFillerSize = 0;
675 : std::vector<Handle<FixedArray>> arrays =
676 5 : heap::FillOldSpacePageWithFixedArrays(CcTest::heap(), kFillerSize);
677 5 : Handle<FixedArray> array = arrays.back();
678 : Page* page = Page::FromHeapObject(*array);
679 5 : CHECK_EQ(page->area_end(), array->address() + array->Size() + kFillerSize);
680 :
681 : // Reset space so high water mark and fillers are consistent.
682 5 : PagedSpace* old_space = CcTest::heap()->old_space();
683 5 : old_space->ResetFreeList();
684 5 : old_space->FreeLinearAllocationArea();
685 :
686 5 : size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
687 5 : CHECK_EQ(0u, shrunk);
688 5 : }
689 :
690 26644 : TEST(ShrinkPageToHighWaterMarkOneWordFiller) {
691 5 : CcTest::InitializeVM();
692 : Isolate* isolate = CcTest::i_isolate();
693 : HandleScope scope(isolate);
694 :
695 5 : heap::SealCurrentObjects(CcTest::heap());
696 :
697 : const int kFillerSize = kTaggedSize;
698 : std::vector<Handle<FixedArray>> arrays =
699 5 : heap::FillOldSpacePageWithFixedArrays(CcTest::heap(), kFillerSize);
700 5 : Handle<FixedArray> array = arrays.back();
701 : Page* page = Page::FromHeapObject(*array);
702 5 : CHECK_EQ(page->area_end(), array->address() + array->Size() + kFillerSize);
703 :
704 : // Reset space so high water mark and fillers are consistent.
705 5 : PagedSpace* old_space = CcTest::heap()->old_space();
706 5 : old_space->FreeLinearAllocationArea();
707 5 : old_space->ResetFreeList();
708 :
709 5 : HeapObject filler = HeapObject::FromAddress(array->address() + array->Size());
710 10 : CHECK_EQ(filler->map(),
711 : ReadOnlyRoots(CcTest::heap()).one_pointer_filler_map());
712 :
713 5 : size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
714 5 : CHECK_EQ(0u, shrunk);
715 5 : }
716 :
717 26644 : TEST(ShrinkPageToHighWaterMarkTwoWordFiller) {
718 5 : CcTest::InitializeVM();
719 : Isolate* isolate = CcTest::i_isolate();
720 : HandleScope scope(isolate);
721 :
722 5 : heap::SealCurrentObjects(CcTest::heap());
723 :
724 : const int kFillerSize = 2 * kTaggedSize;
725 : std::vector<Handle<FixedArray>> arrays =
726 5 : heap::FillOldSpacePageWithFixedArrays(CcTest::heap(), kFillerSize);
727 5 : Handle<FixedArray> array = arrays.back();
728 : Page* page = Page::FromHeapObject(*array);
729 5 : CHECK_EQ(page->area_end(), array->address() + array->Size() + kFillerSize);
730 :
731 : // Reset space so high water mark and fillers are consistent.
732 5 : PagedSpace* old_space = CcTest::heap()->old_space();
733 5 : old_space->FreeLinearAllocationArea();
734 5 : old_space->ResetFreeList();
735 :
736 5 : HeapObject filler = HeapObject::FromAddress(array->address() + array->Size());
737 10 : CHECK_EQ(filler->map(),
738 : ReadOnlyRoots(CcTest::heap()).two_pointer_filler_map());
739 :
740 5 : size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
741 5 : CHECK_EQ(0u, shrunk);
742 5 : }
743 :
744 : } // namespace heap
745 : } // namespace internal
746 79917 : } // namespace v8
|