Line data Source code
1 : // Copyright 2011 the V8 project authors. All rights reserved.
2 : // Redistribution and use in source and binary forms, with or without
3 : // modification, are permitted provided that the following conditions are
4 : // met:
5 : //
6 : // * Redistributions of source code must retain the above copyright
7 : // notice, this list of conditions and the following disclaimer.
8 : // * Redistributions in binary form must reproduce the above
9 : // copyright notice, this list of conditions and the following
10 : // disclaimer in the documentation and/or other materials provided
11 : // with the distribution.
12 : // * Neither the name of Google Inc. nor the names of its
13 : // contributors may be used to endorse or promote products derived
14 : // from this software without specific prior written permission.
15 : //
16 : // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 : // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 : // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 : // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 : // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 : // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 : // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 : // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 : // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 : // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 : // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 :
28 : #include <stdlib.h>
29 :
30 : #include "src/base/bounded-page-allocator.h"
31 : #include "src/base/platform/platform.h"
32 : #include "src/heap/factory.h"
33 : #include "src/heap/spaces-inl.h"
34 : #include "src/objects-inl.h"
35 : #include "src/objects/free-space.h"
36 : #include "src/snapshot/snapshot.h"
37 : #include "test/cctest/cctest.h"
38 : #include "test/cctest/heap/heap-tester.h"
39 : #include "test/cctest/heap/heap-utils.h"
40 :
41 : namespace v8 {
42 : namespace internal {
43 : namespace heap {
44 :
45 : // Temporarily sets a given allocator in an isolate.
46 : class TestMemoryAllocatorScope {
47 : public:
48 : TestMemoryAllocatorScope(Isolate* isolate, MemoryAllocator* allocator)
49 1015 : : isolate_(isolate), old_allocator_(isolate->heap()->memory_allocator()) {
50 1015 : isolate->heap()->memory_allocator_ = allocator;
51 : }
52 :
53 : ~TestMemoryAllocatorScope() {
54 1015 : isolate_->heap()->memory_allocator_ = old_allocator_;
55 : }
56 :
57 : private:
58 : Isolate* isolate_;
59 : MemoryAllocator* old_allocator_;
60 :
61 : DISALLOW_COPY_AND_ASSIGN(TestMemoryAllocatorScope);
62 : };
63 :
64 : // Temporarily sets a given code page allocator in an isolate.
65 : class TestCodePageAllocatorScope {
66 : public:
67 : TestCodePageAllocatorScope(Isolate* isolate,
68 : v8::PageAllocator* code_page_allocator)
69 : : isolate_(isolate),
70 : old_code_page_allocator_(
71 1000 : isolate->heap()->memory_allocator()->code_page_allocator()) {
72 : isolate->heap()->memory_allocator()->code_page_allocator_ =
73 1000 : code_page_allocator;
74 : }
75 :
76 : ~TestCodePageAllocatorScope() {
77 1000 : isolate_->heap()->memory_allocator()->code_page_allocator_ =
78 1000 : old_code_page_allocator_;
79 : }
80 :
81 : private:
82 : Isolate* isolate_;
83 : v8::PageAllocator* old_code_page_allocator_;
84 :
85 : DISALLOW_COPY_AND_ASSIGN(TestCodePageAllocatorScope);
86 : };
87 :
88 1000 : static void VerifyMemoryChunk(Isolate* isolate, Heap* heap,
89 : v8::PageAllocator* code_page_allocator,
90 : size_t reserve_area_size, size_t commit_area_size,
91 1000 : Executability executable, Space* space) {
92 1000 : MemoryAllocator* memory_allocator =
93 1000 : new MemoryAllocator(isolate, heap->MaxReserved(), 0);
94 : {
95 : TestMemoryAllocatorScope test_allocator_scope(isolate, memory_allocator);
96 : TestCodePageAllocatorScope test_code_page_allocator_scope(
97 : isolate, code_page_allocator);
98 :
99 : v8::PageAllocator* page_allocator =
100 : memory_allocator->page_allocator(executable);
101 :
102 : size_t allocatable_memory_area_offset =
103 1000 : MemoryChunkLayout::ObjectStartOffsetInMemoryChunk(space->identity());
104 : size_t guard_size =
105 1000 : (executable == EXECUTABLE) ? MemoryChunkLayout::CodePageGuardSize() : 0;
106 :
107 3000 : MemoryChunk* memory_chunk = memory_allocator->AllocateChunk(
108 1000 : reserve_area_size, commit_area_size, executable, space);
109 : size_t reserved_size =
110 : ((executable == EXECUTABLE))
111 500 : ? allocatable_memory_area_offset +
112 500 : RoundUp(reserve_area_size, page_allocator->CommitPageSize()) +
113 : guard_size
114 : : RoundUp(allocatable_memory_area_offset + reserve_area_size,
115 1500 : page_allocator->CommitPageSize());
116 1000 : CHECK(memory_chunk->size() == reserved_size);
117 1000 : CHECK(memory_chunk->area_start() <
118 : memory_chunk->address() + memory_chunk->size());
119 1000 : CHECK(memory_chunk->area_end() <=
120 : memory_chunk->address() + memory_chunk->size());
121 1000 : CHECK(static_cast<size_t>(memory_chunk->area_size()) == commit_area_size);
122 :
123 1000 : memory_allocator->Free<MemoryAllocator::kFull>(memory_chunk);
124 : }
125 1000 : memory_allocator->TearDown();
126 1000 : delete memory_allocator;
127 1000 : }
128 :
129 : static unsigned int PseudorandomAreaSize() {
130 : static uint32_t lo = 2345;
131 500 : lo = 18273 * (lo & 0xFFFFF) + (lo >> 16);
132 500 : return lo & 0xFFFFF;
133 : }
134 :
135 :
136 28342 : TEST(MemoryChunk) {
137 : Isolate* isolate = CcTest::i_isolate();
138 1005 : Heap* heap = isolate->heap();
139 :
140 5 : v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
141 :
142 : size_t reserve_area_size = 1 * MB;
143 : size_t initial_commit_area_size;
144 :
145 505 : for (int i = 0; i < 100; i++) {
146 : initial_commit_area_size =
147 1000 : RoundUp(PseudorandomAreaSize(), page_allocator->CommitPageSize());
148 :
149 : // With CodeRange.
150 : const size_t code_range_size = 32 * MB;
151 : VirtualMemory code_range_reservation(page_allocator, code_range_size,
152 500 : nullptr, MemoryChunk::kAlignment);
153 500 : CHECK(code_range_reservation.IsReserved());
154 :
155 : base::BoundedPageAllocator code_page_allocator(
156 : page_allocator, code_range_reservation.address(),
157 500 : code_range_reservation.size(), MemoryChunk::kAlignment);
158 :
159 : VerifyMemoryChunk(isolate, heap, &code_page_allocator, reserve_area_size,
160 500 : initial_commit_area_size, EXECUTABLE, heap->code_space());
161 :
162 : VerifyMemoryChunk(isolate, heap, &code_page_allocator, reserve_area_size,
163 : initial_commit_area_size, NOT_EXECUTABLE,
164 500 : heap->old_space());
165 500 : }
166 5 : }
167 :
168 :
169 28342 : TEST(MemoryAllocator) {
170 : Isolate* isolate = CcTest::i_isolate();
171 5 : Heap* heap = isolate->heap();
172 :
173 : MemoryAllocator* memory_allocator =
174 5 : new MemoryAllocator(isolate, heap->MaxReserved(), 0);
175 5 : CHECK_NOT_NULL(memory_allocator);
176 : TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
177 :
178 : {
179 : int total_pages = 0;
180 : OldSpace faked_space(heap);
181 5 : CHECK(!faked_space.first_page());
182 5 : CHECK(!faked_space.last_page());
183 : Page* first_page = memory_allocator->AllocatePage(
184 5 : faked_space.AreaSize(), static_cast<PagedSpace*>(&faked_space),
185 5 : NOT_EXECUTABLE);
186 :
187 : faked_space.memory_chunk_list().PushBack(first_page);
188 5 : CHECK(first_page->next_page() == nullptr);
189 : total_pages++;
190 :
191 10 : for (Page* p = first_page; p != nullptr; p = p->next_page()) {
192 5 : CHECK(p->owner() == &faked_space);
193 : }
194 :
195 : // Again, we should get n or n - 1 pages.
196 : Page* other = memory_allocator->AllocatePage(
197 5 : faked_space.AreaSize(), static_cast<PagedSpace*>(&faked_space),
198 5 : NOT_EXECUTABLE);
199 : total_pages++;
200 : faked_space.memory_chunk_list().PushBack(other);
201 : int page_count = 0;
202 20 : for (Page* p = first_page; p != nullptr; p = p->next_page()) {
203 10 : CHECK(p->owner() == &faked_space);
204 10 : page_count++;
205 : }
206 5 : CHECK(total_pages == page_count);
207 :
208 : Page* second_page = first_page->next_page();
209 5 : CHECK_NOT_NULL(second_page);
210 :
211 : // OldSpace's destructor will tear down the space and free up all pages.
212 : }
213 5 : memory_allocator->TearDown();
214 5 : delete memory_allocator;
215 5 : }
216 :
217 28342 : TEST(ComputeDiscardMemoryAreas) {
218 : base::AddressRegion memory_area;
219 5 : size_t page_size = MemoryAllocator::GetCommitPageSize();
220 : size_t free_header_size = FreeSpace::kSize;
221 :
222 5 : memory_area = MemoryAllocator::ComputeDiscardMemoryArea(0, 0);
223 5 : CHECK_EQ(memory_area.begin(), 0);
224 5 : CHECK_EQ(memory_area.size(), 0);
225 :
226 : memory_area = MemoryAllocator::ComputeDiscardMemoryArea(
227 5 : 0, page_size + free_header_size);
228 5 : CHECK_EQ(memory_area.begin(), 0);
229 5 : CHECK_EQ(memory_area.size(), 0);
230 :
231 : memory_area = MemoryAllocator::ComputeDiscardMemoryArea(
232 5 : page_size - free_header_size, page_size + free_header_size);
233 5 : CHECK_EQ(memory_area.begin(), page_size);
234 5 : CHECK_EQ(memory_area.size(), page_size);
235 :
236 5 : memory_area = MemoryAllocator::ComputeDiscardMemoryArea(page_size, page_size);
237 5 : CHECK_EQ(memory_area.begin(), 0);
238 5 : CHECK_EQ(memory_area.size(), 0);
239 :
240 : memory_area = MemoryAllocator::ComputeDiscardMemoryArea(
241 5 : page_size / 2, page_size + page_size / 2);
242 5 : CHECK_EQ(memory_area.begin(), page_size);
243 5 : CHECK_EQ(memory_area.size(), page_size);
244 :
245 : memory_area = MemoryAllocator::ComputeDiscardMemoryArea(
246 5 : page_size / 2, page_size + page_size / 4);
247 5 : CHECK_EQ(memory_area.begin(), 0);
248 5 : CHECK_EQ(memory_area.size(), 0);
249 :
250 : memory_area =
251 5 : MemoryAllocator::ComputeDiscardMemoryArea(page_size / 2, page_size * 3);
252 5 : CHECK_EQ(memory_area.begin(), page_size);
253 5 : CHECK_EQ(memory_area.size(), page_size * 2);
254 5 : }
255 :
256 28342 : TEST(NewSpace) {
257 : Isolate* isolate = CcTest::i_isolate();
258 5 : Heap* heap = isolate->heap();
259 5 : MemoryAllocator* memory_allocator =
260 5 : new MemoryAllocator(isolate, heap->MaxReserved(), 0);
261 : TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
262 :
263 : NewSpace new_space(heap, memory_allocator->data_page_allocator(),
264 : CcTest::heap()->InitialSemiSpaceSize(),
265 15 : CcTest::heap()->InitialSemiSpaceSize());
266 5 : CHECK(new_space.MaximumCapacity());
267 :
268 15 : while (new_space.Available() >= kMaxRegularHeapObjectSize) {
269 20 : CHECK(new_space.Contains(
270 : new_space.AllocateRawUnaligned(kMaxRegularHeapObjectSize)
271 : .ToObjectChecked()));
272 : }
273 :
274 5 : new_space.TearDown();
275 5 : memory_allocator->unmapper()->EnsureUnmappingCompleted();
276 5 : memory_allocator->TearDown();
277 5 : delete memory_allocator;
278 5 : }
279 :
280 :
281 28342 : TEST(OldSpace) {
282 : Isolate* isolate = CcTest::i_isolate();
283 5 : Heap* heap = isolate->heap();
284 : MemoryAllocator* memory_allocator =
285 5 : new MemoryAllocator(isolate, heap->MaxReserved(), 0);
286 : TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
287 :
288 : OldSpace* s = new OldSpace(heap);
289 5 : CHECK_NOT_NULL(s);
290 :
291 5 : while (s->Available() > 0) {
292 0 : s->AllocateRawUnaligned(kMaxRegularHeapObjectSize).ToObjectChecked();
293 : }
294 :
295 5 : delete s;
296 5 : memory_allocator->TearDown();
297 5 : delete memory_allocator;
298 5 : }
299 :
300 28342 : TEST(LargeObjectSpace) {
301 : // This test does not initialize allocated objects, which confuses the
302 : // incremental marker.
303 5 : FLAG_incremental_marking = false;
304 5 : v8::V8::Initialize();
305 :
306 5 : LargeObjectSpace* lo = CcTest::heap()->lo_space();
307 5 : CHECK_NOT_NULL(lo);
308 :
309 : int lo_size = Page::kPageSize;
310 :
311 5 : Object obj = lo->AllocateRaw(lo_size).ToObjectChecked();
312 5 : CHECK(obj->IsHeapObject());
313 :
314 : HeapObject ho = HeapObject::cast(obj);
315 :
316 5 : CHECK(lo->Contains(HeapObject::cast(obj)));
317 :
318 10 : CHECK(lo->FindObject(ho->address()) == obj);
319 :
320 5 : CHECK(lo->Contains(ho));
321 :
322 : while (true) {
323 : {
324 7000 : AllocationResult allocation = lo->AllocateRaw(lo_size);
325 7000 : if (allocation.IsRetry()) break;
326 : }
327 : }
328 :
329 5 : CHECK(!lo->IsEmpty());
330 :
331 7005 : CHECK(lo->AllocateRaw(lo_size).IsRetry());
332 5 : }
333 :
334 : #ifndef DEBUG
335 : // The test verifies that committed size of a space is less then some threshold.
336 : // Debug builds pull in all sorts of additional instrumentation that increases
337 : // heap sizes. E.g. CSA_ASSERT creates on-heap strings for error messages. These
338 : // messages are also not stable if files are moved and modified during the build
339 : // process (jumbo builds).
340 28342 : TEST(SizeOfInitialHeap) {
341 6 : if (i::FLAG_always_opt) return;
342 : // Bootstrapping without a snapshot causes more allocations.
343 4 : CcTest::InitializeVM();
344 4 : Isolate* isolate = CcTest::i_isolate();
345 4 : if (!isolate->snapshot_available()) return;
346 : HandleScope scope(isolate);
347 4 : v8::Local<v8::Context> context = CcTest::isolate()->GetCurrentContext();
348 : // Skip this test on the custom snapshot builder.
349 4 : if (!CcTest::global()
350 12 : ->Get(context, v8_str("assertEquals"))
351 4 : .ToLocalChecked()
352 : ->IsUndefined()) {
353 : return;
354 : }
355 : // Initial size of LO_SPACE
356 4 : size_t initial_lo_space = isolate->heap()->lo_space()->Size();
357 :
358 : // The limit for each space for an empty isolate containing just the
359 : // snapshot.
360 : // In PPC the page size is 64K, causing more internal fragmentation
361 : // hence requiring a larger limit.
362 : #if V8_OS_LINUX && V8_HOST_ARCH_PPC
363 : const size_t kMaxInitialSizePerSpace = 3 * MB;
364 : #else
365 : const size_t kMaxInitialSizePerSpace = 2 * MB;
366 : #endif
367 :
368 : // Freshly initialized VM gets by with the snapshot size (which is below
369 : // kMaxInitialSizePerSpace per space).
370 : Heap* heap = isolate->heap();
371 4 : int page_count[LAST_GROWABLE_PAGED_SPACE + 1] = {0, 0, 0, 0};
372 16 : for (int i = FIRST_GROWABLE_PAGED_SPACE; i <= LAST_GROWABLE_PAGED_SPACE;
373 : i++) {
374 : // Debug code can be very large, so skip CODE_SPACE if we are generating it.
375 12 : if (i == CODE_SPACE && i::FLAG_debug_code) continue;
376 :
377 12 : page_count[i] = heap->paged_space(i)->CountTotalPages();
378 : // Check that the initial heap is also below the limit.
379 12 : CHECK_LE(heap->paged_space(i)->CommittedMemory(), kMaxInitialSizePerSpace);
380 : }
381 :
382 : // Executing the empty script gets by with the same number of pages, i.e.,
383 : // requires no extra space.
384 : CompileRun("/*empty*/");
385 16 : for (int i = FIRST_GROWABLE_PAGED_SPACE; i <= LAST_GROWABLE_PAGED_SPACE;
386 : i++) {
387 : // Skip CODE_SPACE, since we had to generate code even for an empty script.
388 12 : if (i == CODE_SPACE) continue;
389 8 : CHECK_EQ(page_count[i], isolate->heap()->paged_space(i)->CountTotalPages());
390 : }
391 :
392 : // No large objects required to perform the above steps.
393 4 : CHECK_EQ(initial_lo_space,
394 : static_cast<size_t>(isolate->heap()->lo_space()->Size()));
395 : }
396 : #endif // DEBUG
397 :
398 2935 : static HeapObject AllocateUnaligned(NewSpace* space, int size) {
399 2935 : AllocationResult allocation = space->AllocateRawUnaligned(size);
400 2935 : CHECK(!allocation.IsRetry());
401 2935 : HeapObject filler;
402 2935 : CHECK(allocation.To(&filler));
403 : space->heap()->CreateFillerObjectAt(filler->address(), size,
404 2935 : ClearRecordedSlots::kNo);
405 2935 : return filler;
406 : }
407 :
408 375 : static HeapObject AllocateUnaligned(PagedSpace* space, int size) {
409 375 : AllocationResult allocation = space->AllocateRaw(size, kDoubleUnaligned);
410 375 : CHECK(!allocation.IsRetry());
411 375 : HeapObject filler;
412 375 : CHECK(allocation.To(&filler));
413 : space->heap()->CreateFillerObjectAt(filler->address(), size,
414 375 : ClearRecordedSlots::kNo);
415 375 : return filler;
416 : }
417 :
418 375 : static HeapObject AllocateUnaligned(LargeObjectSpace* space, int size) {
419 375 : AllocationResult allocation = space->AllocateRaw(size);
420 375 : CHECK(!allocation.IsRetry());
421 375 : HeapObject filler;
422 375 : CHECK(allocation.To(&filler));
423 375 : return filler;
424 : }
425 :
426 19 : class Observer : public AllocationObserver {
427 : public:
428 : explicit Observer(intptr_t step_size)
429 49 : : AllocationObserver(step_size), count_(0) {}
430 :
431 683 : void Step(int bytes_allocated, Address addr, size_t) override { count_++; }
432 :
433 : int count() const { return count_; }
434 :
435 : private:
436 : int count_;
437 : };
438 :
439 : template <typename T>
440 15 : void testAllocationObserver(Isolate* i_isolate, T* space) {
441 : Observer observer1(128);
442 15 : space->AddAllocationObserver(&observer1);
443 :
444 : // The observer should not get notified if we have only allocated less than
445 : // 128 bytes.
446 15 : AllocateUnaligned(space, 64);
447 15 : CHECK_EQ(observer1.count(), 0);
448 :
449 : // The observer should get called when we have allocated exactly 128 bytes.
450 15 : AllocateUnaligned(space, 64);
451 15 : CHECK_EQ(observer1.count(), 1);
452 :
453 : // Another >128 bytes should get another notification.
454 15 : AllocateUnaligned(space, 136);
455 15 : CHECK_EQ(observer1.count(), 2);
456 :
457 : // Allocating a large object should get only one notification.
458 15 : AllocateUnaligned(space, 1024);
459 15 : CHECK_EQ(observer1.count(), 3);
460 :
461 : // Allocating another 2048 bytes in small objects should get 16
462 : // notifications.
463 960 : for (int i = 0; i < 64; ++i) {
464 960 : AllocateUnaligned(space, 32);
465 : }
466 15 : CHECK_EQ(observer1.count(), 19);
467 :
468 : // Multiple observers should work.
469 : Observer observer2(96);
470 15 : space->AddAllocationObserver(&observer2);
471 :
472 15 : AllocateUnaligned(space, 2048);
473 15 : CHECK_EQ(observer1.count(), 20);
474 15 : CHECK_EQ(observer2.count(), 1);
475 :
476 15 : AllocateUnaligned(space, 104);
477 15 : CHECK_EQ(observer1.count(), 20);
478 15 : CHECK_EQ(observer2.count(), 2);
479 :
480 : // Callback should stop getting called after an observer is removed.
481 15 : space->RemoveAllocationObserver(&observer1);
482 :
483 15 : AllocateUnaligned(space, 384);
484 15 : CHECK_EQ(observer1.count(), 20); // no more notifications.
485 15 : CHECK_EQ(observer2.count(), 3); // this one is still active.
486 :
487 : // Ensure that PauseInlineAllocationObserversScope work correctly.
488 15 : AllocateUnaligned(space, 48);
489 15 : CHECK_EQ(observer2.count(), 3);
490 : {
491 15 : PauseAllocationObserversScope pause_observers(i_isolate->heap());
492 15 : CHECK_EQ(observer2.count(), 3);
493 15 : AllocateUnaligned(space, 384);
494 15 : CHECK_EQ(observer2.count(), 3);
495 : }
496 15 : CHECK_EQ(observer2.count(), 3);
497 : // Coupled with the 48 bytes allocated before the pause, another 48 bytes
498 : // allocated here should trigger a notification.
499 15 : AllocateUnaligned(space, 48);
500 15 : CHECK_EQ(observer2.count(), 4);
501 :
502 15 : space->RemoveAllocationObserver(&observer2);
503 15 : AllocateUnaligned(space, 384);
504 15 : CHECK_EQ(observer1.count(), 20);
505 15 : CHECK_EQ(observer2.count(), 4);
506 15 : }
507 :
508 28342 : UNINITIALIZED_TEST(AllocationObserver) {
509 : v8::Isolate::CreateParams create_params;
510 5 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
511 5 : v8::Isolate* isolate = v8::Isolate::New(create_params);
512 : {
513 : v8::Isolate::Scope isolate_scope(isolate);
514 10 : v8::HandleScope handle_scope(isolate);
515 10 : v8::Context::New(isolate)->Enter();
516 :
517 : Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
518 :
519 5 : testAllocationObserver<NewSpace>(i_isolate, i_isolate->heap()->new_space());
520 : // Old space is used but the code path is shared for all
521 : // classes inheriting from PagedSpace.
522 : testAllocationObserver<PagedSpace>(i_isolate,
523 5 : i_isolate->heap()->old_space());
524 : testAllocationObserver<LargeObjectSpace>(i_isolate,
525 5 : i_isolate->heap()->lo_space());
526 : }
527 5 : isolate->Dispose();
528 5 : }
529 :
530 28342 : UNINITIALIZED_TEST(InlineAllocationObserverCadence) {
531 : v8::Isolate::CreateParams create_params;
532 5 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
533 5 : v8::Isolate* isolate = v8::Isolate::New(create_params);
534 : {
535 : v8::Isolate::Scope isolate_scope(isolate);
536 10 : v8::HandleScope handle_scope(isolate);
537 10 : v8::Context::New(isolate)->Enter();
538 :
539 : Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
540 :
541 : // Clear out any pre-existing garbage to make the test consistent
542 : // across snapshot/no-snapshot builds.
543 5 : CcTest::CollectAllGarbage(i_isolate);
544 :
545 5 : NewSpace* new_space = i_isolate->heap()->new_space();
546 :
547 : Observer observer1(512);
548 5 : new_space->AddAllocationObserver(&observer1);
549 : Observer observer2(576);
550 5 : new_space->AddAllocationObserver(&observer2);
551 :
552 2565 : for (int i = 0; i < 512; ++i) {
553 2560 : AllocateUnaligned(new_space, 32);
554 : }
555 :
556 5 : new_space->RemoveAllocationObserver(&observer1);
557 5 : new_space->RemoveAllocationObserver(&observer2);
558 :
559 5 : CHECK_EQ(observer1.count(), 32);
560 5 : CHECK_EQ(observer2.count(), 28);
561 : }
562 5 : isolate->Dispose();
563 5 : }
564 :
565 28342 : HEAP_TEST(Regress777177) {
566 5 : CcTest::InitializeVM();
567 : Isolate* isolate = CcTest::i_isolate();
568 10 : Heap* heap = isolate->heap();
569 : HandleScope scope(isolate);
570 5 : PagedSpace* old_space = heap->old_space();
571 : Observer observer(128);
572 5 : old_space->AddAllocationObserver(&observer);
573 :
574 : int area_size = old_space->AreaSize();
575 : int max_object_size = kMaxRegularHeapObjectSize;
576 5 : int filler_size = area_size - max_object_size;
577 :
578 : {
579 : // Ensure a new linear allocation area on a fresh page.
580 : AlwaysAllocateScope always_allocate(isolate);
581 5 : heap::SimulateFullSpace(old_space);
582 5 : AllocationResult result = old_space->AllocateRaw(filler_size, kWordAligned);
583 5 : HeapObject obj = result.ToObjectChecked();
584 : heap->CreateFillerObjectAt(obj->address(), filler_size,
585 5 : ClearRecordedSlots::kNo);
586 : }
587 :
588 : {
589 : // Allocate all bytes of the linear allocation area. This moves top_ and
590 : // top_on_previous_step_ to the next page.
591 : AllocationResult result =
592 5 : old_space->AllocateRaw(max_object_size, kWordAligned);
593 5 : HeapObject obj = result.ToObjectChecked();
594 : // Simulate allocation folding moving the top pointer back.
595 : old_space->SetTopAndLimit(obj->address(), old_space->limit());
596 : }
597 :
598 : {
599 : // This triggers assert in crbug.com/777177.
600 5 : AllocationResult result = old_space->AllocateRaw(filler_size, kWordAligned);
601 5 : HeapObject obj = result.ToObjectChecked();
602 : heap->CreateFillerObjectAt(obj->address(), filler_size,
603 5 : ClearRecordedSlots::kNo);
604 : }
605 5 : old_space->RemoveAllocationObserver(&observer);
606 5 : }
607 :
608 28342 : HEAP_TEST(Regress791582) {
609 5 : CcTest::InitializeVM();
610 : Isolate* isolate = CcTest::i_isolate();
611 10 : Heap* heap = isolate->heap();
612 : HandleScope scope(isolate);
613 : NewSpace* new_space = heap->new_space();
614 5 : if (new_space->TotalCapacity() < new_space->MaximumCapacity()) {
615 5 : new_space->Grow();
616 : }
617 :
618 10 : int until_page_end = static_cast<int>(new_space->limit() - new_space->top());
619 :
620 5 : if (!IsAligned(until_page_end, kTaggedSize)) {
621 : // The test works if the size of allocation area size is a multiple of
622 : // pointer size. This is usually the case unless some allocation observer
623 : // is already active (e.g. incremental marking observer).
624 5 : return;
625 : }
626 :
627 : Observer observer(128);
628 4 : new_space->AddAllocationObserver(&observer);
629 :
630 : {
631 : AllocationResult result =
632 4 : new_space->AllocateRaw(until_page_end, kWordAligned);
633 4 : HeapObject obj = result.ToObjectChecked();
634 : heap->CreateFillerObjectAt(obj->address(), until_page_end,
635 4 : ClearRecordedSlots::kNo);
636 : // Simulate allocation folding moving the top pointer back.
637 4 : *new_space->allocation_top_address() = obj->address();
638 : }
639 :
640 : {
641 : // This triggers assert in crbug.com/791582
642 4 : AllocationResult result = new_space->AllocateRaw(256, kWordAligned);
643 4 : HeapObject obj = result.ToObjectChecked();
644 4 : heap->CreateFillerObjectAt(obj->address(), 256, ClearRecordedSlots::kNo);
645 : }
646 4 : new_space->RemoveAllocationObserver(&observer);
647 : }
648 :
649 28342 : TEST(ShrinkPageToHighWaterMarkFreeSpaceEnd) {
650 5 : FLAG_stress_incremental_marking = false;
651 5 : CcTest::InitializeVM();
652 : Isolate* isolate = CcTest::i_isolate();
653 : HandleScope scope(isolate);
654 :
655 5 : heap::SealCurrentObjects(CcTest::heap());
656 :
657 : // Prepare page that only contains a single object and a trailing FreeSpace
658 : // filler.
659 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(128, TENURED);
660 : Page* page = Page::FromHeapObject(*array);
661 :
662 : // Reset space so high water mark is consistent.
663 5 : PagedSpace* old_space = CcTest::heap()->old_space();
664 5 : old_space->FreeLinearAllocationArea();
665 5 : old_space->ResetFreeList();
666 :
667 10 : HeapObject filler = HeapObject::FromAddress(array->address() + array->Size());
668 5 : CHECK(filler->IsFreeSpace());
669 5 : size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
670 : size_t should_have_shrunk = RoundDown(
671 5 : static_cast<size_t>(MemoryChunkLayout::AllocatableMemoryInDataPage() -
672 10 : array->Size()),
673 10 : CommitPageSize());
674 5 : CHECK_EQ(should_have_shrunk, shrunk);
675 5 : }
676 :
677 28342 : TEST(ShrinkPageToHighWaterMarkNoFiller) {
678 5 : CcTest::InitializeVM();
679 : Isolate* isolate = CcTest::i_isolate();
680 : HandleScope scope(isolate);
681 5 : heap::SealCurrentObjects(CcTest::heap());
682 :
683 : const int kFillerSize = 0;
684 : std::vector<Handle<FixedArray>> arrays =
685 5 : heap::FillOldSpacePageWithFixedArrays(CcTest::heap(), kFillerSize);
686 5 : Handle<FixedArray> array = arrays.back();
687 : Page* page = Page::FromHeapObject(*array);
688 5 : CHECK_EQ(page->area_end(), array->address() + array->Size() + kFillerSize);
689 :
690 : // Reset space so high water mark and fillers are consistent.
691 5 : PagedSpace* old_space = CcTest::heap()->old_space();
692 5 : old_space->ResetFreeList();
693 5 : old_space->FreeLinearAllocationArea();
694 :
695 5 : size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
696 5 : CHECK_EQ(0u, shrunk);
697 5 : }
698 :
699 28342 : TEST(ShrinkPageToHighWaterMarkOneWordFiller) {
700 5 : CcTest::InitializeVM();
701 : Isolate* isolate = CcTest::i_isolate();
702 : HandleScope scope(isolate);
703 :
704 5 : heap::SealCurrentObjects(CcTest::heap());
705 :
706 : const int kFillerSize = kTaggedSize;
707 : std::vector<Handle<FixedArray>> arrays =
708 5 : heap::FillOldSpacePageWithFixedArrays(CcTest::heap(), kFillerSize);
709 5 : Handle<FixedArray> array = arrays.back();
710 : Page* page = Page::FromHeapObject(*array);
711 5 : CHECK_EQ(page->area_end(), array->address() + array->Size() + kFillerSize);
712 :
713 : // Reset space so high water mark and fillers are consistent.
714 5 : PagedSpace* old_space = CcTest::heap()->old_space();
715 5 : old_space->FreeLinearAllocationArea();
716 5 : old_space->ResetFreeList();
717 :
718 5 : HeapObject filler = HeapObject::FromAddress(array->address() + array->Size());
719 20 : CHECK_EQ(filler->map(),
720 : ReadOnlyRoots(CcTest::heap()).one_pointer_filler_map());
721 :
722 5 : size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
723 5 : CHECK_EQ(0u, shrunk);
724 5 : }
725 :
726 28342 : TEST(ShrinkPageToHighWaterMarkTwoWordFiller) {
727 5 : CcTest::InitializeVM();
728 : Isolate* isolate = CcTest::i_isolate();
729 : HandleScope scope(isolate);
730 :
731 5 : heap::SealCurrentObjects(CcTest::heap());
732 :
733 : const int kFillerSize = 2 * kTaggedSize;
734 : std::vector<Handle<FixedArray>> arrays =
735 5 : heap::FillOldSpacePageWithFixedArrays(CcTest::heap(), kFillerSize);
736 5 : Handle<FixedArray> array = arrays.back();
737 : Page* page = Page::FromHeapObject(*array);
738 5 : CHECK_EQ(page->area_end(), array->address() + array->Size() + kFillerSize);
739 :
740 : // Reset space so high water mark and fillers are consistent.
741 5 : PagedSpace* old_space = CcTest::heap()->old_space();
742 5 : old_space->FreeLinearAllocationArea();
743 5 : old_space->ResetFreeList();
744 :
745 5 : HeapObject filler = HeapObject::FromAddress(array->address() + array->Size());
746 20 : CHECK_EQ(filler->map(),
747 : ReadOnlyRoots(CcTest::heap()).two_pointer_filler_map());
748 :
749 5 : size_t shrunk = old_space->ShrinkPageToHighWaterMark(page);
750 5 : CHECK_EQ(0u, shrunk);
751 5 : }
752 :
753 : } // namespace heap
754 : } // namespace internal
755 85011 : } // namespace v8
|