Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/api-inl.h"
6 : #include "src/heap/array-buffer-tracker.h"
7 : #include "src/heap/heap-inl.h"
8 : #include "src/heap/spaces.h"
9 : #include "src/isolate.h"
10 : #include "src/objects-inl.h"
11 : #include "src/objects/js-array-buffer-inl.h"
12 : #include "test/cctest/cctest.h"
13 : #include "test/cctest/heap/heap-utils.h"
14 :
15 : namespace {
16 :
17 : using LocalTracker = i::LocalArrayBufferTracker;
18 :
19 : bool IsTracked(i::JSArrayBuffer buf) {
20 180 : return i::ArrayBufferTracker::IsTracked(buf);
21 : }
22 :
23 : } // namespace
24 :
25 : namespace v8 {
26 : namespace internal {
27 : namespace heap {
28 :
29 : // The following tests make sure that JSArrayBuffer tracking works expected when
30 : // moving the objects through various spaces during GC phases.
31 :
32 26644 : TEST(ArrayBuffer_OnlyMC) {
33 : ManualGCScope manual_gc_scope;
34 5 : CcTest::InitializeVM();
35 5 : LocalContext env;
36 5 : v8::Isolate* isolate = env->GetIsolate();
37 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
38 :
39 : JSArrayBuffer raw_ab;
40 : {
41 10 : v8::HandleScope handle_scope(isolate);
42 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
43 : Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
44 5 : CHECK(IsTracked(*buf));
45 5 : heap::GcAndSweep(heap, OLD_SPACE);
46 5 : CHECK(IsTracked(*buf));
47 5 : heap::GcAndSweep(heap, OLD_SPACE);
48 5 : CHECK(IsTracked(*buf));
49 : raw_ab = *buf;
50 : // Prohibit page from being released.
51 : Page::FromHeapObject(*buf)->MarkNeverEvacuate();
52 : }
53 : // 2 GCs are needed because we promote to old space as live, meaning that
54 : // we will survive one GC.
55 5 : heap::GcAndSweep(heap, OLD_SPACE);
56 5 : heap::GcAndSweep(heap, OLD_SPACE);
57 5 : CHECK(!IsTracked(raw_ab));
58 5 : }
59 :
60 26644 : TEST(ArrayBuffer_OnlyScavenge) {
61 : ManualGCScope manual_gc_scope;
62 5 : CcTest::InitializeVM();
63 5 : LocalContext env;
64 5 : v8::Isolate* isolate = env->GetIsolate();
65 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
66 :
67 : JSArrayBuffer raw_ab;
68 : {
69 10 : v8::HandleScope handle_scope(isolate);
70 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
71 : Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
72 5 : CHECK(IsTracked(*buf));
73 5 : heap::GcAndSweep(heap, NEW_SPACE);
74 5 : CHECK(IsTracked(*buf));
75 5 : heap::GcAndSweep(heap, NEW_SPACE);
76 5 : CHECK(IsTracked(*buf));
77 5 : heap::GcAndSweep(heap, NEW_SPACE);
78 5 : CHECK(IsTracked(*buf));
79 : raw_ab = *buf;
80 : // Prohibit page from being released.
81 : Page::FromHeapObject(*buf)->MarkNeverEvacuate();
82 : }
83 : // 2 GCs are needed because we promote to old space as live, meaning that
84 : // we will survive one GC.
85 5 : heap::GcAndSweep(heap, OLD_SPACE);
86 5 : heap::GcAndSweep(heap, OLD_SPACE);
87 5 : CHECK(!IsTracked(raw_ab));
88 5 : }
89 :
90 26644 : TEST(ArrayBuffer_ScavengeAndMC) {
91 : ManualGCScope manual_gc_scope;
92 5 : CcTest::InitializeVM();
93 5 : LocalContext env;
94 5 : v8::Isolate* isolate = env->GetIsolate();
95 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
96 :
97 : JSArrayBuffer raw_ab;
98 : {
99 10 : v8::HandleScope handle_scope(isolate);
100 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
101 : Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
102 5 : CHECK(IsTracked(*buf));
103 5 : heap::GcAndSweep(heap, NEW_SPACE);
104 5 : CHECK(IsTracked(*buf));
105 5 : heap::GcAndSweep(heap, NEW_SPACE);
106 5 : CHECK(IsTracked(*buf));
107 5 : heap::GcAndSweep(heap, OLD_SPACE);
108 5 : CHECK(IsTracked(*buf));
109 5 : heap::GcAndSweep(heap, NEW_SPACE);
110 5 : CHECK(IsTracked(*buf));
111 : raw_ab = *buf;
112 : // Prohibit page from being released.
113 : Page::FromHeapObject(*buf)->MarkNeverEvacuate();
114 : }
115 : // 2 GCs are needed because we promote to old space as live, meaning that
116 : // we will survive one GC.
117 5 : heap::GcAndSweep(heap, OLD_SPACE);
118 5 : heap::GcAndSweep(heap, OLD_SPACE);
119 5 : CHECK(!IsTracked(raw_ab));
120 5 : }
121 :
122 26644 : TEST(ArrayBuffer_Compaction) {
123 5 : if (FLAG_never_compact) return;
124 : ManualGCScope manual_gc_scope;
125 5 : FLAG_manual_evacuation_candidates_selection = true;
126 5 : CcTest::InitializeVM();
127 5 : LocalContext env;
128 5 : v8::Isolate* isolate = env->GetIsolate();
129 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
130 5 : heap::AbandonCurrentlyFreeMemory(heap->old_space());
131 :
132 10 : v8::HandleScope handle_scope(isolate);
133 5 : Local<v8::ArrayBuffer> ab1 = v8::ArrayBuffer::New(isolate, 100);
134 : Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
135 5 : CHECK(IsTracked(*buf1));
136 5 : heap::GcAndSweep(heap, NEW_SPACE);
137 5 : heap::GcAndSweep(heap, NEW_SPACE);
138 :
139 : Page* page_before_gc = Page::FromHeapObject(*buf1);
140 5 : heap::ForceEvacuationCandidate(page_before_gc);
141 5 : CHECK(IsTracked(*buf1));
142 :
143 5 : CcTest::CollectAllGarbage();
144 :
145 : Page* page_after_gc = Page::FromHeapObject(*buf1);
146 5 : CHECK(IsTracked(*buf1));
147 :
148 5 : CHECK_NE(page_before_gc, page_after_gc);
149 : }
150 :
151 26644 : TEST(ArrayBuffer_UnregisterDuringSweep) {
152 : // Regular pages in old space (without compaction) are processed concurrently
153 : // in the sweeper. If we happen to unregister a buffer (either explicitly, or
154 : // implicitly through e.g. |Externalize|) we need to sync with the sweeper
155 : // task.
156 : //
157 : // Note: This test will will only fail on TSAN configurations.
158 :
159 : // Disable verify-heap since it forces sweeping to be completed in the
160 : // epilogue of the GC.
161 : #ifdef VERIFY_HEAP
162 : i::FLAG_verify_heap = false;
163 : #endif // VERIFY_HEAP
164 : ManualGCScope manual_gc_scope;
165 :
166 5 : CcTest::InitializeVM();
167 5 : LocalContext env;
168 5 : v8::Isolate* isolate = env->GetIsolate();
169 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
170 : {
171 10 : v8::HandleScope handle_scope(isolate);
172 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
173 : Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
174 :
175 : {
176 10 : v8::HandleScope handle_scope(isolate);
177 : // Allocate another buffer on the same page to force processing a
178 : // non-empty set of buffers in the last GC.
179 5 : Local<v8::ArrayBuffer> ab2 = v8::ArrayBuffer::New(isolate, 100);
180 : Handle<JSArrayBuffer> buf2 = v8::Utils::OpenHandle(*ab2);
181 5 : CHECK(IsTracked(*buf));
182 5 : CHECK(IsTracked(*buf));
183 5 : heap::GcAndSweep(heap, NEW_SPACE);
184 5 : CHECK(IsTracked(*buf));
185 5 : CHECK(IsTracked(*buf));
186 5 : heap::GcAndSweep(heap, NEW_SPACE);
187 5 : CHECK(IsTracked(*buf));
188 5 : CHECK(IsTracked(*buf2));
189 : }
190 :
191 5 : CcTest::CollectGarbage(OLD_SPACE);
192 : // |Externalize| will cause the buffer to be |Unregister|ed. Without
193 : // barriers and proper synchronization this will trigger a data race on
194 : // TSAN.
195 5 : v8::ArrayBuffer::Contents contents = ab->Externalize();
196 : heap->isolate()->array_buffer_allocator()->Free(contents.Data(),
197 5 : contents.ByteLength());
198 : }
199 5 : }
200 :
201 26644 : TEST(ArrayBuffer_NonLivePromotion) {
202 5 : if (!FLAG_incremental_marking) return;
203 : ManualGCScope manual_gc_scope;
204 : // The test verifies that the marking state is preserved when promoting
205 : // a buffer to old space.
206 5 : CcTest::InitializeVM();
207 5 : LocalContext env;
208 5 : v8::Isolate* isolate = env->GetIsolate();
209 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
210 :
211 : JSArrayBuffer raw_ab;
212 : {
213 10 : v8::HandleScope handle_scope(isolate);
214 : Handle<FixedArray> root =
215 5 : heap->isolate()->factory()->NewFixedArray(1, AllocationType::kOld);
216 : {
217 10 : v8::HandleScope handle_scope(isolate);
218 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
219 : Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
220 10 : root->set(0, *buf); // Buffer that should not be promoted as live.
221 : }
222 5 : heap::SimulateIncrementalMarking(heap, false);
223 5 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
224 5 : heap::GcAndSweep(heap, NEW_SPACE);
225 5 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
226 5 : heap::GcAndSweep(heap, NEW_SPACE);
227 5 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
228 : raw_ab = JSArrayBuffer::cast(root->get(0));
229 10 : root->set(0, ReadOnlyRoots(heap).undefined_value());
230 5 : heap::SimulateIncrementalMarking(heap, true);
231 : // Prohibit page from being released.
232 : Page::FromHeapObject(raw_ab)->MarkNeverEvacuate();
233 5 : heap::GcAndSweep(heap, OLD_SPACE);
234 5 : CHECK(!IsTracked(raw_ab));
235 : }
236 : }
237 :
238 26644 : TEST(ArrayBuffer_LivePromotion) {
239 5 : if (!FLAG_incremental_marking) return;
240 : ManualGCScope manual_gc_scope;
241 : // The test verifies that the marking state is preserved when promoting
242 : // a buffer to old space.
243 5 : CcTest::InitializeVM();
244 5 : LocalContext env;
245 5 : v8::Isolate* isolate = env->GetIsolate();
246 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
247 :
248 : JSArrayBuffer raw_ab;
249 : {
250 10 : v8::HandleScope handle_scope(isolate);
251 : Handle<FixedArray> root =
252 5 : heap->isolate()->factory()->NewFixedArray(1, AllocationType::kOld);
253 : {
254 10 : v8::HandleScope handle_scope(isolate);
255 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
256 : Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
257 10 : root->set(0, *buf); // Buffer that should be promoted as live.
258 : }
259 5 : heap::SimulateIncrementalMarking(heap, true);
260 5 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
261 5 : heap::GcAndSweep(heap, NEW_SPACE);
262 5 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
263 5 : heap::GcAndSweep(heap, NEW_SPACE);
264 5 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
265 : raw_ab = JSArrayBuffer::cast(root->get(0));
266 10 : root->set(0, ReadOnlyRoots(heap).undefined_value());
267 : // Prohibit page from being released.
268 : Page::FromHeapObject(raw_ab)->MarkNeverEvacuate();
269 5 : heap::GcAndSweep(heap, OLD_SPACE);
270 5 : CHECK(IsTracked(raw_ab));
271 : }
272 : }
273 :
274 26644 : TEST(ArrayBuffer_SemiSpaceCopyThenPagePromotion) {
275 5 : if (!i::FLAG_incremental_marking) return;
276 : ManualGCScope manual_gc_scope;
277 : // The test verifies that the marking state is preserved across semispace
278 : // copy.
279 5 : CcTest::InitializeVM();
280 5 : LocalContext env;
281 5 : v8::Isolate* isolate = env->GetIsolate();
282 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
283 :
284 5 : heap::SealCurrentObjects(heap);
285 : {
286 10 : v8::HandleScope handle_scope(isolate);
287 : Handle<FixedArray> root =
288 5 : heap->isolate()->factory()->NewFixedArray(1, AllocationType::kOld);
289 : {
290 10 : v8::HandleScope handle_scope(isolate);
291 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
292 : Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
293 10 : root->set(0, *buf); // Buffer that should be promoted as live.
294 : Page::FromHeapObject(*buf)->MarkNeverEvacuate();
295 : }
296 : std::vector<Handle<FixedArray>> handles;
297 : // Make the whole page transition from new->old, getting the buffers
298 : // processed in the sweeper (relying on marking information) instead of
299 : // processing during newspace evacuation.
300 5 : heap::FillCurrentPage(heap->new_space(), &handles);
301 5 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
302 5 : heap::GcAndSweep(heap, NEW_SPACE);
303 5 : heap::SimulateIncrementalMarking(heap, true);
304 5 : heap::GcAndSweep(heap, OLD_SPACE);
305 5 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
306 : }
307 : }
308 :
309 26644 : UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) {
310 5 : if (FLAG_optimize_for_size) return;
311 : ManualGCScope manual_gc_scope;
312 : // Test allocates JSArrayBuffer on different pages before triggering a
313 : // full GC that performs the semispace copy. If parallelized, this test
314 : // ensures proper synchronization in TSAN configurations.
315 5 : FLAG_min_semi_space_size = Max(2 * Page::kPageSize / MB, 1);
316 : v8::Isolate::CreateParams create_params;
317 5 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
318 5 : v8::Isolate* isolate = v8::Isolate::New(create_params);
319 : i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
320 : {
321 : v8::Isolate::Scope isolate_scope(isolate);
322 10 : v8::HandleScope handle_scope(isolate);
323 10 : v8::Context::New(isolate)->Enter();
324 : Heap* heap = i_isolate->heap();
325 :
326 : // Ensure heap is in a clean state.
327 5 : CcTest::CollectAllGarbage(i_isolate);
328 5 : CcTest::CollectAllGarbage(i_isolate);
329 :
330 5 : Local<v8::ArrayBuffer> ab1 = v8::ArrayBuffer::New(isolate, 100);
331 : Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
332 5 : heap::FillCurrentPage(heap->new_space());
333 5 : Local<v8::ArrayBuffer> ab2 = v8::ArrayBuffer::New(isolate, 100);
334 : Handle<JSArrayBuffer> buf2 = v8::Utils::OpenHandle(*ab2);
335 5 : CHECK_NE(Page::FromHeapObject(*buf1), Page::FromHeapObject(*buf2));
336 5 : heap::GcAndSweep(heap, OLD_SPACE);
337 : }
338 5 : isolate->Dispose();
339 : }
340 :
341 26644 : TEST(ArrayBuffer_ExternalBackingStoreSizeIncreases) {
342 5 : CcTest::InitializeVM();
343 5 : LocalContext env;
344 5 : v8::Isolate* isolate = env->GetIsolate();
345 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
346 : ExternalBackingStoreType type = ExternalBackingStoreType::kArrayBuffer;
347 :
348 : const size_t backing_store_before =
349 5 : heap->new_space()->ExternalBackingStoreBytes(type);
350 : {
351 : const size_t kArraybufferSize = 117;
352 10 : v8::HandleScope handle_scope(isolate);
353 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, kArraybufferSize);
354 : USE(ab);
355 : const size_t backing_store_after =
356 5 : heap->new_space()->ExternalBackingStoreBytes(type);
357 5 : CHECK_EQ(kArraybufferSize, backing_store_after - backing_store_before);
358 : }
359 5 : }
360 :
361 26644 : TEST(ArrayBuffer_ExternalBackingStoreSizeDecreases) {
362 5 : CcTest::InitializeVM();
363 5 : LocalContext env;
364 5 : v8::Isolate* isolate = env->GetIsolate();
365 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
366 : ExternalBackingStoreType type = ExternalBackingStoreType::kArrayBuffer;
367 :
368 : const size_t backing_store_before =
369 5 : heap->new_space()->ExternalBackingStoreBytes(type);
370 : {
371 : const size_t kArraybufferSize = 117;
372 10 : v8::HandleScope handle_scope(isolate);
373 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, kArraybufferSize);
374 : USE(ab);
375 : }
376 5 : heap::GcAndSweep(heap, OLD_SPACE);
377 : const size_t backing_store_after =
378 5 : heap->new_space()->ExternalBackingStoreBytes(type);
379 5 : CHECK_EQ(0, backing_store_after - backing_store_before);
380 5 : }
381 :
382 26644 : TEST(ArrayBuffer_ExternalBackingStoreSizeIncreasesMarkCompact) {
383 5 : if (FLAG_never_compact) return;
384 : ManualGCScope manual_gc_scope;
385 5 : FLAG_manual_evacuation_candidates_selection = true;
386 5 : CcTest::InitializeVM();
387 5 : LocalContext env;
388 5 : v8::Isolate* isolate = env->GetIsolate();
389 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
390 5 : heap::AbandonCurrentlyFreeMemory(heap->old_space());
391 : ExternalBackingStoreType type = ExternalBackingStoreType::kArrayBuffer;
392 :
393 : const size_t backing_store_before =
394 5 : heap->old_space()->ExternalBackingStoreBytes(type);
395 :
396 : const size_t kArraybufferSize = 117;
397 : {
398 10 : v8::HandleScope handle_scope(isolate);
399 : Local<v8::ArrayBuffer> ab1 =
400 5 : v8::ArrayBuffer::New(isolate, kArraybufferSize);
401 : Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
402 5 : CHECK(IsTracked(*buf1));
403 5 : heap::GcAndSweep(heap, NEW_SPACE);
404 5 : heap::GcAndSweep(heap, NEW_SPACE);
405 :
406 : Page* page_before_gc = Page::FromHeapObject(*buf1);
407 5 : heap::ForceEvacuationCandidate(page_before_gc);
408 5 : CHECK(IsTracked(*buf1));
409 :
410 5 : CcTest::CollectAllGarbage();
411 :
412 : const size_t backing_store_after =
413 5 : heap->old_space()->ExternalBackingStoreBytes(type);
414 5 : CHECK_EQ(kArraybufferSize, backing_store_after - backing_store_before);
415 : }
416 :
417 5 : heap::GcAndSweep(heap, OLD_SPACE);
418 : const size_t backing_store_after =
419 5 : heap->old_space()->ExternalBackingStoreBytes(type);
420 5 : CHECK_EQ(0, backing_store_after - backing_store_before);
421 : }
422 :
423 : } // namespace heap
424 : } // namespace internal
425 79917 : } // namespace v8
|