Line data Source code
1 : // Copyright 2016 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/api-inl.h"
6 : #include "src/heap/array-buffer-tracker.h"
7 : #include "src/heap/heap-inl.h"
8 : #include "src/heap/spaces.h"
9 : #include "src/isolate.h"
10 : #include "src/objects-inl.h"
11 : #include "src/objects/js-array-buffer-inl.h"
12 : #include "test/cctest/cctest.h"
13 : #include "test/cctest/heap/heap-utils.h"
14 :
15 : namespace {
16 :
17 : typedef i::LocalArrayBufferTracker LocalTracker;
18 :
19 : bool IsTracked(i::JSArrayBuffer buf) {
20 180 : return i::ArrayBufferTracker::IsTracked(buf);
21 : }
22 :
23 : } // namespace
24 :
25 : namespace v8 {
26 : namespace internal {
27 : namespace heap {
28 :
29 : // The following tests make sure that JSArrayBuffer tracking works expected when
30 : // moving the objects through various spaces during GC phases.
31 :
32 28342 : TEST(ArrayBuffer_OnlyMC) {
33 : ManualGCScope manual_gc_scope;
34 5 : CcTest::InitializeVM();
35 10 : LocalContext env;
36 5 : v8::Isolate* isolate = env->GetIsolate();
37 5 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
38 :
39 : JSArrayBuffer raw_ab;
40 : {
41 5 : v8::HandleScope handle_scope(isolate);
42 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
43 : Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
44 5 : CHECK(IsTracked(*buf));
45 5 : heap::GcAndSweep(heap, OLD_SPACE);
46 5 : CHECK(IsTracked(*buf));
47 5 : heap::GcAndSweep(heap, OLD_SPACE);
48 5 : CHECK(IsTracked(*buf));
49 : raw_ab = *buf;
50 : // Prohibit page from being released.
51 5 : Page::FromHeapObject(*buf)->MarkNeverEvacuate();
52 : }
53 : // 2 GCs are needed because we promote to old space as live, meaning that
54 : // we will survive one GC.
55 5 : heap::GcAndSweep(heap, OLD_SPACE);
56 5 : heap::GcAndSweep(heap, OLD_SPACE);
57 5 : CHECK(!IsTracked(raw_ab));
58 5 : }
59 :
60 28342 : TEST(ArrayBuffer_OnlyScavenge) {
61 : ManualGCScope manual_gc_scope;
62 5 : CcTest::InitializeVM();
63 10 : LocalContext env;
64 5 : v8::Isolate* isolate = env->GetIsolate();
65 5 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
66 :
67 : JSArrayBuffer raw_ab;
68 : {
69 5 : v8::HandleScope handle_scope(isolate);
70 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
71 : Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
72 5 : CHECK(IsTracked(*buf));
73 5 : heap::GcAndSweep(heap, NEW_SPACE);
74 5 : CHECK(IsTracked(*buf));
75 5 : heap::GcAndSweep(heap, NEW_SPACE);
76 5 : CHECK(IsTracked(*buf));
77 5 : heap::GcAndSweep(heap, NEW_SPACE);
78 5 : CHECK(IsTracked(*buf));
79 : raw_ab = *buf;
80 : // Prohibit page from being released.
81 5 : Page::FromHeapObject(*buf)->MarkNeverEvacuate();
82 : }
83 : // 2 GCs are needed because we promote to old space as live, meaning that
84 : // we will survive one GC.
85 5 : heap::GcAndSweep(heap, OLD_SPACE);
86 5 : heap::GcAndSweep(heap, OLD_SPACE);
87 5 : CHECK(!IsTracked(raw_ab));
88 5 : }
89 :
90 28342 : TEST(ArrayBuffer_ScavengeAndMC) {
91 : ManualGCScope manual_gc_scope;
92 5 : CcTest::InitializeVM();
93 10 : LocalContext env;
94 5 : v8::Isolate* isolate = env->GetIsolate();
95 5 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
96 :
97 : JSArrayBuffer raw_ab;
98 : {
99 5 : v8::HandleScope handle_scope(isolate);
100 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
101 : Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
102 5 : CHECK(IsTracked(*buf));
103 5 : heap::GcAndSweep(heap, NEW_SPACE);
104 5 : CHECK(IsTracked(*buf));
105 5 : heap::GcAndSweep(heap, NEW_SPACE);
106 5 : CHECK(IsTracked(*buf));
107 5 : heap::GcAndSweep(heap, OLD_SPACE);
108 5 : CHECK(IsTracked(*buf));
109 5 : heap::GcAndSweep(heap, NEW_SPACE);
110 5 : CHECK(IsTracked(*buf));
111 : raw_ab = *buf;
112 : // Prohibit page from being released.
113 5 : Page::FromHeapObject(*buf)->MarkNeverEvacuate();
114 : }
115 : // 2 GCs are needed because we promote to old space as live, meaning that
116 : // we will survive one GC.
117 5 : heap::GcAndSweep(heap, OLD_SPACE);
118 5 : heap::GcAndSweep(heap, OLD_SPACE);
119 5 : CHECK(!IsTracked(raw_ab));
120 5 : }
121 :
122 28342 : TEST(ArrayBuffer_Compaction) {
123 5 : if (FLAG_never_compact) return;
124 : ManualGCScope manual_gc_scope;
125 5 : FLAG_manual_evacuation_candidates_selection = true;
126 5 : CcTest::InitializeVM();
127 10 : LocalContext env;
128 5 : v8::Isolate* isolate = env->GetIsolate();
129 5 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
130 5 : heap::AbandonCurrentlyFreeMemory(heap->old_space());
131 :
132 10 : v8::HandleScope handle_scope(isolate);
133 5 : Local<v8::ArrayBuffer> ab1 = v8::ArrayBuffer::New(isolate, 100);
134 : Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
135 5 : CHECK(IsTracked(*buf1));
136 5 : heap::GcAndSweep(heap, NEW_SPACE);
137 5 : heap::GcAndSweep(heap, NEW_SPACE);
138 :
139 : Page* page_before_gc = Page::FromHeapObject(*buf1);
140 5 : heap::ForceEvacuationCandidate(page_before_gc);
141 5 : CHECK(IsTracked(*buf1));
142 :
143 5 : CcTest::CollectAllGarbage();
144 :
145 : Page* page_after_gc = Page::FromHeapObject(*buf1);
146 5 : CHECK(IsTracked(*buf1));
147 :
148 5 : CHECK_NE(page_before_gc, page_after_gc);
149 : }
150 :
151 28342 : TEST(ArrayBuffer_UnregisterDuringSweep) {
152 : // Regular pages in old space (without compaction) are processed concurrently
153 : // in the sweeper. If we happen to unregister a buffer (either explicitly, or
154 : // implicitly through e.g. |Externalize|) we need to sync with the sweeper
155 : // task.
156 : //
157 : // Note: This test will will only fail on TSAN configurations.
158 :
159 : // Disable verify-heap since it forces sweeping to be completed in the
160 : // epilogue of the GC.
161 : #ifdef VERIFY_HEAP
162 : i::FLAG_verify_heap = false;
163 : #endif // VERIFY_HEAP
164 : ManualGCScope manual_gc_scope;
165 :
166 5 : CcTest::InitializeVM();
167 10 : LocalContext env;
168 5 : v8::Isolate* isolate = env->GetIsolate();
169 5 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
170 : {
171 5 : v8::HandleScope handle_scope(isolate);
172 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
173 : Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
174 :
175 : {
176 5 : v8::HandleScope handle_scope(isolate);
177 : // Allocate another buffer on the same page to force processing a
178 : // non-empty set of buffers in the last GC.
179 5 : Local<v8::ArrayBuffer> ab2 = v8::ArrayBuffer::New(isolate, 100);
180 : Handle<JSArrayBuffer> buf2 = v8::Utils::OpenHandle(*ab2);
181 5 : CHECK(IsTracked(*buf));
182 5 : CHECK(IsTracked(*buf));
183 5 : heap::GcAndSweep(heap, NEW_SPACE);
184 5 : CHECK(IsTracked(*buf));
185 5 : CHECK(IsTracked(*buf));
186 5 : heap::GcAndSweep(heap, NEW_SPACE);
187 5 : CHECK(IsTracked(*buf));
188 5 : CHECK(IsTracked(*buf2));
189 : }
190 :
191 5 : CcTest::CollectGarbage(OLD_SPACE);
192 : // |Externalize| will cause the buffer to be |Unregister|ed. Without
193 : // barriers and proper synchronization this will trigger a data race on
194 : // TSAN.
195 5 : v8::ArrayBuffer::Contents contents = ab->Externalize();
196 5 : heap->isolate()->array_buffer_allocator()->Free(contents.Data(),
197 5 : contents.ByteLength());
198 : }
199 5 : }
200 :
201 28342 : TEST(ArrayBuffer_NonLivePromotion) {
202 5 : if (!FLAG_incremental_marking) return;
203 : ManualGCScope manual_gc_scope;
204 : // The test verifies that the marking state is preserved when promoting
205 : // a buffer to old space.
206 5 : CcTest::InitializeVM();
207 10 : LocalContext env;
208 5 : v8::Isolate* isolate = env->GetIsolate();
209 5 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
210 :
211 : JSArrayBuffer raw_ab;
212 : {
213 5 : v8::HandleScope handle_scope(isolate);
214 : Handle<FixedArray> root =
215 5 : heap->isolate()->factory()->NewFixedArray(1, TENURED);
216 : {
217 5 : v8::HandleScope handle_scope(isolate);
218 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
219 : Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
220 10 : root->set(0, *buf); // Buffer that should not be promoted as live.
221 : }
222 5 : heap::SimulateIncrementalMarking(heap, false);
223 5 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
224 5 : heap::GcAndSweep(heap, NEW_SPACE);
225 5 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
226 5 : heap::GcAndSweep(heap, NEW_SPACE);
227 5 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
228 : raw_ab = JSArrayBuffer::cast(root->get(0));
229 10 : root->set(0, ReadOnlyRoots(heap).undefined_value());
230 5 : heap::SimulateIncrementalMarking(heap, true);
231 : // Prohibit page from being released.
232 : Page::FromHeapObject(raw_ab)->MarkNeverEvacuate();
233 5 : heap::GcAndSweep(heap, OLD_SPACE);
234 5 : CHECK(!IsTracked(raw_ab));
235 : }
236 : }
237 :
238 28342 : TEST(ArrayBuffer_LivePromotion) {
239 5 : if (!FLAG_incremental_marking) return;
240 : ManualGCScope manual_gc_scope;
241 : // The test verifies that the marking state is preserved when promoting
242 : // a buffer to old space.
243 5 : CcTest::InitializeVM();
244 10 : LocalContext env;
245 5 : v8::Isolate* isolate = env->GetIsolate();
246 5 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
247 :
248 : JSArrayBuffer raw_ab;
249 : {
250 5 : v8::HandleScope handle_scope(isolate);
251 : Handle<FixedArray> root =
252 5 : heap->isolate()->factory()->NewFixedArray(1, TENURED);
253 : {
254 5 : v8::HandleScope handle_scope(isolate);
255 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
256 : Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
257 10 : root->set(0, *buf); // Buffer that should be promoted as live.
258 : }
259 5 : heap::SimulateIncrementalMarking(heap, true);
260 5 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
261 5 : heap::GcAndSweep(heap, NEW_SPACE);
262 5 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
263 5 : heap::GcAndSweep(heap, NEW_SPACE);
264 5 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
265 : raw_ab = JSArrayBuffer::cast(root->get(0));
266 10 : root->set(0, ReadOnlyRoots(heap).undefined_value());
267 : // Prohibit page from being released.
268 : Page::FromHeapObject(raw_ab)->MarkNeverEvacuate();
269 5 : heap::GcAndSweep(heap, OLD_SPACE);
270 5 : CHECK(IsTracked(raw_ab));
271 : }
272 : }
273 :
274 28342 : TEST(ArrayBuffer_SemiSpaceCopyThenPagePromotion) {
275 5 : if (!i::FLAG_incremental_marking) return;
276 : ManualGCScope manual_gc_scope;
277 : // The test verifies that the marking state is preserved across semispace
278 : // copy.
279 5 : CcTest::InitializeVM();
280 10 : LocalContext env;
281 5 : v8::Isolate* isolate = env->GetIsolate();
282 10 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
283 :
284 5 : heap::SealCurrentObjects(heap);
285 : {
286 5 : v8::HandleScope handle_scope(isolate);
287 : Handle<FixedArray> root =
288 5 : heap->isolate()->factory()->NewFixedArray(1, TENURED);
289 : {
290 5 : v8::HandleScope handle_scope(isolate);
291 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
292 : Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
293 10 : root->set(0, *buf); // Buffer that should be promoted as live.
294 5 : Page::FromHeapObject(*buf)->MarkNeverEvacuate();
295 : }
296 : std::vector<Handle<FixedArray>> handles;
297 : // Make the whole page transition from new->old, getting the buffers
298 : // processed in the sweeper (relying on marking information) instead of
299 : // processing during newspace evacuation.
300 5 : heap::FillCurrentPage(heap->new_space(), &handles);
301 5 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
302 5 : heap::GcAndSweep(heap, NEW_SPACE);
303 5 : heap::SimulateIncrementalMarking(heap, true);
304 5 : heap::GcAndSweep(heap, OLD_SPACE);
305 10 : CHECK(IsTracked(JSArrayBuffer::cast(root->get(0))));
306 : }
307 : }
308 :
309 28342 : UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) {
310 5 : if (FLAG_optimize_for_size) return;
311 : // Test allocates JSArrayBuffer on different pages before triggering a
312 : // full GC that performs the semispace copy. If parallelized, this test
313 : // ensures proper synchronization in TSAN configurations.
314 5 : FLAG_min_semi_space_size = 2 * Page::kPageSize / MB;
315 : v8::Isolate::CreateParams create_params;
316 5 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
317 5 : v8::Isolate* isolate = v8::Isolate::New(create_params);
318 : i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
319 : {
320 : v8::Isolate::Scope isolate_scope(isolate);
321 10 : v8::HandleScope handle_scope(isolate);
322 10 : v8::Context::New(isolate)->Enter();
323 10 : Heap* heap = i_isolate->heap();
324 :
325 : // Ensure heap is in a clean state.
326 5 : CcTest::CollectAllGarbage(i_isolate);
327 5 : CcTest::CollectAllGarbage(i_isolate);
328 :
329 5 : Local<v8::ArrayBuffer> ab1 = v8::ArrayBuffer::New(isolate, 100);
330 : Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
331 5 : heap::FillCurrentPage(heap->new_space());
332 5 : Local<v8::ArrayBuffer> ab2 = v8::ArrayBuffer::New(isolate, 100);
333 : Handle<JSArrayBuffer> buf2 = v8::Utils::OpenHandle(*ab2);
334 5 : CHECK_NE(Page::FromHeapObject(*buf1), Page::FromHeapObject(*buf2));
335 5 : heap::GcAndSweep(heap, OLD_SPACE);
336 : }
337 5 : isolate->Dispose();
338 : }
339 :
340 28342 : TEST(ArrayBuffer_ExternalBackingStoreSizeIncreases) {
341 5 : CcTest::InitializeVM();
342 5 : LocalContext env;
343 5 : v8::Isolate* isolate = env->GetIsolate();
344 10 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
345 : ExternalBackingStoreType type = ExternalBackingStoreType::kArrayBuffer;
346 :
347 : const size_t backing_store_before =
348 5 : heap->new_space()->ExternalBackingStoreBytes(type);
349 : {
350 : const size_t kArraybufferSize = 117;
351 5 : v8::HandleScope handle_scope(isolate);
352 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, kArraybufferSize);
353 : USE(ab);
354 : const size_t backing_store_after =
355 5 : heap->new_space()->ExternalBackingStoreBytes(type);
356 5 : CHECK_EQ(kArraybufferSize, backing_store_after - backing_store_before);
357 5 : }
358 5 : }
359 :
360 28342 : TEST(ArrayBuffer_ExternalBackingStoreSizeDecreases) {
361 5 : CcTest::InitializeVM();
362 5 : LocalContext env;
363 5 : v8::Isolate* isolate = env->GetIsolate();
364 10 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
365 : ExternalBackingStoreType type = ExternalBackingStoreType::kArrayBuffer;
366 :
367 : const size_t backing_store_before =
368 5 : heap->new_space()->ExternalBackingStoreBytes(type);
369 : {
370 : const size_t kArraybufferSize = 117;
371 5 : v8::HandleScope handle_scope(isolate);
372 5 : Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, kArraybufferSize);
373 5 : USE(ab);
374 : }
375 5 : heap::GcAndSweep(heap, OLD_SPACE);
376 : const size_t backing_store_after =
377 5 : heap->new_space()->ExternalBackingStoreBytes(type);
378 5 : CHECK_EQ(0, backing_store_after - backing_store_before);
379 5 : }
380 :
381 28342 : TEST(ArrayBuffer_ExternalBackingStoreSizeIncreasesMarkCompact) {
382 5 : if (FLAG_never_compact) return;
383 : ManualGCScope manual_gc_scope;
384 5 : FLAG_manual_evacuation_candidates_selection = true;
385 5 : CcTest::InitializeVM();
386 10 : LocalContext env;
387 5 : v8::Isolate* isolate = env->GetIsolate();
388 20 : Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
389 5 : heap::AbandonCurrentlyFreeMemory(heap->old_space());
390 : ExternalBackingStoreType type = ExternalBackingStoreType::kArrayBuffer;
391 :
392 : const size_t backing_store_before =
393 5 : heap->old_space()->ExternalBackingStoreBytes(type);
394 :
395 : const size_t kArraybufferSize = 117;
396 : {
397 5 : v8::HandleScope handle_scope(isolate);
398 : Local<v8::ArrayBuffer> ab1 =
399 5 : v8::ArrayBuffer::New(isolate, kArraybufferSize);
400 : Handle<JSArrayBuffer> buf1 = v8::Utils::OpenHandle(*ab1);
401 5 : CHECK(IsTracked(*buf1));
402 5 : heap::GcAndSweep(heap, NEW_SPACE);
403 5 : heap::GcAndSweep(heap, NEW_SPACE);
404 :
405 : Page* page_before_gc = Page::FromHeapObject(*buf1);
406 5 : heap::ForceEvacuationCandidate(page_before_gc);
407 5 : CHECK(IsTracked(*buf1));
408 :
409 5 : CcTest::CollectAllGarbage();
410 :
411 : const size_t backing_store_after =
412 5 : heap->old_space()->ExternalBackingStoreBytes(type);
413 5 : CHECK_EQ(kArraybufferSize, backing_store_after - backing_store_before);
414 : }
415 :
416 5 : heap::GcAndSweep(heap, OLD_SPACE);
417 : const size_t backing_store_after =
418 5 : heap->old_space()->ExternalBackingStoreBytes(type);
419 5 : CHECK_EQ(0, backing_store_after - backing_store_before);
420 : }
421 :
422 : } // namespace heap
423 : } // namespace internal
424 85011 : } // namespace v8
|