Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Redistribution and use in source and binary forms, with or without
3 : // modification, are permitted provided that the following conditions are
4 : // met:
5 : //
6 : // * Redistributions of source code must retain the above copyright
7 : // notice, this list of conditions and the following disclaimer.
8 : // * Redistributions in binary form must reproduce the above
9 : // copyright notice, this list of conditions and the following
10 : // disclaimer in the documentation and/or other materials provided
11 : // with the distribution.
12 : // * Neither the name of Google Inc. nor the names of its
13 : // contributors may be used to endorse or promote products derived
14 : // from this software without specific prior written permission.
15 : //
16 : // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 : // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 : // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 : // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 : // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 : // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 : // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 : // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 : // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 : // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 : // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 :
28 : #include <stdlib.h>
29 : #include <utility>
30 :
31 : #include "src/api-inl.h"
32 : #include "src/assembler-inl.h"
33 : #include "src/compilation-cache.h"
34 : #include "src/debug/debug.h"
35 : #include "src/deoptimizer.h"
36 : #include "src/elements.h"
37 : #include "src/execution.h"
38 : #include "src/field-type.h"
39 : #include "src/global-handles.h"
40 : #include "src/hash-seed-inl.h"
41 : #include "src/heap/factory.h"
42 : #include "src/heap/gc-tracer.h"
43 : #include "src/heap/heap-inl.h"
44 : #include "src/heap/incremental-marking.h"
45 : #include "src/heap/mark-compact.h"
46 : #include "src/heap/memory-reducer.h"
47 : #include "src/heap/remembered-set.h"
48 : #include "src/ic/ic.h"
49 : #include "src/macro-assembler-inl.h"
50 : #include "src/objects-inl.h"
51 : #include "src/objects/frame-array-inl.h"
52 : #include "src/objects/heap-number-inl.h"
53 : #include "src/objects/js-array-inl.h"
54 : #include "src/objects/js-collection-inl.h"
55 : #include "src/objects/managed.h"
56 : #include "src/objects/slots.h"
57 : #include "src/ostreams.h"
58 : #include "src/regexp/jsregexp.h"
59 : #include "src/snapshot/snapshot.h"
60 : #include "src/transitions.h"
61 : #include "test/cctest/cctest.h"
62 : #include "test/cctest/heap/heap-tester.h"
63 : #include "test/cctest/heap/heap-utils.h"
64 : #include "test/cctest/test-feedback-vector.h"
65 : #include "test/cctest/test-transitions.h"
66 :
67 : namespace v8 {
68 : namespace internal {
69 : namespace heap {
70 :
71 : // We only start allocation-site tracking with the second instantiation.
72 : static const int kPretenureCreationCount =
73 : AllocationSite::kPretenureMinimumCreated + 1;
74 :
75 25 : static void CheckMap(Map map, int type, int instance_size) {
76 25 : CHECK(map->IsHeapObject());
77 : #ifdef DEBUG
78 : CHECK(CcTest::heap()->Contains(map));
79 : #endif
80 75 : CHECK_EQ(ReadOnlyRoots(CcTest::heap()).meta_map(), map->map());
81 25 : CHECK_EQ(type, map->instance_type());
82 25 : CHECK_EQ(instance_size, map->instance_size());
83 25 : }
84 :
85 :
86 25880 : TEST(HeapMaps) {
87 5 : CcTest::InitializeVM();
88 5 : ReadOnlyRoots roots(CcTest::heap());
89 5 : CheckMap(roots.meta_map(), MAP_TYPE, Map::kSize);
90 5 : CheckMap(roots.heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
91 5 : CheckMap(roots.fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
92 5 : CheckMap(roots.hash_table_map(), HASH_TABLE_TYPE, kVariableSizeSentinel);
93 5 : CheckMap(roots.string_map(), STRING_TYPE, kVariableSizeSentinel);
94 5 : }
95 :
96 10 : static void VerifyStoredPrototypeMap(Isolate* isolate,
97 : int stored_map_context_index,
98 : int stored_ctor_context_index) {
99 10 : Handle<Context> context = isolate->native_context();
100 :
101 : Handle<Map> this_map(Map::cast(context->get(stored_map_context_index)),
102 20 : isolate);
103 :
104 : Handle<JSFunction> fun(
105 20 : JSFunction::cast(context->get(stored_ctor_context_index)), isolate);
106 20 : Handle<JSObject> proto(JSObject::cast(fun->initial_map()->prototype()),
107 20 : isolate);
108 : Handle<Map> that_map(proto->map(), isolate);
109 :
110 10 : CHECK(proto->HasFastProperties());
111 30 : CHECK_EQ(*this_map, *that_map);
112 10 : }
113 :
114 : // Checks that critical maps stored on the context (mostly used for fast-path
115 : // checks) are unchanged after initialization.
116 25880 : TEST(ContextMaps) {
117 5 : CcTest::InitializeVM();
118 : Isolate* isolate = CcTest::i_isolate();
119 : HandleScope handle_scope(isolate);
120 :
121 : VerifyStoredPrototypeMap(isolate,
122 : Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX,
123 5 : Context::STRING_FUNCTION_INDEX);
124 : VerifyStoredPrototypeMap(isolate, Context::REGEXP_PROTOTYPE_MAP_INDEX,
125 5 : Context::REGEXP_FUNCTION_INDEX);
126 5 : }
127 :
128 25880 : TEST(InitialObjects) {
129 5 : LocalContext env;
130 : HandleScope scope(CcTest::i_isolate());
131 : Handle<Context> context = v8::Utils::OpenHandle(*env);
132 : // Initial ArrayIterator prototype.
133 15 : CHECK_EQ(
134 : context->initial_array_iterator_prototype(),
135 : *v8::Utils::OpenHandle(*CompileRun("[][Symbol.iterator]().__proto__")));
136 : // Initial Array prototype.
137 15 : CHECK_EQ(context->initial_array_prototype(),
138 : *v8::Utils::OpenHandle(*CompileRun("Array.prototype")));
139 : // Initial Generator prototype.
140 15 : CHECK_EQ(context->initial_generator_prototype(),
141 : *v8::Utils::OpenHandle(
142 : *CompileRun("(function*(){}).__proto__.prototype")));
143 : // Initial Iterator prototype.
144 15 : CHECK_EQ(context->initial_iterator_prototype(),
145 : *v8::Utils::OpenHandle(
146 : *CompileRun("[][Symbol.iterator]().__proto__.__proto__")));
147 : // Initial Object prototype.
148 15 : CHECK_EQ(context->initial_object_prototype(),
149 5 : *v8::Utils::OpenHandle(*CompileRun("Object.prototype")));
150 5 : }
151 :
152 20 : static void CheckOddball(Isolate* isolate, Object obj, const char* string) {
153 20 : CHECK(obj->IsOddball());
154 : Handle<Object> handle(obj, isolate);
155 40 : Object print_string = *Object::ToString(isolate, handle).ToHandleChecked();
156 20 : CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
157 20 : }
158 :
159 15 : static void CheckSmi(Isolate* isolate, int value, const char* string) {
160 : Handle<Object> handle(Smi::FromInt(value), isolate);
161 30 : Object print_string = *Object::ToString(isolate, handle).ToHandleChecked();
162 15 : CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
163 15 : }
164 :
165 :
166 5 : static void CheckNumber(Isolate* isolate, double value, const char* string) {
167 5 : Handle<Object> number = isolate->factory()->NewNumber(value);
168 10 : CHECK(number->IsNumber());
169 : Handle<Object> print_string =
170 10 : Object::ToString(isolate, number).ToHandleChecked();
171 5 : CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
172 5 : }
173 :
174 10 : void CheckEmbeddedObjectsAreEqual(Handle<Code> lhs, Handle<Code> rhs) {
175 : int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
176 10 : RelocIterator lhs_it(*lhs, mode_mask);
177 10 : RelocIterator rhs_it(*rhs, mode_mask);
178 30 : while (!lhs_it.done() && !rhs_it.done()) {
179 30 : CHECK(lhs_it.rinfo()->target_object() == rhs_it.rinfo()->target_object());
180 :
181 10 : lhs_it.next();
182 10 : rhs_it.next();
183 : }
184 10 : CHECK(lhs_it.done() == rhs_it.done());
185 10 : }
186 :
187 25880 : HEAP_TEST(TestNewSpaceRefsInCopiedCode) {
188 5 : CcTest::InitializeVM();
189 5 : Isolate* isolate = CcTest::i_isolate();
190 : Factory* factory = isolate->factory();
191 : HandleScope sc(isolate);
192 :
193 5 : Handle<HeapNumber> value = factory->NewHeapNumber(1.000123);
194 5 : CHECK(Heap::InYoungGeneration(*value));
195 :
196 : i::byte buffer[i::Assembler::kMinimalBufferSize];
197 : MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
198 15 : ExternalAssemblerBuffer(buffer, sizeof(buffer)));
199 : // Add a new-space reference to the code.
200 5 : masm.Push(value);
201 :
202 5 : CodeDesc desc;
203 5 : masm.GetCode(isolate, &desc);
204 : Handle<Code> code =
205 10 : isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
206 :
207 : Handle<Code> copy;
208 : {
209 5 : CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
210 5 : copy = factory->CopyCode(code);
211 : }
212 :
213 5 : CheckEmbeddedObjectsAreEqual(code, copy);
214 5 : CcTest::CollectAllAvailableGarbage();
215 5 : CheckEmbeddedObjectsAreEqual(code, copy);
216 5 : }
217 :
218 5 : static void CheckFindCodeObject(Isolate* isolate) {
219 : // Test FindCodeObject
220 : #define __ assm.
221 :
222 15 : Assembler assm(AssemblerOptions{});
223 :
224 5 : __ nop(); // supported on all architectures
225 :
226 5 : CodeDesc desc;
227 : assm.GetCode(isolate, &desc);
228 : Handle<Code> code =
229 10 : isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
230 10 : CHECK(code->IsCode());
231 :
232 5 : HeapObject obj = HeapObject::cast(*code);
233 : Address obj_addr = obj->address();
234 :
235 65 : for (int i = 0; i < obj->Size(); i += kTaggedSize) {
236 60 : Object found = isolate->FindCodeObject(obj_addr + i);
237 120 : CHECK_EQ(*code, found);
238 : }
239 :
240 : Handle<Code> copy =
241 10 : isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
242 5 : HeapObject obj_copy = HeapObject::cast(*copy);
243 : Object not_right =
244 5 : isolate->FindCodeObject(obj_copy->address() + obj_copy->Size() / 2);
245 5 : CHECK(not_right != *code);
246 5 : }
247 :
248 :
249 25880 : TEST(HandleNull) {
250 5 : CcTest::InitializeVM();
251 : Isolate* isolate = CcTest::i_isolate();
252 : HandleScope outer_scope(isolate);
253 10 : LocalContext context;
254 : Handle<Object> n(Object(0), isolate);
255 5 : CHECK(!n.is_null());
256 5 : }
257 :
258 :
259 25880 : TEST(HeapObjects) {
260 5 : CcTest::InitializeVM();
261 : Isolate* isolate = CcTest::i_isolate();
262 : Factory* factory = isolate->factory();
263 5 : Heap* heap = isolate->heap();
264 :
265 : HandleScope sc(isolate);
266 5 : Handle<Object> value = factory->NewNumber(1.000123);
267 10 : CHECK(value->IsHeapNumber());
268 10 : CHECK(value->IsNumber());
269 5 : CHECK_EQ(1.000123, value->Number());
270 :
271 5 : value = factory->NewNumber(1.0);
272 10 : CHECK(value->IsSmi());
273 10 : CHECK(value->IsNumber());
274 5 : CHECK_EQ(1.0, value->Number());
275 :
276 5 : value = factory->NewNumberFromInt(1024);
277 10 : CHECK(value->IsSmi());
278 10 : CHECK(value->IsNumber());
279 5 : CHECK_EQ(1024.0, value->Number());
280 :
281 5 : value = factory->NewNumberFromInt(Smi::kMinValue);
282 10 : CHECK(value->IsSmi());
283 10 : CHECK(value->IsNumber());
284 10 : CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
285 :
286 5 : value = factory->NewNumberFromInt(Smi::kMaxValue);
287 10 : CHECK(value->IsSmi());
288 10 : CHECK(value->IsNumber());
289 10 : CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
290 :
291 : #if !defined(V8_TARGET_ARCH_64_BIT)
292 : // TODO(lrn): We need a NumberFromIntptr function in order to test this.
293 : value = factory->NewNumberFromInt(Smi::kMinValue - 1);
294 : CHECK(value->IsHeapNumber());
295 : CHECK(value->IsNumber());
296 : CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
297 : #endif
298 :
299 5 : value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
300 10 : CHECK(value->IsHeapNumber());
301 10 : CHECK(value->IsNumber());
302 5 : CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
303 : value->Number());
304 :
305 5 : value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
306 10 : CHECK(value->IsHeapNumber());
307 10 : CHECK(value->IsNumber());
308 5 : CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
309 : value->Number());
310 :
311 : // nan oddball checks
312 10 : CHECK(factory->nan_value()->IsNumber());
313 5 : CHECK(std::isnan(factory->nan_value()->Number()));
314 :
315 5 : Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
316 10 : CHECK(s->IsString());
317 5 : CHECK_EQ(10, s->length());
318 :
319 5 : Handle<String> object_string = Handle<String>::cast(factory->Object_string());
320 : Handle<JSGlobalObject> global(CcTest::i_isolate()->context()->global_object(),
321 10 : isolate);
322 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
323 :
324 : // Check ToString for oddballs
325 : ReadOnlyRoots roots(heap);
326 5 : CheckOddball(isolate, roots.true_value(), "true");
327 5 : CheckOddball(isolate, roots.false_value(), "false");
328 5 : CheckOddball(isolate, roots.null_value(), "null");
329 5 : CheckOddball(isolate, roots.undefined_value(), "undefined");
330 :
331 : // Check ToString for Smis
332 5 : CheckSmi(isolate, 0, "0");
333 5 : CheckSmi(isolate, 42, "42");
334 5 : CheckSmi(isolate, -42, "-42");
335 :
336 : // Check ToString for Numbers
337 5 : CheckNumber(isolate, 1.1, "1.1");
338 :
339 5 : CheckFindCodeObject(isolate);
340 5 : }
341 :
342 25880 : TEST(Tagging) {
343 5 : CcTest::InitializeVM();
344 : int request = 24;
345 : CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
346 10 : CHECK(Smi::FromInt(42)->IsSmi());
347 10 : CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
348 10 : CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
349 5 : }
350 :
351 :
352 25880 : TEST(GarbageCollection) {
353 5 : CcTest::InitializeVM();
354 : Isolate* isolate = CcTest::i_isolate();
355 : Factory* factory = isolate->factory();
356 :
357 : HandleScope sc(isolate);
358 : // Check GC.
359 5 : CcTest::CollectGarbage(NEW_SPACE);
360 :
361 : Handle<JSGlobalObject> global(CcTest::i_isolate()->context()->global_object(),
362 10 : isolate);
363 5 : Handle<String> name = factory->InternalizeUtf8String("theFunction");
364 5 : Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
365 5 : Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
366 5 : Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
367 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
368 : Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
369 :
370 : {
371 : HandleScope inner_scope(isolate);
372 : // Allocate a function and keep it in global object's property.
373 5 : Handle<JSFunction> function = factory->NewFunctionForTest(name);
374 10 : Object::SetProperty(isolate, global, name, function).Check();
375 : // Allocate an object. Unrooted after leaving the scope.
376 5 : Handle<JSObject> obj = factory->NewJSObject(function);
377 10 : Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
378 10 : Object::SetProperty(isolate, obj, prop_namex, twenty_four).Check();
379 :
380 15 : CHECK_EQ(Smi::FromInt(23),
381 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
382 15 : CHECK_EQ(Smi::FromInt(24),
383 : *Object::GetProperty(isolate, obj, prop_namex).ToHandleChecked());
384 : }
385 :
386 5 : CcTest::CollectGarbage(NEW_SPACE);
387 :
388 : // Function should be alive.
389 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
390 : // Check function is retained.
391 : Handle<Object> func_value =
392 10 : Object::GetProperty(isolate, global, name).ToHandleChecked();
393 10 : CHECK(func_value->IsJSFunction());
394 5 : Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
395 :
396 : {
397 : HandleScope inner_scope(isolate);
398 : // Allocate another object, make it reachable from global.
399 5 : Handle<JSObject> obj = factory->NewJSObject(function);
400 10 : Object::SetProperty(isolate, global, obj_name, obj).Check();
401 10 : Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
402 : }
403 :
404 : // After gc, it should survive.
405 5 : CcTest::CollectGarbage(NEW_SPACE);
406 :
407 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
408 : Handle<Object> obj =
409 10 : Object::GetProperty(isolate, global, obj_name).ToHandleChecked();
410 10 : CHECK(obj->IsJSObject());
411 15 : CHECK_EQ(Smi::FromInt(23),
412 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
413 5 : }
414 :
415 :
416 25 : static void VerifyStringAllocation(Isolate* isolate, const char* string) {
417 : HandleScope scope(isolate);
418 : Handle<String> s = isolate->factory()->NewStringFromUtf8(
419 50 : CStrVector(string)).ToHandleChecked();
420 25 : CHECK_EQ(StrLength(string), s->length());
421 385 : for (int index = 0; index < s->length(); index++) {
422 360 : CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
423 : }
424 25 : }
425 :
426 :
427 25880 : TEST(String) {
428 5 : CcTest::InitializeVM();
429 5 : Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
430 :
431 5 : VerifyStringAllocation(isolate, "a");
432 5 : VerifyStringAllocation(isolate, "ab");
433 5 : VerifyStringAllocation(isolate, "abc");
434 5 : VerifyStringAllocation(isolate, "abcd");
435 5 : VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
436 5 : }
437 :
438 :
439 25880 : TEST(LocalHandles) {
440 5 : CcTest::InitializeVM();
441 : Isolate* isolate = CcTest::i_isolate();
442 : Factory* factory = isolate->factory();
443 :
444 5 : v8::HandleScope scope(CcTest::isolate());
445 : const char* name = "Kasper the spunky";
446 5 : Handle<String> string = factory->NewStringFromAsciiChecked(name);
447 5 : CHECK_EQ(StrLength(name), string->length());
448 5 : }
449 :
450 :
451 25880 : TEST(GlobalHandles) {
452 5 : CcTest::InitializeVM();
453 5 : Isolate* isolate = CcTest::i_isolate();
454 : Factory* factory = isolate->factory();
455 : GlobalHandles* global_handles = isolate->global_handles();
456 :
457 : Handle<Object> h1;
458 : Handle<Object> h2;
459 : Handle<Object> h3;
460 : Handle<Object> h4;
461 :
462 : {
463 : HandleScope scope(isolate);
464 :
465 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
466 5 : Handle<Object> u = factory->NewNumber(1.12344);
467 :
468 5 : h1 = global_handles->Create(*i);
469 5 : h2 = global_handles->Create(*u);
470 5 : h3 = global_handles->Create(*i);
471 5 : h4 = global_handles->Create(*u);
472 : }
473 :
474 : // after gc, it should survive
475 5 : CcTest::CollectGarbage(NEW_SPACE);
476 :
477 10 : CHECK((*h1)->IsString());
478 10 : CHECK((*h2)->IsHeapNumber());
479 10 : CHECK((*h3)->IsString());
480 10 : CHECK((*h4)->IsHeapNumber());
481 :
482 15 : CHECK_EQ(*h3, *h1);
483 5 : GlobalHandles::Destroy(h1.location());
484 5 : GlobalHandles::Destroy(h3.location());
485 :
486 15 : CHECK_EQ(*h4, *h2);
487 5 : GlobalHandles::Destroy(h2.location());
488 5 : GlobalHandles::Destroy(h4.location());
489 5 : }
490 :
491 :
492 : static bool WeakPointerCleared = false;
493 :
494 15 : static void TestWeakGlobalHandleCallback(
495 15 : const v8::WeakCallbackInfo<void>& data) {
496 : std::pair<v8::Persistent<v8::Value>*, int>* p =
497 : reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
498 : data.GetParameter());
499 15 : if (p->second == 1234) WeakPointerCleared = true;
500 15 : p->first->Reset();
501 15 : }
502 :
503 :
504 25880 : TEST(WeakGlobalHandlesScavenge) {
505 5 : FLAG_stress_compaction = false;
506 5 : FLAG_stress_incremental_marking = false;
507 5 : CcTest::InitializeVM();
508 5 : Isolate* isolate = CcTest::i_isolate();
509 : Factory* factory = isolate->factory();
510 : GlobalHandles* global_handles = isolate->global_handles();
511 :
512 5 : WeakPointerCleared = false;
513 :
514 : Handle<Object> h1;
515 : Handle<Object> h2;
516 :
517 : {
518 : HandleScope scope(isolate);
519 :
520 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
521 5 : Handle<Object> u = factory->NewNumber(1.12344);
522 :
523 5 : h1 = global_handles->Create(*i);
524 5 : h2 = global_handles->Create(*u);
525 : }
526 :
527 : std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
528 : GlobalHandles::MakeWeak(
529 : h2.location(), reinterpret_cast<void*>(&handle_and_id),
530 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
531 :
532 : // Scavenge treats weak pointers as normal roots.
533 5 : CcTest::CollectGarbage(NEW_SPACE);
534 :
535 10 : CHECK((*h1)->IsString());
536 10 : CHECK((*h2)->IsHeapNumber());
537 :
538 5 : CHECK(!WeakPointerCleared);
539 5 : CHECK(!global_handles->IsNearDeath(h2.location()));
540 5 : CHECK(!global_handles->IsNearDeath(h1.location()));
541 :
542 5 : GlobalHandles::Destroy(h1.location());
543 5 : GlobalHandles::Destroy(h2.location());
544 5 : }
545 :
546 25880 : TEST(WeakGlobalUnmodifiedApiHandlesScavenge) {
547 5 : CcTest::InitializeVM();
548 5 : Isolate* isolate = CcTest::i_isolate();
549 5 : LocalContext context;
550 : Factory* factory = isolate->factory();
551 : GlobalHandles* global_handles = isolate->global_handles();
552 :
553 5 : WeakPointerCleared = false;
554 :
555 : Handle<Object> h1;
556 : Handle<Object> h2;
557 :
558 : {
559 : HandleScope scope(isolate);
560 :
561 : // Create an Api object that is unmodified.
562 10 : Local<v8::Function> function = FunctionTemplate::New(context->GetIsolate())
563 15 : ->GetFunction(context.local())
564 5 : .ToLocalChecked();
565 : Local<v8::Object> i =
566 5 : function->NewInstance(context.local()).ToLocalChecked();
567 5 : Handle<Object> u = factory->NewNumber(1.12344);
568 :
569 5 : h1 = global_handles->Create(*u);
570 5 : h2 = global_handles->Create(*(reinterpret_cast<internal::Address*>(*i)));
571 : }
572 :
573 : std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
574 : GlobalHandles::MakeWeak(
575 : h2.location(), reinterpret_cast<void*>(&handle_and_id),
576 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
577 :
578 5 : CcTest::CollectGarbage(NEW_SPACE);
579 :
580 10 : CHECK((*h1)->IsHeapNumber());
581 5 : CHECK(WeakPointerCleared);
582 5 : CHECK(!global_handles->IsNearDeath(h1.location()));
583 :
584 5 : GlobalHandles::Destroy(h1.location());
585 5 : }
586 :
587 25880 : TEST(WeakGlobalApiHandleModifiedMapScavenge) {
588 5 : CcTest::InitializeVM();
589 5 : Isolate* isolate = CcTest::i_isolate();
590 5 : LocalContext context;
591 : GlobalHandles* global_handles = isolate->global_handles();
592 :
593 5 : WeakPointerCleared = false;
594 :
595 : Handle<Object> h1;
596 :
597 : {
598 : HandleScope scope(isolate);
599 :
600 : // Create an API object which does not have the same map as constructor.
601 5 : auto function_template = FunctionTemplate::New(context->GetIsolate());
602 5 : auto instance_t = function_template->InstanceTemplate();
603 : instance_t->Set(v8::String::NewFromUtf8(context->GetIsolate(), "a",
604 5 : NewStringType::kNormal)
605 : .ToLocalChecked(),
606 15 : v8::Number::New(context->GetIsolate(), 10));
607 : auto function =
608 5 : function_template->GetFunction(context.local()).ToLocalChecked();
609 5 : auto i = function->NewInstance(context.local()).ToLocalChecked();
610 :
611 5 : h1 = global_handles->Create(*(reinterpret_cast<internal::Address*>(*i)));
612 : }
613 :
614 : std::pair<Handle<Object>*, int> handle_and_id(&h1, 1234);
615 : GlobalHandles::MakeWeak(
616 : h1.location(), reinterpret_cast<void*>(&handle_and_id),
617 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
618 :
619 5 : CcTest::CollectGarbage(NEW_SPACE);
620 :
621 5 : CHECK(!WeakPointerCleared);
622 5 : CHECK(!global_handles->IsNearDeath(h1.location()));
623 :
624 5 : GlobalHandles::Destroy(h1.location());
625 5 : }
626 :
627 25880 : TEST(WeakGlobalApiHandleWithElementsScavenge) {
628 5 : CcTest::InitializeVM();
629 5 : Isolate* isolate = CcTest::i_isolate();
630 5 : LocalContext context;
631 : GlobalHandles* global_handles = isolate->global_handles();
632 :
633 5 : WeakPointerCleared = false;
634 :
635 : Handle<Object> h1;
636 :
637 : {
638 : HandleScope scope(isolate);
639 :
640 : // Create an API object which has elements.
641 5 : auto function_template = FunctionTemplate::New(context->GetIsolate());
642 5 : auto instance_t = function_template->InstanceTemplate();
643 : instance_t->Set(v8::String::NewFromUtf8(context->GetIsolate(), "1",
644 5 : NewStringType::kNormal)
645 : .ToLocalChecked(),
646 15 : v8::Number::New(context->GetIsolate(), 10));
647 : instance_t->Set(v8::String::NewFromUtf8(context->GetIsolate(), "2",
648 5 : NewStringType::kNormal)
649 : .ToLocalChecked(),
650 15 : v8::Number::New(context->GetIsolate(), 10));
651 : auto function =
652 5 : function_template->GetFunction(context.local()).ToLocalChecked();
653 5 : auto i = function->NewInstance(context.local()).ToLocalChecked();
654 :
655 5 : h1 = global_handles->Create(*(reinterpret_cast<internal::Address*>(*i)));
656 : }
657 :
658 : std::pair<Handle<Object>*, int> handle_and_id(&h1, 1234);
659 : GlobalHandles::MakeWeak(
660 : h1.location(), reinterpret_cast<void*>(&handle_and_id),
661 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
662 :
663 5 : CcTest::CollectGarbage(NEW_SPACE);
664 :
665 5 : CHECK(!WeakPointerCleared);
666 5 : CHECK(!global_handles->IsNearDeath(h1.location()));
667 :
668 5 : GlobalHandles::Destroy(h1.location());
669 5 : }
670 :
671 25880 : TEST(WeakGlobalHandlesMark) {
672 5 : FLAG_stress_incremental_marking = false;
673 5 : CcTest::InitializeVM();
674 5 : Isolate* isolate = CcTest::i_isolate();
675 : Factory* factory = isolate->factory();
676 : GlobalHandles* global_handles = isolate->global_handles();
677 :
678 5 : WeakPointerCleared = false;
679 :
680 : Handle<Object> h1;
681 : Handle<Object> h2;
682 :
683 : {
684 : HandleScope scope(isolate);
685 :
686 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
687 5 : Handle<Object> u = factory->NewNumber(1.12344);
688 :
689 5 : h1 = global_handles->Create(*i);
690 5 : h2 = global_handles->Create(*u);
691 : }
692 :
693 : // Make sure the objects are promoted.
694 5 : CcTest::CollectGarbage(OLD_SPACE);
695 5 : CcTest::CollectGarbage(NEW_SPACE);
696 10 : CHECK(!Heap::InYoungGeneration(*h1) && !Heap::InYoungGeneration(*h2));
697 :
698 : std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
699 : GlobalHandles::MakeWeak(
700 : h2.location(), reinterpret_cast<void*>(&handle_and_id),
701 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
702 5 : CHECK(!GlobalHandles::IsNearDeath(h1.location()));
703 5 : CHECK(!GlobalHandles::IsNearDeath(h2.location()));
704 :
705 : // Incremental marking potentially marked handles before they turned weak.
706 5 : CcTest::CollectAllGarbage();
707 :
708 10 : CHECK((*h1)->IsString());
709 :
710 5 : CHECK(WeakPointerCleared);
711 5 : CHECK(!GlobalHandles::IsNearDeath(h1.location()));
712 :
713 5 : GlobalHandles::Destroy(h1.location());
714 5 : }
715 :
716 :
717 25880 : TEST(DeleteWeakGlobalHandle) {
718 5 : FLAG_stress_compaction = false;
719 5 : FLAG_stress_incremental_marking = false;
720 5 : CcTest::InitializeVM();
721 5 : Isolate* isolate = CcTest::i_isolate();
722 : Factory* factory = isolate->factory();
723 : GlobalHandles* global_handles = isolate->global_handles();
724 :
725 5 : WeakPointerCleared = false;
726 :
727 : Handle<Object> h;
728 :
729 : {
730 : HandleScope scope(isolate);
731 :
732 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
733 5 : h = global_handles->Create(*i);
734 : }
735 :
736 : std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
737 : GlobalHandles::MakeWeak(h.location(), reinterpret_cast<void*>(&handle_and_id),
738 : &TestWeakGlobalHandleCallback,
739 5 : v8::WeakCallbackType::kParameter);
740 :
741 : // Scanvenge does not recognize weak reference.
742 5 : CcTest::CollectGarbage(NEW_SPACE);
743 :
744 5 : CHECK(!WeakPointerCleared);
745 :
746 : // Mark-compact treats weak reference properly.
747 5 : CcTest::CollectGarbage(OLD_SPACE);
748 :
749 5 : CHECK(WeakPointerCleared);
750 5 : }
751 :
752 25880 : TEST(BytecodeArray) {
753 5 : if (FLAG_never_compact) return;
754 : static const uint8_t kRawBytes[] = {0xC3, 0x7E, 0xA5, 0x5A};
755 : static const int kRawBytesSize = sizeof(kRawBytes);
756 : static const int kFrameSize = 32;
757 : static const int kParameterCount = 2;
758 :
759 : ManualGCScope manual_gc_scope;
760 5 : FLAG_manual_evacuation_candidates_selection = true;
761 5 : CcTest::InitializeVM();
762 : Isolate* isolate = CcTest::i_isolate();
763 5 : Heap* heap = isolate->heap();
764 : Factory* factory = isolate->factory();
765 : HandleScope scope(isolate);
766 :
767 5 : heap::SimulateFullSpace(heap->old_space());
768 5 : Handle<FixedArray> constant_pool = factory->NewFixedArray(5, TENURED);
769 30 : for (int i = 0; i < 5; i++) {
770 25 : Handle<Object> number = factory->NewHeapNumber(i);
771 25 : constant_pool->set(i, *number);
772 : }
773 :
774 : // Allocate and initialize BytecodeArray
775 : Handle<BytecodeArray> array = factory->NewBytecodeArray(
776 5 : kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
777 :
778 10 : CHECK(array->IsBytecodeArray());
779 5 : CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
780 5 : CHECK_EQ(array->frame_size(), kFrameSize);
781 5 : CHECK_EQ(array->parameter_count(), kParameterCount);
782 15 : CHECK_EQ(array->constant_pool(), *constant_pool);
783 5 : CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
784 10 : CHECK_GE(array->address() + array->BytecodeArraySize(),
785 : array->GetFirstBytecodeAddress() + array->length());
786 20 : for (int i = 0; i < kRawBytesSize; i++) {
787 60 : CHECK_EQ(Memory<uint8_t>(array->GetFirstBytecodeAddress() + i),
788 : kRawBytes[i]);
789 20 : CHECK_EQ(array->get(i), kRawBytes[i]);
790 : }
791 :
792 5 : FixedArray old_constant_pool_address = *constant_pool;
793 :
794 : // Perform a full garbage collection and force the constant pool to be on an
795 : // evacuation candidate.
796 : Page* evac_page = Page::FromHeapObject(*constant_pool);
797 5 : heap::ForceEvacuationCandidate(evac_page);
798 5 : CcTest::CollectAllGarbage();
799 :
800 : // BytecodeArray should survive.
801 5 : CHECK_EQ(array->length(), kRawBytesSize);
802 5 : CHECK_EQ(array->frame_size(), kFrameSize);
803 20 : for (int i = 0; i < kRawBytesSize; i++) {
804 40 : CHECK_EQ(array->get(i), kRawBytes[i]);
805 40 : CHECK_EQ(Memory<uint8_t>(array->GetFirstBytecodeAddress() + i),
806 : kRawBytes[i]);
807 : }
808 :
809 : // Constant pool should have been migrated.
810 15 : CHECK_EQ(array->constant_pool(), *constant_pool);
811 10 : CHECK_NE(array->constant_pool(), old_constant_pool_address);
812 : }
813 :
814 25880 : TEST(BytecodeArrayAging) {
815 : static const uint8_t kRawBytes[] = {0xC3, 0x7E, 0xA5, 0x5A};
816 : static const int kRawBytesSize = sizeof(kRawBytes);
817 : static const int kFrameSize = 32;
818 : static const int kParameterCount = 2;
819 5 : CcTest::InitializeVM();
820 : Isolate* isolate = CcTest::i_isolate();
821 : Factory* factory = isolate->factory();
822 : HandleScope scope(isolate);
823 :
824 : Handle<BytecodeArray> array =
825 : factory->NewBytecodeArray(kRawBytesSize, kRawBytes, kFrameSize,
826 5 : kParameterCount, factory->empty_fixed_array());
827 :
828 5 : CHECK_EQ(BytecodeArray::kFirstBytecodeAge, array->bytecode_age());
829 5 : array->MakeOlder();
830 5 : CHECK_EQ(BytecodeArray::kQuadragenarianBytecodeAge, array->bytecode_age());
831 : array->set_bytecode_age(BytecodeArray::kLastBytecodeAge);
832 5 : array->MakeOlder();
833 5 : CHECK_EQ(BytecodeArray::kLastBytecodeAge, array->bytecode_age());
834 5 : }
835 :
836 : static const char* not_so_random_string_table[] = {
837 : "abstract",
838 : "boolean",
839 : "break",
840 : "byte",
841 : "case",
842 : "catch",
843 : "char",
844 : "class",
845 : "const",
846 : "continue",
847 : "debugger",
848 : "default",
849 : "delete",
850 : "do",
851 : "double",
852 : "else",
853 : "enum",
854 : "export",
855 : "extends",
856 : "false",
857 : "final",
858 : "finally",
859 : "float",
860 : "for",
861 : "function",
862 : "goto",
863 : "if",
864 : "implements",
865 : "import",
866 : "in",
867 : "instanceof",
868 : "int",
869 : "interface",
870 : "long",
871 : "native",
872 : "new",
873 : "null",
874 : "package",
875 : "private",
876 : "protected",
877 : "public",
878 : "return",
879 : "short",
880 : "static",
881 : "super",
882 : "switch",
883 : "synchronized",
884 : "this",
885 : "throw",
886 : "throws",
887 : "transient",
888 : "true",
889 : "try",
890 : "typeof",
891 : "var",
892 : "void",
893 : "volatile",
894 : "while",
895 : "with",
896 : nullptr
897 : };
898 :
899 10 : static void CheckInternalizedStrings(const char** strings) {
900 : Isolate* isolate = CcTest::i_isolate();
901 : Factory* factory = isolate->factory();
902 600 : for (const char* string = *strings; *strings != nullptr;
903 : string = *strings++) {
904 : HandleScope scope(isolate);
905 : Handle<String> a =
906 590 : isolate->factory()->InternalizeUtf8String(CStrVector(string));
907 : // InternalizeUtf8String may return a failure if a GC is needed.
908 1180 : CHECK(a->IsInternalizedString());
909 590 : Handle<String> b = factory->InternalizeUtf8String(string);
910 1770 : CHECK_EQ(*b, *a);
911 590 : CHECK(b->IsUtf8EqualTo(CStrVector(string)));
912 590 : b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
913 1770 : CHECK_EQ(*b, *a);
914 590 : CHECK(b->IsUtf8EqualTo(CStrVector(string)));
915 : }
916 10 : }
917 :
918 :
919 25880 : TEST(StringTable) {
920 5 : CcTest::InitializeVM();
921 :
922 5 : v8::HandleScope sc(CcTest::isolate());
923 5 : CheckInternalizedStrings(not_so_random_string_table);
924 5 : CheckInternalizedStrings(not_so_random_string_table);
925 5 : }
926 :
927 :
928 25880 : TEST(FunctionAllocation) {
929 5 : CcTest::InitializeVM();
930 : Isolate* isolate = CcTest::i_isolate();
931 : Factory* factory = isolate->factory();
932 :
933 5 : v8::HandleScope sc(CcTest::isolate());
934 5 : Handle<String> name = factory->InternalizeUtf8String("theFunction");
935 5 : Handle<JSFunction> function = factory->NewFunctionForTest(name);
936 :
937 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
938 : Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
939 :
940 5 : Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
941 5 : Handle<JSObject> obj = factory->NewJSObject(function);
942 10 : Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
943 15 : CHECK_EQ(Smi::FromInt(23),
944 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
945 : // Check that we can add properties to function objects.
946 10 : Object::SetProperty(isolate, function, prop_name, twenty_four).Check();
947 15 : CHECK_EQ(
948 : Smi::FromInt(24),
949 5 : *Object::GetProperty(isolate, function, prop_name).ToHandleChecked());
950 5 : }
951 :
952 :
953 25880 : TEST(ObjectProperties) {
954 5 : CcTest::InitializeVM();
955 : Isolate* isolate = CcTest::i_isolate();
956 : Factory* factory = isolate->factory();
957 :
958 5 : v8::HandleScope sc(CcTest::isolate());
959 : Handle<String> object_string(
960 5 : String::cast(ReadOnlyRoots(CcTest::heap()).Object_string()), isolate);
961 : Handle<Object> object =
962 : Object::GetProperty(isolate, CcTest::i_isolate()->global_object(),
963 10 : object_string)
964 10 : .ToHandleChecked();
965 5 : Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
966 5 : Handle<JSObject> obj = factory->NewJSObject(constructor);
967 5 : Handle<String> first = factory->InternalizeUtf8String("first");
968 5 : Handle<String> second = factory->InternalizeUtf8String("second");
969 :
970 : Handle<Smi> one(Smi::FromInt(1), isolate);
971 : Handle<Smi> two(Smi::FromInt(2), isolate);
972 :
973 : // check for empty
974 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
975 :
976 : // add first
977 10 : Object::SetProperty(isolate, obj, first, one).Check();
978 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
979 :
980 : // delete first
981 10 : CHECK(Just(true) ==
982 : JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
983 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
984 :
985 : // add first and then second
986 10 : Object::SetProperty(isolate, obj, first, one).Check();
987 10 : Object::SetProperty(isolate, obj, second, two).Check();
988 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
989 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
990 :
991 : // delete first and then second
992 10 : CHECK(Just(true) ==
993 : JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
994 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
995 10 : CHECK(Just(true) ==
996 : JSReceiver::DeleteProperty(obj, second, LanguageMode::kSloppy));
997 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
998 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
999 :
1000 : // add first and then second
1001 10 : Object::SetProperty(isolate, obj, first, one).Check();
1002 10 : Object::SetProperty(isolate, obj, second, two).Check();
1003 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
1004 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
1005 :
1006 : // delete second and then first
1007 10 : CHECK(Just(true) ==
1008 : JSReceiver::DeleteProperty(obj, second, LanguageMode::kSloppy));
1009 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
1010 10 : CHECK(Just(true) ==
1011 : JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
1012 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
1013 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
1014 :
1015 : // check string and internalized string match
1016 : const char* string1 = "fisk";
1017 5 : Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
1018 10 : Object::SetProperty(isolate, obj, s1, one).Check();
1019 5 : Handle<String> s1_string = factory->InternalizeUtf8String(string1);
1020 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
1021 :
1022 : // check internalized string and string match
1023 : const char* string2 = "fugl";
1024 5 : Handle<String> s2_string = factory->InternalizeUtf8String(string2);
1025 10 : Object::SetProperty(isolate, obj, s2_string, one).Check();
1026 5 : Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
1027 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
1028 5 : }
1029 :
1030 :
1031 25880 : TEST(JSObjectMaps) {
1032 5 : CcTest::InitializeVM();
1033 : Isolate* isolate = CcTest::i_isolate();
1034 : Factory* factory = isolate->factory();
1035 :
1036 5 : v8::HandleScope sc(CcTest::isolate());
1037 5 : Handle<String> name = factory->InternalizeUtf8String("theFunction");
1038 5 : Handle<JSFunction> function = factory->NewFunctionForTest(name);
1039 :
1040 5 : Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
1041 5 : Handle<JSObject> obj = factory->NewJSObject(function);
1042 10 : Handle<Map> initial_map(function->initial_map(), isolate);
1043 :
1044 : // Set a propery
1045 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
1046 10 : Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
1047 15 : CHECK_EQ(Smi::FromInt(23),
1048 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
1049 :
1050 : // Check the map has changed
1051 5 : CHECK(*initial_map != obj->map());
1052 5 : }
1053 :
1054 :
1055 25880 : TEST(JSArray) {
1056 5 : CcTest::InitializeVM();
1057 : Isolate* isolate = CcTest::i_isolate();
1058 : Factory* factory = isolate->factory();
1059 :
1060 5 : v8::HandleScope sc(CcTest::isolate());
1061 5 : Handle<String> name = factory->InternalizeUtf8String("Array");
1062 : Handle<Object> fun_obj =
1063 10 : Object::GetProperty(isolate, CcTest::i_isolate()->global_object(), name)
1064 10 : .ToHandleChecked();
1065 5 : Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
1066 :
1067 : // Allocate the object.
1068 : Handle<Object> element;
1069 5 : Handle<JSObject> object = factory->NewJSObject(function);
1070 5 : Handle<JSArray> array = Handle<JSArray>::cast(object);
1071 : // We just initialized the VM, no heap allocation failure yet.
1072 5 : JSArray::Initialize(array, 0);
1073 :
1074 : // Set array length to 0.
1075 5 : JSArray::SetLength(array, 0);
1076 10 : CHECK_EQ(Smi::kZero, array->length());
1077 : // Must be in fast mode.
1078 10 : CHECK(array->HasSmiOrObjectElements());
1079 :
1080 : // array[length] = name.
1081 10 : Object::SetElement(isolate, array, 0, name, ShouldThrow::kDontThrow).Check();
1082 10 : CHECK_EQ(Smi::FromInt(1), array->length());
1083 10 : element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
1084 15 : CHECK_EQ(*element, *name);
1085 :
1086 : // Set array length with larger than smi value.
1087 5 : JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
1088 :
1089 5 : uint32_t int_length = 0;
1090 10 : CHECK(array->length()->ToArrayIndex(&int_length));
1091 5 : CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
1092 10 : CHECK(array->HasDictionaryElements()); // Must be in slow mode.
1093 :
1094 : // array[length] = name.
1095 10 : Object::SetElement(isolate, array, int_length, name, ShouldThrow::kDontThrow)
1096 10 : .Check();
1097 5 : uint32_t new_int_length = 0;
1098 10 : CHECK(array->length()->ToArrayIndex(&new_int_length));
1099 10 : CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
1100 10 : element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
1101 15 : CHECK_EQ(*element, *name);
1102 10 : element = Object::GetElement(isolate, array, 0).ToHandleChecked();
1103 15 : CHECK_EQ(*element, *name);
1104 5 : }
1105 :
1106 :
1107 25880 : TEST(JSObjectCopy) {
1108 5 : CcTest::InitializeVM();
1109 : Isolate* isolate = CcTest::i_isolate();
1110 : Factory* factory = isolate->factory();
1111 :
1112 5 : v8::HandleScope sc(CcTest::isolate());
1113 : Handle<String> object_string(
1114 5 : String::cast(ReadOnlyRoots(CcTest::heap()).Object_string()), isolate);
1115 : Handle<Object> object =
1116 : Object::GetProperty(isolate, CcTest::i_isolate()->global_object(),
1117 10 : object_string)
1118 10 : .ToHandleChecked();
1119 5 : Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
1120 5 : Handle<JSObject> obj = factory->NewJSObject(constructor);
1121 5 : Handle<String> first = factory->InternalizeUtf8String("first");
1122 5 : Handle<String> second = factory->InternalizeUtf8String("second");
1123 :
1124 : Handle<Smi> one(Smi::FromInt(1), isolate);
1125 : Handle<Smi> two(Smi::FromInt(2), isolate);
1126 :
1127 10 : Object::SetProperty(isolate, obj, first, one).Check();
1128 10 : Object::SetProperty(isolate, obj, second, two).Check();
1129 :
1130 10 : Object::SetElement(isolate, obj, 0, first, ShouldThrow::kDontThrow).Check();
1131 10 : Object::SetElement(isolate, obj, 1, second, ShouldThrow::kDontThrow).Check();
1132 :
1133 : // Make the clone.
1134 : Handle<Object> value1, value2;
1135 5 : Handle<JSObject> clone = factory->CopyJSObject(obj);
1136 5 : CHECK(!clone.is_identical_to(obj));
1137 :
1138 10 : value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1139 10 : value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1140 15 : CHECK_EQ(*value1, *value2);
1141 10 : value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1142 10 : value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1143 15 : CHECK_EQ(*value1, *value2);
1144 :
1145 10 : value1 = Object::GetProperty(isolate, obj, first).ToHandleChecked();
1146 10 : value2 = Object::GetProperty(isolate, clone, first).ToHandleChecked();
1147 15 : CHECK_EQ(*value1, *value2);
1148 10 : value1 = Object::GetProperty(isolate, obj, second).ToHandleChecked();
1149 10 : value2 = Object::GetProperty(isolate, clone, second).ToHandleChecked();
1150 15 : CHECK_EQ(*value1, *value2);
1151 :
1152 : // Flip the values.
1153 10 : Object::SetProperty(isolate, clone, first, two).Check();
1154 10 : Object::SetProperty(isolate, clone, second, one).Check();
1155 :
1156 5 : Object::SetElement(isolate, clone, 0, second, ShouldThrow::kDontThrow)
1157 10 : .Check();
1158 10 : Object::SetElement(isolate, clone, 1, first, ShouldThrow::kDontThrow).Check();
1159 :
1160 10 : value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1161 10 : value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1162 15 : CHECK_EQ(*value1, *value2);
1163 10 : value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1164 10 : value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1165 15 : CHECK_EQ(*value1, *value2);
1166 :
1167 10 : value1 = Object::GetProperty(isolate, obj, second).ToHandleChecked();
1168 10 : value2 = Object::GetProperty(isolate, clone, first).ToHandleChecked();
1169 15 : CHECK_EQ(*value1, *value2);
1170 10 : value1 = Object::GetProperty(isolate, obj, first).ToHandleChecked();
1171 10 : value2 = Object::GetProperty(isolate, clone, second).ToHandleChecked();
1172 15 : CHECK_EQ(*value1, *value2);
1173 5 : }
1174 :
1175 :
1176 25880 : TEST(StringAllocation) {
1177 5 : CcTest::InitializeVM();
1178 : Isolate* isolate = CcTest::i_isolate();
1179 : Factory* factory = isolate->factory();
1180 :
1181 : const unsigned char chars[] = {0xE5, 0xA4, 0xA7};
1182 510 : for (int length = 0; length < 100; length++) {
1183 500 : v8::HandleScope scope(CcTest::isolate());
1184 500 : char* non_one_byte = NewArray<char>(3 * length + 1);
1185 500 : char* one_byte = NewArray<char>(length + 1);
1186 500 : non_one_byte[3 * length] = 0;
1187 500 : one_byte[length] = 0;
1188 25250 : for (int i = 0; i < length; i++) {
1189 24750 : one_byte[i] = 'a';
1190 24750 : non_one_byte[3 * i] = chars[0];
1191 24750 : non_one_byte[3 * i + 1] = chars[1];
1192 24750 : non_one_byte[3 * i + 2] = chars[2];
1193 : }
1194 : Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
1195 500 : Vector<const char>(non_one_byte, 3 * length));
1196 500 : CHECK_EQ(length, non_one_byte_sym->length());
1197 : Handle<String> one_byte_sym =
1198 500 : factory->InternalizeOneByteString(OneByteVector(one_byte, length));
1199 500 : CHECK_EQ(length, one_byte_sym->length());
1200 : Handle<String> non_one_byte_str =
1201 500 : factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
1202 1000 : .ToHandleChecked();
1203 500 : non_one_byte_str->Hash();
1204 500 : CHECK_EQ(length, non_one_byte_str->length());
1205 : Handle<String> one_byte_str =
1206 500 : factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
1207 1000 : .ToHandleChecked();
1208 500 : one_byte_str->Hash();
1209 500 : CHECK_EQ(length, one_byte_str->length());
1210 : DeleteArray(non_one_byte);
1211 : DeleteArray(one_byte);
1212 500 : }
1213 5 : }
1214 :
1215 :
1216 5 : static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
1217 : // Count the number of objects found in the heap.
1218 : int found_count = 0;
1219 5 : HeapIterator iterator(heap);
1220 68814 : for (HeapObject obj = iterator.next(); !obj.is_null();
1221 : obj = iterator.next()) {
1222 206412 : for (int i = 0; i < size; i++) {
1223 412824 : if (*objs[i] == obj) {
1224 30 : found_count++;
1225 : }
1226 : }
1227 : }
1228 5 : return found_count;
1229 : }
1230 :
1231 :
1232 25880 : TEST(Iteration) {
1233 5 : CcTest::InitializeVM();
1234 : Isolate* isolate = CcTest::i_isolate();
1235 : Factory* factory = isolate->factory();
1236 5 : v8::HandleScope scope(CcTest::isolate());
1237 :
1238 : // Array of objects to scan haep for.
1239 : const int objs_count = 6;
1240 35 : Handle<Object> objs[objs_count];
1241 : int next_objs_index = 0;
1242 :
1243 : // Allocate a JS array to OLD_SPACE and NEW_SPACE
1244 5 : objs[next_objs_index++] = factory->NewJSArray(10);
1245 5 : objs[next_objs_index++] = factory->NewJSArray(10, HOLEY_ELEMENTS, TENURED);
1246 :
1247 : // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
1248 5 : objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
1249 : objs[next_objs_index++] =
1250 5 : factory->NewStringFromStaticChars("abcdefghij", TENURED);
1251 :
1252 : // Allocate a large string (for large object space).
1253 : int large_size = kMaxRegularHeapObjectSize + 1;
1254 5 : char* str = new char[large_size];
1255 5 : for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
1256 5 : str[large_size - 1] = '\0';
1257 5 : objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
1258 5 : delete[] str;
1259 :
1260 : // Add a Map object to look for.
1261 : objs[next_objs_index++] =
1262 5 : Handle<Map>(HeapObject::cast(*objs[0])->map(), isolate);
1263 :
1264 : CHECK_EQ(objs_count, next_objs_index);
1265 5 : CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
1266 5 : }
1267 :
1268 25880 : TEST(TestBytecodeFlushing) {
1269 : #ifndef V8_LITE_MODE
1270 5 : FLAG_opt = false;
1271 5 : FLAG_always_opt = false;
1272 5 : i::FLAG_optimize_for_size = false;
1273 : #endif // V8_LITE_MODE
1274 5 : i::FLAG_flush_bytecode = true;
1275 5 : i::FLAG_allow_natives_syntax = true;
1276 :
1277 5 : CcTest::InitializeVM();
1278 5 : v8::Isolate* isolate = CcTest::isolate();
1279 : Isolate* i_isolate = CcTest::i_isolate();
1280 : Factory* factory = i_isolate->factory();
1281 :
1282 : {
1283 5 : v8::HandleScope scope(isolate);
1284 10 : v8::Context::New(isolate)->Enter();
1285 : const char* source =
1286 : "function foo() {"
1287 : " var x = 42;"
1288 : " var y = 42;"
1289 : " var z = x + y;"
1290 : "};"
1291 : "foo()";
1292 5 : Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1293 :
1294 : // This compile will add the code to the compilation cache.
1295 : {
1296 5 : v8::HandleScope scope(isolate);
1297 5 : CompileRun(source);
1298 : }
1299 :
1300 : // Check function is compiled.
1301 : Handle<Object> func_value =
1302 10 : Object::GetProperty(i_isolate, i_isolate->global_object(), foo_name)
1303 10 : .ToHandleChecked();
1304 10 : CHECK(func_value->IsJSFunction());
1305 5 : Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1306 5 : CHECK(function->shared()->is_compiled());
1307 :
1308 : // The code will survive at least two GCs.
1309 5 : CcTest::CollectAllGarbage();
1310 5 : CcTest::CollectAllGarbage();
1311 5 : CHECK(function->shared()->is_compiled());
1312 :
1313 : // Simulate several GCs that use full marking.
1314 : const int kAgingThreshold = 6;
1315 30 : for (int i = 0; i < kAgingThreshold; i++) {
1316 30 : CcTest::CollectAllGarbage();
1317 : }
1318 :
1319 : // foo should no longer be in the compilation cache
1320 5 : CHECK(!function->shared()->is_compiled());
1321 5 : CHECK(!function->is_compiled());
1322 : // Call foo to get it recompiled.
1323 : CompileRun("foo()");
1324 5 : CHECK(function->shared()->is_compiled());
1325 5 : CHECK(function->is_compiled());
1326 : }
1327 5 : }
1328 :
1329 : #ifndef V8_LITE_MODE
1330 :
1331 25879 : TEST(TestOptimizeAfterBytecodeFlushingCandidate) {
1332 4 : FLAG_opt = true;
1333 4 : FLAG_always_opt = false;
1334 4 : i::FLAG_optimize_for_size = false;
1335 4 : i::FLAG_incremental_marking = true;
1336 4 : i::FLAG_flush_bytecode = true;
1337 4 : i::FLAG_allow_natives_syntax = true;
1338 :
1339 4 : CcTest::InitializeVM();
1340 : Isolate* isolate = CcTest::i_isolate();
1341 : Factory* factory = isolate->factory();
1342 4 : v8::HandleScope scope(CcTest::isolate());
1343 : const char* source =
1344 : "function foo() {"
1345 : " var x = 42;"
1346 : " var y = 42;"
1347 : " var z = x + y;"
1348 : "};"
1349 : "foo()";
1350 4 : Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1351 :
1352 : // This compile will add the code to the compilation cache.
1353 : {
1354 4 : v8::HandleScope scope(CcTest::isolate());
1355 4 : CompileRun(source);
1356 : }
1357 :
1358 : // Check function is compiled.
1359 : Handle<Object> func_value =
1360 8 : Object::GetProperty(isolate, isolate->global_object(), foo_name)
1361 8 : .ToHandleChecked();
1362 8 : CHECK(func_value->IsJSFunction());
1363 4 : Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1364 4 : CHECK(function->shared()->is_compiled());
1365 :
1366 : // The code will survive at least two GCs.
1367 4 : CcTest::CollectAllGarbage();
1368 4 : CcTest::CollectAllGarbage();
1369 4 : CHECK(function->shared()->is_compiled());
1370 :
1371 : // Simulate several GCs that use incremental marking.
1372 : const int kAgingThreshold = 6;
1373 24 : for (int i = 0; i < kAgingThreshold; i++) {
1374 24 : heap::SimulateIncrementalMarking(CcTest::heap());
1375 24 : CcTest::CollectAllGarbage();
1376 : }
1377 4 : CHECK(!function->shared()->is_compiled());
1378 4 : CHECK(!function->is_compiled());
1379 :
1380 : // This compile will compile the function again.
1381 : {
1382 4 : v8::HandleScope scope(CcTest::isolate());
1383 4 : CompileRun("foo();");
1384 : }
1385 :
1386 : // Simulate several GCs that use incremental marking but make sure
1387 : // the loop breaks once the function is enqueued as a candidate.
1388 12 : for (int i = 0; i < kAgingThreshold; i++) {
1389 12 : heap::SimulateIncrementalMarking(CcTest::heap());
1390 12 : if (function->shared()->GetBytecodeArray()->IsOld()) break;
1391 8 : CcTest::CollectAllGarbage();
1392 : }
1393 :
1394 : // Force optimization while incremental marking is active and while
1395 : // the function is enqueued as a candidate.
1396 : {
1397 4 : v8::HandleScope scope(CcTest::isolate());
1398 4 : CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1399 : }
1400 :
1401 : // Simulate one final GC and make sure the candidate wasn't flushed.
1402 4 : CcTest::CollectAllGarbage();
1403 4 : CHECK(function->shared()->is_compiled());
1404 4 : CHECK(function->is_compiled());
1405 4 : }
1406 :
1407 : #endif // V8_LITE_MODE
1408 :
1409 25880 : TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
1410 5 : if (!FLAG_incremental_marking) return;
1411 : // Turn off always_opt because it interferes with running the built-in for
1412 : // the last call to g().
1413 5 : FLAG_always_opt = false;
1414 5 : FLAG_allow_natives_syntax = true;
1415 5 : CcTest::InitializeVM();
1416 : Isolate* isolate = CcTest::i_isolate();
1417 : Factory* factory = isolate->factory();
1418 5 : Heap* heap = isolate->heap();
1419 5 : v8::HandleScope scope(CcTest::isolate());
1420 :
1421 : CompileRun(
1422 : "function make_closure(x) {"
1423 : " return function() { return x + 3 };"
1424 : "}"
1425 : "var f = make_closure(5); f();"
1426 : "var g = make_closure(5);");
1427 :
1428 : // Check f is compiled.
1429 5 : Handle<String> f_name = factory->InternalizeUtf8String("f");
1430 : Handle<Object> f_value =
1431 10 : Object::GetProperty(isolate, isolate->global_object(), f_name)
1432 10 : .ToHandleChecked();
1433 5 : Handle<JSFunction> f_function = Handle<JSFunction>::cast(f_value);
1434 5 : CHECK(f_function->is_compiled());
1435 :
1436 : // Check g is not compiled.
1437 5 : Handle<String> g_name = factory->InternalizeUtf8String("g");
1438 : Handle<Object> g_value =
1439 10 : Object::GetProperty(isolate, isolate->global_object(), g_name)
1440 10 : .ToHandleChecked();
1441 5 : Handle<JSFunction> g_function = Handle<JSFunction>::cast(g_value);
1442 5 : CHECK(!g_function->is_compiled());
1443 :
1444 5 : heap::SimulateIncrementalMarking(heap);
1445 : CompileRun("%OptimizeFunctionOnNextCall(f); f();");
1446 :
1447 : // g should now have available an optimized function, unmarked by gc. The
1448 : // CompileLazy built-in will discover it and install it in the closure, and
1449 : // the incremental write barrier should be used.
1450 : CompileRun("g();");
1451 5 : CHECK(g_function->is_compiled());
1452 : }
1453 :
1454 25880 : TEST(CompilationCacheCachingBehavior) {
1455 : // If we do not have the compilation cache turned off, this test is invalid.
1456 5 : if (!FLAG_compilation_cache) {
1457 0 : return;
1458 : }
1459 5 : CcTest::InitializeVM();
1460 5 : Isolate* isolate = CcTest::i_isolate();
1461 : Factory* factory = isolate->factory();
1462 : CompilationCache* compilation_cache = isolate->compilation_cache();
1463 5 : LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
1464 :
1465 5 : v8::HandleScope scope(CcTest::isolate());
1466 : const char* raw_source =
1467 : "function foo() {"
1468 : " var x = 42;"
1469 : " var y = 42;"
1470 : " var z = x + y;"
1471 : "};"
1472 : "foo();";
1473 5 : Handle<String> source = factory->InternalizeUtf8String(raw_source);
1474 5 : Handle<Context> native_context = isolate->native_context();
1475 :
1476 : {
1477 5 : v8::HandleScope scope(CcTest::isolate());
1478 5 : CompileRun(raw_source);
1479 : }
1480 :
1481 : // The script should be in the cache now.
1482 : {
1483 5 : v8::HandleScope scope(CcTest::isolate());
1484 : MaybeHandle<SharedFunctionInfo> cached_script =
1485 : compilation_cache->LookupScript(source, Handle<Object>(), 0, 0,
1486 : v8::ScriptOriginOptions(true, false),
1487 5 : native_context, language_mode);
1488 5 : CHECK(!cached_script.is_null());
1489 : }
1490 :
1491 : // Check that the code cache entry survives at least one GC.
1492 : {
1493 5 : CcTest::CollectAllGarbage();
1494 5 : v8::HandleScope scope(CcTest::isolate());
1495 : MaybeHandle<SharedFunctionInfo> cached_script =
1496 : compilation_cache->LookupScript(source, Handle<Object>(), 0, 0,
1497 : v8::ScriptOriginOptions(true, false),
1498 5 : native_context, language_mode);
1499 5 : CHECK(!cached_script.is_null());
1500 :
1501 : // Progress code age until it's old and ready for GC.
1502 : Handle<SharedFunctionInfo> shared = cached_script.ToHandleChecked();
1503 5 : CHECK(shared->HasBytecodeArray());
1504 : const int kAgingThreshold = 6;
1505 30 : for (int i = 0; i < kAgingThreshold; i++) {
1506 30 : shared->GetBytecodeArray()->MakeOlder();
1507 5 : }
1508 : }
1509 :
1510 5 : CcTest::CollectAllGarbage();
1511 :
1512 : {
1513 5 : v8::HandleScope scope(CcTest::isolate());
1514 : // Ensure code aging cleared the entry from the cache.
1515 : MaybeHandle<SharedFunctionInfo> cached_script =
1516 : compilation_cache->LookupScript(source, Handle<Object>(), 0, 0,
1517 : v8::ScriptOriginOptions(true, false),
1518 5 : native_context, language_mode);
1519 5 : CHECK(cached_script.is_null());
1520 5 : }
1521 : }
1522 :
1523 :
1524 150 : static void OptimizeEmptyFunction(const char* name) {
1525 : HandleScope scope(CcTest::i_isolate());
1526 : EmbeddedVector<char, 256> source;
1527 : SNPrintF(source,
1528 : "function %s() { return 0; }"
1529 : "%s(); %s();"
1530 : "%%OptimizeFunctionOnNextCall(%s);"
1531 : "%s();",
1532 150 : name, name, name, name, name);
1533 150 : CompileRun(source.start());
1534 150 : }
1535 :
1536 :
1537 : // Count the number of native contexts in the weak list of native contexts.
1538 366 : int CountNativeContexts() {
1539 : int count = 0;
1540 366 : Object object = CcTest::heap()->native_contexts_list();
1541 2682 : while (!object->IsUndefined(CcTest::i_isolate())) {
1542 1950 : count++;
1543 1950 : object = Context::cast(object)->next_context_link();
1544 : }
1545 366 : return count;
1546 : }
1547 :
1548 25880 : TEST(TestInternalWeakLists) {
1549 5 : FLAG_always_opt = false;
1550 5 : FLAG_allow_natives_syntax = true;
1551 5 : v8::V8::Initialize();
1552 :
1553 : // Some flags turn Scavenge collections into Mark-sweep collections
1554 : // and hence are incompatible with this test case.
1555 5 : if (FLAG_gc_global || FLAG_stress_compaction ||
1556 : FLAG_stress_incremental_marking)
1557 2 : return;
1558 4 : FLAG_retain_maps_for_n_gc = 0;
1559 :
1560 : static const int kNumTestContexts = 10;
1561 :
1562 60 : Isolate* isolate = CcTest::i_isolate();
1563 : HandleScope scope(isolate);
1564 44 : v8::Local<v8::Context> ctx[kNumTestContexts];
1565 4 : if (!isolate->use_optimizer()) return;
1566 :
1567 3 : CHECK_EQ(0, CountNativeContexts());
1568 :
1569 : // Create a number of global contests which gets linked together.
1570 33 : for (int i = 0; i < kNumTestContexts; i++) {
1571 30 : ctx[i] = v8::Context::New(CcTest::isolate());
1572 :
1573 : // Collect garbage that might have been created by one of the
1574 : // installed extensions.
1575 30 : isolate->compilation_cache()->Clear();
1576 30 : CcTest::CollectAllGarbage();
1577 :
1578 30 : CHECK_EQ(i + 1, CountNativeContexts());
1579 :
1580 60 : ctx[i]->Enter();
1581 :
1582 : // Create a handle scope so no function objects get stuck in the outer
1583 : // handle scope.
1584 : HandleScope scope(isolate);
1585 30 : OptimizeEmptyFunction("f1");
1586 30 : OptimizeEmptyFunction("f2");
1587 30 : OptimizeEmptyFunction("f3");
1588 30 : OptimizeEmptyFunction("f4");
1589 30 : OptimizeEmptyFunction("f5");
1590 :
1591 : // Remove function f1, and
1592 : CompileRun("f1=null");
1593 :
1594 : // Scavenge treats these references as strong.
1595 330 : for (int j = 0; j < 10; j++) {
1596 300 : CcTest::CollectGarbage(NEW_SPACE);
1597 : }
1598 :
1599 : // Mark compact handles the weak references.
1600 30 : isolate->compilation_cache()->Clear();
1601 30 : CcTest::CollectAllGarbage();
1602 :
1603 : // Get rid of f3 and f5 in the same way.
1604 : CompileRun("f3=null");
1605 330 : for (int j = 0; j < 10; j++) {
1606 300 : CcTest::CollectGarbage(NEW_SPACE);
1607 : }
1608 30 : CcTest::CollectAllGarbage();
1609 : CompileRun("f5=null");
1610 330 : for (int j = 0; j < 10; j++) {
1611 300 : CcTest::CollectGarbage(NEW_SPACE);
1612 : }
1613 30 : CcTest::CollectAllGarbage();
1614 :
1615 30 : ctx[i]->Exit();
1616 : }
1617 :
1618 : // Force compilation cache cleanup.
1619 3 : CcTest::heap()->NotifyContextDisposed(true);
1620 3 : CcTest::CollectAllGarbage();
1621 :
1622 : // Dispose the native contexts one by one.
1623 33 : for (int i = 0; i < kNumTestContexts; i++) {
1624 : // TODO(dcarney): is there a better way to do this?
1625 30 : i::Address* unsafe = reinterpret_cast<i::Address*>(*ctx[i]);
1626 60 : *unsafe = ReadOnlyRoots(CcTest::heap()).undefined_value()->ptr();
1627 : ctx[i].Clear();
1628 :
1629 : // Scavenge treats these references as strong.
1630 330 : for (int j = 0; j < 10; j++) {
1631 300 : CcTest::CollectGarbage(i::NEW_SPACE);
1632 300 : CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1633 : }
1634 :
1635 : // Mark compact handles the weak references.
1636 30 : CcTest::CollectAllGarbage();
1637 30 : CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1638 : }
1639 :
1640 3 : CHECK_EQ(0, CountNativeContexts());
1641 : }
1642 :
1643 :
1644 25880 : TEST(TestSizeOfRegExpCode) {
1645 5 : if (!FLAG_regexp_optimization) return;
1646 :
1647 5 : v8::V8::Initialize();
1648 :
1649 : Isolate* isolate = CcTest::i_isolate();
1650 : HandleScope scope(isolate);
1651 :
1652 10 : LocalContext context;
1653 :
1654 : // Adjust source below and this check to match
1655 : // RegExpImple::kRegExpTooLargeToOptimize.
1656 : CHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
1657 :
1658 : // Compile a regexp that is much larger if we are using regexp optimizations.
1659 : CompileRun(
1660 : "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1661 : "var half_size_reg_exp;"
1662 : "while (reg_exp_source.length < 20 * 1024) {"
1663 : " half_size_reg_exp = reg_exp_source;"
1664 : " reg_exp_source = reg_exp_source + reg_exp_source;"
1665 : "}"
1666 : // Flatten string.
1667 : "reg_exp_source.match(/f/);");
1668 :
1669 : // Get initial heap size after several full GCs, which will stabilize
1670 : // the heap size and return with sweeping finished completely.
1671 5 : CcTest::CollectAllAvailableGarbage();
1672 10 : MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1673 5 : if (collector->sweeping_in_progress()) {
1674 5 : collector->EnsureSweepingCompleted();
1675 : }
1676 5 : int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1677 :
1678 : CompileRun("'foo'.match(reg_exp_source);");
1679 5 : CcTest::CollectAllAvailableGarbage();
1680 5 : int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1681 :
1682 : CompileRun("'foo'.match(half_size_reg_exp);");
1683 5 : CcTest::CollectAllAvailableGarbage();
1684 : int size_with_optimized_regexp =
1685 5 : static_cast<int>(CcTest::heap()->SizeOfObjects());
1686 :
1687 5 : int size_of_regexp_code = size_with_regexp - initial_size;
1688 :
1689 : // On some platforms the debug-code flag causes huge amounts of regexp code
1690 : // to be emitted, breaking this test.
1691 5 : if (!FLAG_debug_code) {
1692 5 : CHECK_LE(size_of_regexp_code, 1 * MB);
1693 : }
1694 :
1695 : // Small regexp is half the size, but compiles to more than twice the code
1696 : // due to the optimization steps.
1697 5 : CHECK_GE(size_with_optimized_regexp,
1698 : size_with_regexp + size_of_regexp_code * 2);
1699 : }
1700 :
1701 :
1702 25880 : HEAP_TEST(TestSizeOfObjects) {
1703 5 : v8::V8::Initialize();
1704 : Isolate* isolate = CcTest::i_isolate();
1705 5 : Heap* heap = CcTest::heap();
1706 10 : MarkCompactCollector* collector = heap->mark_compact_collector();
1707 :
1708 : // Get initial heap size after several full GCs, which will stabilize
1709 : // the heap size and return with sweeping finished completely.
1710 5 : CcTest::CollectAllAvailableGarbage();
1711 5 : if (collector->sweeping_in_progress()) {
1712 5 : collector->EnsureSweepingCompleted();
1713 : }
1714 5 : int initial_size = static_cast<int>(heap->SizeOfObjects());
1715 :
1716 : {
1717 : HandleScope scope(isolate);
1718 : // Allocate objects on several different old-space pages so that
1719 : // concurrent sweeper threads will be busy sweeping the old space on
1720 : // subsequent GC runs.
1721 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1722 : int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1723 505 : for (int i = 1; i <= 100; i++) {
1724 500 : isolate->factory()->NewFixedArray(8192, TENURED);
1725 500 : CHECK_EQ(initial_size + i * filler_size,
1726 : static_cast<int>(heap->SizeOfObjects()));
1727 : }
1728 : }
1729 :
1730 : // The heap size should go back to initial size after a full GC, even
1731 : // though sweeping didn't finish yet.
1732 5 : CcTest::CollectAllGarbage();
1733 : // Normally sweeping would not be complete here, but no guarantees.
1734 5 : CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
1735 : // Waiting for sweeper threads should not change heap size.
1736 5 : if (collector->sweeping_in_progress()) {
1737 5 : collector->EnsureSweepingCompleted();
1738 : }
1739 5 : CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
1740 5 : }
1741 :
1742 :
1743 25880 : TEST(TestAlignmentCalculations) {
1744 : // Maximum fill amounts are consistent.
1745 : int maximum_double_misalignment = kDoubleSize - kTaggedSize;
1746 5 : int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
1747 5 : CHECK_EQ(0, max_word_fill);
1748 5 : int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
1749 5 : CHECK_EQ(maximum_double_misalignment, max_double_fill);
1750 5 : int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
1751 5 : CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
1752 :
1753 : Address base = kNullAddress;
1754 : int fill = 0;
1755 :
1756 : // Word alignment never requires fill.
1757 5 : fill = Heap::GetFillToAlign(base, kWordAligned);
1758 5 : CHECK_EQ(0, fill);
1759 5 : fill = Heap::GetFillToAlign(base + kTaggedSize, kWordAligned);
1760 5 : CHECK_EQ(0, fill);
1761 :
1762 : // No fill is required when address is double aligned.
1763 5 : fill = Heap::GetFillToAlign(base, kDoubleAligned);
1764 5 : CHECK_EQ(0, fill);
1765 : // Fill is required if address is not double aligned.
1766 5 : fill = Heap::GetFillToAlign(base + kTaggedSize, kDoubleAligned);
1767 5 : CHECK_EQ(maximum_double_misalignment, fill);
1768 : // kDoubleUnaligned has the opposite fill amounts.
1769 5 : fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
1770 5 : CHECK_EQ(maximum_double_misalignment, fill);
1771 5 : fill = Heap::GetFillToAlign(base + kTaggedSize, kDoubleUnaligned);
1772 5 : CHECK_EQ(0, fill);
1773 5 : }
1774 :
1775 0 : static HeapObject NewSpaceAllocateAligned(int size,
1776 : AllocationAlignment alignment) {
1777 0 : Heap* heap = CcTest::heap();
1778 : AllocationResult allocation =
1779 0 : heap->new_space()->AllocateRawAligned(size, alignment);
1780 0 : HeapObject obj;
1781 0 : allocation.To(&obj);
1782 0 : heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
1783 0 : return obj;
1784 : }
1785 :
1786 : // Get new space allocation into the desired alignment.
1787 10 : static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
1788 10 : Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1789 10 : int fill = Heap::GetFillToAlign(*top_addr, alignment);
1790 10 : int allocation = fill + offset;
1791 10 : if (allocation) {
1792 0 : NewSpaceAllocateAligned(allocation, kWordAligned);
1793 : }
1794 10 : return *top_addr;
1795 : }
1796 :
1797 :
1798 25880 : TEST(TestAlignedAllocation) {
1799 : // Double misalignment is 4 on 32-bit platforms or when pointer compression
1800 : // is enabled, 0 on 64-bit ones when pointer compression is disabled.
1801 : const intptr_t double_misalignment = kDoubleSize - kTaggedSize;
1802 5 : Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1803 : Address start;
1804 : HeapObject obj;
1805 : HeapObject filler;
1806 : if (double_misalignment) {
1807 : // Allocate a pointer sized object that must be double aligned at an
1808 : // aligned address.
1809 : start = AlignNewSpace(kDoubleAligned, 0);
1810 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1811 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1812 : // There is no filler.
1813 : CHECK_EQ(kTaggedSize, *top_addr - start);
1814 :
1815 : // Allocate a second pointer sized object that must be double aligned at an
1816 : // unaligned address.
1817 : start = AlignNewSpace(kDoubleAligned, kTaggedSize);
1818 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1819 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1820 : // There is a filler object before the object.
1821 : filler = HeapObject::FromAddress(start);
1822 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1823 : CHECK_EQ(kTaggedSize + double_misalignment, *top_addr - start);
1824 :
1825 : // Similarly for kDoubleUnaligned.
1826 : start = AlignNewSpace(kDoubleUnaligned, 0);
1827 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1828 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1829 : CHECK_EQ(kTaggedSize, *top_addr - start);
1830 : start = AlignNewSpace(kDoubleUnaligned, kTaggedSize);
1831 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1832 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1833 : // There is a filler object before the object.
1834 : filler = HeapObject::FromAddress(start);
1835 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1836 : CHECK_EQ(kTaggedSize + double_misalignment, *top_addr - start);
1837 : }
1838 5 : }
1839 :
1840 0 : static HeapObject OldSpaceAllocateAligned(int size,
1841 : AllocationAlignment alignment) {
1842 0 : Heap* heap = CcTest::heap();
1843 : AllocationResult allocation =
1844 0 : heap->old_space()->AllocateRawAligned(size, alignment);
1845 0 : HeapObject obj;
1846 0 : allocation.To(&obj);
1847 0 : heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
1848 0 : return obj;
1849 : }
1850 :
1851 : // Get old space allocation into the desired alignment.
1852 10 : static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
1853 10 : Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
1854 10 : int fill = Heap::GetFillToAlign(*top_addr, alignment);
1855 10 : int allocation = fill + offset;
1856 10 : if (allocation) {
1857 0 : OldSpaceAllocateAligned(allocation, kWordAligned);
1858 : }
1859 10 : Address top = *top_addr;
1860 : // Now force the remaining allocation onto the free list.
1861 10 : CcTest::heap()->old_space()->FreeLinearAllocationArea();
1862 10 : return top;
1863 : }
1864 :
1865 :
1866 : // Test the case where allocation must be done from the free list, so filler
1867 : // may precede or follow the object.
1868 25880 : TEST(TestAlignedOverAllocation) {
1869 10 : Heap* heap = CcTest::heap();
1870 : // Test checks for fillers before and behind objects and requires a fresh
1871 : // page and empty free list.
1872 5 : heap::AbandonCurrentlyFreeMemory(heap->old_space());
1873 : // Allocate a dummy object to properly set up the linear allocation info.
1874 5 : AllocationResult dummy = heap->old_space()->AllocateRawUnaligned(kTaggedSize);
1875 5 : CHECK(!dummy.IsRetry());
1876 : heap->CreateFillerObjectAt(dummy.ToObjectChecked()->address(), kTaggedSize,
1877 10 : ClearRecordedSlots::kNo);
1878 :
1879 : // Double misalignment is 4 on 32-bit platforms or when pointer compression
1880 : // is enabled, 0 on 64-bit ones when pointer compression is disabled.
1881 : const intptr_t double_misalignment = kDoubleSize - kTaggedSize;
1882 : Address start;
1883 : HeapObject obj;
1884 : HeapObject filler;
1885 : if (double_misalignment) {
1886 : start = AlignOldSpace(kDoubleAligned, 0);
1887 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1888 : // The object is aligned.
1889 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1890 : // Try the opposite alignment case.
1891 : start = AlignOldSpace(kDoubleAligned, kTaggedSize);
1892 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1893 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1894 : filler = HeapObject::FromAddress(start);
1895 : CHECK(obj != filler);
1896 : CHECK(filler->IsFiller());
1897 : CHECK_EQ(kTaggedSize, filler->Size());
1898 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1899 :
1900 : // Similarly for kDoubleUnaligned.
1901 : start = AlignOldSpace(kDoubleUnaligned, 0);
1902 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1903 : // The object is aligned.
1904 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1905 : // Try the opposite alignment case.
1906 : start = AlignOldSpace(kDoubleUnaligned, kTaggedSize);
1907 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1908 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1909 : filler = HeapObject::FromAddress(start);
1910 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1911 : }
1912 5 : }
1913 :
1914 25880 : TEST(HeapNumberAlignment) {
1915 5 : CcTest::InitializeVM();
1916 : Isolate* isolate = CcTest::i_isolate();
1917 : Factory* factory = isolate->factory();
1918 5 : Heap* heap = isolate->heap();
1919 : HandleScope sc(isolate);
1920 :
1921 : const auto required_alignment =
1922 : HeapObject::RequiredAlignment(*factory->heap_number_map());
1923 : const int maximum_misalignment =
1924 5 : Heap::GetMaximumFillToAlign(required_alignment);
1925 :
1926 10 : for (int offset = 0; offset <= maximum_misalignment; offset += kTaggedSize) {
1927 5 : AlignNewSpace(required_alignment, offset);
1928 5 : Handle<Object> number_new = factory->NewNumber(1.000123);
1929 10 : CHECK(number_new->IsHeapNumber());
1930 5 : CHECK(Heap::InYoungGeneration(*number_new));
1931 5 : CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new)->address(),
1932 : required_alignment));
1933 :
1934 5 : AlignOldSpace(required_alignment, offset);
1935 5 : Handle<Object> number_old = factory->NewNumber(1.000321, TENURED);
1936 10 : CHECK(number_old->IsHeapNumber());
1937 5 : CHECK(heap->InOldSpace(*number_old));
1938 5 : CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_old)->address(),
1939 : required_alignment));
1940 : }
1941 5 : }
1942 :
1943 25880 : TEST(MutableHeapNumberAlignment) {
1944 5 : CcTest::InitializeVM();
1945 : Isolate* isolate = CcTest::i_isolate();
1946 : Factory* factory = isolate->factory();
1947 5 : Heap* heap = isolate->heap();
1948 : HandleScope sc(isolate);
1949 :
1950 : const auto required_alignment =
1951 : HeapObject::RequiredAlignment(*factory->mutable_heap_number_map());
1952 : const int maximum_misalignment =
1953 5 : Heap::GetMaximumFillToAlign(required_alignment);
1954 :
1955 10 : for (int offset = 0; offset <= maximum_misalignment; offset += kTaggedSize) {
1956 5 : AlignNewSpace(required_alignment, offset);
1957 5 : Handle<Object> number_new = factory->NewMutableHeapNumber(1.000123);
1958 10 : CHECK(number_new->IsMutableHeapNumber());
1959 5 : CHECK(Heap::InYoungGeneration(*number_new));
1960 5 : CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new)->address(),
1961 : required_alignment));
1962 :
1963 5 : AlignOldSpace(required_alignment, offset);
1964 : Handle<Object> number_old =
1965 5 : factory->NewMutableHeapNumber(1.000321, TENURED);
1966 10 : CHECK(number_old->IsMutableHeapNumber());
1967 5 : CHECK(heap->InOldSpace(*number_old));
1968 5 : CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_old)->address(),
1969 : required_alignment));
1970 : }
1971 5 : }
1972 :
1973 25880 : TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1974 5 : CcTest::InitializeVM();
1975 5 : HeapIterator iterator(CcTest::heap());
1976 5 : intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
1977 : intptr_t size_of_objects_2 = 0;
1978 68744 : for (HeapObject obj = iterator.next(); !obj.is_null();
1979 : obj = iterator.next()) {
1980 34367 : if (!obj->IsFreeSpace()) {
1981 34367 : size_of_objects_2 += obj->Size();
1982 : }
1983 : }
1984 : // Delta must be within 5% of the larger result.
1985 : // TODO(gc): Tighten this up by distinguishing between byte
1986 : // arrays that are real and those that merely mark free space
1987 : // on the heap.
1988 5 : if (size_of_objects_1 > size_of_objects_2) {
1989 4 : intptr_t delta = size_of_objects_1 - size_of_objects_2;
1990 : PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
1991 : ", "
1992 : "Iterator: %" V8PRIdPTR
1993 : ", "
1994 : "delta: %" V8PRIdPTR "\n",
1995 4 : size_of_objects_1, size_of_objects_2, delta);
1996 4 : CHECK_GT(size_of_objects_1 / 20, delta);
1997 : } else {
1998 1 : intptr_t delta = size_of_objects_2 - size_of_objects_1;
1999 : PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
2000 : ", "
2001 : "Iterator: %" V8PRIdPTR
2002 : ", "
2003 : "delta: %" V8PRIdPTR "\n",
2004 1 : size_of_objects_1, size_of_objects_2, delta);
2005 1 : CHECK_GT(size_of_objects_2 / 20, delta);
2006 5 : }
2007 5 : }
2008 :
2009 25880 : TEST(GrowAndShrinkNewSpace) {
2010 : // Avoid shrinking new space in GC epilogue. This can happen if allocation
2011 : // throughput samples have been taken while executing the benchmark.
2012 5 : FLAG_predictable = true;
2013 :
2014 5 : CcTest::InitializeVM();
2015 5 : Heap* heap = CcTest::heap();
2016 : NewSpace* new_space = heap->new_space();
2017 :
2018 5 : if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2019 5 : return;
2020 : }
2021 :
2022 : // Make sure we're in a consistent state to start out.
2023 5 : CcTest::CollectAllGarbage();
2024 5 : CcTest::CollectAllGarbage();
2025 5 : new_space->Shrink();
2026 :
2027 : // Explicitly growing should double the space capacity.
2028 : size_t old_capacity, new_capacity;
2029 : old_capacity = new_space->TotalCapacity();
2030 5 : new_space->Grow();
2031 : new_capacity = new_space->TotalCapacity();
2032 5 : CHECK_EQ(2 * old_capacity, new_capacity);
2033 :
2034 : old_capacity = new_space->TotalCapacity();
2035 : {
2036 5 : v8::HandleScope temporary_scope(CcTest::isolate());
2037 5 : heap::SimulateFullSpace(new_space);
2038 : }
2039 : new_capacity = new_space->TotalCapacity();
2040 5 : CHECK_EQ(old_capacity, new_capacity);
2041 :
2042 : // Explicitly shrinking should not affect space capacity.
2043 : old_capacity = new_space->TotalCapacity();
2044 5 : new_space->Shrink();
2045 : new_capacity = new_space->TotalCapacity();
2046 5 : CHECK_EQ(old_capacity, new_capacity);
2047 :
2048 : // Let the scavenger empty the new space.
2049 5 : CcTest::CollectGarbage(NEW_SPACE);
2050 5 : CHECK_LE(new_space->Size(), old_capacity);
2051 :
2052 : // Explicitly shrinking should halve the space capacity.
2053 : old_capacity = new_space->TotalCapacity();
2054 5 : new_space->Shrink();
2055 : new_capacity = new_space->TotalCapacity();
2056 5 : CHECK_EQ(old_capacity, 2 * new_capacity);
2057 :
2058 : // Consecutive shrinking should not affect space capacity.
2059 : old_capacity = new_space->TotalCapacity();
2060 5 : new_space->Shrink();
2061 5 : new_space->Shrink();
2062 5 : new_space->Shrink();
2063 : new_capacity = new_space->TotalCapacity();
2064 5 : CHECK_EQ(old_capacity, new_capacity);
2065 : }
2066 :
2067 25880 : TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2068 5 : CcTest::InitializeVM();
2069 10 : Heap* heap = CcTest::heap();
2070 5 : if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2071 0 : return;
2072 : }
2073 :
2074 5 : v8::HandleScope scope(CcTest::isolate());
2075 : NewSpace* new_space = heap->new_space();
2076 : size_t old_capacity, new_capacity;
2077 : old_capacity = new_space->TotalCapacity();
2078 5 : new_space->Grow();
2079 : new_capacity = new_space->TotalCapacity();
2080 5 : CHECK_EQ(2 * old_capacity, new_capacity);
2081 : {
2082 5 : v8::HandleScope temporary_scope(CcTest::isolate());
2083 5 : heap::SimulateFullSpace(new_space);
2084 : }
2085 5 : CcTest::CollectAllAvailableGarbage();
2086 : new_capacity = new_space->TotalCapacity();
2087 5 : CHECK_EQ(old_capacity, new_capacity);
2088 : }
2089 :
2090 60 : static int NumberOfGlobalObjects() {
2091 : int count = 0;
2092 60 : HeapIterator iterator(CcTest::heap());
2093 795940 : for (HeapObject obj = iterator.next(); !obj.is_null();
2094 : obj = iterator.next()) {
2095 397910 : if (obj->IsJSGlobalObject()) count++;
2096 : }
2097 60 : return count;
2098 : }
2099 :
2100 :
2101 : // Test that we don't embed maps from foreign contexts into
2102 : // optimized code.
2103 25880 : TEST(LeakNativeContextViaMap) {
2104 5 : FLAG_allow_natives_syntax = true;
2105 5 : v8::Isolate* isolate = CcTest::isolate();
2106 5 : v8::HandleScope outer_scope(isolate);
2107 : v8::Persistent<v8::Context> ctx1p;
2108 : v8::Persistent<v8::Context> ctx2p;
2109 : {
2110 5 : v8::HandleScope scope(isolate);
2111 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2112 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2113 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2114 : }
2115 :
2116 5 : CcTest::CollectAllAvailableGarbage();
2117 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2118 :
2119 : {
2120 5 : v8::HandleScope inner_scope(isolate);
2121 : CompileRun("var v = {x: 42}");
2122 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2123 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2124 : v8::Local<v8::Value> v =
2125 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2126 5 : ctx2->Enter();
2127 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2128 : v8::Local<v8::Value> res = CompileRun(
2129 : "function f() { return o.x; }"
2130 : "for (var i = 0; i < 10; ++i) f();"
2131 : "%OptimizeFunctionOnNextCall(f);"
2132 : "f();");
2133 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2134 25 : CHECK(ctx2->Global()
2135 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2136 : .FromJust());
2137 5 : ctx2->Exit();
2138 5 : v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2139 : ctx1p.Reset();
2140 5 : isolate->ContextDisposedNotification();
2141 : }
2142 5 : CcTest::CollectAllAvailableGarbage();
2143 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2144 : ctx2p.Reset();
2145 5 : CcTest::CollectAllAvailableGarbage();
2146 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2147 5 : }
2148 :
2149 :
2150 : // Test that we don't embed functions from foreign contexts into
2151 : // optimized code.
2152 25880 : TEST(LeakNativeContextViaFunction) {
2153 5 : FLAG_allow_natives_syntax = true;
2154 5 : v8::Isolate* isolate = CcTest::isolate();
2155 5 : v8::HandleScope outer_scope(isolate);
2156 : v8::Persistent<v8::Context> ctx1p;
2157 : v8::Persistent<v8::Context> ctx2p;
2158 : {
2159 5 : v8::HandleScope scope(isolate);
2160 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2161 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2162 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2163 : }
2164 :
2165 5 : CcTest::CollectAllAvailableGarbage();
2166 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2167 :
2168 : {
2169 5 : v8::HandleScope inner_scope(isolate);
2170 : CompileRun("var v = function() { return 42; }");
2171 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2172 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2173 : v8::Local<v8::Value> v =
2174 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2175 5 : ctx2->Enter();
2176 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2177 : v8::Local<v8::Value> res = CompileRun(
2178 : "function f(x) { return x(); }"
2179 : "for (var i = 0; i < 10; ++i) f(o);"
2180 : "%OptimizeFunctionOnNextCall(f);"
2181 : "f(o);");
2182 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2183 25 : CHECK(ctx2->Global()
2184 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2185 : .FromJust());
2186 5 : ctx2->Exit();
2187 5 : ctx1->Exit();
2188 : ctx1p.Reset();
2189 5 : isolate->ContextDisposedNotification();
2190 : }
2191 5 : CcTest::CollectAllAvailableGarbage();
2192 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2193 : ctx2p.Reset();
2194 5 : CcTest::CollectAllAvailableGarbage();
2195 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2196 5 : }
2197 :
2198 :
2199 25880 : TEST(LeakNativeContextViaMapKeyed) {
2200 5 : FLAG_allow_natives_syntax = true;
2201 5 : v8::Isolate* isolate = CcTest::isolate();
2202 5 : v8::HandleScope outer_scope(isolate);
2203 : v8::Persistent<v8::Context> ctx1p;
2204 : v8::Persistent<v8::Context> ctx2p;
2205 : {
2206 5 : v8::HandleScope scope(isolate);
2207 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2208 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2209 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2210 : }
2211 :
2212 5 : CcTest::CollectAllAvailableGarbage();
2213 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2214 :
2215 : {
2216 5 : v8::HandleScope inner_scope(isolate);
2217 : CompileRun("var v = [42, 43]");
2218 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2219 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2220 : v8::Local<v8::Value> v =
2221 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2222 5 : ctx2->Enter();
2223 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2224 : v8::Local<v8::Value> res = CompileRun(
2225 : "function f() { return o[0]; }"
2226 : "for (var i = 0; i < 10; ++i) f();"
2227 : "%OptimizeFunctionOnNextCall(f);"
2228 : "f();");
2229 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2230 25 : CHECK(ctx2->Global()
2231 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2232 : .FromJust());
2233 5 : ctx2->Exit();
2234 5 : ctx1->Exit();
2235 : ctx1p.Reset();
2236 5 : isolate->ContextDisposedNotification();
2237 : }
2238 5 : CcTest::CollectAllAvailableGarbage();
2239 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2240 : ctx2p.Reset();
2241 5 : CcTest::CollectAllAvailableGarbage();
2242 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2243 5 : }
2244 :
2245 :
2246 25880 : TEST(LeakNativeContextViaMapProto) {
2247 5 : FLAG_allow_natives_syntax = true;
2248 5 : v8::Isolate* isolate = CcTest::isolate();
2249 5 : v8::HandleScope outer_scope(isolate);
2250 : v8::Persistent<v8::Context> ctx1p;
2251 : v8::Persistent<v8::Context> ctx2p;
2252 : {
2253 5 : v8::HandleScope scope(isolate);
2254 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2255 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2256 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2257 : }
2258 :
2259 5 : CcTest::CollectAllAvailableGarbage();
2260 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2261 :
2262 : {
2263 5 : v8::HandleScope inner_scope(isolate);
2264 : CompileRun("var v = { y: 42}");
2265 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2266 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2267 : v8::Local<v8::Value> v =
2268 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2269 5 : ctx2->Enter();
2270 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2271 : v8::Local<v8::Value> res = CompileRun(
2272 : "function f() {"
2273 : " var p = {x: 42};"
2274 : " p.__proto__ = o;"
2275 : " return p.x;"
2276 : "}"
2277 : "for (var i = 0; i < 10; ++i) f();"
2278 : "%OptimizeFunctionOnNextCall(f);"
2279 : "f();");
2280 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2281 25 : CHECK(ctx2->Global()
2282 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2283 : .FromJust());
2284 5 : ctx2->Exit();
2285 5 : ctx1->Exit();
2286 : ctx1p.Reset();
2287 5 : isolate->ContextDisposedNotification();
2288 : }
2289 5 : CcTest::CollectAllAvailableGarbage();
2290 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2291 : ctx2p.Reset();
2292 5 : CcTest::CollectAllAvailableGarbage();
2293 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2294 5 : }
2295 :
2296 :
2297 25880 : TEST(InstanceOfStubWriteBarrier) {
2298 6 : if (!FLAG_incremental_marking) return;
2299 : ManualGCScope manual_gc_scope;
2300 5 : FLAG_allow_natives_syntax = true;
2301 : #ifdef VERIFY_HEAP
2302 : FLAG_verify_heap = true;
2303 : #endif
2304 :
2305 5 : CcTest::InitializeVM();
2306 5 : if (!CcTest::i_isolate()->use_optimizer()) return;
2307 4 : if (FLAG_force_marking_deque_overflows) return;
2308 8 : v8::HandleScope outer_scope(CcTest::isolate());
2309 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2310 :
2311 : {
2312 4 : v8::HandleScope scope(CcTest::isolate());
2313 : CompileRun(
2314 : "function foo () { }"
2315 : "function mkbar () { return new (new Function(\"\")) (); }"
2316 : "function f (x) { return (x instanceof foo); }"
2317 : "function g () { f(mkbar()); }"
2318 : "f(new foo()); f(new foo());"
2319 : "%OptimizeFunctionOnNextCall(f);"
2320 4 : "f(new foo()); g();");
2321 : }
2322 :
2323 4 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2324 4 : marking->Stop();
2325 : CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2326 4 : i::GarbageCollectionReason::kTesting);
2327 :
2328 : i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2329 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2330 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2331 :
2332 4 : CHECK(f->IsOptimized());
2333 :
2334 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
2335 :
2336 : const double kStepSizeInMs = 100;
2337 80 : while (!marking_state->IsBlack(f->code()) && !marking->IsStopped()) {
2338 : // Discard any pending GC requests otherwise we will get GC when we enter
2339 : // code below.
2340 : marking->V8Step(kStepSizeInMs, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2341 24 : StepOrigin::kV8);
2342 : }
2343 :
2344 4 : CHECK(marking->IsMarking());
2345 :
2346 : {
2347 4 : v8::HandleScope scope(CcTest::isolate());
2348 4 : v8::Local<v8::Object> global = CcTest::global();
2349 : v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
2350 12 : global->Get(ctx, v8_str("g")).ToLocalChecked());
2351 8 : g->Call(ctx, global, 0, nullptr).ToLocalChecked();
2352 : }
2353 :
2354 4 : CcTest::heap()->incremental_marking()->set_should_hurry(true);
2355 4 : CcTest::CollectGarbage(OLD_SPACE);
2356 : }
2357 :
2358 25880 : HEAP_TEST(GCFlags) {
2359 10 : if (!FLAG_incremental_marking) return;
2360 5 : CcTest::InitializeVM();
2361 5 : Heap* heap = CcTest::heap();
2362 :
2363 : heap->set_current_gc_flags(Heap::kNoGCFlags);
2364 5 : CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2365 :
2366 : // Check whether we appropriately reset flags after GC.
2367 : CcTest::heap()->CollectAllGarbage(Heap::kReduceMemoryFootprintMask,
2368 5 : GarbageCollectionReason::kTesting);
2369 5 : CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2370 :
2371 5 : MarkCompactCollector* collector = heap->mark_compact_collector();
2372 5 : if (collector->sweeping_in_progress()) {
2373 5 : collector->EnsureSweepingCompleted();
2374 : }
2375 :
2376 : IncrementalMarking* marking = heap->incremental_marking();
2377 5 : marking->Stop();
2378 : heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask,
2379 5 : i::GarbageCollectionReason::kTesting);
2380 5 : CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2381 :
2382 5 : CcTest::CollectGarbage(NEW_SPACE);
2383 : // NewSpace scavenges should not overwrite the flags.
2384 5 : CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2385 :
2386 5 : CcTest::CollectAllGarbage();
2387 5 : CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2388 : }
2389 :
2390 25880 : HEAP_TEST(Regress845060) {
2391 : // Regression test for crbug.com/845060, where a raw pointer to a string's
2392 : // data was kept across an allocation. If the allocation causes GC and
2393 : // moves the string, such raw pointers become invalid.
2394 5 : FLAG_allow_natives_syntax = true;
2395 5 : FLAG_stress_incremental_marking = false;
2396 5 : FLAG_stress_compaction = false;
2397 5 : CcTest::InitializeVM();
2398 5 : LocalContext context;
2399 10 : v8::HandleScope scope(CcTest::isolate());
2400 10 : Heap* heap = CcTest::heap();
2401 :
2402 : // Preparation: create a string in new space.
2403 : Local<Value> str = CompileRun("var str = (new Array(10000)).join('x'); str");
2404 5 : CHECK(Heap::InYoungGeneration(*v8::Utils::OpenHandle(*str)));
2405 :
2406 : // Idle incremental marking sets the "kReduceMemoryFootprint" flag, which
2407 : // causes from_space to be unmapped after scavenging.
2408 5 : heap->StartIdleIncrementalMarking(GarbageCollectionReason::kTesting);
2409 5 : CHECK(heap->ShouldReduceMemory());
2410 :
2411 : // Run the test (which allocates results) until the original string was
2412 : // promoted to old space. Unmapping of from_space causes accesses to any
2413 : // stale raw pointers to crash.
2414 : CompileRun("while (%InNewSpace(str)) { str.split(''); }");
2415 10 : CHECK(!Heap::InYoungGeneration(*v8::Utils::OpenHandle(*str)));
2416 5 : }
2417 :
2418 25880 : TEST(IdleNotificationFinishMarking) {
2419 10 : if (!FLAG_incremental_marking) return;
2420 : ManualGCScope manual_gc_scope;
2421 5 : FLAG_allow_natives_syntax = true;
2422 5 : CcTest::InitializeVM();
2423 5 : const int initial_gc_count = CcTest::heap()->gc_count();
2424 5 : heap::SimulateFullSpace(CcTest::heap()->old_space());
2425 5 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2426 5 : marking->Stop();
2427 : CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2428 5 : i::GarbageCollectionReason::kTesting);
2429 :
2430 10 : CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count);
2431 :
2432 : const double kStepSizeInMs = 100;
2433 30 : do {
2434 : marking->V8Step(kStepSizeInMs, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2435 30 : StepOrigin::kV8);
2436 : } while (
2437 60 : !CcTest::heap()->mark_compact_collector()->marking_worklist()->IsEmpty());
2438 :
2439 : marking->SetWeakClosureWasOverApproximatedForTesting(true);
2440 :
2441 : // The next idle notification has to finish incremental marking.
2442 : const double kLongIdleTime = 1000.0;
2443 : CcTest::isolate()->IdleNotificationDeadline(
2444 15 : (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2445 : static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2446 5 : kLongIdleTime);
2447 10 : CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count + 1);
2448 : }
2449 :
2450 :
2451 : // Test that HAllocateObject will always return an object in new-space.
2452 25880 : TEST(OptimizedAllocationAlwaysInNewSpace) {
2453 5 : FLAG_allow_natives_syntax = true;
2454 5 : CcTest::InitializeVM();
2455 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2456 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2457 : FLAG_stress_incremental_marking)
2458 : return;
2459 2 : v8::HandleScope scope(CcTest::isolate());
2460 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2461 2 : heap::SimulateFullSpace(CcTest::heap()->new_space());
2462 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2463 : v8::Local<v8::Value> res = CompileRun(
2464 : "function c(x) {"
2465 : " this.x = x;"
2466 : " for (var i = 0; i < 32; i++) {"
2467 : " this['x' + i] = x;"
2468 : " }"
2469 : "}"
2470 : "function f(x) { return new c(x); };"
2471 : "f(1); f(2); f(3);"
2472 : "%OptimizeFunctionOnNextCall(f);"
2473 : "f(4);");
2474 :
2475 8 : CHECK_EQ(4, res.As<v8::Object>()
2476 : ->GetRealNamedProperty(ctx, v8_str("x"))
2477 : .ToLocalChecked()
2478 : ->Int32Value(ctx)
2479 : .FromJust());
2480 :
2481 : i::Handle<JSReceiver> o =
2482 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2483 :
2484 4 : CHECK(Heap::InYoungGeneration(*o));
2485 : }
2486 :
2487 :
2488 25880 : TEST(OptimizedPretenuringAllocationFolding) {
2489 5 : FLAG_allow_natives_syntax = true;
2490 5 : FLAG_expose_gc = true;
2491 5 : CcTest::InitializeVM();
2492 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2493 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2494 : FLAG_stress_incremental_marking)
2495 : return;
2496 2 : v8::HandleScope scope(CcTest::isolate());
2497 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2498 : // Grow new space unitl maximum capacity reached.
2499 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2500 8 : CcTest::heap()->new_space()->Grow();
2501 : }
2502 :
2503 : i::ScopedVector<char> source(1024);
2504 : i::SNPrintF(source,
2505 : "var number_elements = %d;"
2506 : "var elements = new Array();"
2507 : "function f() {"
2508 : " for (var i = 0; i < number_elements; i++) {"
2509 : " elements[i] = [[{}], [1.1]];"
2510 : " }"
2511 : " return elements[number_elements-1]"
2512 : "};"
2513 : "f(); gc();"
2514 : "f(); f();"
2515 : "%%OptimizeFunctionOnNextCall(f);"
2516 : "f();",
2517 2 : kPretenureCreationCount);
2518 :
2519 : v8::Local<v8::Value> res = CompileRun(source.start());
2520 :
2521 : v8::Local<v8::Value> int_array =
2522 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2523 : i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2524 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2525 : v8::Local<v8::Value> double_array =
2526 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2527 : i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2528 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2529 :
2530 : i::Handle<JSReceiver> o =
2531 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2532 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2533 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2534 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2535 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2536 6 : CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2537 : }
2538 :
2539 :
2540 25880 : TEST(OptimizedPretenuringObjectArrayLiterals) {
2541 5 : FLAG_allow_natives_syntax = true;
2542 5 : FLAG_expose_gc = true;
2543 5 : CcTest::InitializeVM();
2544 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2545 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2546 : FLAG_stress_incremental_marking) {
2547 : return;
2548 : }
2549 2 : v8::HandleScope scope(CcTest::isolate());
2550 :
2551 : // Grow new space unitl maximum capacity reached.
2552 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2553 8 : CcTest::heap()->new_space()->Grow();
2554 : }
2555 :
2556 : i::ScopedVector<char> source(1024);
2557 : i::SNPrintF(source,
2558 : "var number_elements = %d;"
2559 : "var elements = new Array(number_elements);"
2560 : "function f() {"
2561 : " for (var i = 0; i < number_elements; i++) {"
2562 : " elements[i] = [{}, {}, {}];"
2563 : " }"
2564 : " return elements[number_elements - 1];"
2565 : "};"
2566 : "f(); gc();"
2567 : "f(); f();"
2568 : "%%OptimizeFunctionOnNextCall(f);"
2569 : "f();",
2570 2 : kPretenureCreationCount);
2571 :
2572 : v8::Local<v8::Value> res = CompileRun(source.start());
2573 :
2574 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2575 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2576 :
2577 4 : CHECK(CcTest::heap()->InOldSpace(o->elements()));
2578 6 : CHECK(CcTest::heap()->InOldSpace(*o));
2579 : }
2580 :
2581 25880 : TEST(OptimizedPretenuringNestedInObjectProperties) {
2582 5 : FLAG_allow_natives_syntax = true;
2583 5 : FLAG_expose_gc = true;
2584 5 : CcTest::InitializeVM();
2585 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2586 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2587 : FLAG_stress_incremental_marking) {
2588 : return;
2589 : }
2590 2 : v8::HandleScope scope(CcTest::isolate());
2591 :
2592 : // Grow new space until maximum capacity reached.
2593 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2594 8 : CcTest::heap()->new_space()->Grow();
2595 : }
2596 :
2597 : // Keep the nested literal alive while its root is freed
2598 : i::ScopedVector<char> source(1024);
2599 : i::SNPrintF(source,
2600 : "let number_elements = %d;"
2601 : "let elements = new Array(number_elements);"
2602 : "function f() {"
2603 : " for (let i = 0; i < number_elements; i++) {"
2604 : " let l = {a: {c: 2.2, d: {e: 3.3}}, b: 1.1}; "
2605 : " elements[i] = l.a;"
2606 : " }"
2607 : " return elements[number_elements-1];"
2608 : "};"
2609 : "f(); gc(); gc();"
2610 : "f(); f();"
2611 : "%%OptimizeFunctionOnNextCall(f);"
2612 : "f();",
2613 2 : kPretenureCreationCount);
2614 :
2615 : v8::Local<v8::Value> res = CompileRun(source.start());
2616 :
2617 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2618 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2619 :
2620 : // Nested literal sites are only pretenured if the top level
2621 : // literal is pretenured
2622 4 : CHECK(Heap::InYoungGeneration(*o));
2623 : }
2624 :
2625 25880 : TEST(OptimizedPretenuringMixedInObjectProperties) {
2626 5 : FLAG_allow_natives_syntax = true;
2627 5 : FLAG_expose_gc = true;
2628 5 : CcTest::InitializeVM();
2629 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2630 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2631 : FLAG_stress_incremental_marking)
2632 : return;
2633 2 : v8::HandleScope scope(CcTest::isolate());
2634 :
2635 : // Grow new space unitl maximum capacity reached.
2636 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2637 8 : CcTest::heap()->new_space()->Grow();
2638 : }
2639 :
2640 :
2641 : i::ScopedVector<char> source(1024);
2642 : i::SNPrintF(source,
2643 : "var number_elements = %d;"
2644 : "var elements = new Array(number_elements);"
2645 : "function f() {"
2646 : " for (var i = 0; i < number_elements; i++) {"
2647 : " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2648 : " }"
2649 : " return elements[number_elements - 1];"
2650 : "};"
2651 : "f(); gc();"
2652 : "f(); f();"
2653 : "%%OptimizeFunctionOnNextCall(f);"
2654 : "f();",
2655 2 : kPretenureCreationCount);
2656 :
2657 : v8::Local<v8::Value> res = CompileRun(source.start());
2658 :
2659 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2660 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2661 :
2662 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2663 2 : FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2664 2 : FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2665 4 : CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
2666 2 : if (!o->IsUnboxedDoubleField(idx2)) {
2667 0 : CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
2668 : } else {
2669 2 : CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
2670 : }
2671 :
2672 4 : JSObject inner_object = JSObject::cast(o->RawFastPropertyAt(idx1));
2673 4 : CHECK(CcTest::heap()->InOldSpace(inner_object));
2674 2 : if (!inner_object->IsUnboxedDoubleField(idx1)) {
2675 0 : CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
2676 : } else {
2677 2 : CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
2678 : }
2679 6 : CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
2680 : }
2681 :
2682 :
2683 25880 : TEST(OptimizedPretenuringDoubleArrayProperties) {
2684 5 : FLAG_allow_natives_syntax = true;
2685 5 : FLAG_expose_gc = true;
2686 5 : CcTest::InitializeVM();
2687 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2688 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2689 : FLAG_stress_incremental_marking)
2690 : return;
2691 2 : v8::HandleScope scope(CcTest::isolate());
2692 :
2693 : // Grow new space until maximum capacity reached.
2694 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2695 8 : CcTest::heap()->new_space()->Grow();
2696 : }
2697 :
2698 : i::ScopedVector<char> source(1024);
2699 : i::SNPrintF(source,
2700 : "var number_elements = %d;"
2701 : "var elements = new Array(number_elements);"
2702 : "function f() {"
2703 : " for (var i = 0; i < number_elements; i++) {"
2704 : " elements[i] = {a: 1.1, b: 2.2};"
2705 : " }"
2706 : " return elements[i - 1];"
2707 : "};"
2708 : "f(); gc();"
2709 : "f(); f();"
2710 : "%%OptimizeFunctionOnNextCall(f);"
2711 : "f();",
2712 2 : kPretenureCreationCount);
2713 :
2714 : v8::Local<v8::Value> res = CompileRun(source.start());
2715 :
2716 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2717 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2718 :
2719 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2720 8 : CHECK_EQ(o->property_array(),
2721 2 : ReadOnlyRoots(CcTest::heap()).empty_property_array());
2722 : }
2723 :
2724 :
2725 25880 : TEST(OptimizedPretenuringdoubleArrayLiterals) {
2726 5 : FLAG_allow_natives_syntax = true;
2727 5 : FLAG_expose_gc = true;
2728 5 : CcTest::InitializeVM();
2729 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2730 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2731 : FLAG_stress_incremental_marking)
2732 : return;
2733 2 : v8::HandleScope scope(CcTest::isolate());
2734 :
2735 : // Grow new space unitl maximum capacity reached.
2736 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2737 8 : CcTest::heap()->new_space()->Grow();
2738 : }
2739 :
2740 : i::ScopedVector<char> source(1024);
2741 : i::SNPrintF(source,
2742 : "var number_elements = %d;"
2743 : "var elements = new Array(number_elements);"
2744 : "function f() {"
2745 : " for (var i = 0; i < number_elements; i++) {"
2746 : " elements[i] = [1.1, 2.2, 3.3];"
2747 : " }"
2748 : " return elements[number_elements - 1];"
2749 : "};"
2750 : "f(); gc();"
2751 : "f(); f();"
2752 : "%%OptimizeFunctionOnNextCall(f);"
2753 : "f();",
2754 2 : kPretenureCreationCount);
2755 :
2756 : v8::Local<v8::Value> res = CompileRun(source.start());
2757 :
2758 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2759 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2760 :
2761 4 : CHECK(CcTest::heap()->InOldSpace(o->elements()));
2762 6 : CHECK(CcTest::heap()->InOldSpace(*o));
2763 : }
2764 :
2765 :
2766 25880 : TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2767 5 : FLAG_allow_natives_syntax = true;
2768 5 : FLAG_expose_gc = true;
2769 5 : CcTest::InitializeVM();
2770 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2771 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2772 : FLAG_stress_incremental_marking)
2773 : return;
2774 2 : v8::HandleScope scope(CcTest::isolate());
2775 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2776 : // Grow new space unitl maximum capacity reached.
2777 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2778 8 : CcTest::heap()->new_space()->Grow();
2779 : }
2780 :
2781 : i::ScopedVector<char> source(1024);
2782 : i::SNPrintF(source,
2783 : "var number_elements = %d;"
2784 : "var elements = new Array(number_elements);"
2785 : "function f() {"
2786 : " for (var i = 0; i < number_elements; i++) {"
2787 : " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
2788 : " }"
2789 : " return elements[number_elements - 1];"
2790 : "};"
2791 : "f(); gc();"
2792 : "f(); f();"
2793 : "%%OptimizeFunctionOnNextCall(f);"
2794 : "f();",
2795 2 : kPretenureCreationCount);
2796 :
2797 : v8::Local<v8::Value> res = CompileRun(source.start());
2798 :
2799 : v8::Local<v8::Value> int_array =
2800 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2801 : i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2802 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2803 : v8::Local<v8::Value> double_array =
2804 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2805 : i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2806 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2807 :
2808 : Handle<JSObject> o = Handle<JSObject>::cast(
2809 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2810 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2811 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2812 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2813 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2814 6 : CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2815 : }
2816 :
2817 :
2818 25880 : TEST(OptimizedPretenuringNestedObjectLiterals) {
2819 5 : FLAG_allow_natives_syntax = true;
2820 5 : FLAG_expose_gc = true;
2821 5 : CcTest::InitializeVM();
2822 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2823 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2824 : FLAG_stress_incremental_marking)
2825 : return;
2826 2 : v8::HandleScope scope(CcTest::isolate());
2827 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2828 : // Grow new space unitl maximum capacity reached.
2829 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2830 8 : CcTest::heap()->new_space()->Grow();
2831 : }
2832 :
2833 : i::ScopedVector<char> source(1024);
2834 : i::SNPrintF(source,
2835 : "var number_elements = %d;"
2836 : "var elements = new Array(number_elements);"
2837 : "function f() {"
2838 : " for (var i = 0; i < number_elements; i++) {"
2839 : " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
2840 : " }"
2841 : " return elements[number_elements - 1];"
2842 : "};"
2843 : "f(); gc();"
2844 : "f(); f();"
2845 : "%%OptimizeFunctionOnNextCall(f);"
2846 : "f();",
2847 2 : kPretenureCreationCount);
2848 :
2849 : v8::Local<v8::Value> res = CompileRun(source.start());
2850 :
2851 : v8::Local<v8::Value> int_array_1 =
2852 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2853 : Handle<JSObject> int_array_handle_1 = Handle<JSObject>::cast(
2854 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_1)));
2855 : v8::Local<v8::Value> int_array_2 =
2856 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2857 : Handle<JSObject> int_array_handle_2 = Handle<JSObject>::cast(
2858 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_2)));
2859 :
2860 : Handle<JSObject> o = Handle<JSObject>::cast(
2861 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2862 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2863 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
2864 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
2865 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
2866 6 : CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
2867 : }
2868 :
2869 :
2870 25880 : TEST(OptimizedPretenuringNestedDoubleLiterals) {
2871 5 : FLAG_allow_natives_syntax = true;
2872 5 : FLAG_expose_gc = true;
2873 5 : CcTest::InitializeVM();
2874 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2875 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2876 : FLAG_stress_incremental_marking)
2877 : return;
2878 2 : v8::HandleScope scope(CcTest::isolate());
2879 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2880 : // Grow new space unitl maximum capacity reached.
2881 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2882 8 : CcTest::heap()->new_space()->Grow();
2883 : }
2884 :
2885 : i::ScopedVector<char> source(1024);
2886 : i::SNPrintF(source,
2887 : "var number_elements = %d;"
2888 : "var elements = new Array(number_elements);"
2889 : "function f() {"
2890 : " for (var i = 0; i < number_elements; i++) {"
2891 : " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
2892 : " }"
2893 : " return elements[number_elements - 1];"
2894 : "};"
2895 : "f(); gc();"
2896 : "f(); f();"
2897 : "%%OptimizeFunctionOnNextCall(f);"
2898 : "f();",
2899 2 : kPretenureCreationCount);
2900 :
2901 : v8::Local<v8::Value> res = CompileRun(source.start());
2902 :
2903 : v8::Local<v8::Value> double_array_1 =
2904 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2905 : i::Handle<JSObject> double_array_handle_1 = i::Handle<JSObject>::cast(
2906 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_1)));
2907 : v8::Local<v8::Value> double_array_2 =
2908 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2909 : i::Handle<JSObject> double_array_handle_2 = Handle<JSObject>::cast(
2910 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_2)));
2911 :
2912 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2913 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2914 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2915 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
2916 4 : CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
2917 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
2918 6 : CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
2919 : }
2920 :
2921 :
2922 : // Test regular array literals allocation.
2923 25880 : TEST(OptimizedAllocationArrayLiterals) {
2924 5 : FLAG_allow_natives_syntax = true;
2925 5 : CcTest::InitializeVM();
2926 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2927 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2928 : FLAG_stress_incremental_marking)
2929 : return;
2930 2 : v8::HandleScope scope(CcTest::isolate());
2931 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2932 : v8::Local<v8::Value> res = CompileRun(
2933 : "function f() {"
2934 : " var numbers = new Array(1, 2, 3);"
2935 : " numbers[0] = 3.14;"
2936 : " return numbers;"
2937 : "};"
2938 : "f(); f(); f();"
2939 : "%OptimizeFunctionOnNextCall(f);"
2940 : "f();");
2941 8 : CHECK_EQ(static_cast<int>(3.14), v8::Object::Cast(*res)
2942 : ->Get(ctx, v8_str("0"))
2943 : .ToLocalChecked()
2944 : ->Int32Value(ctx)
2945 : .FromJust());
2946 :
2947 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2948 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2949 :
2950 4 : CHECK(Heap::InYoungGeneration(o->elements()));
2951 : }
2952 :
2953 10 : static int CountMapTransitions(i::Isolate* isolate, Map map) {
2954 : DisallowHeapAllocation no_gc;
2955 10 : return TransitionsAccessor(isolate, map, &no_gc).NumberOfTransitions();
2956 : }
2957 :
2958 :
2959 : // Test that map transitions are cleared and maps are collected with
2960 : // incremental marking as well.
2961 25880 : TEST(Regress1465) {
2962 5 : if (!FLAG_incremental_marking) return;
2963 5 : FLAG_stress_compaction = false;
2964 5 : FLAG_stress_incremental_marking = false;
2965 5 : FLAG_allow_natives_syntax = true;
2966 5 : FLAG_trace_incremental_marking = true;
2967 5 : FLAG_retain_maps_for_n_gc = 0;
2968 5 : CcTest::InitializeVM();
2969 5 : v8::Isolate* isolate = CcTest::isolate();
2970 : i::Isolate* i_isolate = CcTest::i_isolate();
2971 5 : v8::HandleScope scope(isolate);
2972 5 : v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
2973 : static const int transitions_count = 256;
2974 :
2975 : CompileRun("function F() {}");
2976 : {
2977 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2978 1285 : for (int i = 0; i < transitions_count; i++) {
2979 : EmbeddedVector<char, 64> buffer;
2980 1280 : SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2981 1280 : CompileRun(buffer.start());
2982 : }
2983 : CompileRun("var root = new F;");
2984 : }
2985 :
2986 : i::Handle<JSReceiver> root =
2987 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
2988 20 : CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
2989 :
2990 : // Count number of live transitions before marking.
2991 5 : int transitions_before = CountMapTransitions(i_isolate, root->map());
2992 : CompileRun("%DebugPrint(root);");
2993 5 : CHECK_EQ(transitions_count, transitions_before);
2994 :
2995 5 : heap::SimulateIncrementalMarking(CcTest::heap());
2996 5 : CcTest::CollectAllGarbage();
2997 :
2998 : // Count number of live transitions after marking. Note that one transition
2999 : // is left, because 'o' still holds an instance of one transition target.
3000 5 : int transitions_after = CountMapTransitions(i_isolate, root->map());
3001 : CompileRun("%DebugPrint(root);");
3002 5 : CHECK_EQ(1, transitions_after);
3003 : }
3004 :
3005 5 : static i::Handle<JSObject> GetByName(const char* name) {
3006 : return i::Handle<JSObject>::cast(
3007 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3008 : CcTest::global()
3009 15 : ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(name))
3010 15 : .ToLocalChecked())));
3011 : }
3012 :
3013 : #ifdef DEBUG
3014 : static void AddTransitions(int transitions_count) {
3015 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3016 : for (int i = 0; i < transitions_count; i++) {
3017 : EmbeddedVector<char, 64> buffer;
3018 : SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3019 : CompileRun(buffer.start());
3020 : }
3021 : }
3022 :
3023 :
3024 : static void AddPropertyTo(
3025 : int gc_count, Handle<JSObject> object, const char* property_name) {
3026 : Isolate* isolate = CcTest::i_isolate();
3027 : Factory* factory = isolate->factory();
3028 : Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
3029 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
3030 : FLAG_gc_interval = gc_count;
3031 : FLAG_gc_global = true;
3032 : FLAG_retain_maps_for_n_gc = 0;
3033 : CcTest::heap()->set_allocation_timeout(gc_count);
3034 : Object::SetProperty(isolate, object, prop_name, twenty_three).Check();
3035 : }
3036 :
3037 :
3038 : TEST(TransitionArrayShrinksDuringAllocToZero) {
3039 : FLAG_stress_compaction = false;
3040 : FLAG_stress_incremental_marking = false;
3041 : FLAG_allow_natives_syntax = true;
3042 : CcTest::InitializeVM();
3043 : i::Isolate* i_isolate = CcTest::i_isolate();
3044 : v8::HandleScope scope(CcTest::isolate());
3045 : static const int transitions_count = 10;
3046 : CompileRun("function F() { }");
3047 : AddTransitions(transitions_count);
3048 : CompileRun("var root = new F;");
3049 : Handle<JSObject> root = GetByName("root");
3050 :
3051 : // Count number of live transitions before marking.
3052 : int transitions_before = CountMapTransitions(i_isolate, root->map());
3053 : CHECK_EQ(transitions_count, transitions_before);
3054 :
3055 : // Get rid of o
3056 : CompileRun("o = new F;"
3057 : "root = new F");
3058 : root = GetByName("root");
3059 : AddPropertyTo(2, root, "funny");
3060 : CcTest::CollectGarbage(NEW_SPACE);
3061 :
3062 : // Count number of live transitions after marking. Note that one transition
3063 : // is left, because 'o' still holds an instance of one transition target.
3064 : int transitions_after =
3065 : CountMapTransitions(i_isolate, Map::cast(root->map()->GetBackPointer()));
3066 : CHECK_EQ(1, transitions_after);
3067 : }
3068 :
3069 :
3070 : TEST(TransitionArrayShrinksDuringAllocToOne) {
3071 : FLAG_stress_compaction = false;
3072 : FLAG_stress_incremental_marking = false;
3073 : FLAG_allow_natives_syntax = true;
3074 : CcTest::InitializeVM();
3075 : i::Isolate* i_isolate = CcTest::i_isolate();
3076 : v8::HandleScope scope(CcTest::isolate());
3077 : static const int transitions_count = 10;
3078 : CompileRun("function F() {}");
3079 : AddTransitions(transitions_count);
3080 : CompileRun("var root = new F;");
3081 : Handle<JSObject> root = GetByName("root");
3082 :
3083 : // Count number of live transitions before marking.
3084 : int transitions_before = CountMapTransitions(i_isolate, root->map());
3085 : CHECK_EQ(transitions_count, transitions_before);
3086 :
3087 : root = GetByName("root");
3088 : AddPropertyTo(2, root, "funny");
3089 : CcTest::CollectGarbage(NEW_SPACE);
3090 :
3091 : // Count number of live transitions after marking. Note that one transition
3092 : // is left, because 'o' still holds an instance of one transition target.
3093 : int transitions_after =
3094 : CountMapTransitions(i_isolate, Map::cast(root->map()->GetBackPointer()));
3095 : CHECK_EQ(2, transitions_after);
3096 : }
3097 :
3098 :
3099 : TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3100 : FLAG_stress_compaction = false;
3101 : FLAG_stress_incremental_marking = false;
3102 : FLAG_allow_natives_syntax = true;
3103 : CcTest::InitializeVM();
3104 : i::Isolate* i_isolate = CcTest::i_isolate();
3105 : v8::HandleScope scope(CcTest::isolate());
3106 : static const int transitions_count = 10;
3107 : CompileRun("function F() {}");
3108 : AddTransitions(transitions_count);
3109 : CompileRun("var root = new F;");
3110 : Handle<JSObject> root = GetByName("root");
3111 :
3112 : // Count number of live transitions before marking.
3113 : int transitions_before = CountMapTransitions(i_isolate, root->map());
3114 : CHECK_EQ(transitions_count, transitions_before);
3115 :
3116 : root = GetByName("root");
3117 : AddPropertyTo(0, root, "prop9");
3118 : CcTest::CollectGarbage(OLD_SPACE);
3119 :
3120 : // Count number of live transitions after marking. Note that one transition
3121 : // is left, because 'o' still holds an instance of one transition target.
3122 : int transitions_after =
3123 : CountMapTransitions(i_isolate, Map::cast(root->map()->GetBackPointer()));
3124 : CHECK_EQ(1, transitions_after);
3125 : }
3126 : #endif // DEBUG
3127 :
3128 :
3129 25880 : TEST(ReleaseOverReservedPages) {
3130 5 : if (FLAG_never_compact) return;
3131 5 : FLAG_trace_gc = true;
3132 : // The optimizer can allocate stuff, messing up the test.
3133 : #ifndef V8_LITE_MODE
3134 5 : FLAG_opt = false;
3135 5 : FLAG_always_opt = false;
3136 : #endif // V8_LITE_MODE
3137 : // - Parallel compaction increases fragmentation, depending on how existing
3138 : // memory is distributed. Since this is non-deterministic because of
3139 : // concurrent sweeping, we disable it for this test.
3140 : // - Concurrent sweeping adds non determinism, depending on when memory is
3141 : // available for further reuse.
3142 : // - Fast evacuation of pages may result in a different page count in old
3143 : // space.
3144 : ManualGCScope manual_gc_scope;
3145 5 : FLAG_page_promotion = false;
3146 5 : FLAG_parallel_compaction = false;
3147 5 : CcTest::InitializeVM();
3148 5 : Isolate* isolate = CcTest::i_isolate();
3149 : // If there's snapshot available, we don't know whether 20 small arrays will
3150 : // fit on the initial pages.
3151 5 : if (!isolate->snapshot_available()) return;
3152 : Factory* factory = isolate->factory();
3153 5 : Heap* heap = isolate->heap();
3154 10 : v8::HandleScope scope(CcTest::isolate());
3155 : // Ensure that the young generation is empty.
3156 5 : CcTest::CollectGarbage(NEW_SPACE);
3157 5 : CcTest::CollectGarbage(NEW_SPACE);
3158 : static const int number_of_test_pages = 20;
3159 :
3160 : // Prepare many pages with low live-bytes count.
3161 : PagedSpace* old_space = heap->old_space();
3162 5 : const int initial_page_count = old_space->CountTotalPages();
3163 5 : const int overall_page_count = number_of_test_pages + initial_page_count;
3164 105 : for (int i = 0; i < number_of_test_pages; i++) {
3165 : AlwaysAllocateScope always_allocate(isolate);
3166 100 : heap::SimulateFullSpace(old_space);
3167 100 : factory->NewFixedArray(1, TENURED);
3168 : }
3169 5 : CHECK_EQ(overall_page_count, old_space->CountTotalPages());
3170 :
3171 : // Triggering one GC will cause a lot of garbage to be discovered but
3172 : // even spread across all allocated pages.
3173 5 : CcTest::CollectAllGarbage();
3174 5 : CHECK_GE(overall_page_count, old_space->CountTotalPages());
3175 :
3176 : // Triggering subsequent GCs should cause at least half of the pages
3177 : // to be released to the OS after at most two cycles.
3178 5 : CcTest::CollectAllGarbage();
3179 5 : CHECK_GE(overall_page_count, old_space->CountTotalPages());
3180 5 : CcTest::CollectAllGarbage();
3181 5 : CHECK_GE(overall_page_count, old_space->CountTotalPages() * 2);
3182 :
3183 : // Triggering a last-resort GC should cause all pages to be released to the
3184 : // OS so that other processes can seize the memory. If we get a failure here
3185 : // where there are 2 pages left instead of 1, then we should increase the
3186 : // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3187 : // first page should be small in order to reduce memory used when the VM
3188 : // boots, but if the 20 small arrays don't fit on the first page then that's
3189 : // an indication that it is too small.
3190 5 : CcTest::CollectAllAvailableGarbage();
3191 5 : CHECK_GE(initial_page_count, old_space->CountTotalPages());
3192 : }
3193 :
3194 : static int forced_gc_counter = 0;
3195 :
3196 6 : void MockUseCounterCallback(v8::Isolate* isolate,
3197 : v8::Isolate::UseCounterFeature feature) {
3198 6 : isolate->GetCurrentContext();
3199 6 : if (feature == v8::Isolate::kForcedGC) {
3200 5 : forced_gc_counter++;
3201 : }
3202 6 : }
3203 :
3204 :
3205 25880 : TEST(CountForcedGC) {
3206 5 : FLAG_expose_gc = true;
3207 5 : CcTest::InitializeVM();
3208 : Isolate* isolate = CcTest::i_isolate();
3209 5 : v8::HandleScope scope(CcTest::isolate());
3210 :
3211 5 : isolate->SetUseCounterCallback(MockUseCounterCallback);
3212 :
3213 5 : forced_gc_counter = 0;
3214 : const char* source = "gc();";
3215 : CompileRun(source);
3216 5 : CHECK_GT(forced_gc_counter, 0);
3217 5 : }
3218 :
3219 :
3220 : #ifdef OBJECT_PRINT
3221 : TEST(PrintSharedFunctionInfo) {
3222 : CcTest::InitializeVM();
3223 : v8::HandleScope scope(CcTest::isolate());
3224 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3225 : const char* source = "f = function() { return 987654321; }\n"
3226 : "g = function() { return 123456789; }\n";
3227 : CompileRun(source);
3228 : i::Handle<JSFunction> g = i::Handle<JSFunction>::cast(
3229 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3230 : CcTest::global()->Get(ctx, v8_str("g")).ToLocalChecked())));
3231 :
3232 : StdoutStream os;
3233 : g->shared()->Print(os);
3234 : os << std::endl;
3235 : }
3236 : #endif // OBJECT_PRINT
3237 :
3238 :
3239 25880 : TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3240 6 : if (!FLAG_use_ic) return;
3241 5 : if (!FLAG_incremental_marking) return;
3242 5 : if (FLAG_always_opt) return;
3243 4 : CcTest::InitializeVM();
3244 4 : v8::HandleScope scope(CcTest::isolate());
3245 : v8::Local<v8::Value> fun1, fun2;
3246 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3247 : {
3248 : CompileRun("function fun() {};");
3249 16 : fun1 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3250 : }
3251 :
3252 : {
3253 : CompileRun("function fun() {};");
3254 16 : fun2 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3255 : }
3256 :
3257 : // Prepare function f that contains type feedback for the two closures.
3258 16 : CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3259 16 : CHECK(CcTest::global()->Set(ctx, v8_str("fun2"), fun2).FromJust());
3260 : CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3261 :
3262 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3263 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3264 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3265 :
3266 8 : Handle<FeedbackVector> feedback_vector(f->feedback_vector(), f->GetIsolate());
3267 4 : FeedbackVectorHelper feedback_helper(feedback_vector);
3268 :
3269 : int expected_slots = 2;
3270 4 : CHECK_EQ(expected_slots, feedback_helper.slot_count());
3271 : int slot1 = 0;
3272 : int slot2 = 1;
3273 8 : CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeak());
3274 8 : CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeak());
3275 :
3276 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3277 4 : CcTest::CollectAllGarbage();
3278 :
3279 8 : CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeak());
3280 12 : CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeak());
3281 : }
3282 :
3283 :
3284 24 : static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
3285 : InlineCacheState desired_state) {
3286 : Handle<FeedbackVector> vector =
3287 48 : Handle<FeedbackVector>(f->feedback_vector(), f->GetIsolate());
3288 24 : FeedbackVectorHelper helper(vector);
3289 24 : FeedbackSlot slot = helper.slot(slot_index);
3290 24 : FeedbackNexus nexus(vector, slot);
3291 24 : CHECK(nexus.ic_state() == desired_state);
3292 24 : }
3293 :
3294 25880 : TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3295 1 : if (FLAG_lite_mode) return;
3296 5 : if (!FLAG_incremental_marking) return;
3297 5 : if (FLAG_always_opt) return;
3298 4 : CcTest::InitializeVM();
3299 4 : v8::HandleScope scope(CcTest::isolate());
3300 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3301 : // Prepare function f that contains a monomorphic IC for object
3302 : // originating from the same native context.
3303 : CompileRun(
3304 : "function fun() { this.x = 1; };"
3305 : "function f(o) { return new o(); } f(fun); f(fun);");
3306 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3307 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3308 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3309 :
3310 8 : Handle<FeedbackVector> vector(f->feedback_vector(), f->GetIsolate());
3311 8 : CHECK(vector->Get(FeedbackSlot(0))->IsWeakOrCleared());
3312 :
3313 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3314 4 : CcTest::CollectAllGarbage();
3315 :
3316 8 : CHECK(vector->Get(FeedbackSlot(0))->IsWeakOrCleared());
3317 : }
3318 :
3319 25880 : TEST(IncrementalMarkingPreservesMonomorphicIC) {
3320 6 : if (!FLAG_use_ic) return;
3321 5 : if (!FLAG_incremental_marking) return;
3322 5 : if (FLAG_always_opt) return;
3323 4 : CcTest::InitializeVM();
3324 4 : v8::HandleScope scope(CcTest::isolate());
3325 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3326 : // Prepare function f that contains a monomorphic IC for object
3327 : // originating from the same native context.
3328 : CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3329 : "function f(o) { return o.x; } f(obj); f(obj);");
3330 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3331 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3332 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3333 :
3334 4 : CheckVectorIC(f, 0, MONOMORPHIC);
3335 :
3336 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3337 4 : CcTest::CollectAllGarbage();
3338 :
3339 4 : CheckVectorIC(f, 0, MONOMORPHIC);
3340 : }
3341 :
3342 25880 : TEST(IncrementalMarkingPreservesPolymorphicIC) {
3343 6 : if (!FLAG_use_ic) return;
3344 5 : if (!FLAG_incremental_marking) return;
3345 5 : if (FLAG_always_opt) return;
3346 4 : CcTest::InitializeVM();
3347 4 : v8::HandleScope scope(CcTest::isolate());
3348 : v8::Local<v8::Value> obj1, obj2;
3349 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3350 :
3351 : {
3352 4 : LocalContext env;
3353 : CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3354 20 : obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3355 : }
3356 :
3357 : {
3358 4 : LocalContext env;
3359 : CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3360 20 : obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3361 : }
3362 :
3363 : // Prepare function f that contains a polymorphic IC for objects
3364 : // originating from two different native contexts.
3365 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3366 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3367 : CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3368 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3369 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3370 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3371 :
3372 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3373 :
3374 : // Fire context dispose notification.
3375 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3376 4 : CcTest::CollectAllGarbage();
3377 :
3378 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3379 : }
3380 :
3381 25880 : TEST(ContextDisposeDoesntClearPolymorphicIC) {
3382 6 : if (!FLAG_use_ic) return;
3383 5 : if (!FLAG_incremental_marking) return;
3384 5 : if (FLAG_always_opt) return;
3385 4 : CcTest::InitializeVM();
3386 4 : v8::HandleScope scope(CcTest::isolate());
3387 : v8::Local<v8::Value> obj1, obj2;
3388 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3389 :
3390 : {
3391 4 : LocalContext env;
3392 : CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3393 20 : obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3394 : }
3395 :
3396 : {
3397 4 : LocalContext env;
3398 : CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3399 20 : obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3400 : }
3401 :
3402 : // Prepare function f that contains a polymorphic IC for objects
3403 : // originating from two different native contexts.
3404 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3405 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3406 : CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3407 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3408 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3409 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3410 :
3411 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3412 :
3413 : // Fire context dispose notification.
3414 4 : CcTest::isolate()->ContextDisposedNotification();
3415 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3416 4 : CcTest::CollectAllGarbage();
3417 :
3418 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3419 : }
3420 :
3421 :
3422 48 : class SourceResource : public v8::String::ExternalOneByteStringResource {
3423 : public:
3424 : explicit SourceResource(const char* data)
3425 24 : : data_(data), length_(strlen(data)) { }
3426 :
3427 24 : void Dispose() override {
3428 24 : i::DeleteArray(data_);
3429 24 : data_ = nullptr;
3430 24 : }
3431 :
3432 288 : const char* data() const override { return data_; }
3433 :
3434 96 : size_t length() const override { return length_; }
3435 :
3436 48 : bool IsDisposed() { return data_ == nullptr; }
3437 :
3438 : private:
3439 : const char* data_;
3440 : size_t length_;
3441 : };
3442 :
3443 :
3444 24 : void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3445 : const char* accessor) {
3446 : // Test that the data retained by the Error.stack accessor is released
3447 : // after the first time the accessor is fired. We use external string
3448 : // to check whether the data is being released since the external string
3449 : // resource's callback is fired when the external string is GC'ed.
3450 : i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3451 24 : v8::HandleScope scope(isolate);
3452 96 : SourceResource* resource = new SourceResource(i::StrDup(source));
3453 : {
3454 24 : v8::HandleScope scope(isolate);
3455 24 : v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
3456 : v8::Local<v8::String> source_string =
3457 48 : v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked();
3458 : i_isolate->heap()->CollectAllAvailableGarbage(
3459 24 : i::GarbageCollectionReason::kTesting);
3460 : v8::Script::Compile(ctx, source_string)
3461 24 : .ToLocalChecked()
3462 : ->Run(ctx)
3463 24 : .ToLocalChecked();
3464 24 : CHECK(!resource->IsDisposed());
3465 : }
3466 : // i_isolate->heap()->CollectAllAvailableGarbage();
3467 24 : CHECK(!resource->IsDisposed());
3468 :
3469 : CompileRun(accessor);
3470 : i_isolate->heap()->CollectAllAvailableGarbage(
3471 24 : i::GarbageCollectionReason::kTesting);
3472 :
3473 : // External source has been released.
3474 24 : CHECK(resource->IsDisposed());
3475 24 : delete resource;
3476 24 : }
3477 :
3478 :
3479 25880 : UNINITIALIZED_TEST(ReleaseStackTraceData) {
3480 5 : if (FLAG_always_opt) {
3481 : // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3482 : // See: https://codereview.chromium.org/181833004/
3483 1 : return;
3484 : }
3485 : #ifndef V8_LITE_MODE
3486 : // ICs retain objects.
3487 4 : FLAG_use_ic = false;
3488 : #endif // V8_LITE_MODE
3489 4 : FLAG_concurrent_recompilation = false;
3490 : v8::Isolate::CreateParams create_params;
3491 4 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
3492 4 : v8::Isolate* isolate = v8::Isolate::New(create_params);
3493 : {
3494 : v8::Isolate::Scope isolate_scope(isolate);
3495 8 : v8::HandleScope handle_scope(isolate);
3496 8 : v8::Context::New(isolate)->Enter();
3497 : static const char* source1 = "var error = null; "
3498 : /* Normal Error */ "try { "
3499 : " throw new Error(); "
3500 : "} catch (e) { "
3501 : " error = e; "
3502 : "} ";
3503 : static const char* source2 = "var error = null; "
3504 : /* Stack overflow */ "try { "
3505 : " (function f() { f(); })(); "
3506 : "} catch (e) { "
3507 : " error = e; "
3508 : "} ";
3509 : static const char* source3 = "var error = null; "
3510 : /* Normal Error */ "try { "
3511 : /* as prototype */ " throw new Error(); "
3512 : "} catch (e) { "
3513 : " error = {}; "
3514 : " error.__proto__ = e; "
3515 : "} ";
3516 : static const char* source4 = "var error = null; "
3517 : /* Stack overflow */ "try { "
3518 : /* as prototype */ " (function f() { f(); })(); "
3519 : "} catch (e) { "
3520 : " error = {}; "
3521 : " error.__proto__ = e; "
3522 : "} ";
3523 : static const char* getter = "error.stack";
3524 : static const char* setter = "error.stack = 0";
3525 :
3526 4 : ReleaseStackTraceDataTest(isolate, source1, setter);
3527 4 : ReleaseStackTraceDataTest(isolate, source2, setter);
3528 : // We do not test source3 and source4 with setter, since the setter is
3529 : // supposed to (untypically) write to the receiver, not the holder. This is
3530 : // to emulate the behavior of a data property.
3531 :
3532 4 : ReleaseStackTraceDataTest(isolate, source1, getter);
3533 4 : ReleaseStackTraceDataTest(isolate, source2, getter);
3534 4 : ReleaseStackTraceDataTest(isolate, source3, getter);
3535 4 : ReleaseStackTraceDataTest(isolate, source4, getter);
3536 : }
3537 4 : isolate->Dispose();
3538 : }
3539 :
3540 : // TODO(mmarchini) also write tests for async/await and Promise.all
3541 15 : void DetailedErrorStackTraceTest(const char* src,
3542 : std::function<void(Handle<FrameArray>)> test) {
3543 15 : FLAG_detailed_error_stack_trace = true;
3544 15 : CcTest::InitializeVM();
3545 15 : v8::HandleScope scope(CcTest::isolate());
3546 :
3547 30 : v8::TryCatch try_catch(CcTest::isolate());
3548 : CompileRun(src);
3549 :
3550 15 : CHECK(try_catch.HasCaught());
3551 30 : Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
3552 :
3553 : Isolate* isolate = CcTest::i_isolate();
3554 : Handle<Name> key = isolate->factory()->stack_trace_symbol();
3555 :
3556 : Handle<FrameArray> stack_trace(
3557 : FrameArray::cast(
3558 : Handle<JSArray>::cast(
3559 30 : Object::GetProperty(isolate, exception, key).ToHandleChecked())
3560 45 : ->elements()),
3561 30 : isolate);
3562 :
3563 30 : test(stack_trace);
3564 15 : }
3565 :
3566 : // * Test interpreted function error
3567 25880 : TEST(DetailedErrorStackTrace) {
3568 : static const char* source =
3569 : "function func1(arg1) { "
3570 : " let err = new Error(); "
3571 : " throw err; "
3572 : "} "
3573 : "function func2(arg1, arg2) { "
3574 : " func1(42); "
3575 : "} "
3576 : "class Foo {}; "
3577 : "function main(arg1, arg2) { "
3578 : " func2(arg1, false); "
3579 : "} "
3580 : "var foo = new Foo(); "
3581 : "main(foo); ";
3582 :
3583 5 : DetailedErrorStackTraceTest(source, [](Handle<FrameArray> stack_trace) {
3584 5 : FixedArray foo_parameters = stack_trace->Parameters(0);
3585 5 : CHECK_EQ(foo_parameters->length(), 1);
3586 10 : CHECK(foo_parameters->get(0)->IsSmi());
3587 5 : CHECK_EQ(Smi::ToInt(foo_parameters->get(0)), 42);
3588 :
3589 5 : FixedArray bar_parameters = stack_trace->Parameters(1);
3590 5 : CHECK_EQ(bar_parameters->length(), 2);
3591 10 : CHECK(bar_parameters->get(0)->IsJSObject());
3592 10 : CHECK(bar_parameters->get(1)->IsBoolean());
3593 5 : Handle<Object> foo = Handle<Object>::cast(GetByName("foo"));
3594 15 : CHECK_EQ(bar_parameters->get(0), *foo);
3595 5 : CHECK(!bar_parameters->get(1)->BooleanValue(CcTest::i_isolate()));
3596 :
3597 5 : FixedArray main_parameters = stack_trace->Parameters(2);
3598 5 : CHECK_EQ(main_parameters->length(), 2);
3599 10 : CHECK(main_parameters->get(0)->IsJSObject());
3600 10 : CHECK(main_parameters->get(1)->IsUndefined());
3601 15 : CHECK_EQ(main_parameters->get(0), *foo);
3602 15 : });
3603 5 : }
3604 :
3605 : // * Test optimized function with inline frame error
3606 25880 : TEST(DetailedErrorStackTraceInline) {
3607 5 : FLAG_allow_natives_syntax = true;
3608 : static const char* source =
3609 : "function add(x) { "
3610 : " if (x == 42) "
3611 : " throw new Error(); "
3612 : " return x + x; "
3613 : "} "
3614 : "add(0); "
3615 : "add(1); "
3616 : "function foo(x) { "
3617 : " return add(x + 1) "
3618 : "} "
3619 : "foo(40); "
3620 : "%OptimizeFunctionOnNextCall(foo); "
3621 : "foo(41); ";
3622 :
3623 5 : DetailedErrorStackTraceTest(source, [](Handle<FrameArray> stack_trace) {
3624 5 : FixedArray parameters_add = stack_trace->Parameters(0);
3625 5 : CHECK_EQ(parameters_add->length(), 1);
3626 10 : CHECK(parameters_add->get(0)->IsSmi());
3627 5 : CHECK_EQ(Smi::ToInt(parameters_add->get(0)), 42);
3628 :
3629 5 : FixedArray parameters_foo = stack_trace->Parameters(1);
3630 5 : CHECK_EQ(parameters_foo->length(), 1);
3631 10 : CHECK(parameters_foo->get(0)->IsSmi());
3632 5 : CHECK_EQ(Smi::ToInt(parameters_foo->get(0)), 41);
3633 15 : });
3634 5 : }
3635 :
3636 : // * Test builtin exit error
3637 25880 : TEST(DetailedErrorStackTraceBuiltinExit) {
3638 : static const char* source =
3639 : "function test(arg1) { "
3640 : " (new Number()).toFixed(arg1); "
3641 : "} "
3642 : "test(9999); ";
3643 :
3644 5 : DetailedErrorStackTraceTest(source, [](Handle<FrameArray> stack_trace) {
3645 5 : FixedArray parameters = stack_trace->Parameters(0);
3646 :
3647 5 : CHECK_EQ(parameters->length(), 2);
3648 10 : CHECK(parameters->get(0)->IsSmi());
3649 5 : CHECK_EQ(Smi::ToInt(parameters->get(0)), 9999);
3650 15 : });
3651 5 : }
3652 :
3653 25880 : TEST(Regress169928) {
3654 5 : FLAG_allow_natives_syntax = true;
3655 : #ifndef V8_LITE_MODE
3656 5 : FLAG_opt = false;
3657 : #endif // V8_LITE_MODE
3658 5 : CcTest::InitializeVM();
3659 : Isolate* isolate = CcTest::i_isolate();
3660 5 : LocalContext env;
3661 : Factory* factory = isolate->factory();
3662 9 : v8::HandleScope scope(CcTest::isolate());
3663 :
3664 : // Some flags turn Scavenge collections into Mark-sweep collections
3665 : // and hence are incompatible with this test case.
3666 5 : if (FLAG_gc_global || FLAG_stress_compaction ||
3667 : FLAG_stress_incremental_marking)
3668 1 : return;
3669 :
3670 : // Prepare the environment
3671 : CompileRun("function fastliteralcase(literal, value) {"
3672 : " literal[0] = value;"
3673 : " return literal;"
3674 : "}"
3675 : "function get_standard_literal() {"
3676 : " var literal = [1, 2, 3];"
3677 : " return literal;"
3678 : "}"
3679 : "obj = fastliteralcase(get_standard_literal(), 1);"
3680 : "obj = fastliteralcase(get_standard_literal(), 1.5);"
3681 : "obj = fastliteralcase(get_standard_literal(), 2);");
3682 :
3683 : // prepare the heap
3684 : v8::Local<v8::String> mote_code_string =
3685 4 : v8_str("fastliteralcase(mote, 2.5);");
3686 :
3687 4 : v8::Local<v8::String> array_name = v8_str("mote");
3688 16 : CHECK(CcTest::global()
3689 : ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0))
3690 : .FromJust());
3691 :
3692 : // First make sure we flip spaces
3693 4 : CcTest::CollectGarbage(NEW_SPACE);
3694 :
3695 : // Allocate the object.
3696 4 : Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
3697 : array_data->set(0, Smi::FromInt(1));
3698 : array_data->set(1, Smi::FromInt(2));
3699 :
3700 : heap::AllocateAllButNBytes(
3701 : CcTest::heap()->new_space(),
3702 4 : JSArray::kSize + AllocationMemento::kSize + kTaggedSize);
3703 :
3704 : Handle<JSArray> array =
3705 4 : factory->NewJSArrayWithElements(array_data, PACKED_SMI_ELEMENTS);
3706 :
3707 8 : CHECK_EQ(Smi::FromInt(2), array->length());
3708 8 : CHECK(array->HasSmiOrObjectElements());
3709 :
3710 : // We need filler the size of AllocationMemento object, plus an extra
3711 : // fill pointer value.
3712 4 : HeapObject obj;
3713 : AllocationResult allocation =
3714 : CcTest::heap()->new_space()->AllocateRawUnaligned(
3715 8 : AllocationMemento::kSize + kTaggedSize);
3716 4 : CHECK(allocation.To(&obj));
3717 : Address addr_obj = obj->address();
3718 : CcTest::heap()->CreateFillerObjectAt(addr_obj,
3719 : AllocationMemento::kSize + kTaggedSize,
3720 4 : ClearRecordedSlots::kNo);
3721 :
3722 : // Give the array a name, making sure not to allocate strings.
3723 : v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
3724 12 : CHECK(CcTest::global()->Set(env.local(), array_name, array_obj).FromJust());
3725 :
3726 : // This should crash with a protection violation if we are running a build
3727 : // with the bug.
3728 : AlwaysAllocateScope aa_scope(isolate);
3729 4 : v8::Script::Compile(env.local(), mote_code_string)
3730 4 : .ToLocalChecked()
3731 4 : ->Run(env.local())
3732 8 : .ToLocalChecked();
3733 : }
3734 :
3735 25880 : TEST(LargeObjectSlotRecording) {
3736 5 : if (!FLAG_incremental_marking) return;
3737 5 : if (FLAG_never_compact) return;
3738 : ManualGCScope manual_gc_scope;
3739 5 : FLAG_manual_evacuation_candidates_selection = true;
3740 5 : CcTest::InitializeVM();
3741 : Isolate* isolate = CcTest::i_isolate();
3742 15 : Heap* heap = isolate->heap();
3743 : HandleScope scope(isolate);
3744 :
3745 : // Create an object on an evacuation candidate.
3746 5 : heap::SimulateFullSpace(heap->old_space());
3747 5 : Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED);
3748 : Page* evac_page = Page::FromHeapObject(*lit);
3749 5 : heap::ForceEvacuationCandidate(evac_page);
3750 : FixedArray old_location = *lit;
3751 :
3752 : // Allocate a large object.
3753 : int size = Max(1000000, kMaxRegularHeapObjectSize + KB);
3754 : CHECK_LT(kMaxRegularHeapObjectSize, size);
3755 5 : Handle<FixedArray> lo = isolate->factory()->NewFixedArray(size, TENURED);
3756 10 : CHECK(heap->lo_space()->Contains(*lo));
3757 :
3758 : // Start incremental marking to active write barrier.
3759 5 : heap::SimulateIncrementalMarking(heap, false);
3760 :
3761 : // Create references from the large object to the object on the evacuation
3762 : // candidate.
3763 : const int kStep = size / 10;
3764 55 : for (int i = 0; i < size; i += kStep) {
3765 100 : lo->set(i, *lit);
3766 50 : CHECK(lo->get(i) == old_location);
3767 : }
3768 :
3769 5 : heap::SimulateIncrementalMarking(heap, true);
3770 :
3771 : // Move the evaucation candidate object.
3772 5 : CcTest::CollectAllGarbage();
3773 :
3774 : // Verify that the pointers in the large object got updated.
3775 55 : for (int i = 0; i < size; i += kStep) {
3776 150 : CHECK_EQ(lo->get(i), *lit);
3777 50 : CHECK(lo->get(i) != old_location);
3778 : }
3779 : }
3780 :
3781 5 : class DummyVisitor : public RootVisitor {
3782 : public:
3783 20 : void VisitRootPointers(Root root, const char* description,
3784 20 : FullObjectSlot start, FullObjectSlot end) override {}
3785 : };
3786 :
3787 :
3788 25880 : TEST(DeferredHandles) {
3789 5 : CcTest::InitializeVM();
3790 5 : Isolate* isolate = CcTest::i_isolate();
3791 5 : Heap* heap = isolate->heap();
3792 5 : v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
3793 : HandleScopeData* data = isolate->handle_scope_data();
3794 : Handle<Object> init(ReadOnlyRoots(heap).empty_string(), isolate);
3795 5115 : while (data->next < data->limit) {
3796 : Handle<Object> obj(ReadOnlyRoots(heap).empty_string(), isolate);
3797 : }
3798 : // An entire block of handles has been filled.
3799 : // Next handle would require a new block.
3800 5 : CHECK(data->next == data->limit);
3801 :
3802 10 : DeferredHandleScope deferred(isolate);
3803 5 : DummyVisitor visitor;
3804 5 : isolate->handle_scope_implementer()->Iterate(&visitor);
3805 10 : delete deferred.Detach();
3806 5 : }
3807 :
3808 :
3809 25879 : TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
3810 4 : if (!FLAG_incremental_marking) return;
3811 : ManualGCScope manual_gc_scope;
3812 4 : CcTest::InitializeVM();
3813 8 : v8::HandleScope scope(CcTest::isolate());
3814 : CompileRun("function f(n) {"
3815 : " var a = new Array(n);"
3816 : " for (var i = 0; i < n; i += 100) a[i] = i;"
3817 : "};"
3818 : "f(10 * 1024 * 1024);");
3819 4 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
3820 4 : if (marking->IsStopped()) {
3821 : CcTest::heap()->StartIncrementalMarking(
3822 4 : i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
3823 : }
3824 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3825 4 : CHECK(marking->IsComplete() ||
3826 : marking->IsReadyToOverApproximateWeakClosure());
3827 : }
3828 :
3829 :
3830 25880 : TEST(DisableInlineAllocation) {
3831 5 : FLAG_allow_natives_syntax = true;
3832 5 : CcTest::InitializeVM();
3833 5 : v8::HandleScope scope(CcTest::isolate());
3834 : CompileRun("function test() {"
3835 : " var x = [];"
3836 : " for (var i = 0; i < 10; i++) {"
3837 : " x[i] = [ {}, [1,2,3], [1,x,3] ];"
3838 : " }"
3839 : "}"
3840 : "function run() {"
3841 : " %OptimizeFunctionOnNextCall(test);"
3842 : " test();"
3843 : " %DeoptimizeFunction(test);"
3844 : "}");
3845 :
3846 : // Warm-up with inline allocation enabled.
3847 : CompileRun("test(); test(); run();");
3848 :
3849 : // Run test with inline allocation disabled.
3850 5 : CcTest::heap()->DisableInlineAllocation();
3851 : CompileRun("run()");
3852 :
3853 : // Run test with inline allocation re-enabled.
3854 5 : CcTest::heap()->EnableInlineAllocation();
3855 5 : CompileRun("run()");
3856 5 : }
3857 :
3858 :
3859 266 : static int AllocationSitesCount(Heap* heap) {
3860 : int count = 0;
3861 1825 : for (Object site = heap->allocation_sites_list(); site->IsAllocationSite();) {
3862 1293 : AllocationSite cur = AllocationSite::cast(site);
3863 1293 : CHECK(cur->HasWeakNext());
3864 1293 : site = cur->weak_next();
3865 1293 : count++;
3866 : }
3867 266 : return count;
3868 : }
3869 :
3870 260 : static int SlimAllocationSiteCount(Heap* heap) {
3871 : int count = 0;
3872 1810 : for (Object weak_list = heap->allocation_sites_list();
3873 : weak_list->IsAllocationSite();) {
3874 : AllocationSite weak_cur = AllocationSite::cast(weak_list);
3875 3645 : for (Object site = weak_cur->nested_site(); site->IsAllocationSite();) {
3876 1065 : AllocationSite cur = AllocationSite::cast(site);
3877 1065 : CHECK(!cur->HasWeakNext());
3878 1065 : site = cur->nested_site();
3879 1065 : count++;
3880 : }
3881 1290 : weak_list = weak_cur->weak_next();
3882 : }
3883 260 : return count;
3884 : }
3885 :
3886 25880 : TEST(EnsureAllocationSiteDependentCodesProcessed) {
3887 5 : if (FLAG_always_opt || !FLAG_opt) return;
3888 3 : FLAG_allow_natives_syntax = true;
3889 3 : CcTest::InitializeVM();
3890 3 : Isolate* isolate = CcTest::i_isolate();
3891 3 : v8::internal::Heap* heap = CcTest::heap();
3892 : GlobalHandles* global_handles = isolate->global_handles();
3893 :
3894 3 : if (!isolate->use_optimizer()) return;
3895 :
3896 : // The allocation site at the head of the list is ours.
3897 : Handle<AllocationSite> site;
3898 : {
3899 3 : LocalContext context;
3900 6 : v8::HandleScope scope(context->GetIsolate());
3901 :
3902 3 : int count = AllocationSitesCount(heap);
3903 : CompileRun("var bar = function() { return (new Array()); };"
3904 : "var a = bar();"
3905 : "bar();"
3906 : "bar();");
3907 :
3908 : // One allocation site should have been created.
3909 3 : int new_count = AllocationSitesCount(heap);
3910 3 : CHECK_EQ(new_count, (count + 1));
3911 : site = Handle<AllocationSite>::cast(
3912 : global_handles->Create(
3913 3 : AllocationSite::cast(heap->allocation_sites_list())));
3914 :
3915 : CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
3916 :
3917 : Handle<JSFunction> bar_handle = Handle<JSFunction>::cast(
3918 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3919 : CcTest::global()
3920 9 : ->Get(context.local(), v8_str("bar"))
3921 6 : .ToLocalChecked())));
3922 :
3923 : int dependency_group_count = 0;
3924 3 : DependentCode dependency = site->dependent_code();
3925 12 : while (dependency != ReadOnlyRoots(heap).empty_weak_fixed_array()) {
3926 9 : CHECK(dependency->group() ==
3927 : DependentCode::kAllocationSiteTransitionChangedGroup ||
3928 : dependency->group() ==
3929 : DependentCode::kAllocationSiteTenuringChangedGroup);
3930 6 : CHECK_EQ(1, dependency->count());
3931 6 : CHECK(dependency->object_at(0)->IsWeak());
3932 : Code function_bar =
3933 12 : Code::cast(dependency->object_at(0)->GetHeapObjectAssumeWeak());
3934 12 : CHECK_EQ(bar_handle->code(), function_bar);
3935 6 : dependency = dependency->next_link();
3936 6 : dependency_group_count++;
3937 : }
3938 : // Expect a dependent code object for transitioning and pretenuring.
3939 6 : CHECK_EQ(2, dependency_group_count);
3940 : }
3941 :
3942 : // Now make sure that a gc should get rid of the function, even though we
3943 : // still have the allocation site alive.
3944 15 : for (int i = 0; i < 4; i++) {
3945 12 : CcTest::CollectAllGarbage();
3946 : }
3947 :
3948 : // The site still exists because of our global handle, but the code is no
3949 : // longer referred to by dependent_code().
3950 6 : CHECK(site->dependent_code()->object_at(0)->IsCleared());
3951 : }
3952 :
3953 130 : void CheckNumberOfAllocations(Heap* heap, const char* source,
3954 : int expected_full_alloc,
3955 : int expected_slim_alloc) {
3956 130 : int prev_fat_alloc_count = AllocationSitesCount(heap);
3957 130 : int prev_slim_alloc_count = SlimAllocationSiteCount(heap);
3958 :
3959 : CompileRun(source);
3960 :
3961 130 : int fat_alloc_sites = AllocationSitesCount(heap) - prev_fat_alloc_count;
3962 130 : int slim_alloc_sites = SlimAllocationSiteCount(heap) - prev_slim_alloc_count;
3963 :
3964 130 : CHECK_EQ(expected_full_alloc, fat_alloc_sites);
3965 130 : CHECK_EQ(expected_slim_alloc, slim_alloc_sites);
3966 130 : }
3967 :
3968 25880 : TEST(AllocationSiteCreation) {
3969 : // No feedback vectors and hence no allocation sites.
3970 5 : if (FLAG_lite_mode) return;
3971 5 : FLAG_always_opt = false;
3972 5 : CcTest::InitializeVM();
3973 : Isolate* isolate = CcTest::i_isolate();
3974 5 : Heap* heap = isolate->heap();
3975 : HandleScope scope(isolate);
3976 5 : i::FLAG_enable_one_shot_optimization = true;
3977 :
3978 : // Array literals.
3979 5 : CheckNumberOfAllocations(heap, "function f1() { return []; }; f1()", 1, 0);
3980 : CheckNumberOfAllocations(heap, "function f2() { return [1, 2]; }; f2()", 1,
3981 5 : 0);
3982 : CheckNumberOfAllocations(heap, "function f3() { return [[1], [2]]; }; f3()",
3983 5 : 1, 2);
3984 :
3985 : CheckNumberOfAllocations(heap,
3986 : "function f4() { "
3987 : "return [0, [1, 1.1, 1.2, "
3988 : "], 1.5, [2.1, 2.2], 3];"
3989 : "}; f4();",
3990 5 : 1, 2);
3991 :
3992 : // No allocation sites within IIFE/top-level
3993 : CheckNumberOfAllocations(heap,
3994 : R"(
3995 : (function f4() {
3996 : return [ 0, [ 1, 1.1, 1.2,], 1.5, [2.1, 2.2], 3 ];
3997 : })();
3998 : )",
3999 5 : 0, 0);
4000 :
4001 : CheckNumberOfAllocations(heap,
4002 : R"(
4003 : l = [ 1, 2, 3, 4];
4004 : )",
4005 5 : 0, 0);
4006 :
4007 : CheckNumberOfAllocations(heap,
4008 : R"(
4009 : a = [];
4010 : )",
4011 5 : 0, 0);
4012 :
4013 : CheckNumberOfAllocations(heap,
4014 : R"(
4015 : (function f4() {
4016 : return [];
4017 : })();
4018 : )",
4019 5 : 0, 0);
4020 :
4021 : // Object literals have lazy AllocationSites
4022 5 : CheckNumberOfAllocations(heap, "function f5() { return {}; }; f5(); ", 0, 0);
4023 :
4024 : // No AllocationSites are created for the empty object literal.
4025 30 : for (int i = 0; i < 5; i++) {
4026 25 : CheckNumberOfAllocations(heap, "f5(); ", 0, 0);
4027 : }
4028 :
4029 : CheckNumberOfAllocations(heap, "function f6() { return {a:1}; }; f6(); ", 0,
4030 5 : 0);
4031 :
4032 5 : CheckNumberOfAllocations(heap, "f6(); ", 1, 0);
4033 :
4034 : CheckNumberOfAllocations(heap, "function f7() { return {a:1, b:2}; }; f7(); ",
4035 5 : 0, 0);
4036 5 : CheckNumberOfAllocations(heap, "f7(); ", 1, 0);
4037 :
4038 : // No Allocation sites are created for object subliterals
4039 : CheckNumberOfAllocations(heap,
4040 : "function f8() {"
4041 : "return {a:{}, b:{ a:2, c:{ d:{f:{}}} } }; "
4042 : "}; f8(); ",
4043 5 : 0, 0);
4044 5 : CheckNumberOfAllocations(heap, "f8(); ", 1, 0);
4045 :
4046 : // We currently eagerly create allocation sites if there are sub-arrays.
4047 : // Allocation sites are created only for array subliterals
4048 : CheckNumberOfAllocations(heap,
4049 : "function f9() {"
4050 : "return {a:[1, 2, 3], b:{ a:2, c:{ d:{f:[]} } }}; "
4051 : "}; f9(); ",
4052 5 : 1, 2);
4053 :
4054 : // No new AllocationSites created on the second invocation.
4055 5 : CheckNumberOfAllocations(heap, "f9(); ", 0, 0);
4056 :
4057 : // No allocation sites for literals in an iife/top level code even if it has
4058 : // array subliterals
4059 : CheckNumberOfAllocations(heap,
4060 : R"(
4061 : (function f10() {
4062 : return {a: [1], b: [2]};
4063 : })();
4064 : )",
4065 5 : 0, 0);
4066 :
4067 : CheckNumberOfAllocations(heap,
4068 : R"(
4069 : l = {
4070 : a: 1,
4071 : b: {
4072 : c: [5],
4073 : }
4074 : };
4075 : )",
4076 5 : 0, 0);
4077 :
4078 : // Eagerly create allocation sites for literals within a loop of iife or
4079 : // top-level code
4080 : CheckNumberOfAllocations(heap,
4081 : R"(
4082 : (function f11() {
4083 : while(true) {
4084 : return {a: [1], b: [2]};
4085 : }
4086 : })();
4087 : )",
4088 5 : 1, 2);
4089 :
4090 : CheckNumberOfAllocations(heap,
4091 : R"(
4092 : for (i = 0; i < 1; ++i) {
4093 : l = {
4094 : a: 1,
4095 : b: {
4096 : c: [5],
4097 : }
4098 : };
4099 : }
4100 : )",
4101 5 : 1, 1);
4102 : }
4103 :
4104 25880 : TEST(CellsInOptimizedCodeAreWeak) {
4105 5 : if (FLAG_always_opt || !FLAG_opt) return;
4106 3 : FLAG_allow_natives_syntax = true;
4107 3 : CcTest::InitializeVM();
4108 : Isolate* isolate = CcTest::i_isolate();
4109 3 : v8::internal::Heap* heap = CcTest::heap();
4110 :
4111 3 : if (!isolate->use_optimizer()) return;
4112 : HandleScope outer_scope(heap->isolate());
4113 : Handle<Code> code;
4114 : {
4115 3 : LocalContext context;
4116 : HandleScope scope(heap->isolate());
4117 :
4118 : CompileRun(
4119 : "bar = (function() {"
4120 : " function bar() {"
4121 : " return foo(1);"
4122 : " };"
4123 : " var foo = function(x) { with (x) { return 1 + x; } };"
4124 : " %NeverOptimizeFunction(foo);"
4125 : " bar(foo);"
4126 : " bar(foo);"
4127 : " bar(foo);"
4128 : " %OptimizeFunctionOnNextCall(bar);"
4129 : " bar(foo);"
4130 : " return bar;})();");
4131 :
4132 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4133 : *v8::Local<v8::Function>::Cast(CcTest::global()
4134 9 : ->Get(context.local(), v8_str("bar"))
4135 6 : .ToLocalChecked())));
4136 9 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4137 : }
4138 :
4139 : // Now make sure that a gc should get rid of the function
4140 15 : for (int i = 0; i < 4; i++) {
4141 12 : CcTest::CollectAllGarbage();
4142 : }
4143 :
4144 3 : CHECK(code->marked_for_deoptimization());
4145 3 : CHECK(code->embedded_objects_cleared());
4146 : }
4147 :
4148 :
4149 25880 : TEST(ObjectsInOptimizedCodeAreWeak) {
4150 5 : if (FLAG_always_opt || !FLAG_opt) return;
4151 3 : FLAG_allow_natives_syntax = true;
4152 3 : CcTest::InitializeVM();
4153 : Isolate* isolate = CcTest::i_isolate();
4154 3 : v8::internal::Heap* heap = CcTest::heap();
4155 :
4156 3 : if (!isolate->use_optimizer()) return;
4157 : HandleScope outer_scope(heap->isolate());
4158 : Handle<Code> code;
4159 : {
4160 3 : LocalContext context;
4161 : HandleScope scope(heap->isolate());
4162 :
4163 : CompileRun(
4164 : "function bar() {"
4165 : " return foo(1);"
4166 : "};"
4167 : "function foo(x) { with (x) { return 1 + x; } };"
4168 : "%NeverOptimizeFunction(foo);"
4169 : "bar();"
4170 : "bar();"
4171 : "bar();"
4172 : "%OptimizeFunctionOnNextCall(bar);"
4173 : "bar();");
4174 :
4175 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4176 : *v8::Local<v8::Function>::Cast(CcTest::global()
4177 9 : ->Get(context.local(), v8_str("bar"))
4178 6 : .ToLocalChecked())));
4179 9 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4180 : }
4181 :
4182 : // Now make sure that a gc should get rid of the function
4183 15 : for (int i = 0; i < 4; i++) {
4184 12 : CcTest::CollectAllGarbage();
4185 : }
4186 :
4187 3 : CHECK(code->marked_for_deoptimization());
4188 3 : CHECK(code->embedded_objects_cleared());
4189 : }
4190 :
4191 25880 : TEST(NewSpaceObjectsInOptimizedCode) {
4192 5 : if (FLAG_always_opt || !FLAG_opt) return;
4193 3 : FLAG_allow_natives_syntax = true;
4194 3 : CcTest::InitializeVM();
4195 : Isolate* isolate = CcTest::i_isolate();
4196 :
4197 3 : if (!isolate->use_optimizer()) return;
4198 : HandleScope outer_scope(isolate);
4199 : Handle<Code> code;
4200 : {
4201 3 : LocalContext context;
4202 : HandleScope scope(isolate);
4203 :
4204 : CompileRun(
4205 : "var foo;"
4206 : "var bar;"
4207 : "(function() {"
4208 : " function foo_func(x) { with (x) { return 1 + x; } };"
4209 : " %NeverOptimizeFunction(foo_func);"
4210 : " function bar_func() {"
4211 : " return foo(1);"
4212 : " };"
4213 : " bar = bar_func;"
4214 : " foo = foo_func;"
4215 : " bar_func();"
4216 : " bar_func();"
4217 : " bar_func();"
4218 : " %OptimizeFunctionOnNextCall(bar_func);"
4219 : " bar_func();"
4220 : "})();");
4221 :
4222 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4223 : *v8::Local<v8::Function>::Cast(CcTest::global()
4224 9 : ->Get(context.local(), v8_str("bar"))
4225 6 : .ToLocalChecked())));
4226 :
4227 : Handle<JSFunction> foo = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4228 : *v8::Local<v8::Function>::Cast(CcTest::global()
4229 9 : ->Get(context.local(), v8_str("foo"))
4230 6 : .ToLocalChecked())));
4231 :
4232 3 : CHECK(Heap::InYoungGeneration(*foo));
4233 3 : CcTest::CollectGarbage(NEW_SPACE);
4234 3 : CcTest::CollectGarbage(NEW_SPACE);
4235 3 : CHECK(!Heap::InYoungGeneration(*foo));
4236 : #ifdef VERIFY_HEAP
4237 : CcTest::heap()->Verify();
4238 : #endif
4239 3 : CHECK(!bar->code()->marked_for_deoptimization());
4240 9 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4241 : }
4242 :
4243 : // Now make sure that a gc should get rid of the function
4244 15 : for (int i = 0; i < 4; i++) {
4245 12 : CcTest::CollectAllGarbage();
4246 : }
4247 :
4248 3 : CHECK(code->marked_for_deoptimization());
4249 3 : CHECK(code->embedded_objects_cleared());
4250 : }
4251 :
4252 25880 : TEST(ObjectsInEagerlyDeoptimizedCodeAreWeak) {
4253 5 : if (FLAG_always_opt || !FLAG_opt) return;
4254 3 : FLAG_allow_natives_syntax = true;
4255 3 : CcTest::InitializeVM();
4256 : Isolate* isolate = CcTest::i_isolate();
4257 3 : v8::internal::Heap* heap = CcTest::heap();
4258 :
4259 3 : if (!isolate->use_optimizer()) return;
4260 : HandleScope outer_scope(heap->isolate());
4261 : Handle<Code> code;
4262 : {
4263 3 : LocalContext context;
4264 : HandleScope scope(heap->isolate());
4265 :
4266 : CompileRun(
4267 : "function bar() {"
4268 : " return foo(1);"
4269 : "};"
4270 : "function foo(x) { with (x) { return 1 + x; } };"
4271 : "%NeverOptimizeFunction(foo);"
4272 : "bar();"
4273 : "bar();"
4274 : "bar();"
4275 : "%OptimizeFunctionOnNextCall(bar);"
4276 : "bar();"
4277 : "%DeoptimizeFunction(bar);");
4278 :
4279 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4280 : *v8::Local<v8::Function>::Cast(CcTest::global()
4281 9 : ->Get(context.local(), v8_str("bar"))
4282 6 : .ToLocalChecked())));
4283 9 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4284 : }
4285 :
4286 3 : CHECK(code->marked_for_deoptimization());
4287 :
4288 : // Now make sure that a gc should get rid of the function
4289 12 : for (int i = 0; i < 4; i++) {
4290 12 : CcTest::CollectAllGarbage();
4291 : }
4292 :
4293 3 : CHECK(code->marked_for_deoptimization());
4294 3 : CHECK(code->embedded_objects_cleared());
4295 : }
4296 :
4297 16 : static Handle<JSFunction> OptimizeDummyFunction(v8::Isolate* isolate,
4298 : const char* name) {
4299 : EmbeddedVector<char, 256> source;
4300 : SNPrintF(source,
4301 : "function %s() { return 0; }"
4302 : "%s(); %s();"
4303 : "%%OptimizeFunctionOnNextCall(%s);"
4304 16 : "%s();", name, name, name, name, name);
4305 16 : CompileRun(source.start());
4306 : i::Handle<JSFunction> fun = Handle<JSFunction>::cast(
4307 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4308 : CcTest::global()
4309 48 : ->Get(isolate->GetCurrentContext(), v8_str(name))
4310 32 : .ToLocalChecked())));
4311 16 : return fun;
4312 : }
4313 :
4314 8 : static int GetCodeChainLength(Code code) {
4315 : int result = 0;
4316 32 : while (code->next_code_link()->IsCode()) {
4317 4 : result++;
4318 4 : code = Code::cast(code->next_code_link());
4319 : }
4320 8 : return result;
4321 : }
4322 :
4323 :
4324 25880 : TEST(NextCodeLinkIsWeak) {
4325 5 : FLAG_always_opt = false;
4326 5 : FLAG_allow_natives_syntax = true;
4327 5 : CcTest::InitializeVM();
4328 : Isolate* isolate = CcTest::i_isolate();
4329 5 : v8::internal::Heap* heap = CcTest::heap();
4330 :
4331 10 : if (!isolate->use_optimizer()) return;
4332 : HandleScope outer_scope(heap->isolate());
4333 : Handle<Code> code;
4334 4 : CcTest::CollectAllAvailableGarbage();
4335 : int code_chain_length_before, code_chain_length_after;
4336 : {
4337 : HandleScope scope(heap->isolate());
4338 : Handle<JSFunction> mortal =
4339 4 : OptimizeDummyFunction(CcTest::isolate(), "mortal");
4340 : Handle<JSFunction> immortal =
4341 4 : OptimizeDummyFunction(CcTest::isolate(), "immortal");
4342 16 : CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4343 4 : code_chain_length_before = GetCodeChainLength(immortal->code());
4344 : // Keep the immortal code and let the mortal code die.
4345 8 : code = scope.CloseAndEscape(Handle<Code>(immortal->code(), isolate));
4346 : CompileRun("mortal = null; immortal = null;");
4347 : }
4348 4 : CcTest::CollectAllAvailableGarbage();
4349 : // Now mortal code should be dead.
4350 4 : code_chain_length_after = GetCodeChainLength(*code);
4351 4 : CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
4352 : }
4353 :
4354 25880 : TEST(NextCodeLinkInCodeDataContainerIsCleared) {
4355 5 : FLAG_always_opt = false;
4356 5 : FLAG_allow_natives_syntax = true;
4357 5 : CcTest::InitializeVM();
4358 : Isolate* isolate = CcTest::i_isolate();
4359 5 : v8::internal::Heap* heap = CcTest::heap();
4360 :
4361 10 : if (!isolate->use_optimizer()) return;
4362 : HandleScope outer_scope(heap->isolate());
4363 : Handle<CodeDataContainer> code_data_container;
4364 : {
4365 : HandleScope scope(heap->isolate());
4366 : Handle<JSFunction> mortal1 =
4367 4 : OptimizeDummyFunction(CcTest::isolate(), "mortal1");
4368 : Handle<JSFunction> mortal2 =
4369 4 : OptimizeDummyFunction(CcTest::isolate(), "mortal2");
4370 16 : CHECK_EQ(mortal2->code()->next_code_link(), mortal1->code());
4371 : code_data_container = scope.CloseAndEscape(Handle<CodeDataContainer>(
4372 8 : mortal2->code()->code_data_container(), isolate));
4373 : CompileRun("mortal1 = null; mortal2 = null;");
4374 : }
4375 4 : CcTest::CollectAllAvailableGarbage();
4376 8 : CHECK(code_data_container->next_code_link()->IsUndefined(isolate));
4377 : }
4378 :
4379 8 : static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
4380 : i::byte buffer[i::Assembler::kMinimalBufferSize];
4381 : MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
4382 24 : ExternalAssemblerBuffer(buffer, sizeof(buffer)));
4383 8 : CodeDesc desc;
4384 16 : masm.Push(isolate->factory()->undefined_value());
4385 16 : masm.Push(isolate->factory()->undefined_value());
4386 8 : masm.Drop(2);
4387 8 : masm.GetCode(isolate, &desc);
4388 : Handle<Code> code = isolate->factory()->NewCode(
4389 16 : desc, Code::OPTIMIZED_FUNCTION, masm.CodeObject());
4390 16 : CHECK(code->IsCode());
4391 16 : return code;
4392 : }
4393 :
4394 :
4395 25880 : TEST(NextCodeLinkIsWeak2) {
4396 5 : FLAG_allow_natives_syntax = true;
4397 5 : CcTest::InitializeVM();
4398 : Isolate* isolate = CcTest::i_isolate();
4399 5 : v8::internal::Heap* heap = CcTest::heap();
4400 :
4401 10 : if (!isolate->use_optimizer()) return;
4402 : HandleScope outer_scope(heap->isolate());
4403 4 : CcTest::CollectAllAvailableGarbage();
4404 : Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
4405 : Handle<Code> new_head;
4406 8 : Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
4407 : {
4408 : HandleScope scope(heap->isolate());
4409 4 : Handle<Code> immortal = DummyOptimizedCode(isolate);
4410 4 : Handle<Code> mortal = DummyOptimizedCode(isolate);
4411 4 : mortal->set_next_code_link(*old_head);
4412 8 : immortal->set_next_code_link(*mortal);
4413 8 : context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
4414 4 : new_head = scope.CloseAndEscape(immortal);
4415 : }
4416 4 : CcTest::CollectAllAvailableGarbage();
4417 : // Now mortal code should be dead.
4418 16 : CHECK_EQ(*old_head, new_head->next_code_link());
4419 : }
4420 :
4421 :
4422 : static bool weak_ic_cleared = false;
4423 :
4424 49 : static void ClearWeakIC(
4425 49 : const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
4426 : printf("clear weak is called\n");
4427 49 : weak_ic_cleared = true;
4428 : data.GetParameter()->Reset();
4429 49 : }
4430 :
4431 :
4432 25880 : TEST(WeakFunctionInConstructor) {
4433 1 : if (FLAG_lite_mode) return;
4434 5 : if (FLAG_always_opt) return;
4435 4 : FLAG_stress_compaction = false;
4436 4 : FLAG_stress_incremental_marking = false;
4437 4 : CcTest::InitializeVM();
4438 4 : v8::Isolate* isolate = CcTest::isolate();
4439 4 : LocalContext env;
4440 8 : v8::HandleScope scope(isolate);
4441 : CompileRun(
4442 : "function createObj(obj) {"
4443 : " return new obj();"
4444 : "}");
4445 : i::Handle<JSFunction> createObj = Handle<JSFunction>::cast(
4446 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4447 : CcTest::global()
4448 12 : ->Get(env.local(), v8_str("createObj"))
4449 8 : .ToLocalChecked())));
4450 :
4451 : v8::Persistent<v8::Object> garbage;
4452 : {
4453 4 : v8::HandleScope scope(isolate);
4454 : const char* source =
4455 : " (function() {"
4456 : " function hat() { this.x = 5; }"
4457 : " createObj(hat);"
4458 : " createObj(hat);"
4459 : " return hat;"
4460 : " })();";
4461 4 : garbage.Reset(isolate, CompileRun(env.local(), source)
4462 4 : .ToLocalChecked()
4463 4 : ->ToObject(env.local())
4464 8 : .ToLocalChecked());
4465 : }
4466 4 : weak_ic_cleared = false;
4467 : garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4468 4 : CcTest::CollectAllGarbage();
4469 4 : CHECK(weak_ic_cleared);
4470 :
4471 : // We've determined the constructor in createObj has had it's weak cell
4472 : // cleared. Now, verify that one additional call with a new function
4473 : // allows monomorphicity.
4474 : Handle<FeedbackVector> feedback_vector =
4475 8 : Handle<FeedbackVector>(createObj->feedback_vector(), CcTest::i_isolate());
4476 4 : for (int i = 0; i < 20; i++) {
4477 8 : MaybeObject slot_value = feedback_vector->Get(FeedbackSlot(0));
4478 4 : CHECK(slot_value->IsWeakOrCleared());
4479 4 : if (slot_value->IsCleared()) break;
4480 0 : CcTest::CollectAllGarbage();
4481 : }
4482 :
4483 8 : MaybeObject slot_value = feedback_vector->Get(FeedbackSlot(0));
4484 4 : CHECK(slot_value->IsCleared());
4485 : CompileRun(
4486 : "function coat() { this.x = 6; }"
4487 : "createObj(coat);");
4488 8 : slot_value = feedback_vector->Get(FeedbackSlot(0));
4489 8 : CHECK(slot_value->IsWeak());
4490 : }
4491 :
4492 :
4493 : // Checks that the value returned by execution of the source is weak.
4494 45 : void CheckWeakness(const char* source) {
4495 45 : FLAG_stress_compaction = false;
4496 45 : FLAG_stress_incremental_marking = false;
4497 45 : CcTest::InitializeVM();
4498 45 : v8::Isolate* isolate = CcTest::isolate();
4499 45 : LocalContext env;
4500 90 : v8::HandleScope scope(isolate);
4501 : v8::Persistent<v8::Object> garbage;
4502 : {
4503 45 : v8::HandleScope scope(isolate);
4504 45 : garbage.Reset(isolate, CompileRun(env.local(), source)
4505 45 : .ToLocalChecked()
4506 45 : ->ToObject(env.local())
4507 90 : .ToLocalChecked());
4508 : }
4509 45 : weak_ic_cleared = false;
4510 : garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4511 45 : CcTest::CollectAllGarbage();
4512 90 : CHECK(weak_ic_cleared);
4513 45 : }
4514 :
4515 :
4516 : // Each of the following "weak IC" tests creates an IC that embeds a map with
4517 : // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
4518 25880 : TEST(WeakMapInMonomorphicLoadIC) {
4519 : CheckWeakness("function loadIC(obj) {"
4520 : " return obj.name;"
4521 : "}"
4522 : " (function() {"
4523 : " var proto = {'name' : 'weak'};"
4524 : " var obj = Object.create(proto);"
4525 : " loadIC(obj);"
4526 : " loadIC(obj);"
4527 : " loadIC(obj);"
4528 : " return proto;"
4529 5 : " })();");
4530 5 : }
4531 :
4532 :
4533 25880 : TEST(WeakMapInPolymorphicLoadIC) {
4534 : CheckWeakness(
4535 : "function loadIC(obj) {"
4536 : " return obj.name;"
4537 : "}"
4538 : " (function() {"
4539 : " var proto = {'name' : 'weak'};"
4540 : " var obj = Object.create(proto);"
4541 : " loadIC(obj);"
4542 : " loadIC(obj);"
4543 : " loadIC(obj);"
4544 : " var poly = Object.create(proto);"
4545 : " poly.x = true;"
4546 : " loadIC(poly);"
4547 : " return proto;"
4548 5 : " })();");
4549 5 : }
4550 :
4551 :
4552 25880 : TEST(WeakMapInMonomorphicKeyedLoadIC) {
4553 : CheckWeakness("function keyedLoadIC(obj, field) {"
4554 : " return obj[field];"
4555 : "}"
4556 : " (function() {"
4557 : " var proto = {'name' : 'weak'};"
4558 : " var obj = Object.create(proto);"
4559 : " keyedLoadIC(obj, 'name');"
4560 : " keyedLoadIC(obj, 'name');"
4561 : " keyedLoadIC(obj, 'name');"
4562 : " return proto;"
4563 5 : " })();");
4564 5 : }
4565 :
4566 :
4567 25880 : TEST(WeakMapInPolymorphicKeyedLoadIC) {
4568 : CheckWeakness(
4569 : "function keyedLoadIC(obj, field) {"
4570 : " return obj[field];"
4571 : "}"
4572 : " (function() {"
4573 : " var proto = {'name' : 'weak'};"
4574 : " var obj = Object.create(proto);"
4575 : " keyedLoadIC(obj, 'name');"
4576 : " keyedLoadIC(obj, 'name');"
4577 : " keyedLoadIC(obj, 'name');"
4578 : " var poly = Object.create(proto);"
4579 : " poly.x = true;"
4580 : " keyedLoadIC(poly, 'name');"
4581 : " return proto;"
4582 5 : " })();");
4583 5 : }
4584 :
4585 :
4586 25880 : TEST(WeakMapInMonomorphicStoreIC) {
4587 : CheckWeakness("function storeIC(obj, value) {"
4588 : " obj.name = value;"
4589 : "}"
4590 : " (function() {"
4591 : " var proto = {'name' : 'weak'};"
4592 : " var obj = Object.create(proto);"
4593 : " storeIC(obj, 'x');"
4594 : " storeIC(obj, 'x');"
4595 : " storeIC(obj, 'x');"
4596 : " return proto;"
4597 5 : " })();");
4598 5 : }
4599 :
4600 :
4601 25880 : TEST(WeakMapInPolymorphicStoreIC) {
4602 : CheckWeakness(
4603 : "function storeIC(obj, value) {"
4604 : " obj.name = value;"
4605 : "}"
4606 : " (function() {"
4607 : " var proto = {'name' : 'weak'};"
4608 : " var obj = Object.create(proto);"
4609 : " storeIC(obj, 'x');"
4610 : " storeIC(obj, 'x');"
4611 : " storeIC(obj, 'x');"
4612 : " var poly = Object.create(proto);"
4613 : " poly.x = true;"
4614 : " storeIC(poly, 'x');"
4615 : " return proto;"
4616 5 : " })();");
4617 5 : }
4618 :
4619 :
4620 25880 : TEST(WeakMapInMonomorphicKeyedStoreIC) {
4621 : CheckWeakness("function keyedStoreIC(obj, field, value) {"
4622 : " obj[field] = value;"
4623 : "}"
4624 : " (function() {"
4625 : " var proto = {'name' : 'weak'};"
4626 : " var obj = Object.create(proto);"
4627 : " keyedStoreIC(obj, 'x');"
4628 : " keyedStoreIC(obj, 'x');"
4629 : " keyedStoreIC(obj, 'x');"
4630 : " return proto;"
4631 5 : " })();");
4632 5 : }
4633 :
4634 :
4635 25880 : TEST(WeakMapInPolymorphicKeyedStoreIC) {
4636 : CheckWeakness(
4637 : "function keyedStoreIC(obj, field, value) {"
4638 : " obj[field] = value;"
4639 : "}"
4640 : " (function() {"
4641 : " var proto = {'name' : 'weak'};"
4642 : " var obj = Object.create(proto);"
4643 : " keyedStoreIC(obj, 'x');"
4644 : " keyedStoreIC(obj, 'x');"
4645 : " keyedStoreIC(obj, 'x');"
4646 : " var poly = Object.create(proto);"
4647 : " poly.x = true;"
4648 : " keyedStoreIC(poly, 'x');"
4649 : " return proto;"
4650 5 : " })();");
4651 5 : }
4652 :
4653 :
4654 25880 : TEST(WeakMapInMonomorphicCompareNilIC) {
4655 : CheckWeakness("function compareNilIC(obj) {"
4656 : " return obj == null;"
4657 : "}"
4658 : " (function() {"
4659 : " var proto = {'name' : 'weak'};"
4660 : " var obj = Object.create(proto);"
4661 : " compareNilIC(obj);"
4662 : " compareNilIC(obj);"
4663 : " compareNilIC(obj);"
4664 : " return proto;"
4665 5 : " })();");
4666 5 : }
4667 :
4668 :
4669 8 : Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
4670 8 : Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
4671 : Handle<Object> obj =
4672 16 : Object::GetProperty(isolate, isolate->global_object(), str)
4673 16 : .ToHandleChecked();
4674 8 : return Handle<JSFunction>::cast(obj);
4675 : }
4676 :
4677 16 : void CheckIC(Handle<JSFunction> function, int slot_index,
4678 : InlineCacheState state) {
4679 16 : FeedbackVector vector = function->feedback_vector();
4680 : FeedbackSlot slot(slot_index);
4681 : FeedbackNexus nexus(vector, slot);
4682 16 : CHECK_EQ(nexus.ic_state(), state);
4683 16 : }
4684 :
4685 25880 : TEST(MonomorphicStaysMonomorphicAfterGC) {
4686 6 : if (!FLAG_use_ic) return;
4687 5 : if (FLAG_always_opt) return;
4688 : ManualGCScope manual_gc_scope;
4689 4 : CcTest::InitializeVM();
4690 : Isolate* isolate = CcTest::i_isolate();
4691 8 : v8::HandleScope scope(CcTest::isolate());
4692 : CompileRun(
4693 : "function loadIC(obj) {"
4694 : " return obj.name;"
4695 : "}"
4696 : "function testIC() {"
4697 : " var proto = {'name' : 'weak'};"
4698 : " var obj = Object.create(proto);"
4699 : " loadIC(obj);"
4700 : " loadIC(obj);"
4701 : " loadIC(obj);"
4702 : " return proto;"
4703 : "};");
4704 4 : Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4705 : {
4706 4 : v8::HandleScope scope(CcTest::isolate());
4707 4 : CompileRun("(testIC())");
4708 : }
4709 4 : CcTest::CollectAllGarbage();
4710 4 : CheckIC(loadIC, 0, MONOMORPHIC);
4711 : {
4712 4 : v8::HandleScope scope(CcTest::isolate());
4713 4 : CompileRun("(testIC())");
4714 : }
4715 4 : CheckIC(loadIC, 0, MONOMORPHIC);
4716 : }
4717 :
4718 :
4719 25880 : TEST(PolymorphicStaysPolymorphicAfterGC) {
4720 6 : if (!FLAG_use_ic) return;
4721 5 : if (FLAG_always_opt) return;
4722 : ManualGCScope manual_gc_scope;
4723 4 : CcTest::InitializeVM();
4724 : Isolate* isolate = CcTest::i_isolate();
4725 8 : v8::HandleScope scope(CcTest::isolate());
4726 : CompileRun(
4727 : "function loadIC(obj) {"
4728 : " return obj.name;"
4729 : "}"
4730 : "function testIC() {"
4731 : " var proto = {'name' : 'weak'};"
4732 : " var obj = Object.create(proto);"
4733 : " loadIC(obj);"
4734 : " loadIC(obj);"
4735 : " loadIC(obj);"
4736 : " var poly = Object.create(proto);"
4737 : " poly.x = true;"
4738 : " loadIC(poly);"
4739 : " return proto;"
4740 : "};");
4741 4 : Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4742 : {
4743 4 : v8::HandleScope scope(CcTest::isolate());
4744 4 : CompileRun("(testIC())");
4745 : }
4746 4 : CcTest::CollectAllGarbage();
4747 4 : CheckIC(loadIC, 0, POLYMORPHIC);
4748 : {
4749 4 : v8::HandleScope scope(CcTest::isolate());
4750 4 : CompileRun("(testIC())");
4751 : }
4752 4 : CheckIC(loadIC, 0, POLYMORPHIC);
4753 : }
4754 :
4755 : #ifdef DEBUG
4756 : TEST(AddInstructionChangesNewSpacePromotion) {
4757 : FLAG_allow_natives_syntax = true;
4758 : FLAG_expose_gc = true;
4759 : FLAG_stress_compaction = true;
4760 : FLAG_gc_interval = 1000;
4761 : CcTest::InitializeVM();
4762 : if (!FLAG_allocation_site_pretenuring) return;
4763 : v8::HandleScope scope(CcTest::isolate());
4764 : Isolate* isolate = CcTest::i_isolate();
4765 : Heap* heap = isolate->heap();
4766 : LocalContext env;
4767 : CompileRun(
4768 : "function add(a, b) {"
4769 : " return a + b;"
4770 : "}"
4771 : "add(1, 2);"
4772 : "add(\"a\", \"b\");"
4773 : "var oldSpaceObject;"
4774 : "gc();"
4775 : "function crash(x) {"
4776 : " var object = {a: null, b: null};"
4777 : " var result = add(1.5, x | 0);"
4778 : " object.a = result;"
4779 : " oldSpaceObject = object;"
4780 : " return object;"
4781 : "}"
4782 : "crash(1);"
4783 : "crash(1);"
4784 : "%OptimizeFunctionOnNextCall(crash);"
4785 : "crash(1);");
4786 :
4787 : v8::Local<v8::Object> global = CcTest::global();
4788 : v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
4789 : global->Get(env.local(), v8_str("crash")).ToLocalChecked());
4790 : v8::Local<v8::Value> args1[] = {v8_num(1)};
4791 : heap->DisableInlineAllocation();
4792 : heap->set_allocation_timeout(1);
4793 : g->Call(env.local(), global, 1, args1).ToLocalChecked();
4794 : CcTest::CollectAllGarbage();
4795 : }
4796 :
4797 :
4798 : void OnFatalErrorExpectOOM(const char* location, const char* message) {
4799 : // Exit with 0 if the location matches our expectation.
4800 : exit(strcmp(location, "CALL_AND_RETRY_LAST"));
4801 : }
4802 :
4803 :
4804 : TEST(CEntryStubOOM) {
4805 : FLAG_allow_natives_syntax = true;
4806 : CcTest::InitializeVM();
4807 : v8::HandleScope scope(CcTest::isolate());
4808 : CcTest::isolate()->SetFatalErrorHandler(OnFatalErrorExpectOOM);
4809 :
4810 : v8::Local<v8::Value> result = CompileRun(
4811 : "%SetAllocationTimeout(1, 1);"
4812 : "var a = [];"
4813 : "a.__proto__ = [];"
4814 : "a.unshift(1)");
4815 :
4816 : CHECK(result->IsNumber());
4817 : }
4818 :
4819 : #endif // DEBUG
4820 :
4821 :
4822 5 : static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
4823 :
4824 :
4825 5 : static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
4826 5 : CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, nullptr);
4827 5 : }
4828 :
4829 25880 : HEAP_TEST(Regress538257) {
4830 : ManualGCScope manual_gc_scope;
4831 5 : FLAG_manual_evacuation_candidates_selection = true;
4832 : v8::Isolate::CreateParams create_params;
4833 : // Set heap limits.
4834 : create_params.constraints.set_max_semi_space_size_in_kb(1024);
4835 : #ifdef DEBUG
4836 : create_params.constraints.set_max_old_space_size(20);
4837 : #else
4838 : create_params.constraints.set_max_old_space_size(6);
4839 : #endif
4840 5 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
4841 5 : v8::Isolate* isolate = v8::Isolate::New(create_params);
4842 5 : isolate->Enter();
4843 : {
4844 : i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
4845 10 : Heap* heap = i_isolate->heap();
4846 : HandleScope handle_scope(i_isolate);
4847 5725 : PagedSpace* old_space = heap->old_space();
4848 : const int kMaxObjects = 10000;
4849 : const int kFixedArrayLen = 512;
4850 50005 : Handle<FixedArray> objects[kMaxObjects];
4851 11445 : for (int i = 0; (i < kMaxObjects) &&
4852 5725 : heap->CanExpandOldGeneration(old_space->AreaSize());
4853 : i++) {
4854 5720 : objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED);
4855 11440 : heap::ForceEvacuationCandidate(Page::FromHeapObject(*objects[i]));
4856 : }
4857 5 : heap::SimulateFullSpace(old_space);
4858 5 : CcTest::CollectAllGarbage();
4859 : // If we get this far, we've successfully aborted compaction. Any further
4860 : // allocations might trigger OOM.
4861 : }
4862 5 : isolate->Exit();
4863 5 : isolate->Dispose();
4864 5 : }
4865 :
4866 :
4867 25880 : TEST(Regress357137) {
4868 5 : CcTest::InitializeVM();
4869 5 : v8::Isolate* isolate = CcTest::isolate();
4870 5 : v8::HandleScope hscope(isolate);
4871 5 : v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
4872 : global->Set(
4873 : v8::String::NewFromUtf8(isolate, "interrupt", v8::NewStringType::kNormal)
4874 : .ToLocalChecked(),
4875 15 : v8::FunctionTemplate::New(isolate, RequestInterrupt));
4876 5 : v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
4877 5 : CHECK(!context.IsEmpty());
4878 : v8::Context::Scope cscope(context);
4879 :
4880 : v8::Local<v8::Value> result = CompileRun(
4881 : "var locals = '';"
4882 : "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
4883 : "eval('function f() {' + locals + 'return function() { return v0; }; }');"
4884 : "interrupt();" // This triggers a fake stack overflow in f.
4885 : "f()()");
4886 15 : CHECK_EQ(42.0, result->ToNumber(context).ToLocalChecked()->Value());
4887 5 : }
4888 :
4889 :
4890 25880 : TEST(Regress507979) {
4891 : const int kFixedArrayLen = 10;
4892 5 : CcTest::InitializeVM();
4893 : Isolate* isolate = CcTest::i_isolate();
4894 : HandleScope handle_scope(isolate);
4895 :
4896 5 : Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
4897 5 : Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
4898 5 : CHECK(Heap::InYoungGeneration(*o1));
4899 5 : CHECK(Heap::InYoungGeneration(*o2));
4900 :
4901 10 : HeapIterator it(isolate->heap(), i::HeapIterator::kFilterUnreachable);
4902 :
4903 : // Replace parts of an object placed before a live object with a filler. This
4904 : // way the filler object shares the mark bits with the following live object.
4905 5 : o1->Shrink(isolate, kFixedArrayLen - 1);
4906 :
4907 32391 : for (HeapObject obj = it.next(); !obj.is_null(); obj = it.next()) {
4908 : // Let's not optimize the loop away.
4909 32386 : CHECK_NE(obj->address(), kNullAddress);
4910 : }
4911 5 : }
4912 :
4913 25880 : TEST(Regress388880) {
4914 5 : if (!FLAG_incremental_marking) return;
4915 5 : FLAG_stress_incremental_marking = false;
4916 5 : FLAG_expose_gc = true;
4917 5 : CcTest::InitializeVM();
4918 5 : v8::HandleScope scope(CcTest::isolate());
4919 : Isolate* isolate = CcTest::i_isolate();
4920 : Factory* factory = isolate->factory();
4921 10 : Heap* heap = isolate->heap();
4922 :
4923 5 : Handle<Map> map1 = Map::Create(isolate, 1);
4924 5 : Handle<String> name = factory->NewStringFromStaticChars("foo");
4925 5 : name = factory->InternalizeString(name);
4926 : Handle<Map> map2 =
4927 : Map::CopyWithField(isolate, map1, name, FieldType::Any(isolate), NONE,
4928 : PropertyConstness::kMutable, Representation::Tagged(),
4929 10 : OMIT_TRANSITION)
4930 10 : .ToHandleChecked();
4931 :
4932 5 : size_t desired_offset = Page::kPageSize - map1->instance_size();
4933 :
4934 : // Allocate padding objects in old pointer space so, that object allocated
4935 : // afterwards would end at the end of the page.
4936 5 : heap::SimulateFullSpace(heap->old_space());
4937 : size_t padding_size =
4938 5 : desired_offset - MemoryChunkLayout::ObjectStartOffsetInDataPage();
4939 10 : heap::CreatePadding(heap, static_cast<int>(padding_size), TENURED);
4940 :
4941 5 : Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED);
4942 10 : o->set_raw_properties_or_hash(*factory->empty_fixed_array());
4943 :
4944 : // Ensure that the object allocated where we need it.
4945 : Page* page = Page::FromHeapObject(*o);
4946 5 : CHECK_EQ(desired_offset, page->Offset(o->address()));
4947 :
4948 : // Now we have an object right at the end of the page.
4949 :
4950 : // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
4951 : // that would cause crash.
4952 5 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4953 5 : marking->Stop();
4954 : CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
4955 5 : i::GarbageCollectionReason::kTesting);
4956 5 : CHECK(marking->IsMarking());
4957 :
4958 : // Now everything is set up for crashing in JSObject::MigrateFastToFast()
4959 : // when it calls heap->AdjustLiveBytes(...).
4960 5 : JSObject::MigrateToMap(o, map2);
4961 : }
4962 :
4963 :
4964 25880 : TEST(Regress3631) {
4965 5 : if (!FLAG_incremental_marking) return;
4966 5 : FLAG_expose_gc = true;
4967 5 : CcTest::InitializeVM();
4968 5 : v8::HandleScope scope(CcTest::isolate());
4969 : Isolate* isolate = CcTest::i_isolate();
4970 5 : Heap* heap = isolate->heap();
4971 5 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4972 : v8::Local<v8::Value> result = CompileRun(
4973 : "var weak_map = new WeakMap();"
4974 : "var future_keys = [];"
4975 : "for (var i = 0; i < 50; i++) {"
4976 : " var key = {'k' : i + 0.1};"
4977 : " weak_map.set(key, 1);"
4978 : " future_keys.push({'x' : i + 0.2});"
4979 : "}"
4980 : "weak_map");
4981 5 : if (marking->IsStopped()) {
4982 : CcTest::heap()->StartIncrementalMarking(
4983 4 : i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
4984 : }
4985 : // Incrementally mark the backing store.
4986 : Handle<JSReceiver> obj =
4987 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
4988 : Handle<JSWeakCollection> weak_map(JSWeakCollection::cast(*obj), isolate);
4989 5 : SimulateIncrementalMarking(heap);
4990 : // Stash the backing store in a handle.
4991 : Handle<Object> save(weak_map->table(), isolate);
4992 : // The following line will update the backing store.
4993 : CompileRun(
4994 : "for (var i = 0; i < 50; i++) {"
4995 : " weak_map.set(future_keys[i], i);"
4996 : "}");
4997 : heap->incremental_marking()->set_should_hurry(true);
4998 5 : CcTest::CollectGarbage(OLD_SPACE);
4999 : }
5000 :
5001 :
5002 25880 : TEST(Regress442710) {
5003 5 : CcTest::InitializeVM();
5004 : Isolate* isolate = CcTest::i_isolate();
5005 : Factory* factory = isolate->factory();
5006 :
5007 : HandleScope sc(isolate);
5008 : Handle<JSGlobalObject> global(CcTest::i_isolate()->context()->global_object(),
5009 10 : isolate);
5010 5 : Handle<JSArray> array = factory->NewJSArray(2);
5011 :
5012 5 : Handle<String> name = factory->InternalizeUtf8String("testArray");
5013 10 : Object::SetProperty(isolate, global, name, array).Check();
5014 : CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5015 5 : CcTest::CollectGarbage(OLD_SPACE);
5016 5 : }
5017 :
5018 :
5019 25880 : HEAP_TEST(NumberStringCacheSize) {
5020 : // Test that the number-string cache has not been resized in the snapshot.
5021 5 : CcTest::InitializeVM();
5022 5 : Isolate* isolate = CcTest::i_isolate();
5023 10 : if (!isolate->snapshot_available()) return;
5024 5 : Heap* heap = isolate->heap();
5025 10 : CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
5026 : heap->number_string_cache()->length());
5027 : }
5028 :
5029 :
5030 25880 : TEST(Regress3877) {
5031 5 : CcTest::InitializeVM();
5032 : Isolate* isolate = CcTest::i_isolate();
5033 : Factory* factory = isolate->factory();
5034 : HandleScope scope(isolate);
5035 : CompileRun("function cls() { this.x = 10; }");
5036 5 : Handle<WeakFixedArray> weak_prototype_holder = factory->NewWeakFixedArray(1);
5037 : {
5038 : HandleScope inner_scope(isolate);
5039 : v8::Local<v8::Value> result = CompileRun("cls.prototype");
5040 : Handle<JSReceiver> proto =
5041 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5042 10 : weak_prototype_holder->Set(0, HeapObjectReference::Weak(*proto));
5043 : }
5044 10 : CHECK(!weak_prototype_holder->Get(0)->IsCleared());
5045 : CompileRun(
5046 : "var a = { };"
5047 : "a.x = new cls();"
5048 : "cls.prototype = null;");
5049 25 : for (int i = 0; i < 4; i++) {
5050 20 : CcTest::CollectAllGarbage();
5051 : }
5052 : // The map of a.x keeps prototype alive
5053 10 : CHECK(!weak_prototype_holder->Get(0)->IsCleared());
5054 : // Change the map of a.x and make the previous map garbage collectable.
5055 : CompileRun("a.x.__proto__ = {};");
5056 25 : for (int i = 0; i < 4; i++) {
5057 20 : CcTest::CollectAllGarbage();
5058 : }
5059 10 : CHECK(weak_prototype_holder->Get(0)->IsCleared());
5060 5 : }
5061 :
5062 20 : Handle<WeakFixedArray> AddRetainedMap(Isolate* isolate, Heap* heap) {
5063 : HandleScope inner_scope(isolate);
5064 20 : Handle<Map> map = Map::Create(isolate, 1);
5065 : v8::Local<v8::Value> result =
5066 : CompileRun("(function () { return {x : 10}; })();");
5067 : Handle<JSReceiver> proto =
5068 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5069 20 : Map::SetPrototype(isolate, map, proto);
5070 20 : heap->AddRetainedMap(map);
5071 20 : Handle<WeakFixedArray> array = isolate->factory()->NewWeakFixedArray(1);
5072 40 : array->Set(0, HeapObjectReference::Weak(*map));
5073 40 : return inner_scope.CloseAndEscape(array);
5074 : }
5075 :
5076 :
5077 20 : void CheckMapRetainingFor(int n) {
5078 20 : FLAG_retain_maps_for_n_gc = n;
5079 : Isolate* isolate = CcTest::i_isolate();
5080 20 : Heap* heap = isolate->heap();
5081 20 : Handle<WeakFixedArray> array_with_map = AddRetainedMap(isolate, heap);
5082 40 : CHECK(array_with_map->Get(0)->IsWeak());
5083 50 : for (int i = 0; i < n; i++) {
5084 50 : heap::SimulateIncrementalMarking(heap);
5085 50 : CcTest::CollectGarbage(OLD_SPACE);
5086 : }
5087 40 : CHECK(array_with_map->Get(0)->IsWeak());
5088 20 : heap::SimulateIncrementalMarking(heap);
5089 20 : CcTest::CollectGarbage(OLD_SPACE);
5090 40 : CHECK(array_with_map->Get(0)->IsCleared());
5091 20 : }
5092 :
5093 :
5094 25880 : TEST(MapRetaining) {
5095 5 : if (!FLAG_incremental_marking) return;
5096 : ManualGCScope manual_gc_scope;
5097 5 : CcTest::InitializeVM();
5098 10 : v8::HandleScope scope(CcTest::isolate());
5099 5 : CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5100 5 : CheckMapRetainingFor(0);
5101 5 : CheckMapRetainingFor(1);
5102 5 : CheckMapRetainingFor(7);
5103 : }
5104 :
5105 25875 : TEST(PreprocessStackTrace) {
5106 : // Do not automatically trigger early GC.
5107 0 : FLAG_gc_interval = -1;
5108 0 : CcTest::InitializeVM();
5109 0 : v8::HandleScope scope(CcTest::isolate());
5110 0 : v8::TryCatch try_catch(CcTest::isolate());
5111 : CompileRun("throw new Error();");
5112 0 : CHECK(try_catch.HasCaught());
5113 : Isolate* isolate = CcTest::i_isolate();
5114 0 : Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
5115 : Handle<Name> key = isolate->factory()->stack_trace_symbol();
5116 : Handle<Object> stack_trace =
5117 0 : Object::GetProperty(isolate, exception, key).ToHandleChecked();
5118 : Handle<Object> code =
5119 0 : Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5120 0 : CHECK(code->IsCode());
5121 :
5122 0 : CcTest::CollectAllAvailableGarbage();
5123 :
5124 : Handle<Object> pos =
5125 0 : Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5126 0 : CHECK(pos->IsSmi());
5127 :
5128 0 : Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
5129 0 : int array_length = Smi::ToInt(stack_trace_array->length());
5130 0 : for (int i = 0; i < array_length; i++) {
5131 : Handle<Object> element =
5132 0 : Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
5133 0 : CHECK(!element->IsCode());
5134 0 : }
5135 0 : }
5136 :
5137 :
5138 215 : void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
5139 215 : CHECK_LE(FixedArray::kHeaderSize, bytes);
5140 215 : CHECK(IsAligned(bytes, kTaggedSize));
5141 : Factory* factory = isolate->factory();
5142 : HandleScope scope(isolate);
5143 : AlwaysAllocateScope always_allocate(isolate);
5144 : int elements =
5145 215 : static_cast<int>((bytes - FixedArray::kHeaderSize) / kTaggedSize);
5146 : Handle<FixedArray> array = factory->NewFixedArray(
5147 215 : elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
5148 430 : CHECK((space == NEW_SPACE) == Heap::InYoungGeneration(*array));
5149 215 : CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
5150 215 : }
5151 :
5152 :
5153 25880 : TEST(NewSpaceAllocationCounter) {
5154 5 : CcTest::InitializeVM();
5155 5 : v8::HandleScope scope(CcTest::isolate());
5156 : Isolate* isolate = CcTest::i_isolate();
5157 : Heap* heap = isolate->heap();
5158 : size_t counter1 = heap->NewSpaceAllocationCounter();
5159 5 : CcTest::CollectGarbage(NEW_SPACE);
5160 5 : CcTest::CollectGarbage(NEW_SPACE); // Ensure new space is empty.
5161 : const size_t kSize = 1024;
5162 5 : AllocateInSpace(isolate, kSize, NEW_SPACE);
5163 : size_t counter2 = heap->NewSpaceAllocationCounter();
5164 5 : CHECK_EQ(kSize, counter2 - counter1);
5165 5 : CcTest::CollectGarbage(NEW_SPACE);
5166 : size_t counter3 = heap->NewSpaceAllocationCounter();
5167 5 : CHECK_EQ(0U, counter3 - counter2);
5168 : // Test counter overflow.
5169 : size_t max_counter = static_cast<size_t>(-1);
5170 : heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
5171 : size_t start = heap->NewSpaceAllocationCounter();
5172 105 : for (int i = 0; i < 20; i++) {
5173 100 : AllocateInSpace(isolate, kSize, NEW_SPACE);
5174 : size_t counter = heap->NewSpaceAllocationCounter();
5175 100 : CHECK_EQ(kSize, counter - start);
5176 : start = counter;
5177 5 : }
5178 5 : }
5179 :
5180 :
5181 25880 : TEST(OldSpaceAllocationCounter) {
5182 5 : CcTest::InitializeVM();
5183 5 : v8::HandleScope scope(CcTest::isolate());
5184 : Isolate* isolate = CcTest::i_isolate();
5185 5 : Heap* heap = isolate->heap();
5186 : size_t counter1 = heap->OldGenerationAllocationCounter();
5187 5 : CcTest::CollectGarbage(NEW_SPACE);
5188 5 : CcTest::CollectGarbage(NEW_SPACE);
5189 : const size_t kSize = 1024;
5190 5 : AllocateInSpace(isolate, kSize, OLD_SPACE);
5191 : size_t counter2 = heap->OldGenerationAllocationCounter();
5192 : // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
5193 5 : CHECK_LE(kSize, counter2 - counter1);
5194 5 : CcTest::CollectGarbage(NEW_SPACE);
5195 : size_t counter3 = heap->OldGenerationAllocationCounter();
5196 5 : CHECK_EQ(0u, counter3 - counter2);
5197 5 : AllocateInSpace(isolate, kSize, OLD_SPACE);
5198 5 : CcTest::CollectGarbage(OLD_SPACE);
5199 : size_t counter4 = heap->OldGenerationAllocationCounter();
5200 5 : CHECK_LE(kSize, counter4 - counter3);
5201 : // Test counter overflow.
5202 : size_t max_counter = static_cast<size_t>(-1);
5203 : heap->set_old_generation_allocation_counter_at_last_gc(max_counter -
5204 : 10 * kSize);
5205 : size_t start = heap->OldGenerationAllocationCounter();
5206 105 : for (int i = 0; i < 20; i++) {
5207 100 : AllocateInSpace(isolate, kSize, OLD_SPACE);
5208 : size_t counter = heap->OldGenerationAllocationCounter();
5209 100 : CHECK_LE(kSize, counter - start);
5210 : start = counter;
5211 5 : }
5212 5 : }
5213 :
5214 :
5215 20 : static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
5216 : Isolate* isolate = CcTest::i_isolate();
5217 : Object message(
5218 20 : *reinterpret_cast<Address*>(isolate->pending_message_obj_address()));
5219 20 : CHECK(message->IsTheHole(isolate));
5220 20 : }
5221 :
5222 :
5223 25880 : TEST(MessageObjectLeak) {
5224 5 : CcTest::InitializeVM();
5225 5 : v8::Isolate* isolate = CcTest::isolate();
5226 5 : v8::HandleScope scope(isolate);
5227 5 : v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5228 : global->Set(
5229 : v8::String::NewFromUtf8(isolate, "check", v8::NewStringType::kNormal)
5230 : .ToLocalChecked(),
5231 15 : v8::FunctionTemplate::New(isolate, CheckLeak));
5232 5 : v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
5233 : v8::Context::Scope cscope(context);
5234 :
5235 : const char* test =
5236 : "try {"
5237 : " throw 'message 1';"
5238 : "} catch (e) {"
5239 : "}"
5240 : "check();"
5241 : "L: try {"
5242 : " throw 'message 2';"
5243 : "} finally {"
5244 : " break L;"
5245 : "}"
5246 : "check();";
5247 : CompileRun(test);
5248 :
5249 : const char* flag = "--turbo-filter=*";
5250 5 : FlagList::SetFlagsFromString(flag, StrLength(flag));
5251 5 : FLAG_always_opt = true;
5252 :
5253 5 : CompileRun(test);
5254 5 : }
5255 :
5256 :
5257 10 : static void CheckEqualSharedFunctionInfos(
5258 : const v8::FunctionCallbackInfo<v8::Value>& args) {
5259 10 : Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
5260 10 : Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
5261 10 : Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
5262 10 : Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
5263 30 : CHECK(fun1->shared() == fun2->shared());
5264 10 : }
5265 :
5266 :
5267 10 : static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
5268 : Isolate* isolate = CcTest::i_isolate();
5269 10 : Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
5270 10 : Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
5271 : // Bytecode is code too.
5272 20 : SharedFunctionInfo::DiscardCompiled(isolate, handle(fun->shared(), isolate));
5273 20 : fun->set_code(*BUILTIN_CODE(isolate, CompileLazy));
5274 10 : CcTest::CollectAllAvailableGarbage();
5275 10 : }
5276 :
5277 :
5278 25880 : TEST(CanonicalSharedFunctionInfo) {
5279 5 : CcTest::InitializeVM();
5280 5 : v8::Isolate* isolate = CcTest::isolate();
5281 5 : v8::HandleScope scope(isolate);
5282 5 : v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5283 : global->Set(isolate, "check", v8::FunctionTemplate::New(
5284 10 : isolate, CheckEqualSharedFunctionInfos));
5285 : global->Set(isolate, "remove",
5286 10 : v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
5287 5 : v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
5288 : v8::Context::Scope cscope(context);
5289 : CompileRun(
5290 : "function f() { return function g() {}; }"
5291 : "var g1 = f();"
5292 : "remove(f);"
5293 : "var g2 = f();"
5294 : "check(g1, g2);");
5295 :
5296 : CompileRun(
5297 : "function f() { return (function() { return function g() {}; })(); }"
5298 : "var g1 = f();"
5299 : "remove(f);"
5300 : "var g2 = f();"
5301 5 : "check(g1, g2);");
5302 5 : }
5303 :
5304 :
5305 25880 : TEST(ScriptIterator) {
5306 5 : CcTest::InitializeVM();
5307 5 : v8::HandleScope scope(CcTest::isolate());
5308 : Isolate* isolate = CcTest::i_isolate();
5309 5 : Heap* heap = CcTest::heap();
5310 10 : LocalContext context;
5311 :
5312 5 : CcTest::CollectAllGarbage();
5313 :
5314 : int script_count = 0;
5315 : {
5316 5 : HeapIterator it(heap);
5317 83434 : for (HeapObject obj = it.next(); !obj.is_null(); obj = it.next()) {
5318 41712 : if (obj->IsScript()) script_count++;
5319 5 : }
5320 : }
5321 :
5322 : {
5323 5 : Script::Iterator iterator(isolate);
5324 40 : for (Script script = iterator.Next(); !script.is_null();
5325 : script = iterator.Next()) {
5326 15 : script_count--;
5327 : }
5328 : }
5329 :
5330 10 : CHECK_EQ(0, script_count);
5331 5 : }
5332 :
5333 :
5334 25880 : TEST(SharedFunctionInfoIterator) {
5335 5 : CcTest::InitializeVM();
5336 5 : v8::HandleScope scope(CcTest::isolate());
5337 : Isolate* isolate = CcTest::i_isolate();
5338 5 : Heap* heap = CcTest::heap();
5339 10 : LocalContext context;
5340 :
5341 5 : CcTest::CollectAllGarbage();
5342 5 : CcTest::CollectAllGarbage();
5343 :
5344 : int sfi_count = 0;
5345 : {
5346 5 : HeapIterator it(heap);
5347 82594 : for (HeapObject obj = it.next(); !obj.is_null(); obj = it.next()) {
5348 41292 : if (!obj->IsSharedFunctionInfo()) continue;
5349 3351 : sfi_count++;
5350 5 : }
5351 : }
5352 :
5353 : {
5354 5 : SharedFunctionInfo::GlobalIterator iterator(isolate);
5355 3361 : while (!iterator.Next().is_null()) sfi_count--;
5356 : }
5357 :
5358 10 : CHECK_EQ(0, sfi_count);
5359 5 : }
5360 :
5361 : // This is the same as Factory::NewByteArray, except it doesn't retry on
5362 : // allocation failure.
5363 80650 : AllocationResult HeapTester::AllocateByteArrayForTest(Heap* heap, int length,
5364 : PretenureFlag pretenure) {
5365 : DCHECK(length >= 0 && length <= ByteArray::kMaxLength);
5366 : int size = ByteArray::SizeFor(length);
5367 80650 : AllocationSpace space = heap->SelectSpace(pretenure);
5368 80650 : HeapObject result;
5369 : {
5370 80650 : AllocationResult allocation = heap->AllocateRaw(size, space);
5371 80650 : if (!allocation.To(&result)) return allocation;
5372 : }
5373 :
5374 : result->set_map_after_allocation(ReadOnlyRoots(heap).byte_array_map(),
5375 80640 : SKIP_WRITE_BARRIER);
5376 : ByteArray::cast(result)->set_length(length);
5377 80640 : ByteArray::cast(result)->clear_padding();
5378 80640 : return result;
5379 : }
5380 :
5381 25880 : HEAP_TEST(Regress587004) {
5382 : ManualGCScope manual_gc_scope;
5383 : #ifdef VERIFY_HEAP
5384 : FLAG_verify_heap = false;
5385 : #endif
5386 5 : CcTest::InitializeVM();
5387 10 : v8::HandleScope scope(CcTest::isolate());
5388 15 : Heap* heap = CcTest::heap();
5389 : Isolate* isolate = CcTest::i_isolate();
5390 : Factory* factory = isolate->factory();
5391 : const int N =
5392 : (kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / kTaggedSize;
5393 5 : Handle<FixedArray> array = factory->NewFixedArray(N, TENURED);
5394 10 : CHECK(heap->old_space()->Contains(*array));
5395 5 : Handle<Object> number = factory->NewHeapNumber(1.0);
5396 5 : CHECK(Heap::InYoungGeneration(*number));
5397 316950 : for (int i = 0; i < N; i++) {
5398 316950 : array->set(i, *number);
5399 : }
5400 5 : CcTest::CollectGarbage(OLD_SPACE);
5401 5 : heap::SimulateFullSpace(heap->old_space());
5402 5 : heap->RightTrimFixedArray(*array, N - 1);
5403 5 : heap->mark_compact_collector()->EnsureSweepingCompleted();
5404 5 : ByteArray byte_array;
5405 : const int M = 256;
5406 : // Don't allow old space expansion. The test works without this flag too,
5407 : // but becomes very slow.
5408 : heap->set_force_oom(true);
5409 5 : while (AllocateByteArrayForTest(heap, M, TENURED).To(&byte_array)) {
5410 0 : for (int j = 0; j < M; j++) {
5411 : byte_array->set(j, 0x31);
5412 : }
5413 : }
5414 : // Re-enable old space expansion to avoid OOM crash.
5415 : heap->set_force_oom(false);
5416 5 : CcTest::CollectGarbage(NEW_SPACE);
5417 5 : }
5418 :
5419 25880 : HEAP_TEST(Regress589413) {
5420 5 : if (!FLAG_incremental_marking) return;
5421 5 : FLAG_stress_compaction = true;
5422 5 : FLAG_manual_evacuation_candidates_selection = true;
5423 5 : FLAG_parallel_compaction = false;
5424 : ManualGCScope manual_gc_scope;
5425 5 : CcTest::InitializeVM();
5426 10 : v8::HandleScope scope(CcTest::isolate());
5427 5 : Heap* heap = CcTest::heap();
5428 : // Get the heap in clean state.
5429 5 : CcTest::CollectGarbage(OLD_SPACE);
5430 5 : CcTest::CollectGarbage(OLD_SPACE);
5431 : Isolate* isolate = CcTest::i_isolate();
5432 : Factory* factory = isolate->factory();
5433 : // Fill the new space with byte arrays with elements looking like pointers.
5434 : const int M = 256;
5435 5 : ByteArray byte_array;
5436 19270 : while (AllocateByteArrayForTest(heap, M, NOT_TENURED).To(&byte_array)) {
5437 4930560 : for (int j = 0; j < M; j++) {
5438 : byte_array->set(j, 0x31);
5439 : }
5440 : // Add the array in root set.
5441 : handle(byte_array, isolate);
5442 : }
5443 : // Make sure the byte arrays will be promoted on the next GC.
5444 5 : CcTest::CollectGarbage(NEW_SPACE);
5445 : // This number is close to large free list category threshold.
5446 : const int N = 0x3EEE;
5447 : {
5448 : std::vector<FixedArray> arrays;
5449 : std::set<Page*> pages;
5450 5 : FixedArray array;
5451 : // Fill all pages with fixed arrays.
5452 : heap->set_force_oom(true);
5453 35 : while (AllocateFixedArrayForTest(heap, N, TENURED).To(&array)) {
5454 30 : arrays.push_back(array);
5455 60 : pages.insert(Page::FromHeapObject(array));
5456 : // Add the array in root set.
5457 : handle(array, isolate);
5458 : }
5459 : // Expand and full one complete page with fixed arrays.
5460 : heap->set_force_oom(false);
5461 25 : while (AllocateFixedArrayForTest(heap, N, TENURED).To(&array)) {
5462 20 : arrays.push_back(array);
5463 40 : pages.insert(Page::FromHeapObject(array));
5464 : // Add the array in root set.
5465 : handle(array, isolate);
5466 : // Do not expand anymore.
5467 : heap->set_force_oom(true);
5468 : }
5469 : // Expand and mark the new page as evacuation candidate.
5470 : heap->set_force_oom(false);
5471 : {
5472 : AlwaysAllocateScope always_allocate(isolate);
5473 5 : Handle<HeapObject> ec_obj = factory->NewFixedArray(5000, TENURED);
5474 : Page* ec_page = Page::FromHeapObject(*ec_obj);
5475 5 : heap::ForceEvacuationCandidate(ec_page);
5476 : // Make all arrays point to evacuation candidate so that
5477 : // slots are recorded for them.
5478 110 : for (size_t j = 0; j < arrays.size(); j++) {
5479 50 : array = arrays[j];
5480 805550 : for (int i = 0; i < N; i++) {
5481 805500 : array->set(i, *ec_obj);
5482 : }
5483 : }
5484 : }
5485 5 : heap::SimulateIncrementalMarking(heap);
5486 110 : for (size_t j = 0; j < arrays.size(); j++) {
5487 50 : heap->RightTrimFixedArray(arrays[j], N - 1);
5488 : }
5489 : }
5490 : // Force allocation from the free list.
5491 : heap->set_force_oom(true);
5492 5 : CcTest::CollectGarbage(OLD_SPACE);
5493 : }
5494 :
5495 25880 : TEST(Regress598319) {
5496 5 : if (!FLAG_incremental_marking) return;
5497 : ManualGCScope manual_gc_scope;
5498 : // This test ensures that no white objects can cross the progress bar of large
5499 : // objects during incremental marking. It checks this by using Shift() during
5500 : // incremental marking.
5501 5 : CcTest::InitializeVM();
5502 10 : v8::HandleScope scope(CcTest::isolate());
5503 15 : Heap* heap = CcTest::heap();
5504 : Isolate* isolate = heap->isolate();
5505 :
5506 : // The size of the array should be larger than kProgressBarScanningChunk.
5507 : const int kNumberOfObjects = Max(FixedArray::kMaxRegularLength + 1, 128 * KB);
5508 :
5509 : struct Arr {
5510 5 : Arr(Isolate* isolate, int number_of_objects) {
5511 5 : root = isolate->factory()->NewFixedArray(1, TENURED);
5512 : {
5513 : // Temporary scope to avoid getting any other objects into the root set.
5514 5 : v8::HandleScope scope(CcTest::isolate());
5515 : Handle<FixedArray> tmp =
5516 5 : isolate->factory()->NewFixedArray(number_of_objects, TENURED);
5517 10 : root->set(0, *tmp);
5518 1310730 : for (int i = 0; i < get()->length(); i++) {
5519 655360 : tmp = isolate->factory()->NewFixedArray(100, TENURED);
5520 655360 : get()->set(i, *tmp);
5521 5 : }
5522 : }
5523 5 : }
5524 :
5525 10485870 : FixedArray get() { return FixedArray::cast(root->get(0)); }
5526 :
5527 : Handle<FixedArray> root;
5528 5 : } arr(isolate, kNumberOfObjects);
5529 :
5530 10 : CHECK_EQ(arr.get()->length(), kNumberOfObjects);
5531 10 : CHECK(heap->lo_space()->Contains(arr.get()));
5532 10 : LargePage* page = LargePage::FromHeapObject(arr.get());
5533 5 : CHECK_NOT_NULL(page);
5534 :
5535 : // GC to cleanup state
5536 5 : CcTest::CollectGarbage(OLD_SPACE);
5537 5 : MarkCompactCollector* collector = heap->mark_compact_collector();
5538 5 : if (collector->sweeping_in_progress()) {
5539 5 : collector->EnsureSweepingCompleted();
5540 : }
5541 :
5542 10 : CHECK(heap->lo_space()->Contains(arr.get()));
5543 8185 : IncrementalMarking* marking = heap->incremental_marking();
5544 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5545 10 : CHECK(marking_state->IsWhite(arr.get()));
5546 1310725 : for (int i = 0; i < arr.get()->length(); i++) {
5547 1310720 : HeapObject arr_value = HeapObject::cast(arr.get()->get(i));
5548 655360 : CHECK(marking_state->IsWhite(arr_value));
5549 : }
5550 :
5551 : // Start incremental marking.
5552 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5553 5 : if (marking->IsStopped()) {
5554 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5555 5 : i::GarbageCollectionReason::kTesting);
5556 : }
5557 5 : CHECK(marking->IsMarking());
5558 :
5559 : // Check that we have not marked the interesting array during root scanning.
5560 1310725 : for (int i = 0; i < arr.get()->length(); i++) {
5561 1310720 : HeapObject arr_value = HeapObject::cast(arr.get()->get(i));
5562 655360 : CHECK(marking_state->IsWhite(arr_value));
5563 : }
5564 :
5565 : // Now we search for a state where we are in incremental marking and have
5566 : // only partially marked the large object.
5567 : const double kSmallStepSizeInMs = 0.1;
5568 25 : while (!marking->IsComplete()) {
5569 : marking->V8Step(kSmallStepSizeInMs,
5570 : i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5571 25 : StepOrigin::kV8);
5572 50 : if (page->IsFlagSet(Page::HAS_PROGRESS_BAR) && page->progress_bar() > 0) {
5573 10 : CHECK_NE(page->progress_bar(), arr.get()->Size());
5574 : {
5575 : // Shift by 1, effectively moving one white object across the progress
5576 : // bar, meaning that we will miss marking it.
5577 5 : v8::HandleScope scope(CcTest::isolate());
5578 : Handle<JSArray> js_array = isolate->factory()->NewJSArrayWithElements(
5579 10 : Handle<FixedArray>(arr.get(), isolate));
5580 5 : js_array->GetElementsAccessor()->Shift(js_array);
5581 : }
5582 5 : break;
5583 : }
5584 : }
5585 :
5586 : // Finish marking with bigger steps to speed up test.
5587 : const double kLargeStepSizeInMs = 1000;
5588 8190 : while (!marking->IsComplete()) {
5589 : marking->V8Step(kLargeStepSizeInMs,
5590 : i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5591 8185 : StepOrigin::kV8);
5592 8185 : if (marking->IsReadyToOverApproximateWeakClosure()) {
5593 5 : marking->FinalizeIncrementally();
5594 : }
5595 : }
5596 5 : CHECK(marking->IsComplete());
5597 :
5598 : // All objects need to be black after marking. If a white object crossed the
5599 : // progress bar, we would fail here.
5600 1310725 : for (int i = 0; i < arr.get()->length(); i++) {
5601 1310720 : HeapObject arr_value = HeapObject::cast(arr.get()->get(i));
5602 655360 : CHECK(marking_state->IsBlack(arr_value));
5603 : }
5604 : }
5605 :
5606 10 : Handle<FixedArray> ShrinkArrayAndCheckSize(Heap* heap, int length) {
5607 : // Make sure there is no garbage and the compilation cache is empty.
5608 60 : for (int i = 0; i < 5; i++) {
5609 50 : CcTest::CollectAllGarbage();
5610 : }
5611 10 : heap->mark_compact_collector()->EnsureSweepingCompleted();
5612 10 : size_t size_before_allocation = heap->SizeOfObjects();
5613 : Handle<FixedArray> array =
5614 10 : heap->isolate()->factory()->NewFixedArray(length, TENURED);
5615 10 : size_t size_after_allocation = heap->SizeOfObjects();
5616 10 : CHECK_EQ(size_after_allocation, size_before_allocation + array->Size());
5617 10 : array->Shrink(heap->isolate(), 1);
5618 10 : size_t size_after_shrinking = heap->SizeOfObjects();
5619 : // Shrinking does not change the space size immediately.
5620 10 : CHECK_EQ(size_after_allocation, size_after_shrinking);
5621 : // GC and sweeping updates the size to acccount for shrinking.
5622 10 : CcTest::CollectAllGarbage();
5623 10 : heap->mark_compact_collector()->EnsureSweepingCompleted();
5624 10 : intptr_t size_after_gc = heap->SizeOfObjects();
5625 20 : CHECK_EQ(size_after_gc, size_before_allocation + array->Size());
5626 10 : return array;
5627 : }
5628 :
5629 25880 : TEST(Regress609761) {
5630 5 : CcTest::InitializeVM();
5631 5 : v8::HandleScope scope(CcTest::isolate());
5632 10 : Heap* heap = CcTest::heap();
5633 : int length = kMaxRegularHeapObjectSize / kTaggedSize + 1;
5634 5 : Handle<FixedArray> array = ShrinkArrayAndCheckSize(heap, length);
5635 10 : CHECK(heap->lo_space()->Contains(*array));
5636 5 : }
5637 :
5638 25880 : TEST(LiveBytes) {
5639 5 : CcTest::InitializeVM();
5640 5 : v8::HandleScope scope(CcTest::isolate());
5641 10 : Heap* heap = CcTest::heap();
5642 5 : Handle<FixedArray> array = ShrinkArrayAndCheckSize(heap, 2000);
5643 10 : CHECK(heap->old_space()->Contains(*array));
5644 5 : }
5645 :
5646 25880 : TEST(Regress615489) {
5647 5 : if (!FLAG_incremental_marking) return;
5648 5 : CcTest::InitializeVM();
5649 5 : v8::HandleScope scope(CcTest::isolate());
5650 5 : Heap* heap = CcTest::heap();
5651 : Isolate* isolate = heap->isolate();
5652 5 : CcTest::CollectAllGarbage();
5653 :
5654 5 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5655 15 : i::IncrementalMarking* marking = heap->incremental_marking();
5656 5 : if (collector->sweeping_in_progress()) {
5657 5 : collector->EnsureSweepingCompleted();
5658 : }
5659 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5660 5 : if (marking->IsStopped()) {
5661 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5662 5 : i::GarbageCollectionReason::kTesting);
5663 : }
5664 5 : CHECK(marking->IsMarking());
5665 : marking->StartBlackAllocationForTesting();
5666 : {
5667 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
5668 10 : v8::HandleScope inner(CcTest::isolate());
5669 10 : isolate->factory()->NewFixedArray(500, TENURED)->Size();
5670 : }
5671 : const double kStepSizeInMs = 100;
5672 25 : while (!marking->IsComplete()) {
5673 : marking->V8Step(kStepSizeInMs, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5674 15 : StepOrigin::kV8);
5675 15 : if (marking->IsReadyToOverApproximateWeakClosure()) {
5676 5 : marking->FinalizeIncrementally();
5677 : }
5678 : }
5679 5 : CHECK(marking->IsComplete());
5680 5 : intptr_t size_before = heap->SizeOfObjects();
5681 5 : CcTest::CollectAllGarbage();
5682 5 : intptr_t size_after = heap->SizeOfObjects();
5683 : // Live size does not increase after garbage collection.
5684 5 : CHECK_LE(size_after, size_before);
5685 : }
5686 :
5687 : class StaticOneByteResource : public v8::String::ExternalOneByteStringResource {
5688 : public:
5689 5 : explicit StaticOneByteResource(const char* data) : data_(data) {}
5690 :
5691 5 : ~StaticOneByteResource() override = default;
5692 :
5693 5 : const char* data() const override { return data_; }
5694 :
5695 5 : size_t length() const override { return strlen(data_); }
5696 :
5697 : private:
5698 : const char* data_;
5699 : };
5700 :
5701 25880 : TEST(Regress631969) {
5702 5 : if (!FLAG_incremental_marking) return;
5703 5 : FLAG_manual_evacuation_candidates_selection = true;
5704 5 : FLAG_parallel_compaction = false;
5705 : ManualGCScope manual_gc_scope;
5706 5 : CcTest::InitializeVM();
5707 10 : v8::HandleScope scope(CcTest::isolate());
5708 15 : Heap* heap = CcTest::heap();
5709 : // Get the heap in clean state.
5710 5 : CcTest::CollectGarbage(OLD_SPACE);
5711 5 : CcTest::CollectGarbage(OLD_SPACE);
5712 : Isolate* isolate = CcTest::i_isolate();
5713 : Factory* factory = isolate->factory();
5714 : // Allocate two strings in a fresh page and mark the page as evacuation
5715 : // candidate.
5716 5 : heap::SimulateFullSpace(heap->old_space());
5717 5 : Handle<String> s1 = factory->NewStringFromStaticChars("123456789", TENURED);
5718 5 : Handle<String> s2 = factory->NewStringFromStaticChars("01234", TENURED);
5719 5 : heap::ForceEvacuationCandidate(Page::FromHeapObject(*s1));
5720 :
5721 5 : heap::SimulateIncrementalMarking(heap, false);
5722 :
5723 : // Allocate a cons string and promote it to a fresh page in the old space.
5724 5 : heap::SimulateFullSpace(heap->old_space());
5725 : Handle<String> s3;
5726 10 : factory->NewConsString(s1, s2).ToHandle(&s3);
5727 5 : CcTest::CollectGarbage(NEW_SPACE);
5728 5 : CcTest::CollectGarbage(NEW_SPACE);
5729 :
5730 : // Finish incremental marking.
5731 : const double kStepSizeInMs = 100;
5732 35 : IncrementalMarking* marking = heap->incremental_marking();
5733 45 : while (!marking->IsComplete()) {
5734 : marking->V8Step(kStepSizeInMs, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5735 35 : StepOrigin::kV8);
5736 35 : if (marking->IsReadyToOverApproximateWeakClosure()) {
5737 5 : marking->FinalizeIncrementally();
5738 : }
5739 : }
5740 :
5741 : {
5742 : StaticOneByteResource external_string("12345678901234");
5743 5 : s3->MakeExternal(&external_string);
5744 5 : CcTest::CollectGarbage(OLD_SPACE);
5745 : // This avoids the GC from trying to free stack allocated resources.
5746 10 : i::Handle<i::ExternalOneByteString>::cast(s3)->SetResource(isolate,
5747 5 : nullptr);
5748 : }
5749 : }
5750 :
5751 25880 : TEST(LeftTrimFixedArrayInBlackArea) {
5752 5 : if (!FLAG_incremental_marking) return;
5753 5 : CcTest::InitializeVM();
5754 5 : v8::HandleScope scope(CcTest::isolate());
5755 15 : Heap* heap = CcTest::heap();
5756 : Isolate* isolate = heap->isolate();
5757 5 : CcTest::CollectAllGarbage();
5758 :
5759 5 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5760 : i::IncrementalMarking* marking = heap->incremental_marking();
5761 5 : if (collector->sweeping_in_progress()) {
5762 5 : collector->EnsureSweepingCompleted();
5763 : }
5764 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5765 5 : if (marking->IsStopped()) {
5766 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5767 5 : i::GarbageCollectionReason::kTesting);
5768 : }
5769 5 : CHECK(marking->IsMarking());
5770 : marking->StartBlackAllocationForTesting();
5771 :
5772 : // Ensure that we allocate a new page, set up a bump pointer area, and
5773 : // perform the allocation in a black area.
5774 5 : heap::SimulateFullSpace(heap->old_space());
5775 5 : isolate->factory()->NewFixedArray(4, TENURED);
5776 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(50, TENURED);
5777 10 : CHECK(heap->old_space()->Contains(*array));
5778 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5779 5 : CHECK(marking_state->IsBlack(*array));
5780 :
5781 : // Now left trim the allocated black area. A filler has to be installed
5782 : // for the trimmed area and all mark bits of the trimmed area have to be
5783 : // cleared.
5784 5 : FixedArrayBase trimmed = heap->LeftTrimFixedArray(*array, 10);
5785 5 : CHECK(marking_state->IsBlack(trimmed));
5786 :
5787 5 : heap::GcAndSweep(heap, OLD_SPACE);
5788 : }
5789 :
5790 25880 : TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
5791 5 : if (!FLAG_incremental_marking) return;
5792 5 : CcTest::InitializeVM();
5793 5 : v8::HandleScope scope(CcTest::isolate());
5794 15 : Heap* heap = CcTest::heap();
5795 : Isolate* isolate = heap->isolate();
5796 5 : CcTest::CollectAllGarbage();
5797 :
5798 5 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5799 : i::IncrementalMarking* marking = heap->incremental_marking();
5800 5 : if (collector->sweeping_in_progress()) {
5801 5 : collector->EnsureSweepingCompleted();
5802 : }
5803 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5804 5 : if (marking->IsStopped()) {
5805 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5806 5 : i::GarbageCollectionReason::kTesting);
5807 : }
5808 5 : CHECK(marking->IsMarking());
5809 : marking->StartBlackAllocationForTesting();
5810 :
5811 : // Ensure that we allocate a new page, set up a bump pointer area, and
5812 : // perform the allocation in a black area.
5813 5 : heap::SimulateFullSpace(heap->old_space());
5814 5 : isolate->factory()->NewFixedArray(10, TENURED);
5815 :
5816 : // Allocate the fixed array that will be trimmed later.
5817 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(100, TENURED);
5818 : Address start_address = array->address();
5819 5 : Address end_address = start_address + array->Size();
5820 5 : Page* page = Page::FromAddress(start_address);
5821 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5822 5 : CHECK(marking_state->IsBlack(*array));
5823 10 : CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
5824 : page->AddressToMarkbitIndex(start_address),
5825 : page->AddressToMarkbitIndex(end_address)));
5826 10 : CHECK(heap->old_space()->Contains(*array));
5827 :
5828 : FixedArrayBase previous = *array;
5829 : FixedArrayBase trimmed;
5830 :
5831 : // First trim in one word steps.
5832 55 : for (int i = 0; i < 10; i++) {
5833 50 : trimmed = heap->LeftTrimFixedArray(previous, 1);
5834 50 : HeapObject filler = HeapObject::FromAddress(previous->address());
5835 50 : CHECK(filler->IsFiller());
5836 50 : CHECK(marking_state->IsBlack(trimmed));
5837 50 : CHECK(marking_state->IsBlack(previous));
5838 : previous = trimmed;
5839 : }
5840 :
5841 : // Then trim in two and three word steps.
5842 10 : for (int i = 2; i <= 3; i++) {
5843 100 : for (int j = 0; j < 10; j++) {
5844 100 : trimmed = heap->LeftTrimFixedArray(previous, i);
5845 100 : HeapObject filler = HeapObject::FromAddress(previous->address());
5846 100 : CHECK(filler->IsFiller());
5847 100 : CHECK(marking_state->IsBlack(trimmed));
5848 100 : CHECK(marking_state->IsBlack(previous));
5849 : previous = trimmed;
5850 : }
5851 : }
5852 :
5853 5 : heap::GcAndSweep(heap, OLD_SPACE);
5854 : }
5855 :
5856 25880 : TEST(ContinuousRightTrimFixedArrayInBlackArea) {
5857 5 : if (!FLAG_incremental_marking) return;
5858 5 : CcTest::InitializeVM();
5859 5 : v8::HandleScope scope(CcTest::isolate());
5860 15 : Heap* heap = CcTest::heap();
5861 : Isolate* isolate = CcTest::i_isolate();
5862 5 : CcTest::CollectAllGarbage();
5863 :
5864 5 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5865 : i::IncrementalMarking* marking = heap->incremental_marking();
5866 5 : if (collector->sweeping_in_progress()) {
5867 5 : collector->EnsureSweepingCompleted();
5868 : }
5869 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5870 5 : if (marking->IsStopped()) {
5871 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5872 5 : i::GarbageCollectionReason::kTesting);
5873 : }
5874 5 : CHECK(marking->IsMarking());
5875 : marking->StartBlackAllocationForTesting();
5876 :
5877 : // Ensure that we allocate a new page, set up a bump pointer area, and
5878 : // perform the allocation in a black area.
5879 5 : heap::SimulateFullSpace(heap->old_space());
5880 5 : isolate->factory()->NewFixedArray(10, TENURED);
5881 :
5882 : // Allocate the fixed array that will be trimmed later.
5883 : Handle<FixedArray> array =
5884 5 : CcTest::i_isolate()->factory()->NewFixedArray(100, TENURED);
5885 : Address start_address = array->address();
5886 5 : Address end_address = start_address + array->Size();
5887 5 : Page* page = Page::FromAddress(start_address);
5888 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5889 5 : CHECK(marking_state->IsBlack(*array));
5890 :
5891 10 : CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
5892 : page->AddressToMarkbitIndex(start_address),
5893 : page->AddressToMarkbitIndex(end_address)));
5894 10 : CHECK(heap->old_space()->Contains(*array));
5895 :
5896 : // Trim it once by one word to make checking for white marking color uniform.
5897 5 : Address previous = end_address - kTaggedSize;
5898 5 : isolate->heap()->RightTrimFixedArray(*array, 1);
5899 :
5900 5 : HeapObject filler = HeapObject::FromAddress(previous);
5901 5 : CHECK(filler->IsFiller());
5902 5 : CHECK(marking_state->IsImpossible(filler));
5903 :
5904 : // Trim 10 times by one, two, and three word.
5905 15 : for (int i = 1; i <= 3; i++) {
5906 150 : for (int j = 0; j < 10; j++) {
5907 150 : previous -= kTaggedSize * i;
5908 150 : isolate->heap()->RightTrimFixedArray(*array, i);
5909 150 : HeapObject filler = HeapObject::FromAddress(previous);
5910 150 : CHECK(filler->IsFiller());
5911 150 : CHECK(marking_state->IsWhite(filler));
5912 : }
5913 : }
5914 :
5915 5 : heap::GcAndSweep(heap, OLD_SPACE);
5916 : }
5917 :
5918 25880 : TEST(Regress618958) {
5919 5 : if (!FLAG_incremental_marking) return;
5920 5 : CcTest::InitializeVM();
5921 5 : v8::HandleScope scope(CcTest::isolate());
5922 15 : Heap* heap = CcTest::heap();
5923 : bool isolate_is_locked = true;
5924 5 : CcTest::isolate()->AdjustAmountOfExternalAllocatedMemory(100 * MB);
5925 : int mark_sweep_count_before = heap->ms_count();
5926 : heap->MemoryPressureNotification(MemoryPressureLevel::kCritical,
5927 5 : isolate_is_locked);
5928 : int mark_sweep_count_after = heap->ms_count();
5929 5 : int mark_sweeps_performed = mark_sweep_count_after - mark_sweep_count_before;
5930 : // The memory pressuer handler either performed two GCs or performed one and
5931 : // started incremental marking.
5932 5 : CHECK(mark_sweeps_performed == 2 ||
5933 : (mark_sweeps_performed == 1 &&
5934 5 : !heap->incremental_marking()->IsStopped()));
5935 : }
5936 :
5937 25880 : TEST(YoungGenerationLargeObjectAllocationScavenge) {
5938 10 : if (FLAG_minor_mc) return;
5939 5 : FLAG_young_generation_large_objects = true;
5940 5 : CcTest::InitializeVM();
5941 5 : v8::HandleScope scope(CcTest::isolate());
5942 5 : Heap* heap = CcTest::heap();
5943 5 : Isolate* isolate = heap->isolate();
5944 10 : if (!isolate->serializer_enabled()) return;
5945 :
5946 : // TODO(hpayer): Update the test as soon as we have a tenure limit for LO.
5947 0 : Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(200000);
5948 0 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
5949 0 : CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
5950 0 : CHECK(chunk->IsFlagSet(MemoryChunk::LARGE_PAGE));
5951 0 : CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
5952 :
5953 0 : Handle<Object> number = isolate->factory()->NewHeapNumber(123.456);
5954 0 : array_small->set(0, *number);
5955 :
5956 0 : CcTest::CollectGarbage(NEW_SPACE);
5957 :
5958 : // After the first young generation GC array_small will be in the old
5959 : // generation large object space.
5960 : chunk = MemoryChunk::FromHeapObject(*array_small);
5961 0 : CHECK_EQ(LO_SPACE, chunk->owner()->identity());
5962 0 : CHECK(!chunk->InYoungGeneration());
5963 :
5964 0 : CcTest::CollectAllAvailableGarbage();
5965 : }
5966 :
5967 25880 : TEST(YoungGenerationLargeObjectAllocationMarkCompact) {
5968 10 : if (FLAG_minor_mc) return;
5969 5 : FLAG_young_generation_large_objects = true;
5970 5 : CcTest::InitializeVM();
5971 5 : v8::HandleScope scope(CcTest::isolate());
5972 5 : Heap* heap = CcTest::heap();
5973 5 : Isolate* isolate = heap->isolate();
5974 10 : if (!isolate->serializer_enabled()) return;
5975 :
5976 : // TODO(hpayer): Update the test as soon as we have a tenure limit for LO.
5977 0 : Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(200000);
5978 0 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
5979 0 : CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
5980 0 : CHECK(chunk->IsFlagSet(MemoryChunk::LARGE_PAGE));
5981 0 : CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
5982 :
5983 0 : Handle<Object> number = isolate->factory()->NewHeapNumber(123.456);
5984 0 : array_small->set(0, *number);
5985 :
5986 0 : CcTest::CollectGarbage(OLD_SPACE);
5987 :
5988 : // After the first full GC array_small will be in the old generation
5989 : // large object space.
5990 : chunk = MemoryChunk::FromHeapObject(*array_small);
5991 0 : CHECK_EQ(LO_SPACE, chunk->owner()->identity());
5992 0 : CHECK(!chunk->InYoungGeneration());
5993 :
5994 0 : CcTest::CollectAllAvailableGarbage();
5995 : }
5996 :
5997 25880 : TEST(YoungGenerationLargeObjectAllocationReleaseScavenger) {
5998 10 : if (FLAG_minor_mc) return;
5999 5 : FLAG_young_generation_large_objects = true;
6000 5 : CcTest::InitializeVM();
6001 5 : v8::HandleScope scope(CcTest::isolate());
6002 5 : Heap* heap = CcTest::heap();
6003 5 : Isolate* isolate = heap->isolate();
6004 10 : if (!isolate->serializer_enabled()) return;
6005 :
6006 : {
6007 : HandleScope scope(isolate);
6008 0 : for (int i = 0; i < 10; i++) {
6009 0 : Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(20000);
6010 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
6011 0 : CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
6012 0 : CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
6013 : }
6014 : }
6015 :
6016 0 : CcTest::CollectGarbage(NEW_SPACE);
6017 0 : CHECK(isolate->heap()->new_lo_space()->IsEmpty());
6018 0 : CHECK_EQ(0, isolate->heap()->new_lo_space()->Size());
6019 0 : CHECK_EQ(0, isolate->heap()->new_lo_space()->SizeOfObjects());
6020 0 : CHECK(isolate->heap()->lo_space()->IsEmpty());
6021 0 : CHECK_EQ(0, isolate->heap()->lo_space()->Size());
6022 0 : CHECK_EQ(0, isolate->heap()->lo_space()->SizeOfObjects());
6023 : }
6024 :
6025 25880 : TEST(UncommitUnusedLargeObjectMemory) {
6026 5 : CcTest::InitializeVM();
6027 5 : v8::HandleScope scope(CcTest::isolate());
6028 5 : Heap* heap = CcTest::heap();
6029 : Isolate* isolate = heap->isolate();
6030 :
6031 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(200000, TENURED);
6032 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array);
6033 5 : CHECK(chunk->owner()->identity() == LO_SPACE);
6034 :
6035 5 : intptr_t size_before = array->Size();
6036 5 : size_t committed_memory_before = chunk->CommittedPhysicalMemory();
6037 :
6038 5 : array->Shrink(isolate, 1);
6039 5 : CHECK(array->Size() < size_before);
6040 :
6041 5 : CcTest::CollectAllGarbage();
6042 5 : CHECK(chunk->CommittedPhysicalMemory() < committed_memory_before);
6043 : size_t shrinked_size = RoundUp(
6044 20 : (array->address() - chunk->address()) + array->Size(), CommitPageSize());
6045 5 : CHECK_EQ(shrinked_size, chunk->CommittedPhysicalMemory());
6046 5 : }
6047 :
6048 25880 : TEST(RememberedSetRemoveRange) {
6049 5 : CcTest::InitializeVM();
6050 5 : v8::HandleScope scope(CcTest::isolate());
6051 5 : Heap* heap = CcTest::heap();
6052 : Isolate* isolate = heap->isolate();
6053 :
6054 : Handle<FixedArray> array =
6055 5 : isolate->factory()->NewFixedArray(Page::kPageSize / kTaggedSize, TENURED);
6056 15 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array);
6057 5 : CHECK(chunk->owner()->identity() == LO_SPACE);
6058 5 : Address start = array->address();
6059 : // Maps slot to boolean indicator of whether the slot should be in the set.
6060 : std::map<Address, bool> slots;
6061 5 : slots[start + 0] = true;
6062 5 : slots[start + kTaggedSize] = true;
6063 5 : slots[start + Page::kPageSize - kTaggedSize] = true;
6064 5 : slots[start + Page::kPageSize] = true;
6065 5 : slots[start + Page::kPageSize + kTaggedSize] = true;
6066 5 : slots[chunk->area_end() - kTaggedSize] = true;
6067 :
6068 35 : for (auto x : slots) {
6069 25 : RememberedSet<OLD_TO_NEW>::Insert(chunk, x.first);
6070 : }
6071 :
6072 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
6073 25 : [&slots](MaybeObjectSlot slot) {
6074 25 : CHECK(slots[slot.address()]);
6075 25 : return KEEP_SLOT;
6076 : },
6077 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
6078 :
6079 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start, start + kTaggedSize,
6080 5 : SlotSet::FREE_EMPTY_BUCKETS);
6081 5 : slots[start] = false;
6082 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
6083 20 : [&slots](MaybeObjectSlot slot) {
6084 20 : CHECK(slots[slot.address()]);
6085 20 : return KEEP_SLOT;
6086 : },
6087 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
6088 :
6089 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start + kTaggedSize,
6090 : start + Page::kPageSize,
6091 5 : SlotSet::FREE_EMPTY_BUCKETS);
6092 5 : slots[start + kTaggedSize] = false;
6093 5 : slots[start + Page::kPageSize - kTaggedSize] = false;
6094 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
6095 10 : [&slots](MaybeObjectSlot slot) {
6096 10 : CHECK(slots[slot.address()]);
6097 10 : return KEEP_SLOT;
6098 : },
6099 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
6100 :
6101 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start,
6102 : start + Page::kPageSize + kTaggedSize,
6103 5 : SlotSet::FREE_EMPTY_BUCKETS);
6104 5 : slots[start + Page::kPageSize] = false;
6105 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
6106 5 : [&slots](MaybeObjectSlot slot) {
6107 5 : CHECK(slots[slot.address()]);
6108 5 : return KEEP_SLOT;
6109 : },
6110 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
6111 :
6112 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, chunk->area_end() - kTaggedSize,
6113 : chunk->area_end(),
6114 5 : SlotSet::FREE_EMPTY_BUCKETS);
6115 5 : slots[chunk->area_end() - kTaggedSize] = false;
6116 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
6117 0 : [&slots](MaybeObjectSlot slot) {
6118 0 : CHECK(slots[slot.address()]);
6119 0 : return KEEP_SLOT;
6120 : },
6121 10 : SlotSet::PREFREE_EMPTY_BUCKETS);
6122 5 : }
6123 :
6124 25880 : HEAP_TEST(Regress670675) {
6125 5 : if (!FLAG_incremental_marking) return;
6126 5 : FLAG_stress_incremental_marking = false;
6127 5 : CcTest::InitializeVM();
6128 5 : v8::HandleScope scope(CcTest::isolate());
6129 5 : Heap* heap = CcTest::heap();
6130 : Isolate* isolate = heap->isolate();
6131 5 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
6132 5 : CcTest::CollectAllGarbage();
6133 :
6134 5 : if (collector->sweeping_in_progress()) {
6135 5 : collector->EnsureSweepingCompleted();
6136 : }
6137 5 : i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
6138 5 : if (marking->IsStopped()) {
6139 5 : marking->Start(i::GarbageCollectionReason::kTesting);
6140 : }
6141 : size_t array_length = 128 * KB;
6142 5 : size_t n = heap->OldGenerationSpaceAvailable() / array_length;
6143 210 : for (size_t i = 0; i < n + 40; i++) {
6144 : {
6145 : HandleScope inner_scope(isolate);
6146 : isolate->factory()->NewFixedArray(static_cast<int>(array_length),
6147 210 : TENURED);
6148 : }
6149 210 : if (marking->IsStopped()) break;
6150 205 : double deadline = heap->MonotonicallyIncreasingTimeInMs() + 1;
6151 : marking->AdvanceWithDeadline(
6152 205 : deadline, IncrementalMarking::GC_VIA_STACK_GUARD, StepOrigin::kV8);
6153 : }
6154 5 : DCHECK(marking->IsStopped());
6155 : }
6156 :
6157 : namespace {
6158 10 : Handle<Code> GenerateDummyImmovableCode(Isolate* isolate) {
6159 30 : Assembler assm(AssemblerOptions{});
6160 :
6161 : const int kNumberOfNops = 1 << 10;
6162 10250 : for (int i = 0; i < kNumberOfNops; i++) {
6163 10240 : assm.nop(); // supported on all architectures
6164 : }
6165 :
6166 10 : CodeDesc desc;
6167 : assm.GetCode(isolate, &desc);
6168 : Handle<Code> code = isolate->factory()->NewCode(
6169 : desc, Code::STUB, Handle<Code>(), Builtins::kNoBuiltinId,
6170 30 : MaybeHandle<ByteArray>(), DeoptimizationData::Empty(isolate), kImmovable);
6171 20 : CHECK(code->IsCode());
6172 :
6173 10 : return code;
6174 : }
6175 : } // namespace
6176 :
6177 25880 : HEAP_TEST(Regress5831) {
6178 5 : CcTest::InitializeVM();
6179 25 : Heap* heap = CcTest::heap();
6180 : Isolate* isolate = CcTest::i_isolate();
6181 : HandleScope handle_scope(isolate);
6182 :
6183 : // Used to ensure that the generated code is not collected.
6184 : const int kInitialSize = 32;
6185 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(kInitialSize);
6186 :
6187 : // Ensure that all immovable code space pages are full and we overflow into
6188 : // LO_SPACE.
6189 : const int kMaxIterations = 1 << 16;
6190 : bool overflowed_into_lospace = false;
6191 5 : for (int i = 0; i < kMaxIterations; i++) {
6192 5 : Handle<Code> code = GenerateDummyImmovableCode(isolate);
6193 5 : array = FixedArray::SetAndGrow(isolate, array, i, code);
6194 15 : CHECK(heap->code_space()->Contains(code->address()) ||
6195 : heap->code_lo_space()->Contains(*code));
6196 10 : if (heap->code_lo_space()->Contains(*code)) {
6197 : overflowed_into_lospace = true;
6198 : break;
6199 : }
6200 : }
6201 :
6202 5 : CHECK(overflowed_into_lospace);
6203 :
6204 : // Fake a serializer run.
6205 5 : isolate->serializer_enabled_ = true;
6206 :
6207 : // Generate the code.
6208 5 : Handle<Code> code = GenerateDummyImmovableCode(isolate);
6209 5 : CHECK_GE(i::kMaxRegularHeapObjectSize, code->Size());
6210 5 : CHECK(!heap->code_space()->first_page()->Contains(code->address()));
6211 :
6212 : // Ensure it's not in large object space.
6213 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*code);
6214 5 : CHECK(chunk->owner()->identity() != LO_SPACE);
6215 5 : CHECK(chunk->NeverEvacuate());
6216 5 : }
6217 :
6218 25880 : TEST(Regress6800) {
6219 5 : CcTest::InitializeVM();
6220 : Isolate* isolate = CcTest::i_isolate();
6221 : HandleScope handle_scope(isolate);
6222 :
6223 : const int kRootLength = 1000;
6224 : Handle<FixedArray> root =
6225 5 : isolate->factory()->NewFixedArray(kRootLength, TENURED);
6226 : {
6227 : HandleScope inner_scope(isolate);
6228 5 : Handle<FixedArray> new_space_array = isolate->factory()->NewFixedArray(1);
6229 5005 : for (int i = 0; i < kRootLength; i++) {
6230 10000 : root->set(i, *new_space_array);
6231 : }
6232 5000 : for (int i = 0; i < kRootLength; i++) {
6233 15000 : root->set(i, ReadOnlyRoots(CcTest::heap()).undefined_value());
6234 : }
6235 : }
6236 5 : CcTest::CollectGarbage(NEW_SPACE);
6237 5 : CHECK_EQ(0, RememberedSet<OLD_TO_NEW>::NumberOfPreFreedEmptyBuckets(
6238 : MemoryChunk::FromHeapObject(*root)));
6239 5 : }
6240 :
6241 25880 : TEST(Regress6800LargeObject) {
6242 5 : CcTest::InitializeVM();
6243 : Isolate* isolate = CcTest::i_isolate();
6244 : HandleScope handle_scope(isolate);
6245 :
6246 : const int kRootLength = i::kMaxRegularHeapObjectSize / kTaggedSize;
6247 : Handle<FixedArray> root =
6248 5 : isolate->factory()->NewFixedArray(kRootLength, TENURED);
6249 5 : CcTest::heap()->lo_space()->Contains(*root);
6250 : {
6251 : HandleScope inner_scope(isolate);
6252 5 : Handle<FixedArray> new_space_array = isolate->factory()->NewFixedArray(1);
6253 316965 : for (int i = 0; i < kRootLength; i++) {
6254 633920 : root->set(i, *new_space_array);
6255 : }
6256 316960 : for (int i = 0; i < kRootLength; i++) {
6257 950880 : root->set(i, ReadOnlyRoots(CcTest::heap()).undefined_value());
6258 : }
6259 : }
6260 5 : CcTest::CollectGarbage(OLD_SPACE);
6261 5 : CHECK_EQ(0, RememberedSet<OLD_TO_NEW>::NumberOfPreFreedEmptyBuckets(
6262 : MemoryChunk::FromHeapObject(*root)));
6263 5 : }
6264 :
6265 25880 : HEAP_TEST(RegressMissingWriteBarrierInAllocate) {
6266 5 : if (!FLAG_incremental_marking) return;
6267 : ManualGCScope manual_gc_scope;
6268 5 : CcTest::InitializeVM();
6269 10 : v8::HandleScope scope(CcTest::isolate());
6270 5 : Heap* heap = CcTest::heap();
6271 : Isolate* isolate = heap->isolate();
6272 5 : CcTest::CollectAllGarbage();
6273 5 : heap::SimulateIncrementalMarking(heap, false);
6274 : Handle<Map> map;
6275 : {
6276 : AlwaysAllocateScope always_allocate(isolate);
6277 5 : map = isolate->factory()->NewMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
6278 : }
6279 : heap->incremental_marking()->StartBlackAllocationForTesting();
6280 : Handle<HeapObject> object;
6281 : {
6282 : AlwaysAllocateScope always_allocate(isolate);
6283 : object = handle(isolate->factory()->NewForTest(map, TENURED), isolate);
6284 : }
6285 : // The object is black. If Factory::New sets the map without write-barrier,
6286 : // then the map is white and will be freed prematurely.
6287 5 : heap::SimulateIncrementalMarking(heap, true);
6288 5 : CcTest::CollectAllGarbage();
6289 5 : MarkCompactCollector* collector = heap->mark_compact_collector();
6290 5 : if (collector->sweeping_in_progress()) {
6291 5 : collector->EnsureSweepingCompleted();
6292 : }
6293 10 : CHECK(object->map()->IsMap());
6294 : }
6295 :
6296 25880 : HEAP_TEST(MarkCompactEpochCounter) {
6297 : ManualGCScope manual_gc_scope;
6298 5 : CcTest::InitializeVM();
6299 10 : v8::HandleScope scope(CcTest::isolate());
6300 5 : Heap* heap = CcTest::heap();
6301 5 : unsigned epoch0 = heap->mark_compact_collector()->epoch();
6302 5 : CcTest::CollectGarbage(OLD_SPACE);
6303 5 : unsigned epoch1 = heap->mark_compact_collector()->epoch();
6304 5 : CHECK_EQ(epoch0 + 1, epoch1);
6305 5 : heap::SimulateIncrementalMarking(heap, true);
6306 5 : CcTest::CollectGarbage(OLD_SPACE);
6307 5 : unsigned epoch2 = heap->mark_compact_collector()->epoch();
6308 5 : CHECK_EQ(epoch1 + 1, epoch2);
6309 5 : CcTest::CollectGarbage(NEW_SPACE);
6310 5 : unsigned epoch3 = heap->mark_compact_collector()->epoch();
6311 5 : CHECK_EQ(epoch2, epoch3);
6312 5 : }
6313 :
6314 25880 : UNINITIALIZED_TEST(ReinitializeStringHashSeed) {
6315 : // Enable rehashing and create an isolate and context.
6316 5 : i::FLAG_rehash_snapshot = true;
6317 15 : for (int i = 1; i < 3; i++) {
6318 10 : i::FLAG_hash_seed = 1337 * i;
6319 : v8::Isolate::CreateParams create_params;
6320 10 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6321 10 : v8::Isolate* isolate = v8::Isolate::New(create_params);
6322 : {
6323 : v8::Isolate::Scope isolate_scope(isolate);
6324 10 : CHECK_EQ(static_cast<uint64_t>(1337 * i),
6325 : HashSeed(reinterpret_cast<i::Isolate*>(isolate)));
6326 20 : v8::HandleScope handle_scope(isolate);
6327 10 : v8::Local<v8::Context> context = v8::Context::New(isolate);
6328 10 : CHECK(!context.IsEmpty());
6329 : v8::Context::Scope context_scope(context);
6330 : }
6331 10 : isolate->Dispose();
6332 : }
6333 5 : }
6334 :
6335 : const int kHeapLimit = 100 * MB;
6336 : Isolate* oom_isolate = nullptr;
6337 :
6338 0 : void OOMCallback(const char* location, bool is_heap_oom) {
6339 0 : Heap* heap = oom_isolate->heap();
6340 : size_t kSlack = heap->new_space()->Capacity();
6341 0 : CHECK_LE(heap->OldGenerationCapacity(), kHeapLimit + kSlack);
6342 0 : CHECK_LE(heap->memory_allocator()->Size(), heap->MaxReserved() + kSlack);
6343 0 : base::OS::ExitProcess(0);
6344 0 : }
6345 :
6346 25875 : UNINITIALIZED_TEST(OutOfMemory) {
6347 0 : if (FLAG_stress_incremental_marking) return;
6348 : #ifdef VERIFY_HEAP
6349 : if (FLAG_verify_heap) return;
6350 : #endif
6351 0 : FLAG_max_old_space_size = kHeapLimit / MB;
6352 : v8::Isolate::CreateParams create_params;
6353 0 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6354 0 : v8::Isolate* isolate = v8::Isolate::New(create_params);
6355 : Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
6356 0 : oom_isolate = i_isolate;
6357 0 : isolate->SetOOMErrorHandler(OOMCallback);
6358 : {
6359 : Factory* factory = i_isolate->factory();
6360 : HandleScope handle_scope(i_isolate);
6361 : while (true) {
6362 0 : factory->NewFixedArray(100);
6363 : }
6364 : }
6365 : }
6366 :
6367 25876 : UNINITIALIZED_TEST(OutOfMemoryIneffectiveGC) {
6368 2 : if (!FLAG_detect_ineffective_gcs_near_heap_limit) return;
6369 1 : if (FLAG_stress_incremental_marking) return;
6370 : #ifdef VERIFY_HEAP
6371 : if (FLAG_verify_heap) return;
6372 : #endif
6373 :
6374 0 : FLAG_max_old_space_size = kHeapLimit / MB;
6375 : v8::Isolate::CreateParams create_params;
6376 0 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6377 0 : v8::Isolate* isolate = v8::Isolate::New(create_params);
6378 : Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
6379 0 : oom_isolate = i_isolate;
6380 0 : isolate->SetOOMErrorHandler(OOMCallback);
6381 : Factory* factory = i_isolate->factory();
6382 0 : Heap* heap = i_isolate->heap();
6383 0 : heap->CollectAllGarbage(Heap::kNoGCFlags, GarbageCollectionReason::kTesting);
6384 : {
6385 : HandleScope scope(i_isolate);
6386 0 : while (heap->OldGenerationSizeOfObjects() <
6387 0 : heap->MaxOldGenerationSize() * 0.9) {
6388 0 : factory->NewFixedArray(100, TENURED);
6389 : }
6390 : {
6391 : int initial_ms_count = heap->ms_count();
6392 : int ineffective_ms_start = initial_ms_count;
6393 0 : while (heap->ms_count() < initial_ms_count + 10) {
6394 : HandleScope inner_scope(i_isolate);
6395 0 : factory->NewFixedArray(30000, TENURED);
6396 0 : if (heap->tracer()->AverageMarkCompactMutatorUtilization() >= 0.3) {
6397 0 : ineffective_ms_start = heap->ms_count() + 1;
6398 : }
6399 : }
6400 0 : int consecutive_ineffective_ms = heap->ms_count() - ineffective_ms_start;
6401 0 : CHECK_IMPLIES(
6402 : consecutive_ineffective_ms >= 4,
6403 : heap->tracer()->AverageMarkCompactMutatorUtilization() >= 0.3);
6404 : }
6405 : }
6406 0 : isolate->Dispose();
6407 : }
6408 :
6409 25880 : HEAP_TEST(Regress779503) {
6410 : // The following regression test ensures that the Scavenger does not allocate
6411 : // over invalid slots. More specific, the Scavenger should not sweep a page
6412 : // that it currently processes because it might allocate over the currently
6413 : // processed slot.
6414 : const int kArraySize = 2048;
6415 5 : CcTest::InitializeVM();
6416 : Isolate* isolate = CcTest::i_isolate();
6417 5 : Heap* heap = CcTest::heap();
6418 5 : heap::SealCurrentObjects(heap);
6419 : {
6420 : HandleScope handle_scope(isolate);
6421 : // The byte array filled with kHeapObjectTag ensures that we cannot read
6422 : // from the slot again and interpret it as heap value. Doing so will crash.
6423 5 : Handle<ByteArray> byte_array = isolate->factory()->NewByteArray(kArraySize);
6424 5 : CHECK(Heap::InYoungGeneration(*byte_array));
6425 10240 : for (int i = 0; i < kArraySize; i++) {
6426 : byte_array->set(i, kHeapObjectTag);
6427 : }
6428 :
6429 : {
6430 : HandleScope handle_scope(isolate);
6431 : // The FixedArray in old space serves as space for slots.
6432 : Handle<FixedArray> fixed_array =
6433 5 : isolate->factory()->NewFixedArray(kArraySize, TENURED);
6434 5 : CHECK(!Heap::InYoungGeneration(*fixed_array));
6435 10240 : for (int i = 0; i < kArraySize; i++) {
6436 20480 : fixed_array->set(i, *byte_array);
6437 : }
6438 : }
6439 : // Delay sweeper tasks to allow the scavenger to sweep the page it is
6440 : // currently scavenging.
6441 5 : heap->delay_sweeper_tasks_for_testing_ = true;
6442 5 : CcTest::CollectGarbage(OLD_SPACE);
6443 5 : CHECK(Heap::InYoungGeneration(*byte_array));
6444 : }
6445 : // Scavenging and sweeping the same page will crash as slots will be
6446 : // overridden.
6447 5 : CcTest::CollectGarbage(NEW_SPACE);
6448 5 : heap->delay_sweeper_tasks_for_testing_ = false;
6449 5 : }
6450 :
6451 : struct OutOfMemoryState {
6452 : Heap* heap;
6453 : bool oom_triggered;
6454 : size_t old_generation_capacity_at_oom;
6455 : size_t memory_allocator_size_at_oom;
6456 : size_t new_space_capacity_at_oom;
6457 : size_t new_lo_space_size_at_oom;
6458 : size_t current_heap_limit;
6459 : size_t initial_heap_limit;
6460 : };
6461 :
6462 12 : size_t NearHeapLimitCallback(void* raw_state, size_t current_heap_limit,
6463 : size_t initial_heap_limit) {
6464 : OutOfMemoryState* state = static_cast<OutOfMemoryState*>(raw_state);
6465 36 : Heap* heap = state->heap;
6466 12 : state->oom_triggered = true;
6467 12 : state->old_generation_capacity_at_oom = heap->OldGenerationCapacity();
6468 12 : state->memory_allocator_size_at_oom = heap->memory_allocator()->Size();
6469 12 : state->new_space_capacity_at_oom = heap->new_space()->Capacity();
6470 12 : state->new_lo_space_size_at_oom = heap->new_lo_space()->Size();
6471 12 : state->current_heap_limit = current_heap_limit;
6472 12 : state->initial_heap_limit = initial_heap_limit;
6473 12 : return initial_heap_limit + 100 * MB;
6474 : }
6475 :
6476 0 : size_t MemoryAllocatorSizeFromHeapCapacity(size_t capacity) {
6477 : // Size to capacity factor.
6478 : double factor =
6479 4 : Page::kPageSize * 1.0 / MemoryChunkLayout::AllocatableMemoryInDataPage();
6480 : // Some tables (e.g. deoptimization table) are allocated directly with the
6481 : // memory allocator. Allow some slack to account for them.
6482 : size_t slack = 5 * MB;
6483 4 : return static_cast<size_t>(capacity * factor) + slack;
6484 : }
6485 :
6486 25875 : UNINITIALIZED_TEST(OutOfMemorySmallObjects) {
6487 0 : if (FLAG_stress_incremental_marking) return;
6488 : #ifdef VERIFY_HEAP
6489 : if (FLAG_verify_heap) return;
6490 : #endif
6491 : const size_t kOldGenerationLimit = 300 * MB;
6492 0 : FLAG_max_old_space_size = kOldGenerationLimit / MB;
6493 : v8::Isolate::CreateParams create_params;
6494 0 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6495 : Isolate* isolate =
6496 0 : reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
6497 0 : Heap* heap = isolate->heap();
6498 : Factory* factory = isolate->factory();
6499 : OutOfMemoryState state;
6500 0 : state.heap = heap;
6501 0 : state.oom_triggered = false;
6502 0 : heap->AddNearHeapLimitCallback(NearHeapLimitCallback, &state);
6503 : {
6504 : HandleScope handle_scope(isolate);
6505 0 : while (!state.oom_triggered) {
6506 0 : factory->NewFixedArray(100);
6507 : }
6508 : }
6509 0 : CHECK_LE(state.old_generation_capacity_at_oom,
6510 : kOldGenerationLimit + state.new_space_capacity_at_oom);
6511 0 : CHECK_LE(kOldGenerationLimit, state.old_generation_capacity_at_oom +
6512 : state.new_space_capacity_at_oom);
6513 0 : CHECK_LE(
6514 : state.memory_allocator_size_at_oom,
6515 : MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
6516 : 2 * state.new_space_capacity_at_oom));
6517 0 : reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
6518 : }
6519 :
6520 25880 : UNINITIALIZED_TEST(OutOfMemoryLargeObjects) {
6521 6 : if (FLAG_stress_incremental_marking) return;
6522 : #ifdef VERIFY_HEAP
6523 : if (FLAG_verify_heap) return;
6524 : #endif
6525 : const size_t kOldGenerationLimit = 300 * MB;
6526 4 : FLAG_max_old_space_size = kOldGenerationLimit / MB;
6527 : v8::Isolate::CreateParams create_params;
6528 4 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6529 : Isolate* isolate =
6530 4 : reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
6531 4 : Heap* heap = isolate->heap();
6532 : Factory* factory = isolate->factory();
6533 : OutOfMemoryState state;
6534 4 : state.heap = heap;
6535 4 : state.oom_triggered = false;
6536 4 : heap->AddNearHeapLimitCallback(NearHeapLimitCallback, &state);
6537 : const int kFixedArrayLength = 1000000;
6538 : {
6539 : HandleScope handle_scope(isolate);
6540 164 : while (!state.oom_triggered) {
6541 160 : factory->NewFixedArray(kFixedArrayLength);
6542 : }
6543 : }
6544 4 : CHECK_LE(state.old_generation_capacity_at_oom, kOldGenerationLimit);
6545 4 : CHECK_LE(kOldGenerationLimit, state.old_generation_capacity_at_oom +
6546 : state.new_space_capacity_at_oom +
6547 : state.new_lo_space_size_at_oom +
6548 : FixedArray::SizeFor(kFixedArrayLength));
6549 8 : CHECK_LE(
6550 : state.memory_allocator_size_at_oom,
6551 : MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
6552 : 2 * state.new_space_capacity_at_oom +
6553 : state.new_lo_space_size_at_oom));
6554 4 : reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
6555 : }
6556 :
6557 25880 : UNINITIALIZED_TEST(RestoreHeapLimit) {
6558 6 : if (FLAG_stress_incremental_marking) return;
6559 : #ifdef VERIFY_HEAP
6560 : if (FLAG_verify_heap) return;
6561 : #endif
6562 : ManualGCScope manual_gc_scope;
6563 : const size_t kOldGenerationLimit = 300 * MB;
6564 4 : FLAG_max_old_space_size = kOldGenerationLimit / MB;
6565 : v8::Isolate::CreateParams create_params;
6566 4 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6567 : Isolate* isolate =
6568 4 : reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
6569 4 : Heap* heap = isolate->heap();
6570 : Factory* factory = isolate->factory();
6571 : OutOfMemoryState state;
6572 4 : state.heap = heap;
6573 4 : state.oom_triggered = false;
6574 4 : heap->AddNearHeapLimitCallback(NearHeapLimitCallback, &state);
6575 4 : heap->AutomaticallyRestoreInitialHeapLimit(0.5);
6576 : const int kFixedArrayLength = 1000000;
6577 : {
6578 : HandleScope handle_scope(isolate);
6579 164 : while (!state.oom_triggered) {
6580 160 : factory->NewFixedArray(kFixedArrayLength);
6581 : }
6582 : }
6583 4 : heap->MemoryPressureNotification(MemoryPressureLevel::kCritical, true);
6584 4 : state.oom_triggered = false;
6585 : {
6586 : HandleScope handle_scope(isolate);
6587 164 : while (!state.oom_triggered) {
6588 160 : factory->NewFixedArray(kFixedArrayLength);
6589 : }
6590 : }
6591 4 : CHECK_EQ(state.current_heap_limit, state.initial_heap_limit);
6592 4 : reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
6593 : }
6594 :
6595 5 : void HeapTester::UncommitFromSpace(Heap* heap) {
6596 5 : heap->UncommitFromSpace();
6597 5 : heap->memory_allocator()->unmapper()->EnsureUnmappingCompleted();
6598 5 : }
6599 :
6600 : class DeleteNative {
6601 : public:
6602 : static void Deleter(void* arg) {
6603 : delete reinterpret_cast<DeleteNative*>(arg);
6604 : }
6605 : };
6606 :
6607 25880 : TEST(Regress8014) {
6608 5 : Isolate* isolate = CcTest::InitIsolateOnce();
6609 15 : Heap* heap = isolate->heap();
6610 : {
6611 : HandleScope scope(isolate);
6612 50005 : for (int i = 0; i < 10000; i++) {
6613 : auto handle = Managed<DeleteNative>::FromRawPtr(isolate, 1000000,
6614 50000 : new DeleteNative());
6615 : USE(handle);
6616 : }
6617 : }
6618 : int ms_count = heap->ms_count();
6619 5 : heap->MemoryPressureNotification(MemoryPressureLevel::kCritical, true);
6620 : // Several GCs can be triggred by the above call.
6621 : // The bad case triggers 10000 GCs.
6622 10 : CHECK_LE(heap->ms_count(), ms_count + 10);
6623 5 : }
6624 :
6625 25880 : TEST(Regress8617) {
6626 : ManualGCScope manual_gc_scope;
6627 5 : FLAG_manual_evacuation_candidates_selection = true;
6628 10 : LocalContext env;
6629 : Isolate* isolate = CcTest::i_isolate();
6630 10 : Heap* heap = isolate->heap();
6631 : HandleScope scope(isolate);
6632 5 : heap::SimulateFullSpace(heap->old_space());
6633 : // Step 1. Create a function and ensure that it is in the old space.
6634 : Handle<Object> foo =
6635 : v8::Utils::OpenHandle(*CompileRun("function foo() { return 42; };"
6636 : "foo;"));
6637 5 : if (heap->InYoungGeneration(*foo)) {
6638 0 : CcTest::CollectGarbage(NEW_SPACE);
6639 0 : CcTest::CollectGarbage(NEW_SPACE);
6640 : }
6641 : // Step 2. Create an object with a reference to foo in the descriptor array.
6642 : CompileRun(
6643 : "var obj = {};"
6644 : "obj.method = foo;"
6645 : "obj;");
6646 : // Step 3. Make sure that foo moves during Mark-Compact.
6647 : Page* ec_page = Page::FromAddress(foo->ptr());
6648 5 : heap::ForceEvacuationCandidate(ec_page);
6649 : // Step 4. Start incremental marking.
6650 5 : heap::SimulateIncrementalMarking(heap, false);
6651 5 : CHECK(ec_page->IsEvacuationCandidate());
6652 : // Step 5. Install a new descriptor array on the map of the object.
6653 : // This runs the marking barrier for the descriptor array.
6654 : // In the bad case it sets the number of marked descriptors but does not
6655 : // change the color of the descriptor array.
6656 : CompileRun("obj.bar = 10;");
6657 : // Step 6. Promote the descriptor array to old space. During promotion
6658 : // the Scavenger will not record the slot of foo in the descriptor array.
6659 5 : CcTest::CollectGarbage(NEW_SPACE);
6660 5 : CcTest::CollectGarbage(NEW_SPACE);
6661 : // Step 7. Complete the Mark-Compact.
6662 5 : CcTest::CollectAllGarbage();
6663 : // Step 8. Use the descriptor for foo, which contains a stale pointer.
6664 : CompileRun("obj.method()");
6665 5 : }
6666 :
6667 25880 : HEAP_TEST(MemoryReducerActivationForSmallHeaps) {
6668 : ManualGCScope manual_gc_scope;
6669 10 : LocalContext env;
6670 : Isolate* isolate = CcTest::i_isolate();
6671 5 : Heap* heap = isolate->heap();
6672 5 : CHECK_EQ(heap->memory_reducer()->state_.action, MemoryReducer::Action::kDone);
6673 : HandleScope scope(isolate);
6674 : const size_t kActivationThreshold = 1 * MB;
6675 5 : size_t initial_capacity = heap->OldGenerationCapacity();
6676 1785 : while (heap->OldGenerationCapacity() <
6677 890 : initial_capacity + kActivationThreshold) {
6678 885 : isolate->factory()->NewFixedArray(1 * KB, TENURED);
6679 : }
6680 5 : CHECK_EQ(heap->memory_reducer()->state_.action, MemoryReducer::Action::kWait);
6681 5 : }
6682 :
6683 : } // namespace heap
6684 : } // namespace internal
6685 77625 : } // namespace v8
6686 :
6687 : #undef __
|