Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Redistribution and use in source and binary forms, with or without
3 : // modification, are permitted provided that the following conditions are
4 : // met:
5 : //
6 : // * Redistributions of source code must retain the above copyright
7 : // notice, this list of conditions and the following disclaimer.
8 : // * Redistributions in binary form must reproduce the above
9 : // copyright notice, this list of conditions and the following
10 : // disclaimer in the documentation and/or other materials provided
11 : // with the distribution.
12 : // * Neither the name of Google Inc. nor the names of its
13 : // contributors may be used to endorse or promote products derived
14 : // from this software without specific prior written permission.
15 : //
16 : // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 : // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 : // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 : // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 : // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 : // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 : // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 : // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 : // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 : // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 : // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 :
28 : #include <stdlib.h>
29 : #include <utility>
30 :
31 : #include "src/api-inl.h"
32 : #include "src/assembler-inl.h"
33 : #include "src/compilation-cache.h"
34 : #include "src/debug/debug.h"
35 : #include "src/deoptimizer.h"
36 : #include "src/elements.h"
37 : #include "src/execution.h"
38 : #include "src/field-type.h"
39 : #include "src/global-handles.h"
40 : #include "src/heap/factory.h"
41 : #include "src/heap/gc-tracer.h"
42 : #include "src/heap/incremental-marking.h"
43 : #include "src/heap/mark-compact.h"
44 : #include "src/heap/memory-reducer.h"
45 : #include "src/heap/remembered-set.h"
46 : #include "src/ic/ic.h"
47 : #include "src/macro-assembler-inl.h"
48 : #include "src/objects-inl.h"
49 : #include "src/objects/heap-number-inl.h"
50 : #include "src/objects/js-array-inl.h"
51 : #include "src/objects/js-collection-inl.h"
52 : #include "src/objects/managed.h"
53 : #include "src/objects/slots.h"
54 : #include "src/regexp/jsregexp.h"
55 : #include "src/snapshot/snapshot.h"
56 : #include "src/transitions.h"
57 : #include "test/cctest/cctest.h"
58 : #include "test/cctest/heap/heap-tester.h"
59 : #include "test/cctest/heap/heap-utils.h"
60 : #include "test/cctest/test-feedback-vector.h"
61 : #include "test/cctest/test-transitions.h"
62 :
63 : namespace v8 {
64 : namespace internal {
65 : namespace heap {
66 :
67 : // We only start allocation-site tracking with the second instantiation.
68 : static const int kPretenureCreationCount =
69 : AllocationSite::kPretenureMinimumCreated + 1;
70 :
71 25 : static void CheckMap(Map map, int type, int instance_size) {
72 25 : CHECK(map->IsHeapObject());
73 : #ifdef DEBUG
74 : CHECK(CcTest::heap()->Contains(map));
75 : #endif
76 75 : CHECK_EQ(ReadOnlyRoots(CcTest::heap()).meta_map(), map->map());
77 25 : CHECK_EQ(type, map->instance_type());
78 25 : CHECK_EQ(instance_size, map->instance_size());
79 25 : }
80 :
81 :
82 28342 : TEST(HeapMaps) {
83 5 : CcTest::InitializeVM();
84 5 : ReadOnlyRoots roots(CcTest::heap());
85 5 : CheckMap(roots.meta_map(), MAP_TYPE, Map::kSize);
86 5 : CheckMap(roots.heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
87 5 : CheckMap(roots.fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
88 5 : CheckMap(roots.hash_table_map(), HASH_TABLE_TYPE, kVariableSizeSentinel);
89 5 : CheckMap(roots.string_map(), STRING_TYPE, kVariableSizeSentinel);
90 5 : }
91 :
92 10 : static void VerifyStoredPrototypeMap(Isolate* isolate,
93 : int stored_map_context_index,
94 : int stored_ctor_context_index) {
95 10 : Handle<Context> context = isolate->native_context();
96 :
97 : Handle<Map> this_map(Map::cast(context->get(stored_map_context_index)),
98 20 : isolate);
99 :
100 : Handle<JSFunction> fun(
101 20 : JSFunction::cast(context->get(stored_ctor_context_index)), isolate);
102 20 : Handle<JSObject> proto(JSObject::cast(fun->initial_map()->prototype()),
103 20 : isolate);
104 : Handle<Map> that_map(proto->map(), isolate);
105 :
106 10 : CHECK(proto->HasFastProperties());
107 30 : CHECK_EQ(*this_map, *that_map);
108 10 : }
109 :
110 : // Checks that critical maps stored on the context (mostly used for fast-path
111 : // checks) are unchanged after initialization.
112 28342 : TEST(ContextMaps) {
113 5 : CcTest::InitializeVM();
114 : Isolate* isolate = CcTest::i_isolate();
115 : HandleScope handle_scope(isolate);
116 :
117 : VerifyStoredPrototypeMap(isolate,
118 : Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX,
119 5 : Context::STRING_FUNCTION_INDEX);
120 : VerifyStoredPrototypeMap(isolate, Context::REGEXP_PROTOTYPE_MAP_INDEX,
121 5 : Context::REGEXP_FUNCTION_INDEX);
122 5 : }
123 :
124 28342 : TEST(InitialObjects) {
125 5 : LocalContext env;
126 : HandleScope scope(CcTest::i_isolate());
127 : Handle<Context> context = v8::Utils::OpenHandle(*env);
128 : // Initial ArrayIterator prototype.
129 15 : CHECK_EQ(
130 : context->initial_array_iterator_prototype(),
131 : *v8::Utils::OpenHandle(*CompileRun("[][Symbol.iterator]().__proto__")));
132 : // Initial Array prototype.
133 15 : CHECK_EQ(context->initial_array_prototype(),
134 : *v8::Utils::OpenHandle(*CompileRun("Array.prototype")));
135 : // Initial Generator prototype.
136 15 : CHECK_EQ(context->initial_generator_prototype(),
137 : *v8::Utils::OpenHandle(
138 : *CompileRun("(function*(){}).__proto__.prototype")));
139 : // Initial Iterator prototype.
140 15 : CHECK_EQ(context->initial_iterator_prototype(),
141 : *v8::Utils::OpenHandle(
142 : *CompileRun("[][Symbol.iterator]().__proto__.__proto__")));
143 : // Initial Object prototype.
144 15 : CHECK_EQ(context->initial_object_prototype(),
145 5 : *v8::Utils::OpenHandle(*CompileRun("Object.prototype")));
146 5 : }
147 :
148 20 : static void CheckOddball(Isolate* isolate, Object obj, const char* string) {
149 20 : CHECK(obj->IsOddball());
150 : Handle<Object> handle(obj, isolate);
151 40 : Object print_string = *Object::ToString(isolate, handle).ToHandleChecked();
152 20 : CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
153 20 : }
154 :
155 15 : static void CheckSmi(Isolate* isolate, int value, const char* string) {
156 : Handle<Object> handle(Smi::FromInt(value), isolate);
157 30 : Object print_string = *Object::ToString(isolate, handle).ToHandleChecked();
158 15 : CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
159 15 : }
160 :
161 :
162 5 : static void CheckNumber(Isolate* isolate, double value, const char* string) {
163 5 : Handle<Object> number = isolate->factory()->NewNumber(value);
164 10 : CHECK(number->IsNumber());
165 : Handle<Object> print_string =
166 10 : Object::ToString(isolate, number).ToHandleChecked();
167 5 : CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
168 5 : }
169 :
170 10 : void CheckEmbeddedObjectsAreEqual(Handle<Code> lhs, Handle<Code> rhs) {
171 : int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
172 10 : RelocIterator lhs_it(*lhs, mode_mask);
173 10 : RelocIterator rhs_it(*rhs, mode_mask);
174 30 : while (!lhs_it.done() && !rhs_it.done()) {
175 30 : CHECK(lhs_it.rinfo()->target_object() == rhs_it.rinfo()->target_object());
176 :
177 10 : lhs_it.next();
178 10 : rhs_it.next();
179 : }
180 10 : CHECK(lhs_it.done() == rhs_it.done());
181 10 : }
182 :
183 28342 : HEAP_TEST(TestNewSpaceRefsInCopiedCode) {
184 5 : CcTest::InitializeVM();
185 5 : Isolate* isolate = CcTest::i_isolate();
186 : Factory* factory = isolate->factory();
187 : HandleScope sc(isolate);
188 :
189 5 : Handle<HeapNumber> value = factory->NewHeapNumber(1.000123);
190 5 : CHECK(Heap::InNewSpace(*value));
191 :
192 : i::byte buffer[i::Assembler::kMinimalBufferSize];
193 : MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
194 15 : ExternalAssemblerBuffer(buffer, sizeof(buffer)));
195 : // Add a new-space reference to the code.
196 5 : masm.Push(value);
197 :
198 5 : CodeDesc desc;
199 5 : masm.GetCode(isolate, &desc);
200 : Handle<Code> code =
201 10 : isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
202 :
203 : Handle<Code> copy;
204 : {
205 5 : CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
206 5 : copy = factory->CopyCode(code);
207 : }
208 :
209 5 : CheckEmbeddedObjectsAreEqual(code, copy);
210 5 : CcTest::CollectAllAvailableGarbage();
211 5 : CheckEmbeddedObjectsAreEqual(code, copy);
212 5 : }
213 :
214 5 : static void CheckFindCodeObject(Isolate* isolate) {
215 : // Test FindCodeObject
216 : #define __ assm.
217 :
218 15 : Assembler assm(AssemblerOptions{});
219 :
220 5 : __ nop(); // supported on all architectures
221 :
222 5 : CodeDesc desc;
223 5 : assm.GetCode(isolate, &desc);
224 : Handle<Code> code =
225 10 : isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
226 10 : CHECK(code->IsCode());
227 :
228 5 : HeapObject obj = HeapObject::cast(*code);
229 : Address obj_addr = obj->address();
230 :
231 65 : for (int i = 0; i < obj->Size(); i += kTaggedSize) {
232 60 : Object found = isolate->FindCodeObject(obj_addr + i);
233 120 : CHECK_EQ(*code, found);
234 : }
235 :
236 : Handle<Code> copy =
237 10 : isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
238 5 : HeapObject obj_copy = HeapObject::cast(*copy);
239 : Object not_right =
240 5 : isolate->FindCodeObject(obj_copy->address() + obj_copy->Size() / 2);
241 5 : CHECK(not_right != *code);
242 5 : }
243 :
244 :
245 28342 : TEST(HandleNull) {
246 5 : CcTest::InitializeVM();
247 : Isolate* isolate = CcTest::i_isolate();
248 : HandleScope outer_scope(isolate);
249 10 : LocalContext context;
250 : Handle<Object> n(Object(0), isolate);
251 5 : CHECK(!n.is_null());
252 5 : }
253 :
254 :
255 28342 : TEST(HeapObjects) {
256 5 : CcTest::InitializeVM();
257 : Isolate* isolate = CcTest::i_isolate();
258 : Factory* factory = isolate->factory();
259 5 : Heap* heap = isolate->heap();
260 :
261 : HandleScope sc(isolate);
262 5 : Handle<Object> value = factory->NewNumber(1.000123);
263 10 : CHECK(value->IsHeapNumber());
264 10 : CHECK(value->IsNumber());
265 5 : CHECK_EQ(1.000123, value->Number());
266 :
267 5 : value = factory->NewNumber(1.0);
268 10 : CHECK(value->IsSmi());
269 10 : CHECK(value->IsNumber());
270 5 : CHECK_EQ(1.0, value->Number());
271 :
272 5 : value = factory->NewNumberFromInt(1024);
273 10 : CHECK(value->IsSmi());
274 10 : CHECK(value->IsNumber());
275 5 : CHECK_EQ(1024.0, value->Number());
276 :
277 5 : value = factory->NewNumberFromInt(Smi::kMinValue);
278 10 : CHECK(value->IsSmi());
279 10 : CHECK(value->IsNumber());
280 10 : CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
281 :
282 5 : value = factory->NewNumberFromInt(Smi::kMaxValue);
283 10 : CHECK(value->IsSmi());
284 10 : CHECK(value->IsNumber());
285 10 : CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
286 :
287 : #if !defined(V8_TARGET_ARCH_64_BIT)
288 : // TODO(lrn): We need a NumberFromIntptr function in order to test this.
289 : value = factory->NewNumberFromInt(Smi::kMinValue - 1);
290 : CHECK(value->IsHeapNumber());
291 : CHECK(value->IsNumber());
292 : CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
293 : #endif
294 :
295 5 : value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
296 10 : CHECK(value->IsHeapNumber());
297 10 : CHECK(value->IsNumber());
298 5 : CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
299 : value->Number());
300 :
301 5 : value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
302 10 : CHECK(value->IsHeapNumber());
303 10 : CHECK(value->IsNumber());
304 5 : CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
305 : value->Number());
306 :
307 : // nan oddball checks
308 10 : CHECK(factory->nan_value()->IsNumber());
309 5 : CHECK(std::isnan(factory->nan_value()->Number()));
310 :
311 5 : Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
312 10 : CHECK(s->IsString());
313 5 : CHECK_EQ(10, s->length());
314 :
315 5 : Handle<String> object_string = Handle<String>::cast(factory->Object_string());
316 : Handle<JSGlobalObject> global(CcTest::i_isolate()->context()->global_object(),
317 10 : isolate);
318 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
319 :
320 : // Check ToString for oddballs
321 : ReadOnlyRoots roots(heap);
322 5 : CheckOddball(isolate, roots.true_value(), "true");
323 5 : CheckOddball(isolate, roots.false_value(), "false");
324 5 : CheckOddball(isolate, roots.null_value(), "null");
325 5 : CheckOddball(isolate, roots.undefined_value(), "undefined");
326 :
327 : // Check ToString for Smis
328 5 : CheckSmi(isolate, 0, "0");
329 5 : CheckSmi(isolate, 42, "42");
330 5 : CheckSmi(isolate, -42, "-42");
331 :
332 : // Check ToString for Numbers
333 5 : CheckNumber(isolate, 1.1, "1.1");
334 :
335 5 : CheckFindCodeObject(isolate);
336 5 : }
337 :
338 28342 : TEST(Tagging) {
339 5 : CcTest::InitializeVM();
340 : int request = 24;
341 : CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
342 10 : CHECK(Smi::FromInt(42)->IsSmi());
343 10 : CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
344 10 : CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
345 5 : }
346 :
347 :
348 28342 : TEST(GarbageCollection) {
349 5 : CcTest::InitializeVM();
350 : Isolate* isolate = CcTest::i_isolate();
351 : Factory* factory = isolate->factory();
352 :
353 : HandleScope sc(isolate);
354 : // Check GC.
355 5 : CcTest::CollectGarbage(NEW_SPACE);
356 :
357 : Handle<JSGlobalObject> global(CcTest::i_isolate()->context()->global_object(),
358 10 : isolate);
359 5 : Handle<String> name = factory->InternalizeUtf8String("theFunction");
360 5 : Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
361 5 : Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
362 5 : Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
363 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
364 : Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
365 :
366 : {
367 : HandleScope inner_scope(isolate);
368 : // Allocate a function and keep it in global object's property.
369 5 : Handle<JSFunction> function = factory->NewFunctionForTest(name);
370 5 : Object::SetProperty(isolate, global, name, function, LanguageMode::kSloppy)
371 10 : .Check();
372 : // Allocate an object. Unrooted after leaving the scope.
373 5 : Handle<JSObject> obj = factory->NewJSObject(function);
374 : Object::SetProperty(isolate, obj, prop_name, twenty_three,
375 5 : LanguageMode::kSloppy)
376 10 : .Check();
377 : Object::SetProperty(isolate, obj, prop_namex, twenty_four,
378 5 : LanguageMode::kSloppy)
379 10 : .Check();
380 :
381 15 : CHECK_EQ(Smi::FromInt(23),
382 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
383 15 : CHECK_EQ(Smi::FromInt(24),
384 : *Object::GetProperty(isolate, obj, prop_namex).ToHandleChecked());
385 : }
386 :
387 5 : CcTest::CollectGarbage(NEW_SPACE);
388 :
389 : // Function should be alive.
390 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
391 : // Check function is retained.
392 : Handle<Object> func_value =
393 10 : Object::GetProperty(isolate, global, name).ToHandleChecked();
394 10 : CHECK(func_value->IsJSFunction());
395 5 : Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
396 :
397 : {
398 : HandleScope inner_scope(isolate);
399 : // Allocate another object, make it reachable from global.
400 5 : Handle<JSObject> obj = factory->NewJSObject(function);
401 5 : Object::SetProperty(isolate, global, obj_name, obj, LanguageMode::kSloppy)
402 10 : .Check();
403 : Object::SetProperty(isolate, obj, prop_name, twenty_three,
404 5 : LanguageMode::kSloppy)
405 10 : .Check();
406 : }
407 :
408 : // After gc, it should survive.
409 5 : CcTest::CollectGarbage(NEW_SPACE);
410 :
411 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
412 : Handle<Object> obj =
413 10 : Object::GetProperty(isolate, global, obj_name).ToHandleChecked();
414 10 : CHECK(obj->IsJSObject());
415 15 : CHECK_EQ(Smi::FromInt(23),
416 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
417 5 : }
418 :
419 :
420 25 : static void VerifyStringAllocation(Isolate* isolate, const char* string) {
421 : HandleScope scope(isolate);
422 : Handle<String> s = isolate->factory()->NewStringFromUtf8(
423 50 : CStrVector(string)).ToHandleChecked();
424 25 : CHECK_EQ(StrLength(string), s->length());
425 385 : for (int index = 0; index < s->length(); index++) {
426 360 : CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
427 : }
428 25 : }
429 :
430 :
431 28342 : TEST(String) {
432 5 : CcTest::InitializeVM();
433 5 : Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
434 :
435 5 : VerifyStringAllocation(isolate, "a");
436 5 : VerifyStringAllocation(isolate, "ab");
437 5 : VerifyStringAllocation(isolate, "abc");
438 5 : VerifyStringAllocation(isolate, "abcd");
439 5 : VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
440 5 : }
441 :
442 :
443 28342 : TEST(LocalHandles) {
444 5 : CcTest::InitializeVM();
445 : Isolate* isolate = CcTest::i_isolate();
446 : Factory* factory = isolate->factory();
447 :
448 5 : v8::HandleScope scope(CcTest::isolate());
449 : const char* name = "Kasper the spunky";
450 5 : Handle<String> string = factory->NewStringFromAsciiChecked(name);
451 5 : CHECK_EQ(StrLength(name), string->length());
452 5 : }
453 :
454 :
455 28342 : TEST(GlobalHandles) {
456 5 : CcTest::InitializeVM();
457 5 : Isolate* isolate = CcTest::i_isolate();
458 : Factory* factory = isolate->factory();
459 : GlobalHandles* global_handles = isolate->global_handles();
460 :
461 : Handle<Object> h1;
462 : Handle<Object> h2;
463 : Handle<Object> h3;
464 : Handle<Object> h4;
465 :
466 : {
467 : HandleScope scope(isolate);
468 :
469 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
470 5 : Handle<Object> u = factory->NewNumber(1.12344);
471 :
472 5 : h1 = global_handles->Create(*i);
473 5 : h2 = global_handles->Create(*u);
474 5 : h3 = global_handles->Create(*i);
475 5 : h4 = global_handles->Create(*u);
476 : }
477 :
478 : // after gc, it should survive
479 5 : CcTest::CollectGarbage(NEW_SPACE);
480 :
481 10 : CHECK((*h1)->IsString());
482 10 : CHECK((*h2)->IsHeapNumber());
483 10 : CHECK((*h3)->IsString());
484 10 : CHECK((*h4)->IsHeapNumber());
485 :
486 15 : CHECK_EQ(*h3, *h1);
487 5 : GlobalHandles::Destroy(h1.location());
488 5 : GlobalHandles::Destroy(h3.location());
489 :
490 15 : CHECK_EQ(*h4, *h2);
491 5 : GlobalHandles::Destroy(h2.location());
492 5 : GlobalHandles::Destroy(h4.location());
493 5 : }
494 :
495 :
496 : static bool WeakPointerCleared = false;
497 :
498 15 : static void TestWeakGlobalHandleCallback(
499 15 : const v8::WeakCallbackInfo<void>& data) {
500 : std::pair<v8::Persistent<v8::Value>*, int>* p =
501 : reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
502 : data.GetParameter());
503 15 : if (p->second == 1234) WeakPointerCleared = true;
504 15 : p->first->Reset();
505 15 : }
506 :
507 :
508 28342 : TEST(WeakGlobalHandlesScavenge) {
509 5 : FLAG_stress_compaction = false;
510 5 : FLAG_stress_incremental_marking = false;
511 5 : CcTest::InitializeVM();
512 5 : Isolate* isolate = CcTest::i_isolate();
513 : Factory* factory = isolate->factory();
514 : GlobalHandles* global_handles = isolate->global_handles();
515 :
516 5 : WeakPointerCleared = false;
517 :
518 : Handle<Object> h1;
519 : Handle<Object> h2;
520 :
521 : {
522 : HandleScope scope(isolate);
523 :
524 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
525 5 : Handle<Object> u = factory->NewNumber(1.12344);
526 :
527 5 : h1 = global_handles->Create(*i);
528 5 : h2 = global_handles->Create(*u);
529 : }
530 :
531 : std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
532 : GlobalHandles::MakeWeak(
533 : h2.location(), reinterpret_cast<void*>(&handle_and_id),
534 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
535 :
536 : // Scavenge treats weak pointers as normal roots.
537 5 : CcTest::CollectGarbage(NEW_SPACE);
538 :
539 10 : CHECK((*h1)->IsString());
540 10 : CHECK((*h2)->IsHeapNumber());
541 :
542 5 : CHECK(!WeakPointerCleared);
543 5 : CHECK(!global_handles->IsNearDeath(h2.location()));
544 5 : CHECK(!global_handles->IsNearDeath(h1.location()));
545 :
546 5 : GlobalHandles::Destroy(h1.location());
547 5 : GlobalHandles::Destroy(h2.location());
548 5 : }
549 :
550 28342 : TEST(WeakGlobalUnmodifiedApiHandlesScavenge) {
551 5 : CcTest::InitializeVM();
552 5 : Isolate* isolate = CcTest::i_isolate();
553 5 : LocalContext context;
554 : Factory* factory = isolate->factory();
555 : GlobalHandles* global_handles = isolate->global_handles();
556 :
557 5 : WeakPointerCleared = false;
558 :
559 : Handle<Object> h1;
560 : Handle<Object> h2;
561 :
562 : {
563 : HandleScope scope(isolate);
564 :
565 : // Create an Api object that is unmodified.
566 10 : Local<v8::Function> function = FunctionTemplate::New(context->GetIsolate())
567 15 : ->GetFunction(context.local())
568 5 : .ToLocalChecked();
569 : Local<v8::Object> i =
570 5 : function->NewInstance(context.local()).ToLocalChecked();
571 5 : Handle<Object> u = factory->NewNumber(1.12344);
572 :
573 5 : h1 = global_handles->Create(*u);
574 5 : h2 = global_handles->Create(*(reinterpret_cast<internal::Address*>(*i)));
575 : }
576 :
577 : std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
578 : GlobalHandles::MakeWeak(
579 : h2.location(), reinterpret_cast<void*>(&handle_and_id),
580 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
581 :
582 5 : CcTest::CollectGarbage(NEW_SPACE);
583 :
584 10 : CHECK((*h1)->IsHeapNumber());
585 5 : CHECK(WeakPointerCleared);
586 5 : CHECK(!global_handles->IsNearDeath(h1.location()));
587 :
588 5 : GlobalHandles::Destroy(h1.location());
589 5 : }
590 :
591 28342 : TEST(WeakGlobalApiHandleModifiedMapScavenge) {
592 5 : CcTest::InitializeVM();
593 5 : Isolate* isolate = CcTest::i_isolate();
594 5 : LocalContext context;
595 : GlobalHandles* global_handles = isolate->global_handles();
596 :
597 5 : WeakPointerCleared = false;
598 :
599 : Handle<Object> h1;
600 :
601 : {
602 : HandleScope scope(isolate);
603 :
604 : // Create an API object which does not have the same map as constructor.
605 5 : auto function_template = FunctionTemplate::New(context->GetIsolate());
606 5 : auto instance_t = function_template->InstanceTemplate();
607 : instance_t->Set(v8::String::NewFromUtf8(context->GetIsolate(), "a",
608 5 : NewStringType::kNormal)
609 : .ToLocalChecked(),
610 15 : v8::Number::New(context->GetIsolate(), 10));
611 : auto function =
612 5 : function_template->GetFunction(context.local()).ToLocalChecked();
613 5 : auto i = function->NewInstance(context.local()).ToLocalChecked();
614 :
615 5 : h1 = global_handles->Create(*(reinterpret_cast<internal::Address*>(*i)));
616 : }
617 :
618 : std::pair<Handle<Object>*, int> handle_and_id(&h1, 1234);
619 : GlobalHandles::MakeWeak(
620 : h1.location(), reinterpret_cast<void*>(&handle_and_id),
621 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
622 :
623 5 : CcTest::CollectGarbage(NEW_SPACE);
624 :
625 5 : CHECK(!WeakPointerCleared);
626 5 : CHECK(!global_handles->IsNearDeath(h1.location()));
627 :
628 5 : GlobalHandles::Destroy(h1.location());
629 5 : }
630 :
631 28342 : TEST(WeakGlobalApiHandleWithElementsScavenge) {
632 5 : CcTest::InitializeVM();
633 5 : Isolate* isolate = CcTest::i_isolate();
634 5 : LocalContext context;
635 : GlobalHandles* global_handles = isolate->global_handles();
636 :
637 5 : WeakPointerCleared = false;
638 :
639 : Handle<Object> h1;
640 :
641 : {
642 : HandleScope scope(isolate);
643 :
644 : // Create an API object which has elements.
645 5 : auto function_template = FunctionTemplate::New(context->GetIsolate());
646 5 : auto instance_t = function_template->InstanceTemplate();
647 : instance_t->Set(v8::String::NewFromUtf8(context->GetIsolate(), "1",
648 5 : NewStringType::kNormal)
649 : .ToLocalChecked(),
650 15 : v8::Number::New(context->GetIsolate(), 10));
651 : instance_t->Set(v8::String::NewFromUtf8(context->GetIsolate(), "2",
652 5 : NewStringType::kNormal)
653 : .ToLocalChecked(),
654 15 : v8::Number::New(context->GetIsolate(), 10));
655 : auto function =
656 5 : function_template->GetFunction(context.local()).ToLocalChecked();
657 5 : auto i = function->NewInstance(context.local()).ToLocalChecked();
658 :
659 5 : h1 = global_handles->Create(*(reinterpret_cast<internal::Address*>(*i)));
660 : }
661 :
662 : std::pair<Handle<Object>*, int> handle_and_id(&h1, 1234);
663 : GlobalHandles::MakeWeak(
664 : h1.location(), reinterpret_cast<void*>(&handle_and_id),
665 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
666 :
667 5 : CcTest::CollectGarbage(NEW_SPACE);
668 :
669 5 : CHECK(!WeakPointerCleared);
670 5 : CHECK(!global_handles->IsNearDeath(h1.location()));
671 :
672 5 : GlobalHandles::Destroy(h1.location());
673 5 : }
674 :
675 28342 : TEST(WeakGlobalHandlesMark) {
676 5 : FLAG_stress_incremental_marking = false;
677 5 : CcTest::InitializeVM();
678 5 : Isolate* isolate = CcTest::i_isolate();
679 : Factory* factory = isolate->factory();
680 : GlobalHandles* global_handles = isolate->global_handles();
681 :
682 5 : WeakPointerCleared = false;
683 :
684 : Handle<Object> h1;
685 : Handle<Object> h2;
686 :
687 : {
688 : HandleScope scope(isolate);
689 :
690 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
691 5 : Handle<Object> u = factory->NewNumber(1.12344);
692 :
693 5 : h1 = global_handles->Create(*i);
694 5 : h2 = global_handles->Create(*u);
695 : }
696 :
697 : // Make sure the objects are promoted.
698 5 : CcTest::CollectGarbage(OLD_SPACE);
699 5 : CcTest::CollectGarbage(NEW_SPACE);
700 10 : CHECK(!Heap::InNewSpace(*h1) && !Heap::InNewSpace(*h2));
701 :
702 : std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
703 : GlobalHandles::MakeWeak(
704 : h2.location(), reinterpret_cast<void*>(&handle_and_id),
705 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
706 5 : CHECK(!GlobalHandles::IsNearDeath(h1.location()));
707 5 : CHECK(!GlobalHandles::IsNearDeath(h2.location()));
708 :
709 : // Incremental marking potentially marked handles before they turned weak.
710 5 : CcTest::CollectAllGarbage();
711 :
712 10 : CHECK((*h1)->IsString());
713 :
714 5 : CHECK(WeakPointerCleared);
715 5 : CHECK(!GlobalHandles::IsNearDeath(h1.location()));
716 :
717 5 : GlobalHandles::Destroy(h1.location());
718 5 : }
719 :
720 :
721 28342 : TEST(DeleteWeakGlobalHandle) {
722 5 : FLAG_stress_compaction = false;
723 5 : FLAG_stress_incremental_marking = false;
724 5 : CcTest::InitializeVM();
725 5 : Isolate* isolate = CcTest::i_isolate();
726 : Factory* factory = isolate->factory();
727 : GlobalHandles* global_handles = isolate->global_handles();
728 :
729 5 : WeakPointerCleared = false;
730 :
731 : Handle<Object> h;
732 :
733 : {
734 : HandleScope scope(isolate);
735 :
736 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
737 5 : h = global_handles->Create(*i);
738 : }
739 :
740 : std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
741 : GlobalHandles::MakeWeak(h.location(), reinterpret_cast<void*>(&handle_and_id),
742 : &TestWeakGlobalHandleCallback,
743 5 : v8::WeakCallbackType::kParameter);
744 :
745 : // Scanvenge does not recognize weak reference.
746 5 : CcTest::CollectGarbage(NEW_SPACE);
747 :
748 5 : CHECK(!WeakPointerCleared);
749 :
750 : // Mark-compact treats weak reference properly.
751 5 : CcTest::CollectGarbage(OLD_SPACE);
752 :
753 5 : CHECK(WeakPointerCleared);
754 5 : }
755 :
756 28342 : TEST(BytecodeArray) {
757 5 : if (FLAG_never_compact) return;
758 : static const uint8_t kRawBytes[] = {0xC3, 0x7E, 0xA5, 0x5A};
759 : static const int kRawBytesSize = sizeof(kRawBytes);
760 : static const int kFrameSize = 32;
761 : static const int kParameterCount = 2;
762 :
763 : ManualGCScope manual_gc_scope;
764 5 : FLAG_manual_evacuation_candidates_selection = true;
765 5 : CcTest::InitializeVM();
766 : Isolate* isolate = CcTest::i_isolate();
767 5 : Heap* heap = isolate->heap();
768 : Factory* factory = isolate->factory();
769 : HandleScope scope(isolate);
770 :
771 5 : heap::SimulateFullSpace(heap->old_space());
772 5 : Handle<FixedArray> constant_pool = factory->NewFixedArray(5, TENURED);
773 30 : for (int i = 0; i < 5; i++) {
774 25 : Handle<Object> number = factory->NewHeapNumber(i);
775 25 : constant_pool->set(i, *number);
776 : }
777 :
778 : // Allocate and initialize BytecodeArray
779 : Handle<BytecodeArray> array = factory->NewBytecodeArray(
780 5 : kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
781 :
782 10 : CHECK(array->IsBytecodeArray());
783 5 : CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
784 5 : CHECK_EQ(array->frame_size(), kFrameSize);
785 5 : CHECK_EQ(array->parameter_count(), kParameterCount);
786 15 : CHECK_EQ(array->constant_pool(), *constant_pool);
787 5 : CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
788 10 : CHECK_GE(array->address() + array->BytecodeArraySize(),
789 : array->GetFirstBytecodeAddress() + array->length());
790 20 : for (int i = 0; i < kRawBytesSize; i++) {
791 60 : CHECK_EQ(Memory<uint8_t>(array->GetFirstBytecodeAddress() + i),
792 : kRawBytes[i]);
793 20 : CHECK_EQ(array->get(i), kRawBytes[i]);
794 : }
795 :
796 5 : FixedArray old_constant_pool_address = *constant_pool;
797 :
798 : // Perform a full garbage collection and force the constant pool to be on an
799 : // evacuation candidate.
800 : Page* evac_page = Page::FromHeapObject(*constant_pool);
801 5 : heap::ForceEvacuationCandidate(evac_page);
802 5 : CcTest::CollectAllGarbage();
803 :
804 : // BytecodeArray should survive.
805 5 : CHECK_EQ(array->length(), kRawBytesSize);
806 5 : CHECK_EQ(array->frame_size(), kFrameSize);
807 20 : for (int i = 0; i < kRawBytesSize; i++) {
808 40 : CHECK_EQ(array->get(i), kRawBytes[i]);
809 40 : CHECK_EQ(Memory<uint8_t>(array->GetFirstBytecodeAddress() + i),
810 : kRawBytes[i]);
811 : }
812 :
813 : // Constant pool should have been migrated.
814 15 : CHECK_EQ(array->constant_pool(), *constant_pool);
815 10 : CHECK_NE(array->constant_pool(), old_constant_pool_address);
816 : }
817 :
818 28342 : TEST(BytecodeArrayAging) {
819 : static const uint8_t kRawBytes[] = {0xC3, 0x7E, 0xA5, 0x5A};
820 : static const int kRawBytesSize = sizeof(kRawBytes);
821 : static const int kFrameSize = 32;
822 : static const int kParameterCount = 2;
823 5 : CcTest::InitializeVM();
824 : Isolate* isolate = CcTest::i_isolate();
825 : Factory* factory = isolate->factory();
826 : HandleScope scope(isolate);
827 :
828 : Handle<BytecodeArray> array =
829 : factory->NewBytecodeArray(kRawBytesSize, kRawBytes, kFrameSize,
830 5 : kParameterCount, factory->empty_fixed_array());
831 :
832 5 : CHECK_EQ(BytecodeArray::kFirstBytecodeAge, array->bytecode_age());
833 5 : array->MakeOlder();
834 5 : CHECK_EQ(BytecodeArray::kQuadragenarianBytecodeAge, array->bytecode_age());
835 : array->set_bytecode_age(BytecodeArray::kLastBytecodeAge);
836 5 : array->MakeOlder();
837 5 : CHECK_EQ(BytecodeArray::kLastBytecodeAge, array->bytecode_age());
838 5 : }
839 :
840 : static const char* not_so_random_string_table[] = {
841 : "abstract",
842 : "boolean",
843 : "break",
844 : "byte",
845 : "case",
846 : "catch",
847 : "char",
848 : "class",
849 : "const",
850 : "continue",
851 : "debugger",
852 : "default",
853 : "delete",
854 : "do",
855 : "double",
856 : "else",
857 : "enum",
858 : "export",
859 : "extends",
860 : "false",
861 : "final",
862 : "finally",
863 : "float",
864 : "for",
865 : "function",
866 : "goto",
867 : "if",
868 : "implements",
869 : "import",
870 : "in",
871 : "instanceof",
872 : "int",
873 : "interface",
874 : "long",
875 : "native",
876 : "new",
877 : "null",
878 : "package",
879 : "private",
880 : "protected",
881 : "public",
882 : "return",
883 : "short",
884 : "static",
885 : "super",
886 : "switch",
887 : "synchronized",
888 : "this",
889 : "throw",
890 : "throws",
891 : "transient",
892 : "true",
893 : "try",
894 : "typeof",
895 : "var",
896 : "void",
897 : "volatile",
898 : "while",
899 : "with",
900 : nullptr
901 : };
902 :
903 10 : static void CheckInternalizedStrings(const char** strings) {
904 : Isolate* isolate = CcTest::i_isolate();
905 : Factory* factory = isolate->factory();
906 600 : for (const char* string = *strings; *strings != nullptr;
907 : string = *strings++) {
908 : HandleScope scope(isolate);
909 : Handle<String> a =
910 590 : isolate->factory()->InternalizeUtf8String(CStrVector(string));
911 : // InternalizeUtf8String may return a failure if a GC is needed.
912 1180 : CHECK(a->IsInternalizedString());
913 590 : Handle<String> b = factory->InternalizeUtf8String(string);
914 1770 : CHECK_EQ(*b, *a);
915 590 : CHECK(b->IsUtf8EqualTo(CStrVector(string)));
916 590 : b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
917 1770 : CHECK_EQ(*b, *a);
918 590 : CHECK(b->IsUtf8EqualTo(CStrVector(string)));
919 : }
920 10 : }
921 :
922 :
923 28342 : TEST(StringTable) {
924 5 : CcTest::InitializeVM();
925 :
926 5 : v8::HandleScope sc(CcTest::isolate());
927 5 : CheckInternalizedStrings(not_so_random_string_table);
928 5 : CheckInternalizedStrings(not_so_random_string_table);
929 5 : }
930 :
931 :
932 28342 : TEST(FunctionAllocation) {
933 5 : CcTest::InitializeVM();
934 : Isolate* isolate = CcTest::i_isolate();
935 : Factory* factory = isolate->factory();
936 :
937 5 : v8::HandleScope sc(CcTest::isolate());
938 5 : Handle<String> name = factory->InternalizeUtf8String("theFunction");
939 5 : Handle<JSFunction> function = factory->NewFunctionForTest(name);
940 :
941 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
942 : Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
943 :
944 5 : Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
945 5 : Handle<JSObject> obj = factory->NewJSObject(function);
946 : Object::SetProperty(isolate, obj, prop_name, twenty_three,
947 5 : LanguageMode::kSloppy)
948 10 : .Check();
949 15 : CHECK_EQ(Smi::FromInt(23),
950 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
951 : // Check that we can add properties to function objects.
952 : Object::SetProperty(isolate, function, prop_name, twenty_four,
953 5 : LanguageMode::kSloppy)
954 10 : .Check();
955 15 : CHECK_EQ(
956 : Smi::FromInt(24),
957 5 : *Object::GetProperty(isolate, function, prop_name).ToHandleChecked());
958 5 : }
959 :
960 :
961 28342 : TEST(ObjectProperties) {
962 5 : CcTest::InitializeVM();
963 : Isolate* isolate = CcTest::i_isolate();
964 : Factory* factory = isolate->factory();
965 :
966 5 : v8::HandleScope sc(CcTest::isolate());
967 : Handle<String> object_string(
968 5 : String::cast(ReadOnlyRoots(CcTest::heap()).Object_string()), isolate);
969 : Handle<Object> object =
970 : Object::GetProperty(isolate, CcTest::i_isolate()->global_object(),
971 10 : object_string)
972 10 : .ToHandleChecked();
973 5 : Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
974 5 : Handle<JSObject> obj = factory->NewJSObject(constructor);
975 5 : Handle<String> first = factory->InternalizeUtf8String("first");
976 5 : Handle<String> second = factory->InternalizeUtf8String("second");
977 :
978 : Handle<Smi> one(Smi::FromInt(1), isolate);
979 : Handle<Smi> two(Smi::FromInt(2), isolate);
980 :
981 : // check for empty
982 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
983 :
984 : // add first
985 10 : Object::SetProperty(isolate, obj, first, one, LanguageMode::kSloppy).Check();
986 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
987 :
988 : // delete first
989 10 : CHECK(Just(true) ==
990 : JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
991 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
992 :
993 : // add first and then second
994 10 : Object::SetProperty(isolate, obj, first, one, LanguageMode::kSloppy).Check();
995 10 : Object::SetProperty(isolate, obj, second, two, LanguageMode::kSloppy).Check();
996 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
997 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
998 :
999 : // delete first and then second
1000 10 : CHECK(Just(true) ==
1001 : JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
1002 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
1003 10 : CHECK(Just(true) ==
1004 : JSReceiver::DeleteProperty(obj, second, LanguageMode::kSloppy));
1005 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
1006 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
1007 :
1008 : // add first and then second
1009 10 : Object::SetProperty(isolate, obj, first, one, LanguageMode::kSloppy).Check();
1010 10 : Object::SetProperty(isolate, obj, second, two, LanguageMode::kSloppy).Check();
1011 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
1012 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
1013 :
1014 : // delete second and then first
1015 10 : CHECK(Just(true) ==
1016 : JSReceiver::DeleteProperty(obj, second, LanguageMode::kSloppy));
1017 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
1018 10 : CHECK(Just(true) ==
1019 : JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
1020 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
1021 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
1022 :
1023 : // check string and internalized string match
1024 : const char* string1 = "fisk";
1025 5 : Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
1026 10 : Object::SetProperty(isolate, obj, s1, one, LanguageMode::kSloppy).Check();
1027 5 : Handle<String> s1_string = factory->InternalizeUtf8String(string1);
1028 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
1029 :
1030 : // check internalized string and string match
1031 : const char* string2 = "fugl";
1032 5 : Handle<String> s2_string = factory->InternalizeUtf8String(string2);
1033 5 : Object::SetProperty(isolate, obj, s2_string, one, LanguageMode::kSloppy)
1034 10 : .Check();
1035 5 : Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
1036 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
1037 5 : }
1038 :
1039 :
1040 28342 : TEST(JSObjectMaps) {
1041 5 : CcTest::InitializeVM();
1042 : Isolate* isolate = CcTest::i_isolate();
1043 : Factory* factory = isolate->factory();
1044 :
1045 5 : v8::HandleScope sc(CcTest::isolate());
1046 5 : Handle<String> name = factory->InternalizeUtf8String("theFunction");
1047 5 : Handle<JSFunction> function = factory->NewFunctionForTest(name);
1048 :
1049 5 : Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
1050 5 : Handle<JSObject> obj = factory->NewJSObject(function);
1051 10 : Handle<Map> initial_map(function->initial_map(), isolate);
1052 :
1053 : // Set a propery
1054 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
1055 : Object::SetProperty(isolate, obj, prop_name, twenty_three,
1056 5 : LanguageMode::kSloppy)
1057 10 : .Check();
1058 15 : CHECK_EQ(Smi::FromInt(23),
1059 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
1060 :
1061 : // Check the map has changed
1062 5 : CHECK(*initial_map != obj->map());
1063 5 : }
1064 :
1065 :
1066 28342 : TEST(JSArray) {
1067 5 : CcTest::InitializeVM();
1068 : Isolate* isolate = CcTest::i_isolate();
1069 : Factory* factory = isolate->factory();
1070 :
1071 5 : v8::HandleScope sc(CcTest::isolate());
1072 5 : Handle<String> name = factory->InternalizeUtf8String("Array");
1073 : Handle<Object> fun_obj =
1074 10 : Object::GetProperty(isolate, CcTest::i_isolate()->global_object(), name)
1075 10 : .ToHandleChecked();
1076 5 : Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
1077 :
1078 : // Allocate the object.
1079 : Handle<Object> element;
1080 5 : Handle<JSObject> object = factory->NewJSObject(function);
1081 5 : Handle<JSArray> array = Handle<JSArray>::cast(object);
1082 : // We just initialized the VM, no heap allocation failure yet.
1083 5 : JSArray::Initialize(array, 0);
1084 :
1085 : // Set array length to 0.
1086 5 : JSArray::SetLength(array, 0);
1087 10 : CHECK_EQ(Smi::kZero, array->length());
1088 : // Must be in fast mode.
1089 10 : CHECK(array->HasSmiOrObjectElements());
1090 :
1091 : // array[length] = name.
1092 10 : Object::SetElement(isolate, array, 0, name, LanguageMode::kSloppy).Check();
1093 10 : CHECK_EQ(Smi::FromInt(1), array->length());
1094 10 : element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
1095 15 : CHECK_EQ(*element, *name);
1096 :
1097 : // Set array length with larger than smi value.
1098 5 : JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
1099 :
1100 5 : uint32_t int_length = 0;
1101 10 : CHECK(array->length()->ToArrayIndex(&int_length));
1102 5 : CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
1103 10 : CHECK(array->HasDictionaryElements()); // Must be in slow mode.
1104 :
1105 : // array[length] = name.
1106 10 : Object::SetElement(isolate, array, int_length, name, LanguageMode::kSloppy)
1107 10 : .Check();
1108 5 : uint32_t new_int_length = 0;
1109 10 : CHECK(array->length()->ToArrayIndex(&new_int_length));
1110 10 : CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
1111 10 : element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
1112 15 : CHECK_EQ(*element, *name);
1113 10 : element = Object::GetElement(isolate, array, 0).ToHandleChecked();
1114 15 : CHECK_EQ(*element, *name);
1115 5 : }
1116 :
1117 :
1118 28342 : TEST(JSObjectCopy) {
1119 5 : CcTest::InitializeVM();
1120 : Isolate* isolate = CcTest::i_isolate();
1121 : Factory* factory = isolate->factory();
1122 :
1123 5 : v8::HandleScope sc(CcTest::isolate());
1124 : Handle<String> object_string(
1125 5 : String::cast(ReadOnlyRoots(CcTest::heap()).Object_string()), isolate);
1126 : Handle<Object> object =
1127 : Object::GetProperty(isolate, CcTest::i_isolate()->global_object(),
1128 10 : object_string)
1129 10 : .ToHandleChecked();
1130 5 : Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
1131 5 : Handle<JSObject> obj = factory->NewJSObject(constructor);
1132 5 : Handle<String> first = factory->InternalizeUtf8String("first");
1133 5 : Handle<String> second = factory->InternalizeUtf8String("second");
1134 :
1135 : Handle<Smi> one(Smi::FromInt(1), isolate);
1136 : Handle<Smi> two(Smi::FromInt(2), isolate);
1137 :
1138 10 : Object::SetProperty(isolate, obj, first, one, LanguageMode::kSloppy).Check();
1139 10 : Object::SetProperty(isolate, obj, second, two, LanguageMode::kSloppy).Check();
1140 :
1141 10 : Object::SetElement(isolate, obj, 0, first, LanguageMode::kSloppy).Check();
1142 10 : Object::SetElement(isolate, obj, 1, second, LanguageMode::kSloppy).Check();
1143 :
1144 : // Make the clone.
1145 : Handle<Object> value1, value2;
1146 5 : Handle<JSObject> clone = factory->CopyJSObject(obj);
1147 5 : CHECK(!clone.is_identical_to(obj));
1148 :
1149 10 : value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1150 10 : value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1151 15 : CHECK_EQ(*value1, *value2);
1152 10 : value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1153 10 : value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1154 15 : CHECK_EQ(*value1, *value2);
1155 :
1156 10 : value1 = Object::GetProperty(isolate, obj, first).ToHandleChecked();
1157 10 : value2 = Object::GetProperty(isolate, clone, first).ToHandleChecked();
1158 15 : CHECK_EQ(*value1, *value2);
1159 10 : value1 = Object::GetProperty(isolate, obj, second).ToHandleChecked();
1160 10 : value2 = Object::GetProperty(isolate, clone, second).ToHandleChecked();
1161 15 : CHECK_EQ(*value1, *value2);
1162 :
1163 : // Flip the values.
1164 5 : Object::SetProperty(isolate, clone, first, two, LanguageMode::kSloppy)
1165 10 : .Check();
1166 5 : Object::SetProperty(isolate, clone, second, one, LanguageMode::kSloppy)
1167 10 : .Check();
1168 :
1169 10 : Object::SetElement(isolate, clone, 0, second, LanguageMode::kSloppy).Check();
1170 10 : Object::SetElement(isolate, clone, 1, first, LanguageMode::kSloppy).Check();
1171 :
1172 10 : value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1173 10 : value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1174 15 : CHECK_EQ(*value1, *value2);
1175 10 : value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1176 10 : value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1177 15 : CHECK_EQ(*value1, *value2);
1178 :
1179 10 : value1 = Object::GetProperty(isolate, obj, second).ToHandleChecked();
1180 10 : value2 = Object::GetProperty(isolate, clone, first).ToHandleChecked();
1181 15 : CHECK_EQ(*value1, *value2);
1182 10 : value1 = Object::GetProperty(isolate, obj, first).ToHandleChecked();
1183 10 : value2 = Object::GetProperty(isolate, clone, second).ToHandleChecked();
1184 15 : CHECK_EQ(*value1, *value2);
1185 5 : }
1186 :
1187 :
1188 28342 : TEST(StringAllocation) {
1189 5 : CcTest::InitializeVM();
1190 : Isolate* isolate = CcTest::i_isolate();
1191 : Factory* factory = isolate->factory();
1192 :
1193 : const unsigned char chars[] = {0xE5, 0xA4, 0xA7};
1194 510 : for (int length = 0; length < 100; length++) {
1195 500 : v8::HandleScope scope(CcTest::isolate());
1196 500 : char* non_one_byte = NewArray<char>(3 * length + 1);
1197 500 : char* one_byte = NewArray<char>(length + 1);
1198 500 : non_one_byte[3 * length] = 0;
1199 500 : one_byte[length] = 0;
1200 25250 : for (int i = 0; i < length; i++) {
1201 24750 : one_byte[i] = 'a';
1202 24750 : non_one_byte[3 * i] = chars[0];
1203 24750 : non_one_byte[3 * i + 1] = chars[1];
1204 24750 : non_one_byte[3 * i + 2] = chars[2];
1205 : }
1206 : Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
1207 500 : Vector<const char>(non_one_byte, 3 * length));
1208 500 : CHECK_EQ(length, non_one_byte_sym->length());
1209 : Handle<String> one_byte_sym =
1210 500 : factory->InternalizeOneByteString(OneByteVector(one_byte, length));
1211 500 : CHECK_EQ(length, one_byte_sym->length());
1212 : Handle<String> non_one_byte_str =
1213 500 : factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
1214 1000 : .ToHandleChecked();
1215 500 : non_one_byte_str->Hash();
1216 500 : CHECK_EQ(length, non_one_byte_str->length());
1217 : Handle<String> one_byte_str =
1218 500 : factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
1219 1000 : .ToHandleChecked();
1220 500 : one_byte_str->Hash();
1221 500 : CHECK_EQ(length, one_byte_str->length());
1222 : DeleteArray(non_one_byte);
1223 : DeleteArray(one_byte);
1224 500 : }
1225 5 : }
1226 :
1227 :
1228 5 : static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
1229 : // Count the number of objects found in the heap.
1230 : int found_count = 0;
1231 5 : HeapIterator iterator(heap);
1232 69240 : for (HeapObject obj = iterator.next(); !obj.is_null();
1233 : obj = iterator.next()) {
1234 207690 : for (int i = 0; i < size; i++) {
1235 415380 : if (*objs[i] == obj) {
1236 30 : found_count++;
1237 : }
1238 : }
1239 : }
1240 5 : return found_count;
1241 : }
1242 :
1243 :
1244 28342 : TEST(Iteration) {
1245 5 : CcTest::InitializeVM();
1246 : Isolate* isolate = CcTest::i_isolate();
1247 : Factory* factory = isolate->factory();
1248 5 : v8::HandleScope scope(CcTest::isolate());
1249 :
1250 : // Array of objects to scan haep for.
1251 : const int objs_count = 6;
1252 35 : Handle<Object> objs[objs_count];
1253 : int next_objs_index = 0;
1254 :
1255 : // Allocate a JS array to OLD_SPACE and NEW_SPACE
1256 5 : objs[next_objs_index++] = factory->NewJSArray(10);
1257 5 : objs[next_objs_index++] = factory->NewJSArray(10, HOLEY_ELEMENTS, TENURED);
1258 :
1259 : // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
1260 5 : objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
1261 : objs[next_objs_index++] =
1262 5 : factory->NewStringFromStaticChars("abcdefghij", TENURED);
1263 :
1264 : // Allocate a large string (for large object space).
1265 : int large_size = kMaxRegularHeapObjectSize + 1;
1266 5 : char* str = new char[large_size];
1267 5 : for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
1268 5 : str[large_size - 1] = '\0';
1269 5 : objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
1270 5 : delete[] str;
1271 :
1272 : // Add a Map object to look for.
1273 : objs[next_objs_index++] =
1274 5 : Handle<Map>(HeapObject::cast(*objs[0])->map(), isolate);
1275 :
1276 : CHECK_EQ(objs_count, next_objs_index);
1277 5 : CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
1278 5 : }
1279 :
1280 28342 : TEST(TestBytecodeFlushing) {
1281 : #ifndef V8_LITE_MODE
1282 5 : FLAG_opt = false;
1283 5 : FLAG_always_opt = false;
1284 5 : i::FLAG_optimize_for_size = false;
1285 : #endif // V8_LITE_MODE
1286 5 : i::FLAG_flush_bytecode = true;
1287 5 : i::FLAG_allow_natives_syntax = true;
1288 :
1289 5 : CcTest::InitializeVM();
1290 5 : v8::Isolate* isolate = CcTest::isolate();
1291 : Isolate* i_isolate = CcTest::i_isolate();
1292 : Factory* factory = i_isolate->factory();
1293 :
1294 : {
1295 5 : v8::HandleScope scope(isolate);
1296 10 : v8::Context::New(isolate)->Enter();
1297 : const char* source =
1298 : "function foo() {"
1299 : " var x = 42;"
1300 : " var y = 42;"
1301 : " var z = x + y;"
1302 : "};"
1303 : "foo()";
1304 5 : Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1305 :
1306 : // This compile will add the code to the compilation cache.
1307 : {
1308 5 : v8::HandleScope scope(isolate);
1309 5 : CompileRun(source);
1310 : }
1311 :
1312 : // Check function is compiled.
1313 : Handle<Object> func_value =
1314 10 : Object::GetProperty(i_isolate, i_isolate->global_object(), foo_name)
1315 10 : .ToHandleChecked();
1316 10 : CHECK(func_value->IsJSFunction());
1317 5 : Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1318 5 : CHECK(function->shared()->is_compiled());
1319 :
1320 : // The code will survive at least two GCs.
1321 5 : CcTest::CollectAllGarbage();
1322 5 : CcTest::CollectAllGarbage();
1323 5 : CHECK(function->shared()->is_compiled());
1324 :
1325 : // Simulate several GCs that use full marking.
1326 : const int kAgingThreshold = 6;
1327 30 : for (int i = 0; i < kAgingThreshold; i++) {
1328 30 : CcTest::CollectAllGarbage();
1329 : }
1330 :
1331 : // foo should no longer be in the compilation cache
1332 5 : CHECK(!function->shared()->is_compiled());
1333 5 : CHECK(!function->is_compiled());
1334 : // Call foo to get it recompiled.
1335 : CompileRun("foo()");
1336 5 : CHECK(function->shared()->is_compiled());
1337 5 : CHECK(function->is_compiled());
1338 : }
1339 5 : }
1340 :
1341 : #ifndef V8_LITE_MODE
1342 :
1343 28342 : TEST(TestOptimizeAfterBytecodeFlushingCandidate) {
1344 5 : FLAG_opt = true;
1345 5 : FLAG_always_opt = false;
1346 5 : i::FLAG_optimize_for_size = false;
1347 5 : i::FLAG_incremental_marking = true;
1348 5 : i::FLAG_flush_bytecode = true;
1349 5 : i::FLAG_allow_natives_syntax = true;
1350 :
1351 5 : CcTest::InitializeVM();
1352 : Isolate* isolate = CcTest::i_isolate();
1353 : Factory* factory = isolate->factory();
1354 5 : v8::HandleScope scope(CcTest::isolate());
1355 : const char* source =
1356 : "function foo() {"
1357 : " var x = 42;"
1358 : " var y = 42;"
1359 : " var z = x + y;"
1360 : "};"
1361 : "foo()";
1362 5 : Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1363 :
1364 : // This compile will add the code to the compilation cache.
1365 : {
1366 5 : v8::HandleScope scope(CcTest::isolate());
1367 5 : CompileRun(source);
1368 : }
1369 :
1370 : // Check function is compiled.
1371 : Handle<Object> func_value =
1372 10 : Object::GetProperty(isolate, isolate->global_object(), foo_name)
1373 10 : .ToHandleChecked();
1374 10 : CHECK(func_value->IsJSFunction());
1375 5 : Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1376 5 : CHECK(function->shared()->is_compiled());
1377 :
1378 : // The code will survive at least two GCs.
1379 5 : CcTest::CollectAllGarbage();
1380 5 : CcTest::CollectAllGarbage();
1381 5 : CHECK(function->shared()->is_compiled());
1382 :
1383 : // Simulate several GCs that use incremental marking.
1384 : const int kAgingThreshold = 6;
1385 30 : for (int i = 0; i < kAgingThreshold; i++) {
1386 30 : heap::SimulateIncrementalMarking(CcTest::heap());
1387 30 : CcTest::CollectAllGarbage();
1388 : }
1389 5 : CHECK(!function->shared()->is_compiled());
1390 5 : CHECK(!function->is_compiled());
1391 :
1392 : // This compile will compile the function again.
1393 : {
1394 5 : v8::HandleScope scope(CcTest::isolate());
1395 5 : CompileRun("foo();");
1396 : }
1397 :
1398 : // Simulate several GCs that use incremental marking but make sure
1399 : // the loop breaks once the function is enqueued as a candidate.
1400 15 : for (int i = 0; i < kAgingThreshold; i++) {
1401 15 : heap::SimulateIncrementalMarking(CcTest::heap());
1402 15 : if (function->shared()->GetBytecodeArray()->IsOld()) break;
1403 10 : CcTest::CollectAllGarbage();
1404 : }
1405 :
1406 : // Force optimization while incremental marking is active and while
1407 : // the function is enqueued as a candidate.
1408 : {
1409 5 : v8::HandleScope scope(CcTest::isolate());
1410 5 : CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1411 : }
1412 :
1413 : // Simulate one final GC and make sure the candidate wasn't flushed.
1414 5 : CcTest::CollectAllGarbage();
1415 5 : CHECK(function->shared()->is_compiled());
1416 5 : CHECK(function->is_compiled());
1417 5 : }
1418 :
1419 : #endif // V8_LITE_MODE
1420 :
1421 28342 : TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
1422 5 : if (!FLAG_incremental_marking) return;
1423 : // Turn off always_opt because it interferes with running the built-in for
1424 : // the last call to g().
1425 5 : FLAG_always_opt = false;
1426 5 : FLAG_allow_natives_syntax = true;
1427 5 : CcTest::InitializeVM();
1428 : Isolate* isolate = CcTest::i_isolate();
1429 : Factory* factory = isolate->factory();
1430 5 : Heap* heap = isolate->heap();
1431 5 : v8::HandleScope scope(CcTest::isolate());
1432 :
1433 : CompileRun(
1434 : "function make_closure(x) {"
1435 : " return function() { return x + 3 };"
1436 : "}"
1437 : "var f = make_closure(5); f();"
1438 : "var g = make_closure(5);");
1439 :
1440 : // Check f is compiled.
1441 5 : Handle<String> f_name = factory->InternalizeUtf8String("f");
1442 : Handle<Object> f_value =
1443 10 : Object::GetProperty(isolate, isolate->global_object(), f_name)
1444 10 : .ToHandleChecked();
1445 5 : Handle<JSFunction> f_function = Handle<JSFunction>::cast(f_value);
1446 5 : CHECK(f_function->is_compiled());
1447 :
1448 : // Check g is not compiled.
1449 5 : Handle<String> g_name = factory->InternalizeUtf8String("g");
1450 : Handle<Object> g_value =
1451 10 : Object::GetProperty(isolate, isolate->global_object(), g_name)
1452 10 : .ToHandleChecked();
1453 5 : Handle<JSFunction> g_function = Handle<JSFunction>::cast(g_value);
1454 5 : CHECK(!g_function->is_compiled());
1455 :
1456 5 : heap::SimulateIncrementalMarking(heap);
1457 : CompileRun("%OptimizeFunctionOnNextCall(f); f();");
1458 :
1459 : // g should now have available an optimized function, unmarked by gc. The
1460 : // CompileLazy built-in will discover it and install it in the closure, and
1461 : // the incremental write barrier should be used.
1462 : CompileRun("g();");
1463 5 : CHECK(g_function->is_compiled());
1464 : }
1465 :
1466 28342 : TEST(CompilationCacheCachingBehavior) {
1467 : // If we do not have the compilation cache turned off, this test is invalid.
1468 5 : if (!FLAG_compilation_cache) {
1469 0 : return;
1470 : }
1471 5 : CcTest::InitializeVM();
1472 5 : Isolate* isolate = CcTest::i_isolate();
1473 : Factory* factory = isolate->factory();
1474 : CompilationCache* compilation_cache = isolate->compilation_cache();
1475 5 : LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
1476 :
1477 5 : v8::HandleScope scope(CcTest::isolate());
1478 : const char* raw_source =
1479 : "function foo() {"
1480 : " var x = 42;"
1481 : " var y = 42;"
1482 : " var z = x + y;"
1483 : "};"
1484 : "foo();";
1485 5 : Handle<String> source = factory->InternalizeUtf8String(raw_source);
1486 5 : Handle<Context> native_context = isolate->native_context();
1487 :
1488 : {
1489 5 : v8::HandleScope scope(CcTest::isolate());
1490 5 : CompileRun(raw_source);
1491 : }
1492 :
1493 : // The script should be in the cache now.
1494 : {
1495 5 : v8::HandleScope scope(CcTest::isolate());
1496 : MaybeHandle<SharedFunctionInfo> cached_script =
1497 : compilation_cache->LookupScript(source, Handle<Object>(), 0, 0,
1498 : v8::ScriptOriginOptions(true, false),
1499 5 : native_context, language_mode);
1500 5 : CHECK(!cached_script.is_null());
1501 : }
1502 :
1503 : // Check that the code cache entry survives at least one GC.
1504 : {
1505 5 : CcTest::CollectAllGarbage();
1506 5 : v8::HandleScope scope(CcTest::isolate());
1507 : MaybeHandle<SharedFunctionInfo> cached_script =
1508 : compilation_cache->LookupScript(source, Handle<Object>(), 0, 0,
1509 : v8::ScriptOriginOptions(true, false),
1510 5 : native_context, language_mode);
1511 5 : CHECK(!cached_script.is_null());
1512 :
1513 : // Progress code age until it's old and ready for GC.
1514 : Handle<SharedFunctionInfo> shared = cached_script.ToHandleChecked();
1515 5 : CHECK(shared->HasBytecodeArray());
1516 : const int kAgingThreshold = 6;
1517 30 : for (int i = 0; i < kAgingThreshold; i++) {
1518 30 : shared->GetBytecodeArray()->MakeOlder();
1519 5 : }
1520 : }
1521 :
1522 5 : CcTest::CollectAllGarbage();
1523 :
1524 : {
1525 5 : v8::HandleScope scope(CcTest::isolate());
1526 : // Ensure code aging cleared the entry from the cache.
1527 : MaybeHandle<SharedFunctionInfo> cached_script =
1528 : compilation_cache->LookupScript(source, Handle<Object>(), 0, 0,
1529 : v8::ScriptOriginOptions(true, false),
1530 5 : native_context, language_mode);
1531 5 : CHECK(cached_script.is_null());
1532 5 : }
1533 : }
1534 :
1535 :
1536 150 : static void OptimizeEmptyFunction(const char* name) {
1537 : HandleScope scope(CcTest::i_isolate());
1538 : EmbeddedVector<char, 256> source;
1539 : SNPrintF(source,
1540 : "function %s() { return 0; }"
1541 : "%s(); %s();"
1542 : "%%OptimizeFunctionOnNextCall(%s);"
1543 : "%s();",
1544 150 : name, name, name, name, name);
1545 150 : CompileRun(source.start());
1546 150 : }
1547 :
1548 :
1549 : // Count the number of native contexts in the weak list of native contexts.
1550 366 : int CountNativeContexts() {
1551 : int count = 0;
1552 366 : Object object = CcTest::heap()->native_contexts_list();
1553 2682 : while (!object->IsUndefined(CcTest::i_isolate())) {
1554 1950 : count++;
1555 1950 : object = Context::cast(object)->next_context_link();
1556 : }
1557 366 : return count;
1558 : }
1559 :
1560 28342 : TEST(TestInternalWeakLists) {
1561 5 : FLAG_always_opt = false;
1562 5 : FLAG_allow_natives_syntax = true;
1563 5 : v8::V8::Initialize();
1564 :
1565 : // Some flags turn Scavenge collections into Mark-sweep collections
1566 : // and hence are incompatible with this test case.
1567 5 : if (FLAG_gc_global || FLAG_stress_compaction ||
1568 : FLAG_stress_incremental_marking)
1569 2 : return;
1570 4 : FLAG_retain_maps_for_n_gc = 0;
1571 :
1572 : static const int kNumTestContexts = 10;
1573 :
1574 60 : Isolate* isolate = CcTest::i_isolate();
1575 : HandleScope scope(isolate);
1576 44 : v8::Local<v8::Context> ctx[kNumTestContexts];
1577 4 : if (!isolate->use_optimizer()) return;
1578 :
1579 3 : CHECK_EQ(0, CountNativeContexts());
1580 :
1581 : // Create a number of global contests which gets linked together.
1582 33 : for (int i = 0; i < kNumTestContexts; i++) {
1583 30 : ctx[i] = v8::Context::New(CcTest::isolate());
1584 :
1585 : // Collect garbage that might have been created by one of the
1586 : // installed extensions.
1587 30 : isolate->compilation_cache()->Clear();
1588 30 : CcTest::CollectAllGarbage();
1589 :
1590 30 : CHECK_EQ(i + 1, CountNativeContexts());
1591 :
1592 60 : ctx[i]->Enter();
1593 :
1594 : // Create a handle scope so no function objects get stuck in the outer
1595 : // handle scope.
1596 : HandleScope scope(isolate);
1597 30 : OptimizeEmptyFunction("f1");
1598 30 : OptimizeEmptyFunction("f2");
1599 30 : OptimizeEmptyFunction("f3");
1600 30 : OptimizeEmptyFunction("f4");
1601 30 : OptimizeEmptyFunction("f5");
1602 :
1603 : // Remove function f1, and
1604 : CompileRun("f1=null");
1605 :
1606 : // Scavenge treats these references as strong.
1607 330 : for (int j = 0; j < 10; j++) {
1608 300 : CcTest::CollectGarbage(NEW_SPACE);
1609 : }
1610 :
1611 : // Mark compact handles the weak references.
1612 30 : isolate->compilation_cache()->Clear();
1613 30 : CcTest::CollectAllGarbage();
1614 :
1615 : // Get rid of f3 and f5 in the same way.
1616 : CompileRun("f3=null");
1617 330 : for (int j = 0; j < 10; j++) {
1618 300 : CcTest::CollectGarbage(NEW_SPACE);
1619 : }
1620 30 : CcTest::CollectAllGarbage();
1621 : CompileRun("f5=null");
1622 330 : for (int j = 0; j < 10; j++) {
1623 300 : CcTest::CollectGarbage(NEW_SPACE);
1624 : }
1625 30 : CcTest::CollectAllGarbage();
1626 :
1627 30 : ctx[i]->Exit();
1628 : }
1629 :
1630 : // Force compilation cache cleanup.
1631 3 : CcTest::heap()->NotifyContextDisposed(true);
1632 3 : CcTest::CollectAllGarbage();
1633 :
1634 : // Dispose the native contexts one by one.
1635 33 : for (int i = 0; i < kNumTestContexts; i++) {
1636 : // TODO(dcarney): is there a better way to do this?
1637 30 : i::Address* unsafe = reinterpret_cast<i::Address*>(*ctx[i]);
1638 60 : *unsafe = ReadOnlyRoots(CcTest::heap()).undefined_value()->ptr();
1639 : ctx[i].Clear();
1640 :
1641 : // Scavenge treats these references as strong.
1642 330 : for (int j = 0; j < 10; j++) {
1643 300 : CcTest::CollectGarbage(i::NEW_SPACE);
1644 300 : CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1645 : }
1646 :
1647 : // Mark compact handles the weak references.
1648 30 : CcTest::CollectAllGarbage();
1649 30 : CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1650 : }
1651 :
1652 3 : CHECK_EQ(0, CountNativeContexts());
1653 : }
1654 :
1655 :
1656 28342 : TEST(TestSizeOfRegExpCode) {
1657 5 : if (!FLAG_regexp_optimization) return;
1658 :
1659 5 : v8::V8::Initialize();
1660 :
1661 : Isolate* isolate = CcTest::i_isolate();
1662 : HandleScope scope(isolate);
1663 :
1664 10 : LocalContext context;
1665 :
1666 : // Adjust source below and this check to match
1667 : // RegExpImple::kRegExpTooLargeToOptimize.
1668 : CHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
1669 :
1670 : // Compile a regexp that is much larger if we are using regexp optimizations.
1671 : CompileRun(
1672 : "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1673 : "var half_size_reg_exp;"
1674 : "while (reg_exp_source.length < 20 * 1024) {"
1675 : " half_size_reg_exp = reg_exp_source;"
1676 : " reg_exp_source = reg_exp_source + reg_exp_source;"
1677 : "}"
1678 : // Flatten string.
1679 : "reg_exp_source.match(/f/);");
1680 :
1681 : // Get initial heap size after several full GCs, which will stabilize
1682 : // the heap size and return with sweeping finished completely.
1683 5 : CcTest::CollectAllAvailableGarbage();
1684 5 : MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1685 5 : if (collector->sweeping_in_progress()) {
1686 5 : collector->EnsureSweepingCompleted();
1687 : }
1688 5 : int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1689 :
1690 : CompileRun("'foo'.match(reg_exp_source);");
1691 5 : CcTest::CollectAllAvailableGarbage();
1692 5 : int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1693 :
1694 : CompileRun("'foo'.match(half_size_reg_exp);");
1695 5 : CcTest::CollectAllAvailableGarbage();
1696 : int size_with_optimized_regexp =
1697 5 : static_cast<int>(CcTest::heap()->SizeOfObjects());
1698 :
1699 5 : int size_of_regexp_code = size_with_regexp - initial_size;
1700 :
1701 : // On some platforms the debug-code flag causes huge amounts of regexp code
1702 : // to be emitted, breaking this test.
1703 5 : if (!FLAG_debug_code) {
1704 5 : CHECK_LE(size_of_regexp_code, 1 * MB);
1705 : }
1706 :
1707 : // Small regexp is half the size, but compiles to more than twice the code
1708 : // due to the optimization steps.
1709 5 : CHECK_GE(size_with_optimized_regexp,
1710 : size_with_regexp + size_of_regexp_code * 2);
1711 : }
1712 :
1713 :
1714 28342 : HEAP_TEST(TestSizeOfObjects) {
1715 5 : v8::V8::Initialize();
1716 : Isolate* isolate = CcTest::i_isolate();
1717 5 : Heap* heap = CcTest::heap();
1718 10 : MarkCompactCollector* collector = heap->mark_compact_collector();
1719 :
1720 : // Get initial heap size after several full GCs, which will stabilize
1721 : // the heap size and return with sweeping finished completely.
1722 5 : CcTest::CollectAllAvailableGarbage();
1723 5 : if (collector->sweeping_in_progress()) {
1724 5 : collector->EnsureSweepingCompleted();
1725 : }
1726 5 : int initial_size = static_cast<int>(heap->SizeOfObjects());
1727 :
1728 : {
1729 : HandleScope scope(isolate);
1730 : // Allocate objects on several different old-space pages so that
1731 : // concurrent sweeper threads will be busy sweeping the old space on
1732 : // subsequent GC runs.
1733 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1734 : int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1735 505 : for (int i = 1; i <= 100; i++) {
1736 500 : isolate->factory()->NewFixedArray(8192, TENURED);
1737 500 : CHECK_EQ(initial_size + i * filler_size,
1738 : static_cast<int>(heap->SizeOfObjects()));
1739 : }
1740 : }
1741 :
1742 : // The heap size should go back to initial size after a full GC, even
1743 : // though sweeping didn't finish yet.
1744 5 : CcTest::CollectAllGarbage();
1745 : // Normally sweeping would not be complete here, but no guarantees.
1746 5 : CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
1747 : // Waiting for sweeper threads should not change heap size.
1748 5 : if (collector->sweeping_in_progress()) {
1749 5 : collector->EnsureSweepingCompleted();
1750 : }
1751 5 : CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
1752 5 : }
1753 :
1754 :
1755 28342 : TEST(TestAlignmentCalculations) {
1756 : // Maximum fill amounts are consistent.
1757 : int maximum_double_misalignment = kDoubleSize - kTaggedSize;
1758 5 : int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
1759 5 : CHECK_EQ(0, max_word_fill);
1760 5 : int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
1761 5 : CHECK_EQ(maximum_double_misalignment, max_double_fill);
1762 5 : int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
1763 5 : CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
1764 :
1765 : Address base = kNullAddress;
1766 : int fill = 0;
1767 :
1768 : // Word alignment never requires fill.
1769 5 : fill = Heap::GetFillToAlign(base, kWordAligned);
1770 5 : CHECK_EQ(0, fill);
1771 5 : fill = Heap::GetFillToAlign(base + kTaggedSize, kWordAligned);
1772 5 : CHECK_EQ(0, fill);
1773 :
1774 : // No fill is required when address is double aligned.
1775 5 : fill = Heap::GetFillToAlign(base, kDoubleAligned);
1776 5 : CHECK_EQ(0, fill);
1777 : // Fill is required if address is not double aligned.
1778 5 : fill = Heap::GetFillToAlign(base + kTaggedSize, kDoubleAligned);
1779 5 : CHECK_EQ(maximum_double_misalignment, fill);
1780 : // kDoubleUnaligned has the opposite fill amounts.
1781 5 : fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
1782 5 : CHECK_EQ(maximum_double_misalignment, fill);
1783 5 : fill = Heap::GetFillToAlign(base + kTaggedSize, kDoubleUnaligned);
1784 5 : CHECK_EQ(0, fill);
1785 5 : }
1786 :
1787 : static HeapObject NewSpaceAllocateAligned(int size,
1788 : AllocationAlignment alignment) {
1789 : Heap* heap = CcTest::heap();
1790 : AllocationResult allocation =
1791 : heap->new_space()->AllocateRawAligned(size, alignment);
1792 : HeapObject obj;
1793 : allocation.To(&obj);
1794 : heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
1795 : return obj;
1796 : }
1797 :
1798 : // Get new space allocation into the desired alignment.
1799 : static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
1800 : Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1801 : int fill = Heap::GetFillToAlign(*top_addr, alignment);
1802 : if (fill) {
1803 : NewSpaceAllocateAligned(fill + offset, kWordAligned);
1804 : }
1805 : return *top_addr;
1806 : }
1807 :
1808 :
1809 28342 : TEST(TestAlignedAllocation) {
1810 : // Double misalignment is 4 on 32-bit platforms or when pointer compression
1811 : // is enabled, 0 on 64-bit ones when pointer compression is disabled.
1812 : const intptr_t double_misalignment = kDoubleSize - kTaggedSize;
1813 5 : Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1814 : Address start;
1815 : HeapObject obj;
1816 : HeapObject filler;
1817 : if (double_misalignment) {
1818 : // Allocate a pointer sized object that must be double aligned at an
1819 : // aligned address.
1820 : start = AlignNewSpace(kDoubleAligned, 0);
1821 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1822 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1823 : // There is no filler.
1824 : CHECK_EQ(kTaggedSize, *top_addr - start);
1825 :
1826 : // Allocate a second pointer sized object that must be double aligned at an
1827 : // unaligned address.
1828 : start = AlignNewSpace(kDoubleAligned, kTaggedSize);
1829 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1830 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1831 : // There is a filler object before the object.
1832 : filler = HeapObject::FromAddress(start);
1833 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1834 : CHECK_EQ(kTaggedSize + double_misalignment, *top_addr - start);
1835 :
1836 : // Similarly for kDoubleUnaligned.
1837 : start = AlignNewSpace(kDoubleUnaligned, 0);
1838 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1839 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1840 : CHECK_EQ(kTaggedSize, *top_addr - start);
1841 : start = AlignNewSpace(kDoubleUnaligned, kTaggedSize);
1842 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1843 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1844 : // There is a filler object before the object.
1845 : filler = HeapObject::FromAddress(start);
1846 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1847 : CHECK_EQ(kTaggedSize + double_misalignment, *top_addr - start);
1848 : }
1849 5 : }
1850 :
1851 : static HeapObject OldSpaceAllocateAligned(int size,
1852 : AllocationAlignment alignment) {
1853 : Heap* heap = CcTest::heap();
1854 : AllocationResult allocation =
1855 : heap->old_space()->AllocateRawAligned(size, alignment);
1856 : HeapObject obj;
1857 : allocation.To(&obj);
1858 : heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
1859 : return obj;
1860 : }
1861 :
1862 : // Get old space allocation into the desired alignment.
1863 : static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
1864 : Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
1865 : int fill = Heap::GetFillToAlign(*top_addr, alignment);
1866 : int allocation = fill + offset;
1867 : if (allocation) {
1868 : OldSpaceAllocateAligned(allocation, kWordAligned);
1869 : }
1870 : Address top = *top_addr;
1871 : // Now force the remaining allocation onto the free list.
1872 : CcTest::heap()->old_space()->FreeLinearAllocationArea();
1873 : return top;
1874 : }
1875 :
1876 :
1877 : // Test the case where allocation must be done from the free list, so filler
1878 : // may precede or follow the object.
1879 28342 : TEST(TestAlignedOverAllocation) {
1880 10 : Heap* heap = CcTest::heap();
1881 : // Test checks for fillers before and behind objects and requires a fresh
1882 : // page and empty free list.
1883 5 : heap::AbandonCurrentlyFreeMemory(heap->old_space());
1884 : // Allocate a dummy object to properly set up the linear allocation info.
1885 5 : AllocationResult dummy = heap->old_space()->AllocateRawUnaligned(kTaggedSize);
1886 5 : CHECK(!dummy.IsRetry());
1887 : heap->CreateFillerObjectAt(dummy.ToObjectChecked()->address(), kTaggedSize,
1888 10 : ClearRecordedSlots::kNo);
1889 :
1890 : // Double misalignment is 4 on 32-bit platforms or when pointer compression
1891 : // is enabled, 0 on 64-bit ones when pointer compression is disabled.
1892 : const intptr_t double_misalignment = kDoubleSize - kTaggedSize;
1893 : Address start;
1894 : HeapObject obj;
1895 : HeapObject filler;
1896 : if (double_misalignment) {
1897 : start = AlignOldSpace(kDoubleAligned, 0);
1898 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1899 : // The object is aligned.
1900 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1901 : // Try the opposite alignment case.
1902 : start = AlignOldSpace(kDoubleAligned, kTaggedSize);
1903 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1904 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1905 : filler = HeapObject::FromAddress(start);
1906 : CHECK(obj != filler);
1907 : CHECK(filler->IsFiller());
1908 : CHECK_EQ(kTaggedSize, filler->Size());
1909 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1910 :
1911 : // Similarly for kDoubleUnaligned.
1912 : start = AlignOldSpace(kDoubleUnaligned, 0);
1913 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1914 : // The object is aligned.
1915 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1916 : // Try the opposite alignment case.
1917 : start = AlignOldSpace(kDoubleUnaligned, kTaggedSize);
1918 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1919 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1920 : filler = HeapObject::FromAddress(start);
1921 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1922 : }
1923 5 : }
1924 :
1925 :
1926 28342 : TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1927 5 : CcTest::InitializeVM();
1928 5 : HeapIterator iterator(CcTest::heap());
1929 5 : intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
1930 : intptr_t size_of_objects_2 = 0;
1931 69170 : for (HeapObject obj = iterator.next(); !obj.is_null();
1932 : obj = iterator.next()) {
1933 34580 : if (!obj->IsFreeSpace()) {
1934 34580 : size_of_objects_2 += obj->Size();
1935 : }
1936 : }
1937 : // Delta must be within 5% of the larger result.
1938 : // TODO(gc): Tighten this up by distinguishing between byte
1939 : // arrays that are real and those that merely mark free space
1940 : // on the heap.
1941 5 : if (size_of_objects_1 > size_of_objects_2) {
1942 5 : intptr_t delta = size_of_objects_1 - size_of_objects_2;
1943 : PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
1944 : ", "
1945 : "Iterator: %" V8PRIdPTR
1946 : ", "
1947 : "delta: %" V8PRIdPTR "\n",
1948 5 : size_of_objects_1, size_of_objects_2, delta);
1949 5 : CHECK_GT(size_of_objects_1 / 20, delta);
1950 : } else {
1951 0 : intptr_t delta = size_of_objects_2 - size_of_objects_1;
1952 : PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
1953 : ", "
1954 : "Iterator: %" V8PRIdPTR
1955 : ", "
1956 : "delta: %" V8PRIdPTR "\n",
1957 0 : size_of_objects_1, size_of_objects_2, delta);
1958 0 : CHECK_GT(size_of_objects_2 / 20, delta);
1959 5 : }
1960 5 : }
1961 :
1962 28342 : TEST(GrowAndShrinkNewSpace) {
1963 : // Avoid shrinking new space in GC epilogue. This can happen if allocation
1964 : // throughput samples have been taken while executing the benchmark.
1965 5 : FLAG_predictable = true;
1966 :
1967 5 : CcTest::InitializeVM();
1968 5 : Heap* heap = CcTest::heap();
1969 : NewSpace* new_space = heap->new_space();
1970 :
1971 5 : if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1972 5 : return;
1973 : }
1974 :
1975 : // Make sure we're in a consistent state to start out.
1976 5 : CcTest::CollectAllGarbage();
1977 :
1978 : // Explicitly growing should double the space capacity.
1979 : size_t old_capacity, new_capacity;
1980 : old_capacity = new_space->TotalCapacity();
1981 5 : new_space->Grow();
1982 : new_capacity = new_space->TotalCapacity();
1983 5 : CHECK_EQ(2 * old_capacity, new_capacity);
1984 :
1985 : old_capacity = new_space->TotalCapacity();
1986 : {
1987 5 : v8::HandleScope temporary_scope(CcTest::isolate());
1988 5 : heap::SimulateFullSpace(new_space);
1989 : }
1990 : new_capacity = new_space->TotalCapacity();
1991 5 : CHECK_EQ(old_capacity, new_capacity);
1992 :
1993 : // Explicitly shrinking should not affect space capacity.
1994 : old_capacity = new_space->TotalCapacity();
1995 5 : new_space->Shrink();
1996 : new_capacity = new_space->TotalCapacity();
1997 5 : CHECK_EQ(old_capacity, new_capacity);
1998 :
1999 : // Let the scavenger empty the new space.
2000 5 : CcTest::CollectGarbage(NEW_SPACE);
2001 5 : CHECK_LE(new_space->Size(), old_capacity);
2002 :
2003 : // Explicitly shrinking should halve the space capacity.
2004 : old_capacity = new_space->TotalCapacity();
2005 5 : new_space->Shrink();
2006 : new_capacity = new_space->TotalCapacity();
2007 5 : CHECK_EQ(old_capacity, 2 * new_capacity);
2008 :
2009 : // Consecutive shrinking should not affect space capacity.
2010 : old_capacity = new_space->TotalCapacity();
2011 5 : new_space->Shrink();
2012 5 : new_space->Shrink();
2013 5 : new_space->Shrink();
2014 : new_capacity = new_space->TotalCapacity();
2015 5 : CHECK_EQ(old_capacity, new_capacity);
2016 : }
2017 :
2018 28342 : TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2019 5 : CcTest::InitializeVM();
2020 10 : Heap* heap = CcTest::heap();
2021 5 : if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2022 0 : return;
2023 : }
2024 :
2025 5 : v8::HandleScope scope(CcTest::isolate());
2026 : NewSpace* new_space = heap->new_space();
2027 : size_t old_capacity, new_capacity;
2028 : old_capacity = new_space->TotalCapacity();
2029 5 : new_space->Grow();
2030 : new_capacity = new_space->TotalCapacity();
2031 5 : CHECK_EQ(2 * old_capacity, new_capacity);
2032 : {
2033 5 : v8::HandleScope temporary_scope(CcTest::isolate());
2034 5 : heap::SimulateFullSpace(new_space);
2035 : }
2036 5 : CcTest::CollectAllAvailableGarbage();
2037 : new_capacity = new_space->TotalCapacity();
2038 5 : CHECK_EQ(old_capacity, new_capacity);
2039 : }
2040 :
2041 60 : static int NumberOfGlobalObjects() {
2042 : int count = 0;
2043 60 : HeapIterator iterator(CcTest::heap());
2044 797490 : for (HeapObject obj = iterator.next(); !obj.is_null();
2045 : obj = iterator.next()) {
2046 398685 : if (obj->IsJSGlobalObject()) count++;
2047 : }
2048 60 : return count;
2049 : }
2050 :
2051 :
2052 : // Test that we don't embed maps from foreign contexts into
2053 : // optimized code.
2054 28342 : TEST(LeakNativeContextViaMap) {
2055 5 : FLAG_allow_natives_syntax = true;
2056 5 : v8::Isolate* isolate = CcTest::isolate();
2057 5 : v8::HandleScope outer_scope(isolate);
2058 : v8::Persistent<v8::Context> ctx1p;
2059 : v8::Persistent<v8::Context> ctx2p;
2060 : {
2061 5 : v8::HandleScope scope(isolate);
2062 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2063 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2064 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2065 : }
2066 :
2067 5 : CcTest::CollectAllAvailableGarbage();
2068 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2069 :
2070 : {
2071 5 : v8::HandleScope inner_scope(isolate);
2072 : CompileRun("var v = {x: 42}");
2073 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2074 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2075 : v8::Local<v8::Value> v =
2076 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2077 5 : ctx2->Enter();
2078 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2079 : v8::Local<v8::Value> res = CompileRun(
2080 : "function f() { return o.x; }"
2081 : "for (var i = 0; i < 10; ++i) f();"
2082 : "%OptimizeFunctionOnNextCall(f);"
2083 : "f();");
2084 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2085 25 : CHECK(ctx2->Global()
2086 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2087 : .FromJust());
2088 5 : ctx2->Exit();
2089 5 : v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2090 : ctx1p.Reset();
2091 5 : isolate->ContextDisposedNotification();
2092 : }
2093 5 : CcTest::CollectAllAvailableGarbage();
2094 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2095 : ctx2p.Reset();
2096 5 : CcTest::CollectAllAvailableGarbage();
2097 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2098 5 : }
2099 :
2100 :
2101 : // Test that we don't embed functions from foreign contexts into
2102 : // optimized code.
2103 28342 : TEST(LeakNativeContextViaFunction) {
2104 5 : FLAG_allow_natives_syntax = true;
2105 5 : v8::Isolate* isolate = CcTest::isolate();
2106 5 : v8::HandleScope outer_scope(isolate);
2107 : v8::Persistent<v8::Context> ctx1p;
2108 : v8::Persistent<v8::Context> ctx2p;
2109 : {
2110 5 : v8::HandleScope scope(isolate);
2111 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2112 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2113 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2114 : }
2115 :
2116 5 : CcTest::CollectAllAvailableGarbage();
2117 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2118 :
2119 : {
2120 5 : v8::HandleScope inner_scope(isolate);
2121 : CompileRun("var v = function() { return 42; }");
2122 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2123 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2124 : v8::Local<v8::Value> v =
2125 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2126 5 : ctx2->Enter();
2127 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2128 : v8::Local<v8::Value> res = CompileRun(
2129 : "function f(x) { return x(); }"
2130 : "for (var i = 0; i < 10; ++i) f(o);"
2131 : "%OptimizeFunctionOnNextCall(f);"
2132 : "f(o);");
2133 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2134 25 : CHECK(ctx2->Global()
2135 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2136 : .FromJust());
2137 5 : ctx2->Exit();
2138 5 : ctx1->Exit();
2139 : ctx1p.Reset();
2140 5 : isolate->ContextDisposedNotification();
2141 : }
2142 5 : CcTest::CollectAllAvailableGarbage();
2143 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2144 : ctx2p.Reset();
2145 5 : CcTest::CollectAllAvailableGarbage();
2146 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2147 5 : }
2148 :
2149 :
2150 28342 : TEST(LeakNativeContextViaMapKeyed) {
2151 5 : FLAG_allow_natives_syntax = true;
2152 5 : v8::Isolate* isolate = CcTest::isolate();
2153 5 : v8::HandleScope outer_scope(isolate);
2154 : v8::Persistent<v8::Context> ctx1p;
2155 : v8::Persistent<v8::Context> ctx2p;
2156 : {
2157 5 : v8::HandleScope scope(isolate);
2158 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2159 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2160 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2161 : }
2162 :
2163 5 : CcTest::CollectAllAvailableGarbage();
2164 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2165 :
2166 : {
2167 5 : v8::HandleScope inner_scope(isolate);
2168 : CompileRun("var v = [42, 43]");
2169 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2170 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2171 : v8::Local<v8::Value> v =
2172 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2173 5 : ctx2->Enter();
2174 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2175 : v8::Local<v8::Value> res = CompileRun(
2176 : "function f() { return o[0]; }"
2177 : "for (var i = 0; i < 10; ++i) f();"
2178 : "%OptimizeFunctionOnNextCall(f);"
2179 : "f();");
2180 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2181 25 : CHECK(ctx2->Global()
2182 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2183 : .FromJust());
2184 5 : ctx2->Exit();
2185 5 : ctx1->Exit();
2186 : ctx1p.Reset();
2187 5 : isolate->ContextDisposedNotification();
2188 : }
2189 5 : CcTest::CollectAllAvailableGarbage();
2190 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2191 : ctx2p.Reset();
2192 5 : CcTest::CollectAllAvailableGarbage();
2193 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2194 5 : }
2195 :
2196 :
2197 28342 : TEST(LeakNativeContextViaMapProto) {
2198 5 : FLAG_allow_natives_syntax = true;
2199 5 : v8::Isolate* isolate = CcTest::isolate();
2200 5 : v8::HandleScope outer_scope(isolate);
2201 : v8::Persistent<v8::Context> ctx1p;
2202 : v8::Persistent<v8::Context> ctx2p;
2203 : {
2204 5 : v8::HandleScope scope(isolate);
2205 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2206 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2207 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2208 : }
2209 :
2210 5 : CcTest::CollectAllAvailableGarbage();
2211 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2212 :
2213 : {
2214 5 : v8::HandleScope inner_scope(isolate);
2215 : CompileRun("var v = { y: 42}");
2216 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2217 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2218 : v8::Local<v8::Value> v =
2219 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2220 5 : ctx2->Enter();
2221 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2222 : v8::Local<v8::Value> res = CompileRun(
2223 : "function f() {"
2224 : " var p = {x: 42};"
2225 : " p.__proto__ = o;"
2226 : " return p.x;"
2227 : "}"
2228 : "for (var i = 0; i < 10; ++i) f();"
2229 : "%OptimizeFunctionOnNextCall(f);"
2230 : "f();");
2231 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2232 25 : CHECK(ctx2->Global()
2233 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2234 : .FromJust());
2235 5 : ctx2->Exit();
2236 5 : ctx1->Exit();
2237 : ctx1p.Reset();
2238 5 : isolate->ContextDisposedNotification();
2239 : }
2240 5 : CcTest::CollectAllAvailableGarbage();
2241 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2242 : ctx2p.Reset();
2243 5 : CcTest::CollectAllAvailableGarbage();
2244 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2245 5 : }
2246 :
2247 :
2248 28342 : TEST(InstanceOfStubWriteBarrier) {
2249 6 : if (!FLAG_incremental_marking) return;
2250 : ManualGCScope manual_gc_scope;
2251 5 : FLAG_allow_natives_syntax = true;
2252 : #ifdef VERIFY_HEAP
2253 : FLAG_verify_heap = true;
2254 : #endif
2255 :
2256 5 : CcTest::InitializeVM();
2257 5 : if (!CcTest::i_isolate()->use_optimizer()) return;
2258 4 : if (FLAG_force_marking_deque_overflows) return;
2259 8 : v8::HandleScope outer_scope(CcTest::isolate());
2260 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2261 :
2262 : {
2263 4 : v8::HandleScope scope(CcTest::isolate());
2264 : CompileRun(
2265 : "function foo () { }"
2266 : "function mkbar () { return new (new Function(\"\")) (); }"
2267 : "function f (x) { return (x instanceof foo); }"
2268 : "function g () { f(mkbar()); }"
2269 : "f(new foo()); f(new foo());"
2270 : "%OptimizeFunctionOnNextCall(f);"
2271 4 : "f(new foo()); g();");
2272 : }
2273 :
2274 4 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2275 4 : marking->Stop();
2276 : CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2277 4 : i::GarbageCollectionReason::kTesting);
2278 :
2279 : i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2280 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2281 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2282 :
2283 4 : CHECK(f->IsOptimized());
2284 :
2285 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
2286 :
2287 20 : while (!marking_state->IsBlack(f->code()) && !marking->IsStopped()) {
2288 : // Discard any pending GC requests otherwise we will get GC when we enter
2289 : // code below.
2290 : marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2291 4 : StepOrigin::kV8);
2292 : }
2293 :
2294 4 : CHECK(marking->IsMarking());
2295 :
2296 : {
2297 4 : v8::HandleScope scope(CcTest::isolate());
2298 4 : v8::Local<v8::Object> global = CcTest::global();
2299 : v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
2300 12 : global->Get(ctx, v8_str("g")).ToLocalChecked());
2301 8 : g->Call(ctx, global, 0, nullptr).ToLocalChecked();
2302 : }
2303 :
2304 4 : CcTest::heap()->incremental_marking()->set_should_hurry(true);
2305 4 : CcTest::CollectGarbage(OLD_SPACE);
2306 : }
2307 :
2308 28342 : HEAP_TEST(GCFlags) {
2309 10 : if (!FLAG_incremental_marking) return;
2310 5 : CcTest::InitializeVM();
2311 15 : Heap* heap = CcTest::heap();
2312 :
2313 : heap->set_current_gc_flags(Heap::kNoGCFlags);
2314 5 : CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2315 :
2316 : // Check whether we appropriately reset flags after GC.
2317 : CcTest::heap()->CollectAllGarbage(Heap::kReduceMemoryFootprintMask,
2318 5 : GarbageCollectionReason::kTesting);
2319 5 : CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2320 :
2321 5 : MarkCompactCollector* collector = heap->mark_compact_collector();
2322 5 : if (collector->sweeping_in_progress()) {
2323 5 : collector->EnsureSweepingCompleted();
2324 : }
2325 :
2326 : IncrementalMarking* marking = heap->incremental_marking();
2327 5 : marking->Stop();
2328 : heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask,
2329 5 : i::GarbageCollectionReason::kTesting);
2330 5 : CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2331 :
2332 5 : CcTest::CollectGarbage(NEW_SPACE);
2333 : // NewSpace scavenges should not overwrite the flags.
2334 5 : CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2335 :
2336 5 : CcTest::CollectAllGarbage();
2337 5 : CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2338 : }
2339 :
2340 28342 : HEAP_TEST(Regress845060) {
2341 : // Regression test for crbug.com/845060, where a raw pointer to a string's
2342 : // data was kept across an allocation. If the allocation causes GC and
2343 : // moves the string, such raw pointers become invalid.
2344 5 : FLAG_allow_natives_syntax = true;
2345 5 : FLAG_stress_incremental_marking = false;
2346 5 : FLAG_stress_compaction = false;
2347 5 : CcTest::InitializeVM();
2348 5 : LocalContext context;
2349 10 : v8::HandleScope scope(CcTest::isolate());
2350 10 : Heap* heap = CcTest::heap();
2351 :
2352 : // Preparation: create a string in new space.
2353 : Local<Value> str = CompileRun("var str = (new Array(10000)).join('x'); str");
2354 5 : CHECK(Heap::InNewSpace(*v8::Utils::OpenHandle(*str)));
2355 :
2356 : // Idle incremental marking sets the "kReduceMemoryFootprint" flag, which
2357 : // causes from_space to be unmapped after scavenging.
2358 5 : heap->StartIdleIncrementalMarking(GarbageCollectionReason::kTesting);
2359 5 : CHECK(heap->ShouldReduceMemory());
2360 :
2361 : // Run the test (which allocates results) until the original string was
2362 : // promoted to old space. Unmapping of from_space causes accesses to any
2363 : // stale raw pointers to crash.
2364 : CompileRun("while (%InNewSpace(str)) { str.split(''); }");
2365 10 : CHECK(!Heap::InNewSpace(*v8::Utils::OpenHandle(*str)));
2366 5 : }
2367 :
2368 28342 : TEST(IdleNotificationFinishMarking) {
2369 10 : if (!FLAG_incremental_marking) return;
2370 : ManualGCScope manual_gc_scope;
2371 5 : FLAG_allow_natives_syntax = true;
2372 5 : CcTest::InitializeVM();
2373 5 : const int initial_gc_count = CcTest::heap()->gc_count();
2374 5 : heap::SimulateFullSpace(CcTest::heap()->old_space());
2375 5 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2376 5 : marking->Stop();
2377 : CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2378 5 : i::GarbageCollectionReason::kTesting);
2379 :
2380 10 : CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count);
2381 :
2382 5 : do {
2383 : marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2384 5 : StepOrigin::kV8);
2385 : } while (
2386 5 : !CcTest::heap()->mark_compact_collector()->marking_worklist()->IsEmpty());
2387 :
2388 : marking->SetWeakClosureWasOverApproximatedForTesting(true);
2389 :
2390 : // The next idle notification has to finish incremental marking.
2391 : const double kLongIdleTime = 1000.0;
2392 : CcTest::isolate()->IdleNotificationDeadline(
2393 15 : (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2394 : static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2395 5 : kLongIdleTime);
2396 10 : CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count + 1);
2397 : }
2398 :
2399 :
2400 : // Test that HAllocateObject will always return an object in new-space.
2401 28342 : TEST(OptimizedAllocationAlwaysInNewSpace) {
2402 5 : FLAG_allow_natives_syntax = true;
2403 5 : CcTest::InitializeVM();
2404 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2405 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2406 : FLAG_stress_incremental_marking)
2407 : return;
2408 2 : v8::HandleScope scope(CcTest::isolate());
2409 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2410 2 : heap::SimulateFullSpace(CcTest::heap()->new_space());
2411 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2412 : v8::Local<v8::Value> res = CompileRun(
2413 : "function c(x) {"
2414 : " this.x = x;"
2415 : " for (var i = 0; i < 32; i++) {"
2416 : " this['x' + i] = x;"
2417 : " }"
2418 : "}"
2419 : "function f(x) { return new c(x); };"
2420 : "f(1); f(2); f(3);"
2421 : "%OptimizeFunctionOnNextCall(f);"
2422 : "f(4);");
2423 :
2424 8 : CHECK_EQ(4, res.As<v8::Object>()
2425 : ->GetRealNamedProperty(ctx, v8_str("x"))
2426 : .ToLocalChecked()
2427 : ->Int32Value(ctx)
2428 : .FromJust());
2429 :
2430 : i::Handle<JSReceiver> o =
2431 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2432 :
2433 4 : CHECK(Heap::InNewSpace(*o));
2434 : }
2435 :
2436 :
2437 28342 : TEST(OptimizedPretenuringAllocationFolding) {
2438 5 : FLAG_allow_natives_syntax = true;
2439 5 : FLAG_expose_gc = true;
2440 5 : CcTest::InitializeVM();
2441 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2442 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2443 : FLAG_stress_incremental_marking)
2444 : return;
2445 2 : v8::HandleScope scope(CcTest::isolate());
2446 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2447 : // Grow new space unitl maximum capacity reached.
2448 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2449 8 : CcTest::heap()->new_space()->Grow();
2450 : }
2451 :
2452 : i::ScopedVector<char> source(1024);
2453 : i::SNPrintF(source,
2454 : "var number_elements = %d;"
2455 : "var elements = new Array();"
2456 : "function f() {"
2457 : " for (var i = 0; i < number_elements; i++) {"
2458 : " elements[i] = [[{}], [1.1]];"
2459 : " }"
2460 : " return elements[number_elements-1]"
2461 : "};"
2462 : "f(); gc();"
2463 : "f(); f();"
2464 : "%%OptimizeFunctionOnNextCall(f);"
2465 : "f();",
2466 2 : kPretenureCreationCount);
2467 :
2468 : v8::Local<v8::Value> res = CompileRun(source.start());
2469 :
2470 : v8::Local<v8::Value> int_array =
2471 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2472 : i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2473 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2474 : v8::Local<v8::Value> double_array =
2475 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2476 : i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2477 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2478 :
2479 : i::Handle<JSReceiver> o =
2480 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2481 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2482 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2483 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2484 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2485 6 : CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2486 : }
2487 :
2488 :
2489 28342 : TEST(OptimizedPretenuringObjectArrayLiterals) {
2490 5 : FLAG_allow_natives_syntax = true;
2491 5 : FLAG_expose_gc = true;
2492 5 : CcTest::InitializeVM();
2493 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2494 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2495 : FLAG_stress_incremental_marking) {
2496 : return;
2497 : }
2498 2 : v8::HandleScope scope(CcTest::isolate());
2499 :
2500 : // Grow new space unitl maximum capacity reached.
2501 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2502 8 : CcTest::heap()->new_space()->Grow();
2503 : }
2504 :
2505 : i::ScopedVector<char> source(1024);
2506 : i::SNPrintF(source,
2507 : "var number_elements = %d;"
2508 : "var elements = new Array(number_elements);"
2509 : "function f() {"
2510 : " for (var i = 0; i < number_elements; i++) {"
2511 : " elements[i] = [{}, {}, {}];"
2512 : " }"
2513 : " return elements[number_elements - 1];"
2514 : "};"
2515 : "f(); gc();"
2516 : "f(); f();"
2517 : "%%OptimizeFunctionOnNextCall(f);"
2518 : "f();",
2519 2 : kPretenureCreationCount);
2520 :
2521 : v8::Local<v8::Value> res = CompileRun(source.start());
2522 :
2523 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2524 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2525 :
2526 4 : CHECK(CcTest::heap()->InOldSpace(o->elements()));
2527 6 : CHECK(CcTest::heap()->InOldSpace(*o));
2528 : }
2529 :
2530 28342 : TEST(OptimizedPretenuringNestedInObjectProperties) {
2531 5 : FLAG_allow_natives_syntax = true;
2532 5 : FLAG_expose_gc = true;
2533 5 : CcTest::InitializeVM();
2534 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2535 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2536 : FLAG_stress_incremental_marking) {
2537 : return;
2538 : }
2539 2 : v8::HandleScope scope(CcTest::isolate());
2540 :
2541 : // Grow new space until maximum capacity reached.
2542 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2543 8 : CcTest::heap()->new_space()->Grow();
2544 : }
2545 :
2546 : // Keep the nested literal alive while its root is freed
2547 : i::ScopedVector<char> source(1024);
2548 : i::SNPrintF(source,
2549 : "let number_elements = %d;"
2550 : "let elements = new Array(number_elements);"
2551 : "function f() {"
2552 : " for (let i = 0; i < number_elements; i++) {"
2553 : " let l = {a: {c: 2.2, d: {e: 3.3}}, b: 1.1}; "
2554 : " elements[i] = l.a;"
2555 : " }"
2556 : " return elements[number_elements-1];"
2557 : "};"
2558 : "f(); gc(); gc();"
2559 : "f(); f();"
2560 : "%%OptimizeFunctionOnNextCall(f);"
2561 : "f();",
2562 2 : kPretenureCreationCount);
2563 :
2564 : v8::Local<v8::Value> res = CompileRun(source.start());
2565 :
2566 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2567 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2568 :
2569 : // Nested literal sites are only pretenured if the top level
2570 : // literal is pretenured
2571 4 : CHECK(Heap::InNewSpace(*o));
2572 : }
2573 :
2574 28342 : TEST(OptimizedPretenuringMixedInObjectProperties) {
2575 5 : FLAG_allow_natives_syntax = true;
2576 5 : FLAG_expose_gc = true;
2577 5 : CcTest::InitializeVM();
2578 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2579 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2580 : FLAG_stress_incremental_marking)
2581 : return;
2582 2 : v8::HandleScope scope(CcTest::isolate());
2583 :
2584 : // Grow new space unitl maximum capacity reached.
2585 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2586 8 : CcTest::heap()->new_space()->Grow();
2587 : }
2588 :
2589 :
2590 : i::ScopedVector<char> source(1024);
2591 : i::SNPrintF(source,
2592 : "var number_elements = %d;"
2593 : "var elements = new Array(number_elements);"
2594 : "function f() {"
2595 : " for (var i = 0; i < number_elements; i++) {"
2596 : " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2597 : " }"
2598 : " return elements[number_elements - 1];"
2599 : "};"
2600 : "f(); gc();"
2601 : "f(); f();"
2602 : "%%OptimizeFunctionOnNextCall(f);"
2603 : "f();",
2604 2 : kPretenureCreationCount);
2605 :
2606 : v8::Local<v8::Value> res = CompileRun(source.start());
2607 :
2608 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2609 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2610 :
2611 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2612 2 : FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2613 2 : FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2614 4 : CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
2615 2 : if (!o->IsUnboxedDoubleField(idx2)) {
2616 0 : CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
2617 : } else {
2618 2 : CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
2619 : }
2620 :
2621 4 : JSObject inner_object = JSObject::cast(o->RawFastPropertyAt(idx1));
2622 4 : CHECK(CcTest::heap()->InOldSpace(inner_object));
2623 2 : if (!inner_object->IsUnboxedDoubleField(idx1)) {
2624 0 : CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
2625 : } else {
2626 2 : CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
2627 : }
2628 6 : CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
2629 : }
2630 :
2631 :
2632 28342 : TEST(OptimizedPretenuringDoubleArrayProperties) {
2633 5 : FLAG_allow_natives_syntax = true;
2634 5 : FLAG_expose_gc = true;
2635 5 : CcTest::InitializeVM();
2636 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2637 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2638 : FLAG_stress_incremental_marking)
2639 : return;
2640 2 : v8::HandleScope scope(CcTest::isolate());
2641 :
2642 : // Grow new space until maximum capacity reached.
2643 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2644 8 : CcTest::heap()->new_space()->Grow();
2645 : }
2646 :
2647 : i::ScopedVector<char> source(1024);
2648 : i::SNPrintF(source,
2649 : "var number_elements = %d;"
2650 : "var elements = new Array(number_elements);"
2651 : "function f() {"
2652 : " for (var i = 0; i < number_elements; i++) {"
2653 : " elements[i] = {a: 1.1, b: 2.2};"
2654 : " }"
2655 : " return elements[i - 1];"
2656 : "};"
2657 : "f(); gc();"
2658 : "f(); f();"
2659 : "%%OptimizeFunctionOnNextCall(f);"
2660 : "f();",
2661 2 : kPretenureCreationCount);
2662 :
2663 : v8::Local<v8::Value> res = CompileRun(source.start());
2664 :
2665 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2666 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2667 :
2668 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2669 8 : CHECK_EQ(o->property_array(),
2670 2 : ReadOnlyRoots(CcTest::heap()).empty_property_array());
2671 : }
2672 :
2673 :
2674 28342 : TEST(OptimizedPretenuringdoubleArrayLiterals) {
2675 5 : FLAG_allow_natives_syntax = true;
2676 5 : FLAG_expose_gc = true;
2677 5 : CcTest::InitializeVM();
2678 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2679 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2680 : FLAG_stress_incremental_marking)
2681 : return;
2682 2 : v8::HandleScope scope(CcTest::isolate());
2683 :
2684 : // Grow new space unitl maximum capacity reached.
2685 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2686 8 : CcTest::heap()->new_space()->Grow();
2687 : }
2688 :
2689 : i::ScopedVector<char> source(1024);
2690 : i::SNPrintF(source,
2691 : "var number_elements = %d;"
2692 : "var elements = new Array(number_elements);"
2693 : "function f() {"
2694 : " for (var i = 0; i < number_elements; i++) {"
2695 : " elements[i] = [1.1, 2.2, 3.3];"
2696 : " }"
2697 : " return elements[number_elements - 1];"
2698 : "};"
2699 : "f(); gc();"
2700 : "f(); f();"
2701 : "%%OptimizeFunctionOnNextCall(f);"
2702 : "f();",
2703 2 : kPretenureCreationCount);
2704 :
2705 : v8::Local<v8::Value> res = CompileRun(source.start());
2706 :
2707 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2708 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2709 :
2710 4 : CHECK(CcTest::heap()->InOldSpace(o->elements()));
2711 6 : CHECK(CcTest::heap()->InOldSpace(*o));
2712 : }
2713 :
2714 :
2715 28342 : TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2716 5 : FLAG_allow_natives_syntax = true;
2717 5 : FLAG_expose_gc = true;
2718 5 : CcTest::InitializeVM();
2719 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2720 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2721 : FLAG_stress_incremental_marking)
2722 : return;
2723 2 : v8::HandleScope scope(CcTest::isolate());
2724 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2725 : // Grow new space unitl maximum capacity reached.
2726 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2727 8 : CcTest::heap()->new_space()->Grow();
2728 : }
2729 :
2730 : i::ScopedVector<char> source(1024);
2731 : i::SNPrintF(source,
2732 : "var number_elements = %d;"
2733 : "var elements = new Array(number_elements);"
2734 : "function f() {"
2735 : " for (var i = 0; i < number_elements; i++) {"
2736 : " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
2737 : " }"
2738 : " return elements[number_elements - 1];"
2739 : "};"
2740 : "f(); gc();"
2741 : "f(); f();"
2742 : "%%OptimizeFunctionOnNextCall(f);"
2743 : "f();",
2744 2 : kPretenureCreationCount);
2745 :
2746 : v8::Local<v8::Value> res = CompileRun(source.start());
2747 :
2748 : v8::Local<v8::Value> int_array =
2749 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2750 : i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2751 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2752 : v8::Local<v8::Value> double_array =
2753 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2754 : i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2755 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2756 :
2757 : Handle<JSObject> o = Handle<JSObject>::cast(
2758 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2759 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2760 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2761 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2762 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2763 6 : CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2764 : }
2765 :
2766 :
2767 28342 : TEST(OptimizedPretenuringNestedObjectLiterals) {
2768 5 : FLAG_allow_natives_syntax = true;
2769 5 : FLAG_expose_gc = true;
2770 5 : CcTest::InitializeVM();
2771 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2772 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2773 : FLAG_stress_incremental_marking)
2774 : return;
2775 2 : v8::HandleScope scope(CcTest::isolate());
2776 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2777 : // Grow new space unitl maximum capacity reached.
2778 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2779 8 : CcTest::heap()->new_space()->Grow();
2780 : }
2781 :
2782 : i::ScopedVector<char> source(1024);
2783 : i::SNPrintF(source,
2784 : "var number_elements = %d;"
2785 : "var elements = new Array(number_elements);"
2786 : "function f() {"
2787 : " for (var i = 0; i < number_elements; i++) {"
2788 : " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
2789 : " }"
2790 : " return elements[number_elements - 1];"
2791 : "};"
2792 : "f(); gc();"
2793 : "f(); f();"
2794 : "%%OptimizeFunctionOnNextCall(f);"
2795 : "f();",
2796 2 : kPretenureCreationCount);
2797 :
2798 : v8::Local<v8::Value> res = CompileRun(source.start());
2799 :
2800 : v8::Local<v8::Value> int_array_1 =
2801 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2802 : Handle<JSObject> int_array_handle_1 = Handle<JSObject>::cast(
2803 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_1)));
2804 : v8::Local<v8::Value> int_array_2 =
2805 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2806 : Handle<JSObject> int_array_handle_2 = Handle<JSObject>::cast(
2807 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_2)));
2808 :
2809 : Handle<JSObject> o = Handle<JSObject>::cast(
2810 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2811 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2812 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
2813 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
2814 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
2815 6 : CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
2816 : }
2817 :
2818 :
2819 28342 : TEST(OptimizedPretenuringNestedDoubleLiterals) {
2820 5 : FLAG_allow_natives_syntax = true;
2821 5 : FLAG_expose_gc = true;
2822 5 : CcTest::InitializeVM();
2823 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2824 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2825 : FLAG_stress_incremental_marking)
2826 : return;
2827 2 : v8::HandleScope scope(CcTest::isolate());
2828 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2829 : // Grow new space unitl maximum capacity reached.
2830 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2831 8 : CcTest::heap()->new_space()->Grow();
2832 : }
2833 :
2834 : i::ScopedVector<char> source(1024);
2835 : i::SNPrintF(source,
2836 : "var number_elements = %d;"
2837 : "var elements = new Array(number_elements);"
2838 : "function f() {"
2839 : " for (var i = 0; i < number_elements; i++) {"
2840 : " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
2841 : " }"
2842 : " return elements[number_elements - 1];"
2843 : "};"
2844 : "f(); gc();"
2845 : "f(); f();"
2846 : "%%OptimizeFunctionOnNextCall(f);"
2847 : "f();",
2848 2 : kPretenureCreationCount);
2849 :
2850 : v8::Local<v8::Value> res = CompileRun(source.start());
2851 :
2852 : v8::Local<v8::Value> double_array_1 =
2853 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2854 : i::Handle<JSObject> double_array_handle_1 = i::Handle<JSObject>::cast(
2855 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_1)));
2856 : v8::Local<v8::Value> double_array_2 =
2857 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2858 : i::Handle<JSObject> double_array_handle_2 = Handle<JSObject>::cast(
2859 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_2)));
2860 :
2861 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2862 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2863 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2864 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
2865 4 : CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
2866 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
2867 6 : CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
2868 : }
2869 :
2870 :
2871 : // Test regular array literals allocation.
2872 28342 : TEST(OptimizedAllocationArrayLiterals) {
2873 5 : FLAG_allow_natives_syntax = true;
2874 5 : CcTest::InitializeVM();
2875 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2876 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2877 : FLAG_stress_incremental_marking)
2878 : return;
2879 2 : v8::HandleScope scope(CcTest::isolate());
2880 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2881 : v8::Local<v8::Value> res = CompileRun(
2882 : "function f() {"
2883 : " var numbers = new Array(1, 2, 3);"
2884 : " numbers[0] = 3.14;"
2885 : " return numbers;"
2886 : "};"
2887 : "f(); f(); f();"
2888 : "%OptimizeFunctionOnNextCall(f);"
2889 : "f();");
2890 8 : CHECK_EQ(static_cast<int>(3.14), v8::Object::Cast(*res)
2891 : ->Get(ctx, v8_str("0"))
2892 : .ToLocalChecked()
2893 : ->Int32Value(ctx)
2894 : .FromJust());
2895 :
2896 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2897 2 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2898 :
2899 4 : CHECK(Heap::InNewSpace(o->elements()));
2900 : }
2901 :
2902 10 : static int CountMapTransitions(i::Isolate* isolate, Map map) {
2903 : DisallowHeapAllocation no_gc;
2904 10 : return TransitionsAccessor(isolate, map, &no_gc).NumberOfTransitions();
2905 : }
2906 :
2907 :
2908 : // Test that map transitions are cleared and maps are collected with
2909 : // incremental marking as well.
2910 28342 : TEST(Regress1465) {
2911 5 : if (!FLAG_incremental_marking) return;
2912 5 : FLAG_stress_compaction = false;
2913 5 : FLAG_stress_incremental_marking = false;
2914 5 : FLAG_allow_natives_syntax = true;
2915 5 : FLAG_trace_incremental_marking = true;
2916 5 : FLAG_retain_maps_for_n_gc = 0;
2917 5 : CcTest::InitializeVM();
2918 5 : v8::Isolate* isolate = CcTest::isolate();
2919 : i::Isolate* i_isolate = CcTest::i_isolate();
2920 5 : v8::HandleScope scope(isolate);
2921 5 : v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
2922 : static const int transitions_count = 256;
2923 :
2924 : CompileRun("function F() {}");
2925 : {
2926 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2927 1285 : for (int i = 0; i < transitions_count; i++) {
2928 : EmbeddedVector<char, 64> buffer;
2929 1280 : SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2930 1280 : CompileRun(buffer.start());
2931 : }
2932 : CompileRun("var root = new F;");
2933 : }
2934 :
2935 : i::Handle<JSReceiver> root =
2936 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
2937 20 : CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
2938 :
2939 : // Count number of live transitions before marking.
2940 5 : int transitions_before = CountMapTransitions(i_isolate, root->map());
2941 : CompileRun("%DebugPrint(root);");
2942 5 : CHECK_EQ(transitions_count, transitions_before);
2943 :
2944 5 : heap::SimulateIncrementalMarking(CcTest::heap());
2945 5 : CcTest::CollectAllGarbage();
2946 :
2947 : // Count number of live transitions after marking. Note that one transition
2948 : // is left, because 'o' still holds an instance of one transition target.
2949 5 : int transitions_after = CountMapTransitions(i_isolate, root->map());
2950 : CompileRun("%DebugPrint(root);");
2951 5 : CHECK_EQ(1, transitions_after);
2952 : }
2953 :
2954 :
2955 : #ifdef DEBUG
2956 : static void AddTransitions(int transitions_count) {
2957 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2958 : for (int i = 0; i < transitions_count; i++) {
2959 : EmbeddedVector<char, 64> buffer;
2960 : SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2961 : CompileRun(buffer.start());
2962 : }
2963 : }
2964 :
2965 :
2966 : static i::Handle<JSObject> GetByName(const char* name) {
2967 : return i::Handle<JSObject>::cast(
2968 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
2969 : CcTest::global()
2970 : ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(name))
2971 : .ToLocalChecked())));
2972 : }
2973 :
2974 :
2975 : static void AddPropertyTo(
2976 : int gc_count, Handle<JSObject> object, const char* property_name) {
2977 : Isolate* isolate = CcTest::i_isolate();
2978 : Factory* factory = isolate->factory();
2979 : Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
2980 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
2981 : FLAG_gc_interval = gc_count;
2982 : FLAG_gc_global = true;
2983 : FLAG_retain_maps_for_n_gc = 0;
2984 : CcTest::heap()->set_allocation_timeout(gc_count);
2985 : Object::SetProperty(isolate, object, prop_name, twenty_three,
2986 : LanguageMode::kSloppy)
2987 : .Check();
2988 : }
2989 :
2990 :
2991 : TEST(TransitionArrayShrinksDuringAllocToZero) {
2992 : FLAG_stress_compaction = false;
2993 : FLAG_stress_incremental_marking = false;
2994 : FLAG_allow_natives_syntax = true;
2995 : CcTest::InitializeVM();
2996 : i::Isolate* i_isolate = CcTest::i_isolate();
2997 : v8::HandleScope scope(CcTest::isolate());
2998 : static const int transitions_count = 10;
2999 : CompileRun("function F() { }");
3000 : AddTransitions(transitions_count);
3001 : CompileRun("var root = new F;");
3002 : Handle<JSObject> root = GetByName("root");
3003 :
3004 : // Count number of live transitions before marking.
3005 : int transitions_before = CountMapTransitions(i_isolate, root->map());
3006 : CHECK_EQ(transitions_count, transitions_before);
3007 :
3008 : // Get rid of o
3009 : CompileRun("o = new F;"
3010 : "root = new F");
3011 : root = GetByName("root");
3012 : AddPropertyTo(2, root, "funny");
3013 : CcTest::CollectGarbage(NEW_SPACE);
3014 :
3015 : // Count number of live transitions after marking. Note that one transition
3016 : // is left, because 'o' still holds an instance of one transition target.
3017 : int transitions_after =
3018 : CountMapTransitions(i_isolate, Map::cast(root->map()->GetBackPointer()));
3019 : CHECK_EQ(1, transitions_after);
3020 : }
3021 :
3022 :
3023 : TEST(TransitionArrayShrinksDuringAllocToOne) {
3024 : FLAG_stress_compaction = false;
3025 : FLAG_stress_incremental_marking = false;
3026 : FLAG_allow_natives_syntax = true;
3027 : CcTest::InitializeVM();
3028 : i::Isolate* i_isolate = CcTest::i_isolate();
3029 : v8::HandleScope scope(CcTest::isolate());
3030 : static const int transitions_count = 10;
3031 : CompileRun("function F() {}");
3032 : AddTransitions(transitions_count);
3033 : CompileRun("var root = new F;");
3034 : Handle<JSObject> root = GetByName("root");
3035 :
3036 : // Count number of live transitions before marking.
3037 : int transitions_before = CountMapTransitions(i_isolate, root->map());
3038 : CHECK_EQ(transitions_count, transitions_before);
3039 :
3040 : root = GetByName("root");
3041 : AddPropertyTo(2, root, "funny");
3042 : CcTest::CollectGarbage(NEW_SPACE);
3043 :
3044 : // Count number of live transitions after marking. Note that one transition
3045 : // is left, because 'o' still holds an instance of one transition target.
3046 : int transitions_after =
3047 : CountMapTransitions(i_isolate, Map::cast(root->map()->GetBackPointer()));
3048 : CHECK_EQ(2, transitions_after);
3049 : }
3050 :
3051 :
3052 : TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3053 : FLAG_stress_compaction = false;
3054 : FLAG_stress_incremental_marking = false;
3055 : FLAG_allow_natives_syntax = true;
3056 : CcTest::InitializeVM();
3057 : i::Isolate* i_isolate = CcTest::i_isolate();
3058 : v8::HandleScope scope(CcTest::isolate());
3059 : static const int transitions_count = 10;
3060 : CompileRun("function F() {}");
3061 : AddTransitions(transitions_count);
3062 : CompileRun("var root = new F;");
3063 : Handle<JSObject> root = GetByName("root");
3064 :
3065 : // Count number of live transitions before marking.
3066 : int transitions_before = CountMapTransitions(i_isolate, root->map());
3067 : CHECK_EQ(transitions_count, transitions_before);
3068 :
3069 : root = GetByName("root");
3070 : AddPropertyTo(0, root, "prop9");
3071 : CcTest::CollectGarbage(OLD_SPACE);
3072 :
3073 : // Count number of live transitions after marking. Note that one transition
3074 : // is left, because 'o' still holds an instance of one transition target.
3075 : int transitions_after =
3076 : CountMapTransitions(i_isolate, Map::cast(root->map()->GetBackPointer()));
3077 : CHECK_EQ(1, transitions_after);
3078 : }
3079 : #endif // DEBUG
3080 :
3081 :
3082 28342 : TEST(ReleaseOverReservedPages) {
3083 5 : if (FLAG_never_compact) return;
3084 5 : FLAG_trace_gc = true;
3085 : // The optimizer can allocate stuff, messing up the test.
3086 : #ifndef V8_LITE_MODE
3087 5 : FLAG_opt = false;
3088 5 : FLAG_always_opt = false;
3089 : #endif // V8_LITE_MODE
3090 : // - Parallel compaction increases fragmentation, depending on how existing
3091 : // memory is distributed. Since this is non-deterministic because of
3092 : // concurrent sweeping, we disable it for this test.
3093 : // - Concurrent sweeping adds non determinism, depending on when memory is
3094 : // available for further reuse.
3095 : // - Fast evacuation of pages may result in a different page count in old
3096 : // space.
3097 : ManualGCScope manual_gc_scope;
3098 5 : FLAG_page_promotion = false;
3099 5 : FLAG_parallel_compaction = false;
3100 5 : CcTest::InitializeVM();
3101 5 : Isolate* isolate = CcTest::i_isolate();
3102 : // If there's snapshot available, we don't know whether 20 small arrays will
3103 : // fit on the initial pages.
3104 5 : if (!isolate->snapshot_available()) return;
3105 : Factory* factory = isolate->factory();
3106 5 : Heap* heap = isolate->heap();
3107 10 : v8::HandleScope scope(CcTest::isolate());
3108 : static const int number_of_test_pages = 20;
3109 :
3110 : // Prepare many pages with low live-bytes count.
3111 : PagedSpace* old_space = heap->old_space();
3112 5 : const int initial_page_count = old_space->CountTotalPages();
3113 5 : const int overall_page_count = number_of_test_pages + initial_page_count;
3114 105 : for (int i = 0; i < number_of_test_pages; i++) {
3115 : AlwaysAllocateScope always_allocate(isolate);
3116 100 : heap::SimulateFullSpace(old_space);
3117 100 : factory->NewFixedArray(1, TENURED);
3118 : }
3119 5 : CHECK_EQ(overall_page_count, old_space->CountTotalPages());
3120 :
3121 : // Triggering one GC will cause a lot of garbage to be discovered but
3122 : // even spread across all allocated pages.
3123 5 : CcTest::CollectAllGarbage();
3124 5 : CHECK_GE(overall_page_count, old_space->CountTotalPages());
3125 :
3126 : // Triggering subsequent GCs should cause at least half of the pages
3127 : // to be released to the OS after at most two cycles.
3128 5 : CcTest::CollectAllGarbage();
3129 5 : CHECK_GE(overall_page_count, old_space->CountTotalPages());
3130 5 : CcTest::CollectAllGarbage();
3131 5 : CHECK_GE(overall_page_count, old_space->CountTotalPages() * 2);
3132 :
3133 : // Triggering a last-resort GC should cause all pages to be released to the
3134 : // OS so that other processes can seize the memory. If we get a failure here
3135 : // where there are 2 pages left instead of 1, then we should increase the
3136 : // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3137 : // first page should be small in order to reduce memory used when the VM
3138 : // boots, but if the 20 small arrays don't fit on the first page then that's
3139 : // an indication that it is too small.
3140 5 : CcTest::CollectAllAvailableGarbage();
3141 5 : CHECK_EQ(initial_page_count, old_space->CountTotalPages());
3142 : }
3143 :
3144 : static int forced_gc_counter = 0;
3145 :
3146 6 : void MockUseCounterCallback(v8::Isolate* isolate,
3147 : v8::Isolate::UseCounterFeature feature) {
3148 6 : isolate->GetCurrentContext();
3149 6 : if (feature == v8::Isolate::kForcedGC) {
3150 5 : forced_gc_counter++;
3151 : }
3152 6 : }
3153 :
3154 :
3155 28342 : TEST(CountForcedGC) {
3156 5 : FLAG_expose_gc = true;
3157 5 : CcTest::InitializeVM();
3158 : Isolate* isolate = CcTest::i_isolate();
3159 5 : v8::HandleScope scope(CcTest::isolate());
3160 :
3161 5 : isolate->SetUseCounterCallback(MockUseCounterCallback);
3162 :
3163 5 : forced_gc_counter = 0;
3164 : const char* source = "gc();";
3165 : CompileRun(source);
3166 5 : CHECK_GT(forced_gc_counter, 0);
3167 5 : }
3168 :
3169 :
3170 : #ifdef OBJECT_PRINT
3171 : TEST(PrintSharedFunctionInfo) {
3172 : CcTest::InitializeVM();
3173 : v8::HandleScope scope(CcTest::isolate());
3174 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3175 : const char* source = "f = function() { return 987654321; }\n"
3176 : "g = function() { return 123456789; }\n";
3177 : CompileRun(source);
3178 : i::Handle<JSFunction> g = i::Handle<JSFunction>::cast(
3179 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3180 : CcTest::global()->Get(ctx, v8_str("g")).ToLocalChecked())));
3181 :
3182 : StdoutStream os;
3183 : g->shared()->Print(os);
3184 : os << std::endl;
3185 : }
3186 : #endif // OBJECT_PRINT
3187 :
3188 :
3189 28342 : TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3190 6 : if (!FLAG_use_ic) return;
3191 5 : if (!FLAG_incremental_marking) return;
3192 5 : if (FLAG_always_opt) return;
3193 4 : CcTest::InitializeVM();
3194 4 : v8::HandleScope scope(CcTest::isolate());
3195 : v8::Local<v8::Value> fun1, fun2;
3196 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3197 : {
3198 : CompileRun("function fun() {};");
3199 16 : fun1 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3200 : }
3201 :
3202 : {
3203 : CompileRun("function fun() {};");
3204 16 : fun2 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3205 : }
3206 :
3207 : // Prepare function f that contains type feedback for the two closures.
3208 16 : CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3209 16 : CHECK(CcTest::global()->Set(ctx, v8_str("fun2"), fun2).FromJust());
3210 : CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3211 :
3212 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3213 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3214 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3215 :
3216 8 : Handle<FeedbackVector> feedback_vector(f->feedback_vector(), f->GetIsolate());
3217 4 : FeedbackVectorHelper feedback_helper(feedback_vector);
3218 :
3219 : int expected_slots = 2;
3220 4 : CHECK_EQ(expected_slots, feedback_helper.slot_count());
3221 : int slot1 = 0;
3222 : int slot2 = 1;
3223 8 : CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeak());
3224 8 : CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeak());
3225 :
3226 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3227 4 : CcTest::CollectAllGarbage();
3228 :
3229 8 : CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeak());
3230 12 : CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeak());
3231 : }
3232 :
3233 :
3234 24 : static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
3235 : InlineCacheState desired_state) {
3236 : Handle<FeedbackVector> vector =
3237 48 : Handle<FeedbackVector>(f->feedback_vector(), f->GetIsolate());
3238 24 : FeedbackVectorHelper helper(vector);
3239 24 : FeedbackSlot slot = helper.slot(slot_index);
3240 24 : FeedbackNexus nexus(vector, slot);
3241 24 : CHECK(nexus.StateFromFeedback() == desired_state);
3242 24 : }
3243 :
3244 28342 : TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3245 1 : if (FLAG_lite_mode) return;
3246 5 : if (!FLAG_incremental_marking) return;
3247 5 : if (FLAG_always_opt) return;
3248 4 : CcTest::InitializeVM();
3249 4 : v8::HandleScope scope(CcTest::isolate());
3250 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3251 : // Prepare function f that contains a monomorphic IC for object
3252 : // originating from the same native context.
3253 : CompileRun(
3254 : "function fun() { this.x = 1; };"
3255 : "function f(o) { return new o(); } f(fun); f(fun);");
3256 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3257 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3258 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3259 :
3260 8 : Handle<FeedbackVector> vector(f->feedback_vector(), f->GetIsolate());
3261 8 : CHECK(vector->Get(FeedbackSlot(0))->IsWeakOrCleared());
3262 :
3263 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3264 4 : CcTest::CollectAllGarbage();
3265 :
3266 8 : CHECK(vector->Get(FeedbackSlot(0))->IsWeakOrCleared());
3267 : }
3268 :
3269 28342 : TEST(IncrementalMarkingPreservesMonomorphicIC) {
3270 6 : if (!FLAG_use_ic) return;
3271 5 : if (!FLAG_incremental_marking) return;
3272 5 : if (FLAG_always_opt) return;
3273 4 : CcTest::InitializeVM();
3274 4 : v8::HandleScope scope(CcTest::isolate());
3275 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3276 : // Prepare function f that contains a monomorphic IC for object
3277 : // originating from the same native context.
3278 : CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3279 : "function f(o) { return o.x; } f(obj); f(obj);");
3280 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3281 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3282 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3283 :
3284 4 : CheckVectorIC(f, 0, MONOMORPHIC);
3285 :
3286 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3287 4 : CcTest::CollectAllGarbage();
3288 :
3289 4 : CheckVectorIC(f, 0, MONOMORPHIC);
3290 : }
3291 :
3292 28342 : TEST(IncrementalMarkingPreservesPolymorphicIC) {
3293 6 : if (!FLAG_use_ic) return;
3294 5 : if (!FLAG_incremental_marking) return;
3295 5 : if (FLAG_always_opt) return;
3296 4 : CcTest::InitializeVM();
3297 4 : v8::HandleScope scope(CcTest::isolate());
3298 : v8::Local<v8::Value> obj1, obj2;
3299 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3300 :
3301 : {
3302 4 : LocalContext env;
3303 : CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3304 20 : obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3305 : }
3306 :
3307 : {
3308 4 : LocalContext env;
3309 : CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3310 20 : obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3311 : }
3312 :
3313 : // Prepare function f that contains a polymorphic IC for objects
3314 : // originating from two different native contexts.
3315 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3316 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3317 : CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3318 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3319 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3320 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3321 :
3322 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3323 :
3324 : // Fire context dispose notification.
3325 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3326 4 : CcTest::CollectAllGarbage();
3327 :
3328 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3329 : }
3330 :
3331 28342 : TEST(ContextDisposeDoesntClearPolymorphicIC) {
3332 6 : if (!FLAG_use_ic) return;
3333 5 : if (!FLAG_incremental_marking) return;
3334 5 : if (FLAG_always_opt) return;
3335 4 : CcTest::InitializeVM();
3336 4 : v8::HandleScope scope(CcTest::isolate());
3337 : v8::Local<v8::Value> obj1, obj2;
3338 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3339 :
3340 : {
3341 4 : LocalContext env;
3342 : CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3343 20 : obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3344 : }
3345 :
3346 : {
3347 4 : LocalContext env;
3348 : CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3349 20 : obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3350 : }
3351 :
3352 : // Prepare function f that contains a polymorphic IC for objects
3353 : // originating from two different native contexts.
3354 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3355 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3356 : CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3357 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3358 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3359 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3360 :
3361 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3362 :
3363 : // Fire context dispose notification.
3364 4 : CcTest::isolate()->ContextDisposedNotification();
3365 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3366 4 : CcTest::CollectAllGarbage();
3367 :
3368 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3369 : }
3370 :
3371 :
3372 48 : class SourceResource : public v8::String::ExternalOneByteStringResource {
3373 : public:
3374 : explicit SourceResource(const char* data)
3375 24 : : data_(data), length_(strlen(data)) { }
3376 :
3377 24 : void Dispose() override {
3378 24 : i::DeleteArray(data_);
3379 24 : data_ = nullptr;
3380 24 : }
3381 :
3382 288 : const char* data() const override { return data_; }
3383 :
3384 96 : size_t length() const override { return length_; }
3385 :
3386 48 : bool IsDisposed() { return data_ == nullptr; }
3387 :
3388 : private:
3389 : const char* data_;
3390 : size_t length_;
3391 : };
3392 :
3393 :
3394 24 : void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3395 : const char* accessor) {
3396 : // Test that the data retained by the Error.stack accessor is released
3397 : // after the first time the accessor is fired. We use external string
3398 : // to check whether the data is being released since the external string
3399 : // resource's callback is fired when the external string is GC'ed.
3400 : i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3401 24 : v8::HandleScope scope(isolate);
3402 96 : SourceResource* resource = new SourceResource(i::StrDup(source));
3403 : {
3404 24 : v8::HandleScope scope(isolate);
3405 24 : v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
3406 : v8::Local<v8::String> source_string =
3407 48 : v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked();
3408 : i_isolate->heap()->CollectAllAvailableGarbage(
3409 24 : i::GarbageCollectionReason::kTesting);
3410 : v8::Script::Compile(ctx, source_string)
3411 24 : .ToLocalChecked()
3412 : ->Run(ctx)
3413 24 : .ToLocalChecked();
3414 24 : CHECK(!resource->IsDisposed());
3415 : }
3416 : // i_isolate->heap()->CollectAllAvailableGarbage();
3417 24 : CHECK(!resource->IsDisposed());
3418 :
3419 : CompileRun(accessor);
3420 : i_isolate->heap()->CollectAllAvailableGarbage(
3421 24 : i::GarbageCollectionReason::kTesting);
3422 :
3423 : // External source has been released.
3424 24 : CHECK(resource->IsDisposed());
3425 24 : delete resource;
3426 24 : }
3427 :
3428 :
3429 28342 : UNINITIALIZED_TEST(ReleaseStackTraceData) {
3430 5 : if (FLAG_always_opt) {
3431 : // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3432 : // See: https://codereview.chromium.org/181833004/
3433 1 : return;
3434 : }
3435 : #ifndef V8_LITE_MODE
3436 : // ICs retain objects.
3437 4 : FLAG_use_ic = false;
3438 : #endif // V8_LITE_MODE
3439 4 : FLAG_concurrent_recompilation = false;
3440 : v8::Isolate::CreateParams create_params;
3441 4 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
3442 4 : v8::Isolate* isolate = v8::Isolate::New(create_params);
3443 : {
3444 : v8::Isolate::Scope isolate_scope(isolate);
3445 8 : v8::HandleScope handle_scope(isolate);
3446 8 : v8::Context::New(isolate)->Enter();
3447 : static const char* source1 = "var error = null; "
3448 : /* Normal Error */ "try { "
3449 : " throw new Error(); "
3450 : "} catch (e) { "
3451 : " error = e; "
3452 : "} ";
3453 : static const char* source2 = "var error = null; "
3454 : /* Stack overflow */ "try { "
3455 : " (function f() { f(); })(); "
3456 : "} catch (e) { "
3457 : " error = e; "
3458 : "} ";
3459 : static const char* source3 = "var error = null; "
3460 : /* Normal Error */ "try { "
3461 : /* as prototype */ " throw new Error(); "
3462 : "} catch (e) { "
3463 : " error = {}; "
3464 : " error.__proto__ = e; "
3465 : "} ";
3466 : static const char* source4 = "var error = null; "
3467 : /* Stack overflow */ "try { "
3468 : /* as prototype */ " (function f() { f(); })(); "
3469 : "} catch (e) { "
3470 : " error = {}; "
3471 : " error.__proto__ = e; "
3472 : "} ";
3473 : static const char* getter = "error.stack";
3474 : static const char* setter = "error.stack = 0";
3475 :
3476 4 : ReleaseStackTraceDataTest(isolate, source1, setter);
3477 4 : ReleaseStackTraceDataTest(isolate, source2, setter);
3478 : // We do not test source3 and source4 with setter, since the setter is
3479 : // supposed to (untypically) write to the receiver, not the holder. This is
3480 : // to emulate the behavior of a data property.
3481 :
3482 4 : ReleaseStackTraceDataTest(isolate, source1, getter);
3483 4 : ReleaseStackTraceDataTest(isolate, source2, getter);
3484 4 : ReleaseStackTraceDataTest(isolate, source3, getter);
3485 4 : ReleaseStackTraceDataTest(isolate, source4, getter);
3486 : }
3487 4 : isolate->Dispose();
3488 : }
3489 :
3490 28342 : TEST(Regress169928) {
3491 5 : FLAG_allow_natives_syntax = true;
3492 : #ifndef V8_LITE_MODE
3493 5 : FLAG_opt = false;
3494 : #endif // V8_LITE_MODE
3495 5 : CcTest::InitializeVM();
3496 : Isolate* isolate = CcTest::i_isolate();
3497 5 : LocalContext env;
3498 : Factory* factory = isolate->factory();
3499 9 : v8::HandleScope scope(CcTest::isolate());
3500 :
3501 : // Some flags turn Scavenge collections into Mark-sweep collections
3502 : // and hence are incompatible with this test case.
3503 5 : if (FLAG_gc_global || FLAG_stress_compaction ||
3504 : FLAG_stress_incremental_marking)
3505 1 : return;
3506 :
3507 : // Prepare the environment
3508 : CompileRun("function fastliteralcase(literal, value) {"
3509 : " literal[0] = value;"
3510 : " return literal;"
3511 : "}"
3512 : "function get_standard_literal() {"
3513 : " var literal = [1, 2, 3];"
3514 : " return literal;"
3515 : "}"
3516 : "obj = fastliteralcase(get_standard_literal(), 1);"
3517 : "obj = fastliteralcase(get_standard_literal(), 1.5);"
3518 : "obj = fastliteralcase(get_standard_literal(), 2);");
3519 :
3520 : // prepare the heap
3521 : v8::Local<v8::String> mote_code_string =
3522 4 : v8_str("fastliteralcase(mote, 2.5);");
3523 :
3524 4 : v8::Local<v8::String> array_name = v8_str("mote");
3525 16 : CHECK(CcTest::global()
3526 : ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0))
3527 : .FromJust());
3528 :
3529 : // First make sure we flip spaces
3530 4 : CcTest::CollectGarbage(NEW_SPACE);
3531 :
3532 : // Allocate the object.
3533 4 : Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
3534 : array_data->set(0, Smi::FromInt(1));
3535 : array_data->set(1, Smi::FromInt(2));
3536 :
3537 : heap::AllocateAllButNBytes(
3538 : CcTest::heap()->new_space(),
3539 4 : JSArray::kSize + AllocationMemento::kSize + kTaggedSize);
3540 :
3541 : Handle<JSArray> array =
3542 4 : factory->NewJSArrayWithElements(array_data, PACKED_SMI_ELEMENTS);
3543 :
3544 8 : CHECK_EQ(Smi::FromInt(2), array->length());
3545 8 : CHECK(array->HasSmiOrObjectElements());
3546 :
3547 : // We need filler the size of AllocationMemento object, plus an extra
3548 : // fill pointer value.
3549 4 : HeapObject obj;
3550 : AllocationResult allocation =
3551 : CcTest::heap()->new_space()->AllocateRawUnaligned(
3552 8 : AllocationMemento::kSize + kTaggedSize);
3553 4 : CHECK(allocation.To(&obj));
3554 : Address addr_obj = obj->address();
3555 : CcTest::heap()->CreateFillerObjectAt(addr_obj,
3556 : AllocationMemento::kSize + kTaggedSize,
3557 4 : ClearRecordedSlots::kNo);
3558 :
3559 : // Give the array a name, making sure not to allocate strings.
3560 : v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
3561 12 : CHECK(CcTest::global()->Set(env.local(), array_name, array_obj).FromJust());
3562 :
3563 : // This should crash with a protection violation if we are running a build
3564 : // with the bug.
3565 : AlwaysAllocateScope aa_scope(isolate);
3566 4 : v8::Script::Compile(env.local(), mote_code_string)
3567 4 : .ToLocalChecked()
3568 4 : ->Run(env.local())
3569 8 : .ToLocalChecked();
3570 : }
3571 :
3572 28342 : TEST(LargeObjectSlotRecording) {
3573 5 : if (!FLAG_incremental_marking) return;
3574 5 : if (FLAG_never_compact) return;
3575 : ManualGCScope manual_gc_scope;
3576 5 : FLAG_manual_evacuation_candidates_selection = true;
3577 5 : CcTest::InitializeVM();
3578 : Isolate* isolate = CcTest::i_isolate();
3579 20 : Heap* heap = isolate->heap();
3580 : HandleScope scope(isolate);
3581 :
3582 : // Create an object on an evacuation candidate.
3583 5 : heap::SimulateFullSpace(heap->old_space());
3584 5 : Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED);
3585 : Page* evac_page = Page::FromHeapObject(*lit);
3586 5 : heap::ForceEvacuationCandidate(evac_page);
3587 : FixedArray old_location = *lit;
3588 :
3589 : // Allocate a large object.
3590 : int size = Max(1000000, kMaxRegularHeapObjectSize + KB);
3591 : CHECK_LT(kMaxRegularHeapObjectSize, size);
3592 5 : Handle<FixedArray> lo = isolate->factory()->NewFixedArray(size, TENURED);
3593 10 : CHECK(heap->lo_space()->Contains(*lo));
3594 :
3595 : // Start incremental marking to active write barrier.
3596 5 : heap::SimulateIncrementalMarking(heap, false);
3597 : heap->incremental_marking()->AdvanceIncrementalMarking(
3598 5 : 10000000, IncrementalMarking::NO_GC_VIA_STACK_GUARD, StepOrigin::kV8);
3599 :
3600 : // Create references from the large object to the object on the evacuation
3601 : // candidate.
3602 : const int kStep = size / 10;
3603 55 : for (int i = 0; i < size; i += kStep) {
3604 100 : lo->set(i, *lit);
3605 50 : CHECK(lo->get(i) == old_location);
3606 : }
3607 :
3608 : // Move the evaucation candidate object.
3609 5 : CcTest::CollectAllGarbage();
3610 :
3611 : // Verify that the pointers in the large object got updated.
3612 55 : for (int i = 0; i < size; i += kStep) {
3613 150 : CHECK_EQ(lo->get(i), *lit);
3614 50 : CHECK(lo->get(i) != old_location);
3615 : }
3616 : }
3617 :
3618 5 : class DummyVisitor : public RootVisitor {
3619 : public:
3620 20 : void VisitRootPointers(Root root, const char* description,
3621 20 : FullObjectSlot start, FullObjectSlot end) override {}
3622 : };
3623 :
3624 :
3625 28342 : TEST(DeferredHandles) {
3626 5 : CcTest::InitializeVM();
3627 5 : Isolate* isolate = CcTest::i_isolate();
3628 5 : Heap* heap = isolate->heap();
3629 5 : v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
3630 : HandleScopeData* data = isolate->handle_scope_data();
3631 : Handle<Object> init(ReadOnlyRoots(heap).empty_string(), isolate);
3632 5115 : while (data->next < data->limit) {
3633 : Handle<Object> obj(ReadOnlyRoots(heap).empty_string(), isolate);
3634 : }
3635 : // An entire block of handles has been filled.
3636 : // Next handle would require a new block.
3637 5 : CHECK(data->next == data->limit);
3638 :
3639 10 : DeferredHandleScope deferred(isolate);
3640 5 : DummyVisitor visitor;
3641 5 : isolate->handle_scope_implementer()->Iterate(&visitor);
3642 10 : delete deferred.Detach();
3643 5 : }
3644 :
3645 :
3646 28342 : TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
3647 5 : if (!FLAG_incremental_marking) return;
3648 : ManualGCScope manual_gc_scope;
3649 5 : CcTest::InitializeVM();
3650 10 : v8::HandleScope scope(CcTest::isolate());
3651 : CompileRun("function f(n) {"
3652 : " var a = new Array(n);"
3653 : " for (var i = 0; i < n; i += 100) a[i] = i;"
3654 : "};"
3655 : "f(10 * 1024 * 1024);");
3656 10 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
3657 5 : if (marking->IsStopped()) {
3658 : CcTest::heap()->StartIncrementalMarking(
3659 5 : i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
3660 : }
3661 : // This big step should be sufficient to mark the whole array.
3662 : marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
3663 5 : StepOrigin::kV8);
3664 10 : CHECK(marking->IsComplete() ||
3665 : marking->IsReadyToOverApproximateWeakClosure());
3666 : }
3667 :
3668 :
3669 28342 : TEST(DisableInlineAllocation) {
3670 5 : FLAG_allow_natives_syntax = true;
3671 5 : CcTest::InitializeVM();
3672 5 : v8::HandleScope scope(CcTest::isolate());
3673 : CompileRun("function test() {"
3674 : " var x = [];"
3675 : " for (var i = 0; i < 10; i++) {"
3676 : " x[i] = [ {}, [1,2,3], [1,x,3] ];"
3677 : " }"
3678 : "}"
3679 : "function run() {"
3680 : " %OptimizeFunctionOnNextCall(test);"
3681 : " test();"
3682 : " %DeoptimizeFunction(test);"
3683 : "}");
3684 :
3685 : // Warm-up with inline allocation enabled.
3686 : CompileRun("test(); test(); run();");
3687 :
3688 : // Run test with inline allocation disabled.
3689 5 : CcTest::heap()->DisableInlineAllocation();
3690 : CompileRun("run()");
3691 :
3692 : // Run test with inline allocation re-enabled.
3693 5 : CcTest::heap()->EnableInlineAllocation();
3694 5 : CompileRun("run()");
3695 5 : }
3696 :
3697 :
3698 266 : static int AllocationSitesCount(Heap* heap) {
3699 : int count = 0;
3700 1825 : for (Object site = heap->allocation_sites_list(); site->IsAllocationSite();) {
3701 1293 : AllocationSite cur = AllocationSite::cast(site);
3702 1293 : CHECK(cur->HasWeakNext());
3703 1293 : site = cur->weak_next();
3704 1293 : count++;
3705 : }
3706 266 : return count;
3707 : }
3708 :
3709 260 : static int SlimAllocationSiteCount(Heap* heap) {
3710 : int count = 0;
3711 1810 : for (Object weak_list = heap->allocation_sites_list();
3712 : weak_list->IsAllocationSite();) {
3713 1290 : AllocationSite weak_cur = AllocationSite::cast(weak_list);
3714 3645 : for (Object site = weak_cur->nested_site(); site->IsAllocationSite();) {
3715 1065 : AllocationSite cur = AllocationSite::cast(site);
3716 1065 : CHECK(!cur->HasWeakNext());
3717 1065 : site = cur->nested_site();
3718 1065 : count++;
3719 : }
3720 1290 : weak_list = weak_cur->weak_next();
3721 : }
3722 260 : return count;
3723 : }
3724 :
3725 28342 : TEST(EnsureAllocationSiteDependentCodesProcessed) {
3726 5 : if (FLAG_always_opt || !FLAG_opt) return;
3727 3 : FLAG_allow_natives_syntax = true;
3728 3 : CcTest::InitializeVM();
3729 3 : Isolate* isolate = CcTest::i_isolate();
3730 3 : v8::internal::Heap* heap = CcTest::heap();
3731 : GlobalHandles* global_handles = isolate->global_handles();
3732 :
3733 3 : if (!isolate->use_optimizer()) return;
3734 :
3735 : // The allocation site at the head of the list is ours.
3736 : Handle<AllocationSite> site;
3737 : {
3738 3 : LocalContext context;
3739 6 : v8::HandleScope scope(context->GetIsolate());
3740 :
3741 3 : int count = AllocationSitesCount(heap);
3742 : CompileRun("var bar = function() { return (new Array()); };"
3743 : "var a = bar();"
3744 : "bar();"
3745 : "bar();");
3746 :
3747 : // One allocation site should have been created.
3748 3 : int new_count = AllocationSitesCount(heap);
3749 3 : CHECK_EQ(new_count, (count + 1));
3750 : site = Handle<AllocationSite>::cast(
3751 : global_handles->Create(
3752 3 : AllocationSite::cast(heap->allocation_sites_list())));
3753 :
3754 : CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
3755 :
3756 : Handle<JSFunction> bar_handle = Handle<JSFunction>::cast(
3757 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3758 : CcTest::global()
3759 9 : ->Get(context.local(), v8_str("bar"))
3760 6 : .ToLocalChecked())));
3761 :
3762 : int dependency_group_count = 0;
3763 3 : DependentCode dependency = site->dependent_code();
3764 12 : while (dependency != ReadOnlyRoots(heap).empty_weak_fixed_array()) {
3765 9 : CHECK(dependency->group() ==
3766 : DependentCode::kAllocationSiteTransitionChangedGroup ||
3767 : dependency->group() ==
3768 : DependentCode::kAllocationSiteTenuringChangedGroup);
3769 6 : CHECK_EQ(1, dependency->count());
3770 6 : CHECK(dependency->object_at(0)->IsWeak());
3771 : Code function_bar =
3772 12 : Code::cast(dependency->object_at(0)->GetHeapObjectAssumeWeak());
3773 12 : CHECK_EQ(bar_handle->code(), function_bar);
3774 6 : dependency = dependency->next_link();
3775 6 : dependency_group_count++;
3776 : }
3777 : // Expect a dependent code object for transitioning and pretenuring.
3778 6 : CHECK_EQ(2, dependency_group_count);
3779 : }
3780 :
3781 : // Now make sure that a gc should get rid of the function, even though we
3782 : // still have the allocation site alive.
3783 15 : for (int i = 0; i < 4; i++) {
3784 12 : CcTest::CollectAllGarbage();
3785 : }
3786 :
3787 : // The site still exists because of our global handle, but the code is no
3788 : // longer referred to by dependent_code().
3789 6 : CHECK(site->dependent_code()->object_at(0)->IsCleared());
3790 : }
3791 :
3792 130 : void CheckNumberOfAllocations(Heap* heap, const char* source,
3793 : int expected_full_alloc,
3794 : int expected_slim_alloc) {
3795 130 : int prev_fat_alloc_count = AllocationSitesCount(heap);
3796 130 : int prev_slim_alloc_count = SlimAllocationSiteCount(heap);
3797 :
3798 : CompileRun(source);
3799 :
3800 130 : int fat_alloc_sites = AllocationSitesCount(heap) - prev_fat_alloc_count;
3801 130 : int slim_alloc_sites = SlimAllocationSiteCount(heap) - prev_slim_alloc_count;
3802 :
3803 130 : CHECK_EQ(expected_full_alloc, fat_alloc_sites);
3804 130 : CHECK_EQ(expected_slim_alloc, slim_alloc_sites);
3805 130 : }
3806 :
3807 28342 : TEST(AllocationSiteCreation) {
3808 : // No feedback vectors and hence no allocation sites.
3809 5 : if (FLAG_lite_mode) return;
3810 5 : FLAG_always_opt = false;
3811 5 : CcTest::InitializeVM();
3812 : Isolate* isolate = CcTest::i_isolate();
3813 5 : Heap* heap = isolate->heap();
3814 : HandleScope scope(isolate);
3815 5 : i::FLAG_enable_one_shot_optimization = true;
3816 :
3817 : // Array literals.
3818 5 : CheckNumberOfAllocations(heap, "function f1() { return []; }; f1()", 1, 0);
3819 : CheckNumberOfAllocations(heap, "function f2() { return [1, 2]; }; f2()", 1,
3820 5 : 0);
3821 : CheckNumberOfAllocations(heap, "function f3() { return [[1], [2]]; }; f3()",
3822 5 : 1, 2);
3823 :
3824 : CheckNumberOfAllocations(heap,
3825 : "function f4() { "
3826 : "return [0, [1, 1.1, 1.2, "
3827 : "], 1.5, [2.1, 2.2], 3];"
3828 : "}; f4();",
3829 5 : 1, 2);
3830 :
3831 : // No allocation sites within IIFE/top-level
3832 : CheckNumberOfAllocations(heap,
3833 : R"(
3834 : (function f4() {
3835 : return [ 0, [ 1, 1.1, 1.2,], 1.5, [2.1, 2.2], 3 ];
3836 : })();
3837 : )",
3838 5 : 0, 0);
3839 :
3840 : CheckNumberOfAllocations(heap,
3841 : R"(
3842 : l = [ 1, 2, 3, 4];
3843 : )",
3844 5 : 0, 0);
3845 :
3846 : CheckNumberOfAllocations(heap,
3847 : R"(
3848 : a = [];
3849 : )",
3850 5 : 0, 0);
3851 :
3852 : CheckNumberOfAllocations(heap,
3853 : R"(
3854 : (function f4() {
3855 : return [];
3856 : })();
3857 : )",
3858 5 : 0, 0);
3859 :
3860 : // Object literals have lazy AllocationSites
3861 5 : CheckNumberOfAllocations(heap, "function f5() { return {}; }; f5(); ", 0, 0);
3862 :
3863 : // No AllocationSites are created for the empty object literal.
3864 30 : for (int i = 0; i < 5; i++) {
3865 25 : CheckNumberOfAllocations(heap, "f5(); ", 0, 0);
3866 : }
3867 :
3868 : CheckNumberOfAllocations(heap, "function f6() { return {a:1}; }; f6(); ", 0,
3869 5 : 0);
3870 :
3871 5 : CheckNumberOfAllocations(heap, "f6(); ", 1, 0);
3872 :
3873 : CheckNumberOfAllocations(heap, "function f7() { return {a:1, b:2}; }; f7(); ",
3874 5 : 0, 0);
3875 5 : CheckNumberOfAllocations(heap, "f7(); ", 1, 0);
3876 :
3877 : // No Allocation sites are created for object subliterals
3878 : CheckNumberOfAllocations(heap,
3879 : "function f8() {"
3880 : "return {a:{}, b:{ a:2, c:{ d:{f:{}}} } }; "
3881 : "}; f8(); ",
3882 5 : 0, 0);
3883 5 : CheckNumberOfAllocations(heap, "f8(); ", 1, 0);
3884 :
3885 : // We currently eagerly create allocation sites if there are sub-arrays.
3886 : // Allocation sites are created only for array subliterals
3887 : CheckNumberOfAllocations(heap,
3888 : "function f9() {"
3889 : "return {a:[1, 2, 3], b:{ a:2, c:{ d:{f:[]} } }}; "
3890 : "}; f9(); ",
3891 5 : 1, 2);
3892 :
3893 : // No new AllocationSites created on the second invocation.
3894 5 : CheckNumberOfAllocations(heap, "f9(); ", 0, 0);
3895 :
3896 : // No allocation sites for literals in an iife/top level code even if it has
3897 : // array subliterals
3898 : CheckNumberOfAllocations(heap,
3899 : R"(
3900 : (function f10() {
3901 : return {a: [1], b: [2]};
3902 : })();
3903 : )",
3904 5 : 0, 0);
3905 :
3906 : CheckNumberOfAllocations(heap,
3907 : R"(
3908 : l = {
3909 : a: 1,
3910 : b: {
3911 : c: [5],
3912 : }
3913 : };
3914 : )",
3915 5 : 0, 0);
3916 :
3917 : // Eagerly create allocation sites for literals within a loop of iife or
3918 : // top-level code
3919 : CheckNumberOfAllocations(heap,
3920 : R"(
3921 : (function f11() {
3922 : while(true) {
3923 : return {a: [1], b: [2]};
3924 : }
3925 : })();
3926 : )",
3927 5 : 1, 2);
3928 :
3929 : CheckNumberOfAllocations(heap,
3930 : R"(
3931 : for (i = 0; i < 1; ++i) {
3932 : l = {
3933 : a: 1,
3934 : b: {
3935 : c: [5],
3936 : }
3937 : };
3938 : }
3939 : )",
3940 5 : 1, 1);
3941 : }
3942 :
3943 28342 : TEST(CellsInOptimizedCodeAreWeak) {
3944 5 : if (FLAG_always_opt || !FLAG_opt) return;
3945 3 : FLAG_allow_natives_syntax = true;
3946 3 : CcTest::InitializeVM();
3947 : Isolate* isolate = CcTest::i_isolate();
3948 3 : v8::internal::Heap* heap = CcTest::heap();
3949 :
3950 3 : if (!isolate->use_optimizer()) return;
3951 : HandleScope outer_scope(heap->isolate());
3952 : Handle<Code> code;
3953 : {
3954 3 : LocalContext context;
3955 : HandleScope scope(heap->isolate());
3956 :
3957 : CompileRun(
3958 : "bar = (function() {"
3959 : " function bar() {"
3960 : " return foo(1);"
3961 : " };"
3962 : " var foo = function(x) { with (x) { return 1 + x; } };"
3963 : " %NeverOptimizeFunction(foo);"
3964 : " bar(foo);"
3965 : " bar(foo);"
3966 : " bar(foo);"
3967 : " %OptimizeFunctionOnNextCall(bar);"
3968 : " bar(foo);"
3969 : " return bar;})();");
3970 :
3971 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
3972 : *v8::Local<v8::Function>::Cast(CcTest::global()
3973 9 : ->Get(context.local(), v8_str("bar"))
3974 6 : .ToLocalChecked())));
3975 9 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
3976 : }
3977 :
3978 : // Now make sure that a gc should get rid of the function
3979 15 : for (int i = 0; i < 4; i++) {
3980 12 : CcTest::CollectAllGarbage();
3981 : }
3982 :
3983 3 : CHECK(code->marked_for_deoptimization());
3984 3 : CHECK(code->embedded_objects_cleared());
3985 : }
3986 :
3987 :
3988 28342 : TEST(ObjectsInOptimizedCodeAreWeak) {
3989 5 : if (FLAG_always_opt || !FLAG_opt) return;
3990 3 : FLAG_allow_natives_syntax = true;
3991 3 : CcTest::InitializeVM();
3992 : Isolate* isolate = CcTest::i_isolate();
3993 3 : v8::internal::Heap* heap = CcTest::heap();
3994 :
3995 3 : if (!isolate->use_optimizer()) return;
3996 : HandleScope outer_scope(heap->isolate());
3997 : Handle<Code> code;
3998 : {
3999 3 : LocalContext context;
4000 : HandleScope scope(heap->isolate());
4001 :
4002 : CompileRun(
4003 : "function bar() {"
4004 : " return foo(1);"
4005 : "};"
4006 : "function foo(x) { with (x) { return 1 + x; } };"
4007 : "%NeverOptimizeFunction(foo);"
4008 : "bar();"
4009 : "bar();"
4010 : "bar();"
4011 : "%OptimizeFunctionOnNextCall(bar);"
4012 : "bar();");
4013 :
4014 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4015 : *v8::Local<v8::Function>::Cast(CcTest::global()
4016 9 : ->Get(context.local(), v8_str("bar"))
4017 6 : .ToLocalChecked())));
4018 9 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4019 : }
4020 :
4021 : // Now make sure that a gc should get rid of the function
4022 15 : for (int i = 0; i < 4; i++) {
4023 12 : CcTest::CollectAllGarbage();
4024 : }
4025 :
4026 3 : CHECK(code->marked_for_deoptimization());
4027 3 : CHECK(code->embedded_objects_cleared());
4028 : }
4029 :
4030 28342 : TEST(NewSpaceObjectsInOptimizedCode) {
4031 5 : if (FLAG_always_opt || !FLAG_opt) return;
4032 3 : FLAG_allow_natives_syntax = true;
4033 3 : CcTest::InitializeVM();
4034 : Isolate* isolate = CcTest::i_isolate();
4035 :
4036 3 : if (!isolate->use_optimizer()) return;
4037 : HandleScope outer_scope(isolate);
4038 : Handle<Code> code;
4039 : {
4040 3 : LocalContext context;
4041 : HandleScope scope(isolate);
4042 :
4043 : CompileRun(
4044 : "var foo;"
4045 : "var bar;"
4046 : "(function() {"
4047 : " function foo_func(x) { with (x) { return 1 + x; } };"
4048 : " %NeverOptimizeFunction(foo_func);"
4049 : " function bar_func() {"
4050 : " return foo(1);"
4051 : " };"
4052 : " bar = bar_func;"
4053 : " foo = foo_func;"
4054 : " bar_func();"
4055 : " bar_func();"
4056 : " bar_func();"
4057 : " %OptimizeFunctionOnNextCall(bar_func);"
4058 : " bar_func();"
4059 : "})();");
4060 :
4061 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4062 : *v8::Local<v8::Function>::Cast(CcTest::global()
4063 9 : ->Get(context.local(), v8_str("bar"))
4064 6 : .ToLocalChecked())));
4065 :
4066 : Handle<JSFunction> foo = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4067 : *v8::Local<v8::Function>::Cast(CcTest::global()
4068 9 : ->Get(context.local(), v8_str("foo"))
4069 6 : .ToLocalChecked())));
4070 :
4071 3 : CHECK(Heap::InNewSpace(*foo));
4072 3 : CcTest::CollectGarbage(NEW_SPACE);
4073 3 : CcTest::CollectGarbage(NEW_SPACE);
4074 3 : CHECK(!Heap::InNewSpace(*foo));
4075 : #ifdef VERIFY_HEAP
4076 : CcTest::heap()->Verify();
4077 : #endif
4078 3 : CHECK(!bar->code()->marked_for_deoptimization());
4079 9 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4080 : }
4081 :
4082 : // Now make sure that a gc should get rid of the function
4083 15 : for (int i = 0; i < 4; i++) {
4084 12 : CcTest::CollectAllGarbage();
4085 : }
4086 :
4087 3 : CHECK(code->marked_for_deoptimization());
4088 3 : CHECK(code->embedded_objects_cleared());
4089 : }
4090 :
4091 28342 : TEST(ObjectsInEagerlyDeoptimizedCodeAreWeak) {
4092 5 : if (FLAG_always_opt || !FLAG_opt) return;
4093 3 : FLAG_allow_natives_syntax = true;
4094 3 : CcTest::InitializeVM();
4095 : Isolate* isolate = CcTest::i_isolate();
4096 3 : v8::internal::Heap* heap = CcTest::heap();
4097 :
4098 3 : if (!isolate->use_optimizer()) return;
4099 : HandleScope outer_scope(heap->isolate());
4100 : Handle<Code> code;
4101 : {
4102 3 : LocalContext context;
4103 : HandleScope scope(heap->isolate());
4104 :
4105 : CompileRun(
4106 : "function bar() {"
4107 : " return foo(1);"
4108 : "};"
4109 : "function foo(x) { with (x) { return 1 + x; } };"
4110 : "%NeverOptimizeFunction(foo);"
4111 : "bar();"
4112 : "bar();"
4113 : "bar();"
4114 : "%OptimizeFunctionOnNextCall(bar);"
4115 : "bar();"
4116 : "%DeoptimizeFunction(bar);");
4117 :
4118 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4119 : *v8::Local<v8::Function>::Cast(CcTest::global()
4120 9 : ->Get(context.local(), v8_str("bar"))
4121 6 : .ToLocalChecked())));
4122 9 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4123 : }
4124 :
4125 3 : CHECK(code->marked_for_deoptimization());
4126 :
4127 : // Now make sure that a gc should get rid of the function
4128 12 : for (int i = 0; i < 4; i++) {
4129 12 : CcTest::CollectAllGarbage();
4130 : }
4131 :
4132 3 : CHECK(code->marked_for_deoptimization());
4133 3 : CHECK(code->embedded_objects_cleared());
4134 : }
4135 :
4136 16 : static Handle<JSFunction> OptimizeDummyFunction(v8::Isolate* isolate,
4137 : const char* name) {
4138 : EmbeddedVector<char, 256> source;
4139 : SNPrintF(source,
4140 : "function %s() { return 0; }"
4141 : "%s(); %s();"
4142 : "%%OptimizeFunctionOnNextCall(%s);"
4143 16 : "%s();", name, name, name, name, name);
4144 16 : CompileRun(source.start());
4145 : i::Handle<JSFunction> fun = Handle<JSFunction>::cast(
4146 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4147 : CcTest::global()
4148 48 : ->Get(isolate->GetCurrentContext(), v8_str(name))
4149 32 : .ToLocalChecked())));
4150 16 : return fun;
4151 : }
4152 :
4153 8 : static int GetCodeChainLength(Code code) {
4154 : int result = 0;
4155 32 : while (code->next_code_link()->IsCode()) {
4156 4 : result++;
4157 4 : code = Code::cast(code->next_code_link());
4158 : }
4159 8 : return result;
4160 : }
4161 :
4162 :
4163 28342 : TEST(NextCodeLinkIsWeak) {
4164 5 : FLAG_always_opt = false;
4165 5 : FLAG_allow_natives_syntax = true;
4166 5 : CcTest::InitializeVM();
4167 : Isolate* isolate = CcTest::i_isolate();
4168 5 : v8::internal::Heap* heap = CcTest::heap();
4169 :
4170 10 : if (!isolate->use_optimizer()) return;
4171 : HandleScope outer_scope(heap->isolate());
4172 : Handle<Code> code;
4173 4 : CcTest::CollectAllAvailableGarbage();
4174 : int code_chain_length_before, code_chain_length_after;
4175 : {
4176 : HandleScope scope(heap->isolate());
4177 : Handle<JSFunction> mortal =
4178 4 : OptimizeDummyFunction(CcTest::isolate(), "mortal");
4179 : Handle<JSFunction> immortal =
4180 4 : OptimizeDummyFunction(CcTest::isolate(), "immortal");
4181 16 : CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4182 4 : code_chain_length_before = GetCodeChainLength(immortal->code());
4183 : // Keep the immortal code and let the mortal code die.
4184 8 : code = scope.CloseAndEscape(Handle<Code>(immortal->code(), isolate));
4185 : CompileRun("mortal = null; immortal = null;");
4186 : }
4187 4 : CcTest::CollectAllAvailableGarbage();
4188 : // Now mortal code should be dead.
4189 4 : code_chain_length_after = GetCodeChainLength(*code);
4190 4 : CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
4191 : }
4192 :
4193 28342 : TEST(NextCodeLinkInCodeDataContainerIsCleared) {
4194 5 : FLAG_always_opt = false;
4195 5 : FLAG_allow_natives_syntax = true;
4196 5 : CcTest::InitializeVM();
4197 : Isolate* isolate = CcTest::i_isolate();
4198 5 : v8::internal::Heap* heap = CcTest::heap();
4199 :
4200 10 : if (!isolate->use_optimizer()) return;
4201 : HandleScope outer_scope(heap->isolate());
4202 : Handle<CodeDataContainer> code_data_container;
4203 : {
4204 : HandleScope scope(heap->isolate());
4205 : Handle<JSFunction> mortal1 =
4206 4 : OptimizeDummyFunction(CcTest::isolate(), "mortal1");
4207 : Handle<JSFunction> mortal2 =
4208 4 : OptimizeDummyFunction(CcTest::isolate(), "mortal2");
4209 16 : CHECK_EQ(mortal2->code()->next_code_link(), mortal1->code());
4210 : code_data_container = scope.CloseAndEscape(Handle<CodeDataContainer>(
4211 8 : mortal2->code()->code_data_container(), isolate));
4212 : CompileRun("mortal1 = null; mortal2 = null;");
4213 : }
4214 4 : CcTest::CollectAllAvailableGarbage();
4215 8 : CHECK(code_data_container->next_code_link()->IsUndefined(isolate));
4216 : }
4217 :
4218 8 : static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
4219 : i::byte buffer[i::Assembler::kMinimalBufferSize];
4220 : MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
4221 24 : ExternalAssemblerBuffer(buffer, sizeof(buffer)));
4222 8 : CodeDesc desc;
4223 16 : masm.Push(isolate->factory()->undefined_value());
4224 16 : masm.Push(isolate->factory()->undefined_value());
4225 8 : masm.Drop(2);
4226 8 : masm.GetCode(isolate, &desc);
4227 : Handle<Code> code = isolate->factory()->NewCode(
4228 16 : desc, Code::OPTIMIZED_FUNCTION, masm.CodeObject());
4229 16 : CHECK(code->IsCode());
4230 16 : return code;
4231 : }
4232 :
4233 :
4234 28342 : TEST(NextCodeLinkIsWeak2) {
4235 5 : FLAG_allow_natives_syntax = true;
4236 5 : CcTest::InitializeVM();
4237 : Isolate* isolate = CcTest::i_isolate();
4238 5 : v8::internal::Heap* heap = CcTest::heap();
4239 :
4240 10 : if (!isolate->use_optimizer()) return;
4241 : HandleScope outer_scope(heap->isolate());
4242 4 : CcTest::CollectAllAvailableGarbage();
4243 : Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
4244 : Handle<Code> new_head;
4245 8 : Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
4246 : {
4247 : HandleScope scope(heap->isolate());
4248 4 : Handle<Code> immortal = DummyOptimizedCode(isolate);
4249 4 : Handle<Code> mortal = DummyOptimizedCode(isolate);
4250 4 : mortal->set_next_code_link(*old_head);
4251 8 : immortal->set_next_code_link(*mortal);
4252 8 : context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
4253 4 : new_head = scope.CloseAndEscape(immortal);
4254 : }
4255 4 : CcTest::CollectAllAvailableGarbage();
4256 : // Now mortal code should be dead.
4257 16 : CHECK_EQ(*old_head, new_head->next_code_link());
4258 : }
4259 :
4260 :
4261 : static bool weak_ic_cleared = false;
4262 :
4263 49 : static void ClearWeakIC(
4264 49 : const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
4265 : printf("clear weak is called\n");
4266 49 : weak_ic_cleared = true;
4267 : data.GetParameter()->Reset();
4268 49 : }
4269 :
4270 :
4271 28342 : TEST(WeakFunctionInConstructor) {
4272 1 : if (FLAG_lite_mode) return;
4273 5 : if (FLAG_always_opt) return;
4274 4 : FLAG_stress_compaction = false;
4275 4 : FLAG_stress_incremental_marking = false;
4276 4 : CcTest::InitializeVM();
4277 4 : v8::Isolate* isolate = CcTest::isolate();
4278 4 : LocalContext env;
4279 8 : v8::HandleScope scope(isolate);
4280 : CompileRun(
4281 : "function createObj(obj) {"
4282 : " return new obj();"
4283 : "}");
4284 : i::Handle<JSFunction> createObj = Handle<JSFunction>::cast(
4285 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4286 : CcTest::global()
4287 12 : ->Get(env.local(), v8_str("createObj"))
4288 8 : .ToLocalChecked())));
4289 :
4290 : v8::Persistent<v8::Object> garbage;
4291 : {
4292 4 : v8::HandleScope scope(isolate);
4293 : const char* source =
4294 : " (function() {"
4295 : " function hat() { this.x = 5; }"
4296 : " createObj(hat);"
4297 : " createObj(hat);"
4298 : " return hat;"
4299 : " })();";
4300 4 : garbage.Reset(isolate, CompileRun(env.local(), source)
4301 4 : .ToLocalChecked()
4302 4 : ->ToObject(env.local())
4303 8 : .ToLocalChecked());
4304 : }
4305 4 : weak_ic_cleared = false;
4306 : garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4307 4 : CcTest::CollectAllGarbage();
4308 4 : CHECK(weak_ic_cleared);
4309 :
4310 : // We've determined the constructor in createObj has had it's weak cell
4311 : // cleared. Now, verify that one additional call with a new function
4312 : // allows monomorphicity.
4313 : Handle<FeedbackVector> feedback_vector =
4314 8 : Handle<FeedbackVector>(createObj->feedback_vector(), CcTest::i_isolate());
4315 4 : for (int i = 0; i < 20; i++) {
4316 8 : MaybeObject slot_value = feedback_vector->Get(FeedbackSlot(0));
4317 4 : CHECK(slot_value->IsWeakOrCleared());
4318 4 : if (slot_value->IsCleared()) break;
4319 0 : CcTest::CollectAllGarbage();
4320 : }
4321 :
4322 8 : MaybeObject slot_value = feedback_vector->Get(FeedbackSlot(0));
4323 4 : CHECK(slot_value->IsCleared());
4324 : CompileRun(
4325 : "function coat() { this.x = 6; }"
4326 : "createObj(coat);");
4327 8 : slot_value = feedback_vector->Get(FeedbackSlot(0));
4328 8 : CHECK(slot_value->IsWeak());
4329 : }
4330 :
4331 :
4332 : // Checks that the value returned by execution of the source is weak.
4333 45 : void CheckWeakness(const char* source) {
4334 45 : FLAG_stress_compaction = false;
4335 45 : FLAG_stress_incremental_marking = false;
4336 45 : CcTest::InitializeVM();
4337 45 : v8::Isolate* isolate = CcTest::isolate();
4338 45 : LocalContext env;
4339 90 : v8::HandleScope scope(isolate);
4340 : v8::Persistent<v8::Object> garbage;
4341 : {
4342 45 : v8::HandleScope scope(isolate);
4343 45 : garbage.Reset(isolate, CompileRun(env.local(), source)
4344 45 : .ToLocalChecked()
4345 45 : ->ToObject(env.local())
4346 90 : .ToLocalChecked());
4347 : }
4348 45 : weak_ic_cleared = false;
4349 : garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4350 45 : CcTest::CollectAllGarbage();
4351 90 : CHECK(weak_ic_cleared);
4352 45 : }
4353 :
4354 :
4355 : // Each of the following "weak IC" tests creates an IC that embeds a map with
4356 : // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
4357 28342 : TEST(WeakMapInMonomorphicLoadIC) {
4358 : CheckWeakness("function loadIC(obj) {"
4359 : " return obj.name;"
4360 : "}"
4361 : " (function() {"
4362 : " var proto = {'name' : 'weak'};"
4363 : " var obj = Object.create(proto);"
4364 : " loadIC(obj);"
4365 : " loadIC(obj);"
4366 : " loadIC(obj);"
4367 : " return proto;"
4368 5 : " })();");
4369 5 : }
4370 :
4371 :
4372 28342 : TEST(WeakMapInPolymorphicLoadIC) {
4373 : CheckWeakness(
4374 : "function loadIC(obj) {"
4375 : " return obj.name;"
4376 : "}"
4377 : " (function() {"
4378 : " var proto = {'name' : 'weak'};"
4379 : " var obj = Object.create(proto);"
4380 : " loadIC(obj);"
4381 : " loadIC(obj);"
4382 : " loadIC(obj);"
4383 : " var poly = Object.create(proto);"
4384 : " poly.x = true;"
4385 : " loadIC(poly);"
4386 : " return proto;"
4387 5 : " })();");
4388 5 : }
4389 :
4390 :
4391 28342 : TEST(WeakMapInMonomorphicKeyedLoadIC) {
4392 : CheckWeakness("function keyedLoadIC(obj, field) {"
4393 : " return obj[field];"
4394 : "}"
4395 : " (function() {"
4396 : " var proto = {'name' : 'weak'};"
4397 : " var obj = Object.create(proto);"
4398 : " keyedLoadIC(obj, 'name');"
4399 : " keyedLoadIC(obj, 'name');"
4400 : " keyedLoadIC(obj, 'name');"
4401 : " return proto;"
4402 5 : " })();");
4403 5 : }
4404 :
4405 :
4406 28342 : TEST(WeakMapInPolymorphicKeyedLoadIC) {
4407 : CheckWeakness(
4408 : "function keyedLoadIC(obj, field) {"
4409 : " return obj[field];"
4410 : "}"
4411 : " (function() {"
4412 : " var proto = {'name' : 'weak'};"
4413 : " var obj = Object.create(proto);"
4414 : " keyedLoadIC(obj, 'name');"
4415 : " keyedLoadIC(obj, 'name');"
4416 : " keyedLoadIC(obj, 'name');"
4417 : " var poly = Object.create(proto);"
4418 : " poly.x = true;"
4419 : " keyedLoadIC(poly, 'name');"
4420 : " return proto;"
4421 5 : " })();");
4422 5 : }
4423 :
4424 :
4425 28342 : TEST(WeakMapInMonomorphicStoreIC) {
4426 : CheckWeakness("function storeIC(obj, value) {"
4427 : " obj.name = value;"
4428 : "}"
4429 : " (function() {"
4430 : " var proto = {'name' : 'weak'};"
4431 : " var obj = Object.create(proto);"
4432 : " storeIC(obj, 'x');"
4433 : " storeIC(obj, 'x');"
4434 : " storeIC(obj, 'x');"
4435 : " return proto;"
4436 5 : " })();");
4437 5 : }
4438 :
4439 :
4440 28342 : TEST(WeakMapInPolymorphicStoreIC) {
4441 : CheckWeakness(
4442 : "function storeIC(obj, value) {"
4443 : " obj.name = value;"
4444 : "}"
4445 : " (function() {"
4446 : " var proto = {'name' : 'weak'};"
4447 : " var obj = Object.create(proto);"
4448 : " storeIC(obj, 'x');"
4449 : " storeIC(obj, 'x');"
4450 : " storeIC(obj, 'x');"
4451 : " var poly = Object.create(proto);"
4452 : " poly.x = true;"
4453 : " storeIC(poly, 'x');"
4454 : " return proto;"
4455 5 : " })();");
4456 5 : }
4457 :
4458 :
4459 28342 : TEST(WeakMapInMonomorphicKeyedStoreIC) {
4460 : CheckWeakness("function keyedStoreIC(obj, field, value) {"
4461 : " obj[field] = value;"
4462 : "}"
4463 : " (function() {"
4464 : " var proto = {'name' : 'weak'};"
4465 : " var obj = Object.create(proto);"
4466 : " keyedStoreIC(obj, 'x');"
4467 : " keyedStoreIC(obj, 'x');"
4468 : " keyedStoreIC(obj, 'x');"
4469 : " return proto;"
4470 5 : " })();");
4471 5 : }
4472 :
4473 :
4474 28342 : TEST(WeakMapInPolymorphicKeyedStoreIC) {
4475 : CheckWeakness(
4476 : "function keyedStoreIC(obj, field, value) {"
4477 : " obj[field] = value;"
4478 : "}"
4479 : " (function() {"
4480 : " var proto = {'name' : 'weak'};"
4481 : " var obj = Object.create(proto);"
4482 : " keyedStoreIC(obj, 'x');"
4483 : " keyedStoreIC(obj, 'x');"
4484 : " keyedStoreIC(obj, 'x');"
4485 : " var poly = Object.create(proto);"
4486 : " poly.x = true;"
4487 : " keyedStoreIC(poly, 'x');"
4488 : " return proto;"
4489 5 : " })();");
4490 5 : }
4491 :
4492 :
4493 28342 : TEST(WeakMapInMonomorphicCompareNilIC) {
4494 : CheckWeakness("function compareNilIC(obj) {"
4495 : " return obj == null;"
4496 : "}"
4497 : " (function() {"
4498 : " var proto = {'name' : 'weak'};"
4499 : " var obj = Object.create(proto);"
4500 : " compareNilIC(obj);"
4501 : " compareNilIC(obj);"
4502 : " compareNilIC(obj);"
4503 : " return proto;"
4504 5 : " })();");
4505 5 : }
4506 :
4507 :
4508 8 : Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
4509 8 : Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
4510 : Handle<Object> obj =
4511 16 : Object::GetProperty(isolate, isolate->global_object(), str)
4512 16 : .ToHandleChecked();
4513 8 : return Handle<JSFunction>::cast(obj);
4514 : }
4515 :
4516 16 : void CheckIC(Handle<JSFunction> function, int slot_index,
4517 : InlineCacheState state) {
4518 16 : FeedbackVector vector = function->feedback_vector();
4519 : FeedbackSlot slot(slot_index);
4520 : FeedbackNexus nexus(vector, slot);
4521 16 : CHECK_EQ(nexus.StateFromFeedback(), state);
4522 16 : }
4523 :
4524 28342 : TEST(MonomorphicStaysMonomorphicAfterGC) {
4525 6 : if (!FLAG_use_ic) return;
4526 5 : if (FLAG_always_opt) return;
4527 : ManualGCScope manual_gc_scope;
4528 4 : CcTest::InitializeVM();
4529 : Isolate* isolate = CcTest::i_isolate();
4530 8 : v8::HandleScope scope(CcTest::isolate());
4531 : CompileRun(
4532 : "function loadIC(obj) {"
4533 : " return obj.name;"
4534 : "}"
4535 : "function testIC() {"
4536 : " var proto = {'name' : 'weak'};"
4537 : " var obj = Object.create(proto);"
4538 : " loadIC(obj);"
4539 : " loadIC(obj);"
4540 : " loadIC(obj);"
4541 : " return proto;"
4542 : "};");
4543 4 : Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4544 : {
4545 4 : v8::HandleScope scope(CcTest::isolate());
4546 4 : CompileRun("(testIC())");
4547 : }
4548 4 : CcTest::CollectAllGarbage();
4549 4 : CheckIC(loadIC, 0, MONOMORPHIC);
4550 : {
4551 4 : v8::HandleScope scope(CcTest::isolate());
4552 4 : CompileRun("(testIC())");
4553 : }
4554 4 : CheckIC(loadIC, 0, MONOMORPHIC);
4555 : }
4556 :
4557 :
4558 28342 : TEST(PolymorphicStaysPolymorphicAfterGC) {
4559 6 : if (!FLAG_use_ic) return;
4560 5 : if (FLAG_always_opt) return;
4561 : ManualGCScope manual_gc_scope;
4562 4 : CcTest::InitializeVM();
4563 : Isolate* isolate = CcTest::i_isolate();
4564 8 : v8::HandleScope scope(CcTest::isolate());
4565 : CompileRun(
4566 : "function loadIC(obj) {"
4567 : " return obj.name;"
4568 : "}"
4569 : "function testIC() {"
4570 : " var proto = {'name' : 'weak'};"
4571 : " var obj = Object.create(proto);"
4572 : " loadIC(obj);"
4573 : " loadIC(obj);"
4574 : " loadIC(obj);"
4575 : " var poly = Object.create(proto);"
4576 : " poly.x = true;"
4577 : " loadIC(poly);"
4578 : " return proto;"
4579 : "};");
4580 4 : Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4581 : {
4582 4 : v8::HandleScope scope(CcTest::isolate());
4583 4 : CompileRun("(testIC())");
4584 : }
4585 4 : CcTest::CollectAllGarbage();
4586 4 : CheckIC(loadIC, 0, POLYMORPHIC);
4587 : {
4588 4 : v8::HandleScope scope(CcTest::isolate());
4589 4 : CompileRun("(testIC())");
4590 : }
4591 4 : CheckIC(loadIC, 0, POLYMORPHIC);
4592 : }
4593 :
4594 : #ifdef DEBUG
4595 : TEST(AddInstructionChangesNewSpacePromotion) {
4596 : FLAG_allow_natives_syntax = true;
4597 : FLAG_expose_gc = true;
4598 : FLAG_stress_compaction = true;
4599 : FLAG_gc_interval = 1000;
4600 : CcTest::InitializeVM();
4601 : if (!FLAG_allocation_site_pretenuring) return;
4602 : v8::HandleScope scope(CcTest::isolate());
4603 : Isolate* isolate = CcTest::i_isolate();
4604 : Heap* heap = isolate->heap();
4605 : LocalContext env;
4606 : CompileRun(
4607 : "function add(a, b) {"
4608 : " return a + b;"
4609 : "}"
4610 : "add(1, 2);"
4611 : "add(\"a\", \"b\");"
4612 : "var oldSpaceObject;"
4613 : "gc();"
4614 : "function crash(x) {"
4615 : " var object = {a: null, b: null};"
4616 : " var result = add(1.5, x | 0);"
4617 : " object.a = result;"
4618 : " oldSpaceObject = object;"
4619 : " return object;"
4620 : "}"
4621 : "crash(1);"
4622 : "crash(1);"
4623 : "%OptimizeFunctionOnNextCall(crash);"
4624 : "crash(1);");
4625 :
4626 : v8::Local<v8::Object> global = CcTest::global();
4627 : v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
4628 : global->Get(env.local(), v8_str("crash")).ToLocalChecked());
4629 : v8::Local<v8::Value> args1[] = {v8_num(1)};
4630 : heap->DisableInlineAllocation();
4631 : heap->set_allocation_timeout(1);
4632 : g->Call(env.local(), global, 1, args1).ToLocalChecked();
4633 : CcTest::CollectAllGarbage();
4634 : }
4635 :
4636 :
4637 : void OnFatalErrorExpectOOM(const char* location, const char* message) {
4638 : // Exit with 0 if the location matches our expectation.
4639 : exit(strcmp(location, "CALL_AND_RETRY_LAST"));
4640 : }
4641 :
4642 :
4643 : TEST(CEntryStubOOM) {
4644 : FLAG_allow_natives_syntax = true;
4645 : CcTest::InitializeVM();
4646 : v8::HandleScope scope(CcTest::isolate());
4647 : CcTest::isolate()->SetFatalErrorHandler(OnFatalErrorExpectOOM);
4648 :
4649 : v8::Local<v8::Value> result = CompileRun(
4650 : "%SetAllocationTimeout(1, 1);"
4651 : "var a = [];"
4652 : "a.__proto__ = [];"
4653 : "a.unshift(1)");
4654 :
4655 : CHECK(result->IsNumber());
4656 : }
4657 :
4658 : #endif // DEBUG
4659 :
4660 :
4661 5 : static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
4662 :
4663 :
4664 5 : static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
4665 5 : CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, nullptr);
4666 5 : }
4667 :
4668 28342 : HEAP_TEST(Regress538257) {
4669 : ManualGCScope manual_gc_scope;
4670 5 : FLAG_manual_evacuation_candidates_selection = true;
4671 : v8::Isolate::CreateParams create_params;
4672 : // Set heap limits.
4673 : create_params.constraints.set_max_semi_space_size_in_kb(1024);
4674 : #ifdef DEBUG
4675 : create_params.constraints.set_max_old_space_size(20);
4676 : #else
4677 : create_params.constraints.set_max_old_space_size(6);
4678 : #endif
4679 5 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
4680 5 : v8::Isolate* isolate = v8::Isolate::New(create_params);
4681 5 : isolate->Enter();
4682 : {
4683 : i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
4684 10 : Heap* heap = i_isolate->heap();
4685 : HandleScope handle_scope(i_isolate);
4686 5725 : PagedSpace* old_space = heap->old_space();
4687 : const int kMaxObjects = 10000;
4688 : const int kFixedArrayLen = 512;
4689 50005 : Handle<FixedArray> objects[kMaxObjects];
4690 11445 : for (int i = 0; (i < kMaxObjects) &&
4691 5725 : heap->CanExpandOldGeneration(old_space->AreaSize());
4692 : i++) {
4693 5720 : objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED);
4694 11440 : heap::ForceEvacuationCandidate(Page::FromHeapObject(*objects[i]));
4695 : }
4696 5 : heap::SimulateFullSpace(old_space);
4697 5 : CcTest::CollectAllGarbage();
4698 : // If we get this far, we've successfully aborted compaction. Any further
4699 : // allocations might trigger OOM.
4700 : }
4701 5 : isolate->Exit();
4702 5 : isolate->Dispose();
4703 5 : }
4704 :
4705 :
4706 28342 : TEST(Regress357137) {
4707 5 : CcTest::InitializeVM();
4708 5 : v8::Isolate* isolate = CcTest::isolate();
4709 5 : v8::HandleScope hscope(isolate);
4710 5 : v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
4711 : global->Set(
4712 : v8::String::NewFromUtf8(isolate, "interrupt", v8::NewStringType::kNormal)
4713 : .ToLocalChecked(),
4714 15 : v8::FunctionTemplate::New(isolate, RequestInterrupt));
4715 5 : v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
4716 5 : CHECK(!context.IsEmpty());
4717 : v8::Context::Scope cscope(context);
4718 :
4719 : v8::Local<v8::Value> result = CompileRun(
4720 : "var locals = '';"
4721 : "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
4722 : "eval('function f() {' + locals + 'return function() { return v0; }; }');"
4723 : "interrupt();" // This triggers a fake stack overflow in f.
4724 : "f()()");
4725 15 : CHECK_EQ(42.0, result->ToNumber(context).ToLocalChecked()->Value());
4726 5 : }
4727 :
4728 :
4729 28342 : TEST(Regress507979) {
4730 : const int kFixedArrayLen = 10;
4731 5 : CcTest::InitializeVM();
4732 : Isolate* isolate = CcTest::i_isolate();
4733 : HandleScope handle_scope(isolate);
4734 :
4735 5 : Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
4736 5 : Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
4737 5 : CHECK(Heap::InNewSpace(*o1));
4738 5 : CHECK(Heap::InNewSpace(*o2));
4739 :
4740 10 : HeapIterator it(isolate->heap(), i::HeapIterator::kFilterUnreachable);
4741 :
4742 : // Replace parts of an object placed before a live object with a filler. This
4743 : // way the filler object shares the mark bits with the following live object.
4744 5 : o1->Shrink(isolate, kFixedArrayLen - 1);
4745 :
4746 32505 : for (HeapObject obj = it.next(); !obj.is_null(); obj = it.next()) {
4747 : // Let's not optimize the loop away.
4748 32500 : CHECK_NE(obj->address(), kNullAddress);
4749 : }
4750 5 : }
4751 :
4752 28342 : TEST(Regress388880) {
4753 5 : if (!FLAG_incremental_marking) return;
4754 5 : FLAG_stress_incremental_marking = false;
4755 5 : FLAG_expose_gc = true;
4756 5 : CcTest::InitializeVM();
4757 5 : v8::HandleScope scope(CcTest::isolate());
4758 : Isolate* isolate = CcTest::i_isolate();
4759 : Factory* factory = isolate->factory();
4760 10 : Heap* heap = isolate->heap();
4761 :
4762 5 : Handle<Map> map1 = Map::Create(isolate, 1);
4763 5 : Handle<String> name = factory->NewStringFromStaticChars("foo");
4764 5 : name = factory->InternalizeString(name);
4765 : Handle<Map> map2 =
4766 : Map::CopyWithField(isolate, map1, name, FieldType::Any(isolate), NONE,
4767 : PropertyConstness::kMutable, Representation::Tagged(),
4768 10 : OMIT_TRANSITION)
4769 10 : .ToHandleChecked();
4770 :
4771 5 : size_t desired_offset = Page::kPageSize - map1->instance_size();
4772 :
4773 : // Allocate padding objects in old pointer space so, that object allocated
4774 : // afterwards would end at the end of the page.
4775 5 : heap::SimulateFullSpace(heap->old_space());
4776 : size_t padding_size =
4777 5 : desired_offset - MemoryChunkLayout::ObjectStartOffsetInDataPage();
4778 10 : heap::CreatePadding(heap, static_cast<int>(padding_size), TENURED);
4779 :
4780 5 : Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED);
4781 10 : o->set_raw_properties_or_hash(*factory->empty_fixed_array());
4782 :
4783 : // Ensure that the object allocated where we need it.
4784 : Page* page = Page::FromHeapObject(*o);
4785 5 : CHECK_EQ(desired_offset, page->Offset(o->address()));
4786 :
4787 : // Now we have an object right at the end of the page.
4788 :
4789 : // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
4790 : // that would cause crash.
4791 5 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4792 5 : marking->Stop();
4793 : CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
4794 5 : i::GarbageCollectionReason::kTesting);
4795 5 : CHECK(marking->IsMarking());
4796 :
4797 : // Now everything is set up for crashing in JSObject::MigrateFastToFast()
4798 : // when it calls heap->AdjustLiveBytes(...).
4799 5 : JSObject::MigrateToMap(o, map2);
4800 : }
4801 :
4802 :
4803 28342 : TEST(Regress3631) {
4804 5 : if (!FLAG_incremental_marking) return;
4805 5 : FLAG_expose_gc = true;
4806 5 : CcTest::InitializeVM();
4807 5 : v8::HandleScope scope(CcTest::isolate());
4808 : Isolate* isolate = CcTest::i_isolate();
4809 5 : Heap* heap = isolate->heap();
4810 5 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4811 : v8::Local<v8::Value> result = CompileRun(
4812 : "var weak_map = new WeakMap();"
4813 : "var future_keys = [];"
4814 : "for (var i = 0; i < 50; i++) {"
4815 : " var key = {'k' : i + 0.1};"
4816 : " weak_map.set(key, 1);"
4817 : " future_keys.push({'x' : i + 0.2});"
4818 : "}"
4819 : "weak_map");
4820 5 : if (marking->IsStopped()) {
4821 : CcTest::heap()->StartIncrementalMarking(
4822 4 : i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
4823 : }
4824 : // Incrementally mark the backing store.
4825 : Handle<JSReceiver> obj =
4826 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
4827 : Handle<JSWeakCollection> weak_map(JSWeakCollection::cast(*obj), isolate);
4828 10 : HeapObject weak_map_table = HeapObject::cast(weak_map->table());
4829 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
4830 20 : while (!marking_state->IsBlack(weak_map_table) && !marking->IsStopped()) {
4831 : marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
4832 5 : StepOrigin::kV8);
4833 : }
4834 : // Stash the backing store in a handle.
4835 10 : Handle<Object> save(weak_map->table(), isolate);
4836 : // The following line will update the backing store.
4837 : CompileRun(
4838 : "for (var i = 0; i < 50; i++) {"
4839 : " weak_map.set(future_keys[i], i);"
4840 : "}");
4841 : heap->incremental_marking()->set_should_hurry(true);
4842 5 : CcTest::CollectGarbage(OLD_SPACE);
4843 : }
4844 :
4845 :
4846 28342 : TEST(Regress442710) {
4847 5 : CcTest::InitializeVM();
4848 : Isolate* isolate = CcTest::i_isolate();
4849 : Factory* factory = isolate->factory();
4850 :
4851 : HandleScope sc(isolate);
4852 : Handle<JSGlobalObject> global(CcTest::i_isolate()->context()->global_object(),
4853 10 : isolate);
4854 5 : Handle<JSArray> array = factory->NewJSArray(2);
4855 :
4856 5 : Handle<String> name = factory->InternalizeUtf8String("testArray");
4857 5 : Object::SetProperty(isolate, global, name, array, LanguageMode::kSloppy)
4858 10 : .Check();
4859 : CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
4860 5 : CcTest::CollectGarbage(OLD_SPACE);
4861 5 : }
4862 :
4863 :
4864 28342 : HEAP_TEST(NumberStringCacheSize) {
4865 : // Test that the number-string cache has not been resized in the snapshot.
4866 5 : CcTest::InitializeVM();
4867 5 : Isolate* isolate = CcTest::i_isolate();
4868 10 : if (!isolate->snapshot_available()) return;
4869 5 : Heap* heap = isolate->heap();
4870 10 : CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
4871 : heap->number_string_cache()->length());
4872 : }
4873 :
4874 :
4875 28342 : TEST(Regress3877) {
4876 5 : CcTest::InitializeVM();
4877 : Isolate* isolate = CcTest::i_isolate();
4878 : Factory* factory = isolate->factory();
4879 : HandleScope scope(isolate);
4880 : CompileRun("function cls() { this.x = 10; }");
4881 5 : Handle<WeakFixedArray> weak_prototype_holder = factory->NewWeakFixedArray(1);
4882 : {
4883 : HandleScope inner_scope(isolate);
4884 : v8::Local<v8::Value> result = CompileRun("cls.prototype");
4885 : Handle<JSReceiver> proto =
4886 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
4887 10 : weak_prototype_holder->Set(0, HeapObjectReference::Weak(*proto));
4888 : }
4889 10 : CHECK(!weak_prototype_holder->Get(0)->IsCleared());
4890 : CompileRun(
4891 : "var a = { };"
4892 : "a.x = new cls();"
4893 : "cls.prototype = null;");
4894 25 : for (int i = 0; i < 4; i++) {
4895 20 : CcTest::CollectAllGarbage();
4896 : }
4897 : // The map of a.x keeps prototype alive
4898 10 : CHECK(!weak_prototype_holder->Get(0)->IsCleared());
4899 : // Change the map of a.x and make the previous map garbage collectable.
4900 : CompileRun("a.x.__proto__ = {};");
4901 25 : for (int i = 0; i < 4; i++) {
4902 20 : CcTest::CollectAllGarbage();
4903 : }
4904 10 : CHECK(weak_prototype_holder->Get(0)->IsCleared());
4905 5 : }
4906 :
4907 20 : Handle<WeakFixedArray> AddRetainedMap(Isolate* isolate, Heap* heap) {
4908 : HandleScope inner_scope(isolate);
4909 20 : Handle<Map> map = Map::Create(isolate, 1);
4910 : v8::Local<v8::Value> result =
4911 : CompileRun("(function () { return {x : 10}; })();");
4912 : Handle<JSReceiver> proto =
4913 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
4914 20 : Map::SetPrototype(isolate, map, proto);
4915 20 : heap->AddRetainedMap(map);
4916 20 : Handle<WeakFixedArray> array = isolate->factory()->NewWeakFixedArray(1);
4917 40 : array->Set(0, HeapObjectReference::Weak(*map));
4918 40 : return inner_scope.CloseAndEscape(array);
4919 : }
4920 :
4921 :
4922 20 : void CheckMapRetainingFor(int n) {
4923 20 : FLAG_retain_maps_for_n_gc = n;
4924 : Isolate* isolate = CcTest::i_isolate();
4925 20 : Heap* heap = isolate->heap();
4926 20 : Handle<WeakFixedArray> array_with_map = AddRetainedMap(isolate, heap);
4927 40 : CHECK(array_with_map->Get(0)->IsWeak());
4928 50 : for (int i = 0; i < n; i++) {
4929 50 : heap::SimulateIncrementalMarking(heap);
4930 50 : CcTest::CollectGarbage(OLD_SPACE);
4931 : }
4932 40 : CHECK(array_with_map->Get(0)->IsWeak());
4933 20 : heap::SimulateIncrementalMarking(heap);
4934 20 : CcTest::CollectGarbage(OLD_SPACE);
4935 40 : CHECK(array_with_map->Get(0)->IsCleared());
4936 20 : }
4937 :
4938 :
4939 28342 : TEST(MapRetaining) {
4940 5 : if (!FLAG_incremental_marking) return;
4941 : ManualGCScope manual_gc_scope;
4942 5 : CcTest::InitializeVM();
4943 10 : v8::HandleScope scope(CcTest::isolate());
4944 5 : CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
4945 5 : CheckMapRetainingFor(0);
4946 5 : CheckMapRetainingFor(1);
4947 5 : CheckMapRetainingFor(7);
4948 : }
4949 :
4950 28337 : TEST(PreprocessStackTrace) {
4951 : // Do not automatically trigger early GC.
4952 0 : FLAG_gc_interval = -1;
4953 0 : CcTest::InitializeVM();
4954 0 : v8::HandleScope scope(CcTest::isolate());
4955 0 : v8::TryCatch try_catch(CcTest::isolate());
4956 : CompileRun("throw new Error();");
4957 0 : CHECK(try_catch.HasCaught());
4958 : Isolate* isolate = CcTest::i_isolate();
4959 0 : Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
4960 : Handle<Name> key = isolate->factory()->stack_trace_symbol();
4961 : Handle<Object> stack_trace =
4962 0 : Object::GetProperty(isolate, exception, key).ToHandleChecked();
4963 : Handle<Object> code =
4964 0 : Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
4965 0 : CHECK(code->IsCode());
4966 :
4967 0 : CcTest::CollectAllAvailableGarbage();
4968 :
4969 : Handle<Object> pos =
4970 0 : Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
4971 0 : CHECK(pos->IsSmi());
4972 :
4973 0 : Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
4974 0 : int array_length = Smi::ToInt(stack_trace_array->length());
4975 0 : for (int i = 0; i < array_length; i++) {
4976 : Handle<Object> element =
4977 0 : Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
4978 0 : CHECK(!element->IsCode());
4979 0 : }
4980 0 : }
4981 :
4982 :
4983 215 : void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
4984 215 : CHECK_LE(FixedArray::kHeaderSize, bytes);
4985 215 : CHECK(IsAligned(bytes, kTaggedSize));
4986 : Factory* factory = isolate->factory();
4987 : HandleScope scope(isolate);
4988 : AlwaysAllocateScope always_allocate(isolate);
4989 : int elements =
4990 215 : static_cast<int>((bytes - FixedArray::kHeaderSize) / kTaggedSize);
4991 : Handle<FixedArray> array = factory->NewFixedArray(
4992 215 : elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
4993 430 : CHECK((space == NEW_SPACE) == Heap::InNewSpace(*array));
4994 215 : CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
4995 215 : }
4996 :
4997 :
4998 28342 : TEST(NewSpaceAllocationCounter) {
4999 5 : CcTest::InitializeVM();
5000 5 : v8::HandleScope scope(CcTest::isolate());
5001 : Isolate* isolate = CcTest::i_isolate();
5002 : Heap* heap = isolate->heap();
5003 : size_t counter1 = heap->NewSpaceAllocationCounter();
5004 5 : CcTest::CollectGarbage(NEW_SPACE);
5005 5 : CcTest::CollectGarbage(NEW_SPACE); // Ensure new space is empty.
5006 : const size_t kSize = 1024;
5007 5 : AllocateInSpace(isolate, kSize, NEW_SPACE);
5008 : size_t counter2 = heap->NewSpaceAllocationCounter();
5009 5 : CHECK_EQ(kSize, counter2 - counter1);
5010 5 : CcTest::CollectGarbage(NEW_SPACE);
5011 : size_t counter3 = heap->NewSpaceAllocationCounter();
5012 5 : CHECK_EQ(0U, counter3 - counter2);
5013 : // Test counter overflow.
5014 : size_t max_counter = static_cast<size_t>(-1);
5015 : heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
5016 : size_t start = heap->NewSpaceAllocationCounter();
5017 105 : for (int i = 0; i < 20; i++) {
5018 100 : AllocateInSpace(isolate, kSize, NEW_SPACE);
5019 : size_t counter = heap->NewSpaceAllocationCounter();
5020 100 : CHECK_EQ(kSize, counter - start);
5021 : start = counter;
5022 5 : }
5023 5 : }
5024 :
5025 :
5026 28342 : TEST(OldSpaceAllocationCounter) {
5027 5 : CcTest::InitializeVM();
5028 5 : v8::HandleScope scope(CcTest::isolate());
5029 : Isolate* isolate = CcTest::i_isolate();
5030 5 : Heap* heap = isolate->heap();
5031 : size_t counter1 = heap->OldGenerationAllocationCounter();
5032 5 : CcTest::CollectGarbage(NEW_SPACE);
5033 5 : CcTest::CollectGarbage(NEW_SPACE);
5034 : const size_t kSize = 1024;
5035 5 : AllocateInSpace(isolate, kSize, OLD_SPACE);
5036 : size_t counter2 = heap->OldGenerationAllocationCounter();
5037 : // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
5038 5 : CHECK_LE(kSize, counter2 - counter1);
5039 5 : CcTest::CollectGarbage(NEW_SPACE);
5040 : size_t counter3 = heap->OldGenerationAllocationCounter();
5041 5 : CHECK_EQ(0u, counter3 - counter2);
5042 5 : AllocateInSpace(isolate, kSize, OLD_SPACE);
5043 5 : CcTest::CollectGarbage(OLD_SPACE);
5044 : size_t counter4 = heap->OldGenerationAllocationCounter();
5045 5 : CHECK_LE(kSize, counter4 - counter3);
5046 : // Test counter overflow.
5047 : size_t max_counter = static_cast<size_t>(-1);
5048 : heap->set_old_generation_allocation_counter_at_last_gc(max_counter -
5049 : 10 * kSize);
5050 : size_t start = heap->OldGenerationAllocationCounter();
5051 105 : for (int i = 0; i < 20; i++) {
5052 100 : AllocateInSpace(isolate, kSize, OLD_SPACE);
5053 : size_t counter = heap->OldGenerationAllocationCounter();
5054 100 : CHECK_LE(kSize, counter - start);
5055 : start = counter;
5056 5 : }
5057 5 : }
5058 :
5059 :
5060 20 : static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
5061 : Isolate* isolate = CcTest::i_isolate();
5062 : Object message(
5063 20 : *reinterpret_cast<Address*>(isolate->pending_message_obj_address()));
5064 20 : CHECK(message->IsTheHole(isolate));
5065 20 : }
5066 :
5067 :
5068 28342 : TEST(MessageObjectLeak) {
5069 5 : CcTest::InitializeVM();
5070 5 : v8::Isolate* isolate = CcTest::isolate();
5071 5 : v8::HandleScope scope(isolate);
5072 5 : v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5073 : global->Set(
5074 : v8::String::NewFromUtf8(isolate, "check", v8::NewStringType::kNormal)
5075 : .ToLocalChecked(),
5076 15 : v8::FunctionTemplate::New(isolate, CheckLeak));
5077 5 : v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
5078 : v8::Context::Scope cscope(context);
5079 :
5080 : const char* test =
5081 : "try {"
5082 : " throw 'message 1';"
5083 : "} catch (e) {"
5084 : "}"
5085 : "check();"
5086 : "L: try {"
5087 : " throw 'message 2';"
5088 : "} finally {"
5089 : " break L;"
5090 : "}"
5091 : "check();";
5092 : CompileRun(test);
5093 :
5094 : const char* flag = "--turbo-filter=*";
5095 5 : FlagList::SetFlagsFromString(flag, StrLength(flag));
5096 5 : FLAG_always_opt = true;
5097 :
5098 5 : CompileRun(test);
5099 5 : }
5100 :
5101 :
5102 10 : static void CheckEqualSharedFunctionInfos(
5103 : const v8::FunctionCallbackInfo<v8::Value>& args) {
5104 10 : Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
5105 10 : Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
5106 10 : Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
5107 10 : Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
5108 30 : CHECK(fun1->shared() == fun2->shared());
5109 10 : }
5110 :
5111 :
5112 10 : static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
5113 : Isolate* isolate = CcTest::i_isolate();
5114 10 : Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
5115 10 : Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
5116 : // Bytecode is code too.
5117 20 : SharedFunctionInfo::DiscardCompiled(isolate, handle(fun->shared(), isolate));
5118 20 : fun->set_code(*BUILTIN_CODE(isolate, CompileLazy));
5119 10 : CcTest::CollectAllAvailableGarbage();
5120 10 : }
5121 :
5122 :
5123 28342 : TEST(CanonicalSharedFunctionInfo) {
5124 5 : CcTest::InitializeVM();
5125 5 : v8::Isolate* isolate = CcTest::isolate();
5126 5 : v8::HandleScope scope(isolate);
5127 5 : v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5128 : global->Set(isolate, "check", v8::FunctionTemplate::New(
5129 10 : isolate, CheckEqualSharedFunctionInfos));
5130 : global->Set(isolate, "remove",
5131 10 : v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
5132 5 : v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
5133 : v8::Context::Scope cscope(context);
5134 : CompileRun(
5135 : "function f() { return function g() {}; }"
5136 : "var g1 = f();"
5137 : "remove(f);"
5138 : "var g2 = f();"
5139 : "check(g1, g2);");
5140 :
5141 : CompileRun(
5142 : "function f() { return (function() { return function g() {}; })(); }"
5143 : "var g1 = f();"
5144 : "remove(f);"
5145 : "var g2 = f();"
5146 5 : "check(g1, g2);");
5147 5 : }
5148 :
5149 :
5150 28342 : TEST(ScriptIterator) {
5151 5 : CcTest::InitializeVM();
5152 5 : v8::HandleScope scope(CcTest::isolate());
5153 : Isolate* isolate = CcTest::i_isolate();
5154 5 : Heap* heap = CcTest::heap();
5155 10 : LocalContext context;
5156 :
5157 5 : CcTest::CollectAllGarbage();
5158 :
5159 : int script_count = 0;
5160 : {
5161 5 : HeapIterator it(heap);
5162 83896 : for (HeapObject obj = it.next(); !obj.is_null(); obj = it.next()) {
5163 41943 : if (obj->IsScript()) script_count++;
5164 5 : }
5165 : }
5166 :
5167 : {
5168 5 : Script::Iterator iterator(isolate);
5169 40 : for (Script script = iterator.Next(); !script.is_null();
5170 : script = iterator.Next()) {
5171 15 : script_count--;
5172 : }
5173 : }
5174 :
5175 10 : CHECK_EQ(0, script_count);
5176 5 : }
5177 :
5178 :
5179 28342 : TEST(SharedFunctionInfoIterator) {
5180 5 : CcTest::InitializeVM();
5181 5 : v8::HandleScope scope(CcTest::isolate());
5182 : Isolate* isolate = CcTest::i_isolate();
5183 5 : Heap* heap = CcTest::heap();
5184 10 : LocalContext context;
5185 :
5186 5 : CcTest::CollectAllGarbage();
5187 5 : CcTest::CollectAllGarbage();
5188 :
5189 : int sfi_count = 0;
5190 : {
5191 5 : HeapIterator it(heap);
5192 83000 : for (HeapObject obj = it.next(); !obj.is_null(); obj = it.next()) {
5193 41495 : if (!obj->IsSharedFunctionInfo()) continue;
5194 3465 : sfi_count++;
5195 5 : }
5196 : }
5197 :
5198 : {
5199 5 : SharedFunctionInfo::GlobalIterator iterator(isolate);
5200 3475 : while (!iterator.Next().is_null()) sfi_count--;
5201 : }
5202 :
5203 10 : CHECK_EQ(0, sfi_count);
5204 5 : }
5205 :
5206 : // This is the same as Factory::NewByteArray, except it doesn't retry on
5207 : // allocation failure.
5208 80650 : AllocationResult HeapTester::AllocateByteArrayForTest(Heap* heap, int length,
5209 : PretenureFlag pretenure) {
5210 : DCHECK(length >= 0 && length <= ByteArray::kMaxLength);
5211 : int size = ByteArray::SizeFor(length);
5212 80650 : AllocationSpace space = heap->SelectSpace(pretenure);
5213 80650 : HeapObject result;
5214 : {
5215 80650 : AllocationResult allocation = heap->AllocateRaw(size, space);
5216 80650 : if (!allocation.To(&result)) return allocation;
5217 : }
5218 :
5219 : result->set_map_after_allocation(ReadOnlyRoots(heap).byte_array_map(),
5220 80640 : SKIP_WRITE_BARRIER);
5221 : ByteArray::cast(result)->set_length(length);
5222 80640 : ByteArray::cast(result)->clear_padding();
5223 80640 : return result;
5224 : }
5225 :
5226 28342 : HEAP_TEST(Regress587004) {
5227 : ManualGCScope manual_gc_scope;
5228 : #ifdef VERIFY_HEAP
5229 : FLAG_verify_heap = false;
5230 : #endif
5231 5 : CcTest::InitializeVM();
5232 10 : v8::HandleScope scope(CcTest::isolate());
5233 20 : Heap* heap = CcTest::heap();
5234 : Isolate* isolate = CcTest::i_isolate();
5235 : Factory* factory = isolate->factory();
5236 : const int N =
5237 : (kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / kTaggedSize;
5238 5 : Handle<FixedArray> array = factory->NewFixedArray(N, TENURED);
5239 10 : CHECK(heap->old_space()->Contains(*array));
5240 5 : Handle<Object> number = factory->NewHeapNumber(1.0);
5241 5 : CHECK(Heap::InNewSpace(*number));
5242 316950 : for (int i = 0; i < N; i++) {
5243 316950 : array->set(i, *number);
5244 : }
5245 5 : CcTest::CollectGarbage(OLD_SPACE);
5246 5 : heap::SimulateFullSpace(heap->old_space());
5247 5 : heap->RightTrimFixedArray(*array, N - 1);
5248 5 : heap->mark_compact_collector()->EnsureSweepingCompleted();
5249 5 : ByteArray byte_array;
5250 : const int M = 256;
5251 : // Don't allow old space expansion. The test works without this flag too,
5252 : // but becomes very slow.
5253 : heap->set_force_oom(true);
5254 5 : while (AllocateByteArrayForTest(heap, M, TENURED).To(&byte_array)) {
5255 0 : for (int j = 0; j < M; j++) {
5256 : byte_array->set(j, 0x31);
5257 : }
5258 : }
5259 : // Re-enable old space expansion to avoid OOM crash.
5260 : heap->set_force_oom(false);
5261 5 : CcTest::CollectGarbage(NEW_SPACE);
5262 5 : }
5263 :
5264 28342 : HEAP_TEST(Regress589413) {
5265 5 : if (!FLAG_incremental_marking) return;
5266 5 : FLAG_stress_compaction = true;
5267 5 : FLAG_manual_evacuation_candidates_selection = true;
5268 5 : FLAG_parallel_compaction = false;
5269 : ManualGCScope manual_gc_scope;
5270 5 : CcTest::InitializeVM();
5271 10 : v8::HandleScope scope(CcTest::isolate());
5272 5 : Heap* heap = CcTest::heap();
5273 : // Get the heap in clean state.
5274 5 : CcTest::CollectGarbage(OLD_SPACE);
5275 5 : CcTest::CollectGarbage(OLD_SPACE);
5276 : Isolate* isolate = CcTest::i_isolate();
5277 : Factory* factory = isolate->factory();
5278 : // Fill the new space with byte arrays with elements looking like pointers.
5279 : const int M = 256;
5280 5 : ByteArray byte_array;
5281 19270 : while (AllocateByteArrayForTest(heap, M, NOT_TENURED).To(&byte_array)) {
5282 4930560 : for (int j = 0; j < M; j++) {
5283 : byte_array->set(j, 0x31);
5284 : }
5285 : // Add the array in root set.
5286 : handle(byte_array, isolate);
5287 : }
5288 : // Make sure the byte arrays will be promoted on the next GC.
5289 5 : CcTest::CollectGarbage(NEW_SPACE);
5290 : // This number is close to large free list category threshold.
5291 : const int N = 0x3EEE;
5292 : {
5293 : std::vector<FixedArray> arrays;
5294 : std::set<Page*> pages;
5295 5 : FixedArray array;
5296 : // Fill all pages with fixed arrays.
5297 : heap->set_force_oom(true);
5298 35 : while (AllocateFixedArrayForTest(heap, N, TENURED).To(&array)) {
5299 30 : arrays.push_back(array);
5300 60 : pages.insert(Page::FromHeapObject(array));
5301 : // Add the array in root set.
5302 : handle(array, isolate);
5303 : }
5304 : // Expand and full one complete page with fixed arrays.
5305 : heap->set_force_oom(false);
5306 25 : while (AllocateFixedArrayForTest(heap, N, TENURED).To(&array)) {
5307 20 : arrays.push_back(array);
5308 40 : pages.insert(Page::FromHeapObject(array));
5309 : // Add the array in root set.
5310 : handle(array, isolate);
5311 : // Do not expand anymore.
5312 : heap->set_force_oom(true);
5313 : }
5314 : // Expand and mark the new page as evacuation candidate.
5315 : heap->set_force_oom(false);
5316 : {
5317 : AlwaysAllocateScope always_allocate(isolate);
5318 5 : Handle<HeapObject> ec_obj = factory->NewFixedArray(5000, TENURED);
5319 : Page* ec_page = Page::FromHeapObject(*ec_obj);
5320 5 : heap::ForceEvacuationCandidate(ec_page);
5321 : // Make all arrays point to evacuation candidate so that
5322 : // slots are recorded for them.
5323 110 : for (size_t j = 0; j < arrays.size(); j++) {
5324 50 : array = arrays[j];
5325 805550 : for (int i = 0; i < N; i++) {
5326 805500 : array->set(i, *ec_obj);
5327 : }
5328 : }
5329 : }
5330 5 : heap::SimulateIncrementalMarking(heap);
5331 110 : for (size_t j = 0; j < arrays.size(); j++) {
5332 50 : heap->RightTrimFixedArray(arrays[j], N - 1);
5333 : }
5334 : }
5335 : // Force allocation from the free list.
5336 : heap->set_force_oom(true);
5337 5 : CcTest::CollectGarbage(OLD_SPACE);
5338 : }
5339 :
5340 28342 : TEST(Regress598319) {
5341 5 : if (!FLAG_incremental_marking) return;
5342 : ManualGCScope manual_gc_scope;
5343 : // This test ensures that no white objects can cross the progress bar of large
5344 : // objects during incremental marking. It checks this by using Shift() during
5345 : // incremental marking.
5346 5 : CcTest::InitializeVM();
5347 10 : v8::HandleScope scope(CcTest::isolate());
5348 30 : Heap* heap = CcTest::heap();
5349 : Isolate* isolate = heap->isolate();
5350 :
5351 : const int kNumberOfObjects = kMaxRegularHeapObjectSize / kTaggedSize;
5352 :
5353 : struct Arr {
5354 5 : Arr(Isolate* isolate, int number_of_objects) {
5355 5 : root = isolate->factory()->NewFixedArray(1, TENURED);
5356 : {
5357 : // Temporary scope to avoid getting any other objects into the root set.
5358 5 : v8::HandleScope scope(CcTest::isolate());
5359 : Handle<FixedArray> tmp =
5360 5 : isolate->factory()->NewFixedArray(number_of_objects, TENURED);
5361 10 : root->set(0, *tmp);
5362 633930 : for (int i = 0; i < get()->length(); i++) {
5363 316960 : tmp = isolate->factory()->NewFixedArray(100, TENURED);
5364 316960 : get()->set(i, *tmp);
5365 5 : }
5366 : }
5367 5 : }
5368 :
5369 5071470 : FixedArray get() { return FixedArray::cast(root->get(0)); }
5370 :
5371 : Handle<FixedArray> root;
5372 5 : } arr(isolate, kNumberOfObjects);
5373 :
5374 10 : CHECK_EQ(arr.get()->length(), kNumberOfObjects);
5375 10 : CHECK(heap->lo_space()->Contains(arr.get()));
5376 10 : LargePage* page = heap->lo_space()->FindPage(arr.get()->address());
5377 5 : CHECK_NOT_NULL(page);
5378 :
5379 : // GC to cleanup state
5380 5 : CcTest::CollectGarbage(OLD_SPACE);
5381 5 : MarkCompactCollector* collector = heap->mark_compact_collector();
5382 5 : if (collector->sweeping_in_progress()) {
5383 5 : collector->EnsureSweepingCompleted();
5384 : }
5385 :
5386 10 : CHECK(heap->lo_space()->Contains(arr.get()));
5387 35 : IncrementalMarking* marking = heap->incremental_marking();
5388 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5389 10 : CHECK(marking_state->IsWhite(arr.get()));
5390 633925 : for (int i = 0; i < arr.get()->length(); i++) {
5391 633920 : HeapObject arr_value = HeapObject::cast(arr.get()->get(i));
5392 316960 : CHECK(marking_state->IsWhite(arr_value));
5393 : }
5394 :
5395 : // Start incremental marking.
5396 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5397 5 : if (marking->IsStopped()) {
5398 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5399 5 : i::GarbageCollectionReason::kTesting);
5400 : }
5401 5 : CHECK(marking->IsMarking());
5402 :
5403 : // Check that we have not marked the interesting array during root scanning.
5404 633925 : for (int i = 0; i < arr.get()->length(); i++) {
5405 633920 : HeapObject arr_value = HeapObject::cast(arr.get()->get(i));
5406 316960 : CHECK(marking_state->IsWhite(arr_value));
5407 : }
5408 :
5409 : // Now we search for a state where we are in incremental marking and have
5410 : // only partially marked the large object.
5411 1395 : while (!marking->IsComplete()) {
5412 : marking->Step(i::KB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5413 1395 : StepOrigin::kV8);
5414 2790 : if (page->IsFlagSet(Page::HAS_PROGRESS_BAR) && page->progress_bar() > 0) {
5415 10 : CHECK_NE(page->progress_bar(), arr.get()->Size());
5416 : {
5417 : // Shift by 1, effectively moving one white object across the progress
5418 : // bar, meaning that we will miss marking it.
5419 5 : v8::HandleScope scope(CcTest::isolate());
5420 : Handle<JSArray> js_array = isolate->factory()->NewJSArrayWithElements(
5421 10 : Handle<FixedArray>(arr.get(), isolate));
5422 5 : js_array->GetElementsAccessor()->Shift(js_array);
5423 : }
5424 5 : break;
5425 : }
5426 : }
5427 :
5428 : // Finish marking with bigger steps to speed up test.
5429 40 : while (!marking->IsComplete()) {
5430 : marking->Step(10 * i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5431 35 : StepOrigin::kV8);
5432 35 : if (marking->IsReadyToOverApproximateWeakClosure()) {
5433 5 : marking->FinalizeIncrementally();
5434 : }
5435 : }
5436 5 : CHECK(marking->IsComplete());
5437 :
5438 : // All objects need to be black after marking. If a white object crossed the
5439 : // progress bar, we would fail here.
5440 633925 : for (int i = 0; i < arr.get()->length(); i++) {
5441 633920 : HeapObject arr_value = HeapObject::cast(arr.get()->get(i));
5442 316960 : CHECK(marking_state->IsBlack(arr_value));
5443 : }
5444 : }
5445 :
5446 30 : Handle<FixedArray> ShrinkArrayAndCheckSize(Heap* heap, int length) {
5447 : // Make sure there is no garbage and the compilation cache is empty.
5448 60 : for (int i = 0; i < 5; i++) {
5449 50 : CcTest::CollectAllGarbage();
5450 : }
5451 10 : heap->mark_compact_collector()->EnsureSweepingCompleted();
5452 10 : size_t size_before_allocation = heap->SizeOfObjects();
5453 : Handle<FixedArray> array =
5454 10 : heap->isolate()->factory()->NewFixedArray(length, TENURED);
5455 10 : size_t size_after_allocation = heap->SizeOfObjects();
5456 10 : CHECK_EQ(size_after_allocation, size_before_allocation + array->Size());
5457 10 : array->Shrink(heap->isolate(), 1);
5458 10 : size_t size_after_shrinking = heap->SizeOfObjects();
5459 : // Shrinking does not change the space size immediately.
5460 10 : CHECK_EQ(size_after_allocation, size_after_shrinking);
5461 : // GC and sweeping updates the size to acccount for shrinking.
5462 10 : CcTest::CollectAllGarbage();
5463 10 : heap->mark_compact_collector()->EnsureSweepingCompleted();
5464 10 : intptr_t size_after_gc = heap->SizeOfObjects();
5465 20 : CHECK_EQ(size_after_gc, size_before_allocation + array->Size());
5466 10 : return array;
5467 : }
5468 :
5469 28342 : TEST(Regress609761) {
5470 5 : CcTest::InitializeVM();
5471 5 : v8::HandleScope scope(CcTest::isolate());
5472 10 : Heap* heap = CcTest::heap();
5473 : int length = kMaxRegularHeapObjectSize / kTaggedSize + 1;
5474 5 : Handle<FixedArray> array = ShrinkArrayAndCheckSize(heap, length);
5475 10 : CHECK(heap->lo_space()->Contains(*array));
5476 5 : }
5477 :
5478 28342 : TEST(LiveBytes) {
5479 5 : CcTest::InitializeVM();
5480 5 : v8::HandleScope scope(CcTest::isolate());
5481 10 : Heap* heap = CcTest::heap();
5482 5 : Handle<FixedArray> array = ShrinkArrayAndCheckSize(heap, 2000);
5483 10 : CHECK(heap->old_space()->Contains(*array));
5484 5 : }
5485 :
5486 28342 : TEST(Regress615489) {
5487 5 : if (!FLAG_incremental_marking) return;
5488 5 : FLAG_black_allocation = true;
5489 5 : CcTest::InitializeVM();
5490 5 : v8::HandleScope scope(CcTest::isolate());
5491 10 : Heap* heap = CcTest::heap();
5492 : Isolate* isolate = heap->isolate();
5493 5 : CcTest::CollectAllGarbage();
5494 :
5495 5 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5496 10 : i::IncrementalMarking* marking = heap->incremental_marking();
5497 5 : if (collector->sweeping_in_progress()) {
5498 5 : collector->EnsureSweepingCompleted();
5499 : }
5500 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5501 5 : if (marking->IsStopped()) {
5502 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5503 5 : i::GarbageCollectionReason::kTesting);
5504 : }
5505 5 : CHECK(marking->IsMarking());
5506 : marking->StartBlackAllocationForTesting();
5507 : {
5508 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
5509 10 : v8::HandleScope inner(CcTest::isolate());
5510 10 : isolate->factory()->NewFixedArray(500, TENURED)->Size();
5511 : }
5512 20 : while (!marking->IsComplete()) {
5513 : marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5514 10 : StepOrigin::kV8);
5515 10 : if (marking->IsReadyToOverApproximateWeakClosure()) {
5516 5 : marking->FinalizeIncrementally();
5517 : }
5518 : }
5519 5 : CHECK(marking->IsComplete());
5520 5 : intptr_t size_before = heap->SizeOfObjects();
5521 5 : CcTest::CollectAllGarbage();
5522 5 : intptr_t size_after = heap->SizeOfObjects();
5523 : // Live size does not increase after garbage collection.
5524 5 : CHECK_LE(size_after, size_before);
5525 : }
5526 :
5527 : class StaticOneByteResource : public v8::String::ExternalOneByteStringResource {
5528 : public:
5529 5 : explicit StaticOneByteResource(const char* data) : data_(data) {}
5530 :
5531 5 : ~StaticOneByteResource() override = default;
5532 :
5533 5 : const char* data() const override { return data_; }
5534 :
5535 5 : size_t length() const override { return strlen(data_); }
5536 :
5537 : private:
5538 : const char* data_;
5539 : };
5540 :
5541 28342 : TEST(Regress631969) {
5542 5 : if (!FLAG_incremental_marking) return;
5543 5 : FLAG_manual_evacuation_candidates_selection = true;
5544 5 : FLAG_parallel_compaction = false;
5545 : ManualGCScope manual_gc_scope;
5546 5 : CcTest::InitializeVM();
5547 10 : v8::HandleScope scope(CcTest::isolate());
5548 20 : Heap* heap = CcTest::heap();
5549 : // Get the heap in clean state.
5550 5 : CcTest::CollectGarbage(OLD_SPACE);
5551 5 : CcTest::CollectGarbage(OLD_SPACE);
5552 : Isolate* isolate = CcTest::i_isolate();
5553 : Factory* factory = isolate->factory();
5554 : // Allocate two strings in a fresh page and mark the page as evacuation
5555 : // candidate.
5556 5 : heap::SimulateFullSpace(heap->old_space());
5557 5 : Handle<String> s1 = factory->NewStringFromStaticChars("123456789", TENURED);
5558 5 : Handle<String> s2 = factory->NewStringFromStaticChars("01234", TENURED);
5559 5 : heap::ForceEvacuationCandidate(Page::FromHeapObject(*s1));
5560 :
5561 5 : heap::SimulateIncrementalMarking(heap, false);
5562 :
5563 : // Allocate a cons string and promote it to a fresh page in the old space.
5564 5 : heap::SimulateFullSpace(heap->old_space());
5565 : Handle<String> s3;
5566 10 : factory->NewConsString(s1, s2).ToHandle(&s3);
5567 5 : CcTest::CollectGarbage(NEW_SPACE);
5568 5 : CcTest::CollectGarbage(NEW_SPACE);
5569 :
5570 : // Finish incremental marking.
5571 10 : IncrementalMarking* marking = heap->incremental_marking();
5572 20 : while (!marking->IsComplete()) {
5573 : marking->Step(MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5574 10 : StepOrigin::kV8);
5575 10 : if (marking->IsReadyToOverApproximateWeakClosure()) {
5576 5 : marking->FinalizeIncrementally();
5577 : }
5578 : }
5579 :
5580 : {
5581 : StaticOneByteResource external_string("12345678901234");
5582 5 : s3->MakeExternal(&external_string);
5583 5 : CcTest::CollectGarbage(OLD_SPACE);
5584 : // This avoids the GC from trying to free stack allocated resources.
5585 10 : i::Handle<i::ExternalOneByteString>::cast(s3)->SetResource(isolate,
5586 5 : nullptr);
5587 : }
5588 : }
5589 :
5590 28342 : TEST(LeftTrimFixedArrayInBlackArea) {
5591 5 : if (!FLAG_incremental_marking) return;
5592 5 : FLAG_black_allocation = true;
5593 5 : CcTest::InitializeVM();
5594 5 : v8::HandleScope scope(CcTest::isolate());
5595 20 : Heap* heap = CcTest::heap();
5596 : Isolate* isolate = heap->isolate();
5597 5 : CcTest::CollectAllGarbage();
5598 :
5599 5 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5600 : i::IncrementalMarking* marking = heap->incremental_marking();
5601 5 : if (collector->sweeping_in_progress()) {
5602 5 : collector->EnsureSweepingCompleted();
5603 : }
5604 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5605 5 : if (marking->IsStopped()) {
5606 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5607 5 : i::GarbageCollectionReason::kTesting);
5608 : }
5609 5 : CHECK(marking->IsMarking());
5610 : marking->StartBlackAllocationForTesting();
5611 :
5612 : // Ensure that we allocate a new page, set up a bump pointer area, and
5613 : // perform the allocation in a black area.
5614 5 : heap::SimulateFullSpace(heap->old_space());
5615 5 : isolate->factory()->NewFixedArray(4, TENURED);
5616 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(50, TENURED);
5617 10 : CHECK(heap->old_space()->Contains(*array));
5618 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5619 5 : CHECK(marking_state->IsBlack(*array));
5620 :
5621 : // Now left trim the allocated black area. A filler has to be installed
5622 : // for the trimmed area and all mark bits of the trimmed area have to be
5623 : // cleared.
5624 5 : FixedArrayBase trimmed = heap->LeftTrimFixedArray(*array, 10);
5625 5 : CHECK(marking_state->IsBlack(trimmed));
5626 :
5627 5 : heap::GcAndSweep(heap, OLD_SPACE);
5628 : }
5629 :
5630 28342 : TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
5631 5 : if (!FLAG_incremental_marking) return;
5632 5 : FLAG_black_allocation = true;
5633 5 : CcTest::InitializeVM();
5634 5 : v8::HandleScope scope(CcTest::isolate());
5635 20 : Heap* heap = CcTest::heap();
5636 : Isolate* isolate = heap->isolate();
5637 5 : CcTest::CollectAllGarbage();
5638 :
5639 5 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5640 : i::IncrementalMarking* marking = heap->incremental_marking();
5641 5 : if (collector->sweeping_in_progress()) {
5642 5 : collector->EnsureSweepingCompleted();
5643 : }
5644 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5645 5 : if (marking->IsStopped()) {
5646 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5647 5 : i::GarbageCollectionReason::kTesting);
5648 : }
5649 5 : CHECK(marking->IsMarking());
5650 : marking->StartBlackAllocationForTesting();
5651 :
5652 : // Ensure that we allocate a new page, set up a bump pointer area, and
5653 : // perform the allocation in a black area.
5654 5 : heap::SimulateFullSpace(heap->old_space());
5655 5 : isolate->factory()->NewFixedArray(10, TENURED);
5656 :
5657 : // Allocate the fixed array that will be trimmed later.
5658 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(100, TENURED);
5659 : Address start_address = array->address();
5660 5 : Address end_address = start_address + array->Size();
5661 5 : Page* page = Page::FromAddress(start_address);
5662 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5663 5 : CHECK(marking_state->IsBlack(*array));
5664 10 : CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
5665 : page->AddressToMarkbitIndex(start_address),
5666 : page->AddressToMarkbitIndex(end_address)));
5667 10 : CHECK(heap->old_space()->Contains(*array));
5668 :
5669 : FixedArrayBase previous = *array;
5670 : FixedArrayBase trimmed;
5671 :
5672 : // First trim in one word steps.
5673 55 : for (int i = 0; i < 10; i++) {
5674 50 : trimmed = heap->LeftTrimFixedArray(previous, 1);
5675 50 : HeapObject filler = HeapObject::FromAddress(previous->address());
5676 50 : CHECK(filler->IsFiller());
5677 50 : CHECK(marking_state->IsBlack(trimmed));
5678 50 : CHECK(marking_state->IsBlack(previous));
5679 : previous = trimmed;
5680 : }
5681 :
5682 : // Then trim in two and three word steps.
5683 10 : for (int i = 2; i <= 3; i++) {
5684 100 : for (int j = 0; j < 10; j++) {
5685 100 : trimmed = heap->LeftTrimFixedArray(previous, i);
5686 100 : HeapObject filler = HeapObject::FromAddress(previous->address());
5687 100 : CHECK(filler->IsFiller());
5688 100 : CHECK(marking_state->IsBlack(trimmed));
5689 100 : CHECK(marking_state->IsBlack(previous));
5690 : previous = trimmed;
5691 : }
5692 : }
5693 :
5694 5 : heap::GcAndSweep(heap, OLD_SPACE);
5695 : }
5696 :
5697 28342 : TEST(ContinuousRightTrimFixedArrayInBlackArea) {
5698 5 : if (!FLAG_incremental_marking) return;
5699 5 : FLAG_black_allocation = true;
5700 5 : CcTest::InitializeVM();
5701 5 : v8::HandleScope scope(CcTest::isolate());
5702 20 : Heap* heap = CcTest::heap();
5703 : Isolate* isolate = CcTest::i_isolate();
5704 5 : CcTest::CollectAllGarbage();
5705 :
5706 5 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5707 : i::IncrementalMarking* marking = heap->incremental_marking();
5708 5 : if (collector->sweeping_in_progress()) {
5709 5 : collector->EnsureSweepingCompleted();
5710 : }
5711 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5712 5 : if (marking->IsStopped()) {
5713 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5714 5 : i::GarbageCollectionReason::kTesting);
5715 : }
5716 5 : CHECK(marking->IsMarking());
5717 : marking->StartBlackAllocationForTesting();
5718 :
5719 : // Ensure that we allocate a new page, set up a bump pointer area, and
5720 : // perform the allocation in a black area.
5721 5 : heap::SimulateFullSpace(heap->old_space());
5722 5 : isolate->factory()->NewFixedArray(10, TENURED);
5723 :
5724 : // Allocate the fixed array that will be trimmed later.
5725 : Handle<FixedArray> array =
5726 5 : CcTest::i_isolate()->factory()->NewFixedArray(100, TENURED);
5727 : Address start_address = array->address();
5728 5 : Address end_address = start_address + array->Size();
5729 5 : Page* page = Page::FromAddress(start_address);
5730 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5731 5 : CHECK(marking_state->IsBlack(*array));
5732 :
5733 10 : CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
5734 : page->AddressToMarkbitIndex(start_address),
5735 : page->AddressToMarkbitIndex(end_address)));
5736 10 : CHECK(heap->old_space()->Contains(*array));
5737 :
5738 : // Trim it once by one word to make checking for white marking color uniform.
5739 5 : Address previous = end_address - kTaggedSize;
5740 5 : isolate->heap()->RightTrimFixedArray(*array, 1);
5741 :
5742 5 : HeapObject filler = HeapObject::FromAddress(previous);
5743 5 : CHECK(filler->IsFiller());
5744 5 : CHECK(marking_state->IsImpossible(filler));
5745 :
5746 : // Trim 10 times by one, two, and three word.
5747 15 : for (int i = 1; i <= 3; i++) {
5748 150 : for (int j = 0; j < 10; j++) {
5749 150 : previous -= kTaggedSize * i;
5750 150 : isolate->heap()->RightTrimFixedArray(*array, i);
5751 150 : HeapObject filler = HeapObject::FromAddress(previous);
5752 150 : CHECK(filler->IsFiller());
5753 150 : CHECK(marking_state->IsWhite(filler));
5754 : }
5755 : }
5756 :
5757 5 : heap::GcAndSweep(heap, OLD_SPACE);
5758 : }
5759 :
5760 28342 : TEST(Regress618958) {
5761 5 : if (!FLAG_incremental_marking) return;
5762 5 : CcTest::InitializeVM();
5763 5 : v8::HandleScope scope(CcTest::isolate());
5764 15 : Heap* heap = CcTest::heap();
5765 : bool isolate_is_locked = true;
5766 5 : CcTest::isolate()->AdjustAmountOfExternalAllocatedMemory(100 * MB);
5767 : int mark_sweep_count_before = heap->ms_count();
5768 : heap->MemoryPressureNotification(MemoryPressureLevel::kCritical,
5769 5 : isolate_is_locked);
5770 : int mark_sweep_count_after = heap->ms_count();
5771 5 : int mark_sweeps_performed = mark_sweep_count_after - mark_sweep_count_before;
5772 : // The memory pressuer handler either performed two GCs or performed one and
5773 : // started incremental marking.
5774 5 : CHECK(mark_sweeps_performed == 2 ||
5775 : (mark_sweeps_performed == 1 &&
5776 5 : !heap->incremental_marking()->IsStopped()));
5777 : }
5778 :
5779 28342 : TEST(YoungGenerationLargeObjectAllocationScavenge) {
5780 10 : if (FLAG_minor_mc) return;
5781 5 : FLAG_young_generation_large_objects = true;
5782 5 : CcTest::InitializeVM();
5783 5 : v8::HandleScope scope(CcTest::isolate());
5784 5 : Heap* heap = CcTest::heap();
5785 5 : Isolate* isolate = heap->isolate();
5786 10 : if (!isolate->serializer_enabled()) return;
5787 :
5788 : // TODO(hpayer): Update the test as soon as we have a tenure limit for LO.
5789 0 : Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(200000);
5790 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
5791 0 : CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
5792 0 : CHECK(chunk->IsFlagSet(MemoryChunk::IN_TO_SPACE));
5793 :
5794 0 : Handle<Object> number = isolate->factory()->NewHeapNumber(123.456);
5795 0 : array_small->set(0, *number);
5796 :
5797 0 : CcTest::CollectGarbage(NEW_SPACE);
5798 :
5799 : // After the first young generation GC array_small will be in the old
5800 : // generation large object space.
5801 : chunk = MemoryChunk::FromHeapObject(*array_small);
5802 0 : CHECK_EQ(LO_SPACE, chunk->owner()->identity());
5803 0 : CHECK(!chunk->IsFlagSet(MemoryChunk::IN_TO_SPACE));
5804 :
5805 0 : CcTest::CollectAllAvailableGarbage();
5806 : }
5807 :
5808 28342 : TEST(YoungGenerationLargeObjectAllocationMarkCompact) {
5809 10 : if (FLAG_minor_mc) return;
5810 5 : FLAG_young_generation_large_objects = true;
5811 5 : CcTest::InitializeVM();
5812 5 : v8::HandleScope scope(CcTest::isolate());
5813 5 : Heap* heap = CcTest::heap();
5814 5 : Isolate* isolate = heap->isolate();
5815 10 : if (!isolate->serializer_enabled()) return;
5816 :
5817 : // TODO(hpayer): Update the test as soon as we have a tenure limit for LO.
5818 0 : Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(200000);
5819 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
5820 0 : CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
5821 0 : CHECK(chunk->IsFlagSet(MemoryChunk::IN_TO_SPACE));
5822 :
5823 0 : Handle<Object> number = isolate->factory()->NewHeapNumber(123.456);
5824 0 : array_small->set(0, *number);
5825 :
5826 0 : CcTest::CollectGarbage(OLD_SPACE);
5827 :
5828 : // After the first full GC array_small will be in the old generation
5829 : // large object space.
5830 : chunk = MemoryChunk::FromHeapObject(*array_small);
5831 0 : CHECK_EQ(LO_SPACE, chunk->owner()->identity());
5832 0 : CHECK(!chunk->IsFlagSet(MemoryChunk::IN_TO_SPACE));
5833 :
5834 0 : CcTest::CollectAllAvailableGarbage();
5835 : }
5836 :
5837 28342 : TEST(YoungGenerationLargeObjectAllocationReleaseScavenger) {
5838 10 : if (FLAG_minor_mc) return;
5839 5 : FLAG_young_generation_large_objects = true;
5840 5 : CcTest::InitializeVM();
5841 5 : v8::HandleScope scope(CcTest::isolate());
5842 5 : Heap* heap = CcTest::heap();
5843 5 : Isolate* isolate = heap->isolate();
5844 10 : if (!isolate->serializer_enabled()) return;
5845 :
5846 : {
5847 : HandleScope scope(isolate);
5848 0 : for (int i = 0; i < 10; i++) {
5849 0 : Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(20000);
5850 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
5851 0 : CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
5852 0 : CHECK(chunk->IsFlagSet(MemoryChunk::IN_TO_SPACE));
5853 : }
5854 : }
5855 :
5856 0 : CcTest::CollectGarbage(NEW_SPACE);
5857 0 : CHECK(isolate->heap()->new_lo_space()->IsEmpty());
5858 0 : CHECK_EQ(0, isolate->heap()->new_lo_space()->Size());
5859 0 : CHECK_EQ(0, isolate->heap()->new_lo_space()->SizeOfObjects());
5860 0 : CHECK(isolate->heap()->lo_space()->IsEmpty());
5861 0 : CHECK_EQ(0, isolate->heap()->lo_space()->Size());
5862 0 : CHECK_EQ(0, isolate->heap()->lo_space()->SizeOfObjects());
5863 : }
5864 :
5865 28342 : TEST(UncommitUnusedLargeObjectMemory) {
5866 5 : CcTest::InitializeVM();
5867 5 : v8::HandleScope scope(CcTest::isolate());
5868 5 : Heap* heap = CcTest::heap();
5869 : Isolate* isolate = heap->isolate();
5870 :
5871 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(200000, TENURED);
5872 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array);
5873 5 : CHECK(chunk->owner()->identity() == LO_SPACE);
5874 :
5875 5 : intptr_t size_before = array->Size();
5876 5 : size_t committed_memory_before = chunk->CommittedPhysicalMemory();
5877 :
5878 5 : array->Shrink(isolate, 1);
5879 5 : CHECK(array->Size() < size_before);
5880 :
5881 5 : CcTest::CollectAllGarbage();
5882 5 : CHECK(chunk->CommittedPhysicalMemory() < committed_memory_before);
5883 : size_t shrinked_size = RoundUp(
5884 20 : (array->address() - chunk->address()) + array->Size(), CommitPageSize());
5885 5 : CHECK_EQ(shrinked_size, chunk->CommittedPhysicalMemory());
5886 5 : }
5887 :
5888 28342 : TEST(RememberedSetRemoveRange) {
5889 5 : CcTest::InitializeVM();
5890 5 : v8::HandleScope scope(CcTest::isolate());
5891 5 : Heap* heap = CcTest::heap();
5892 : Isolate* isolate = heap->isolate();
5893 :
5894 : Handle<FixedArray> array =
5895 5 : isolate->factory()->NewFixedArray(Page::kPageSize / kTaggedSize, TENURED);
5896 15 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array);
5897 5 : CHECK(chunk->owner()->identity() == LO_SPACE);
5898 5 : Address start = array->address();
5899 : // Maps slot to boolean indicator of whether the slot should be in the set.
5900 : std::map<Address, bool> slots;
5901 5 : slots[start + 0] = true;
5902 5 : slots[start + kTaggedSize] = true;
5903 5 : slots[start + Page::kPageSize - kTaggedSize] = true;
5904 5 : slots[start + Page::kPageSize] = true;
5905 5 : slots[start + Page::kPageSize + kTaggedSize] = true;
5906 5 : slots[chunk->area_end() - kTaggedSize] = true;
5907 :
5908 35 : for (auto x : slots) {
5909 25 : RememberedSet<OLD_TO_NEW>::Insert(chunk, x.first);
5910 : }
5911 :
5912 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
5913 25 : [&slots](MaybeObjectSlot slot) {
5914 25 : CHECK(slots[slot.address()]);
5915 25 : return KEEP_SLOT;
5916 : },
5917 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
5918 :
5919 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start, start + kTaggedSize,
5920 5 : SlotSet::FREE_EMPTY_BUCKETS);
5921 5 : slots[start] = false;
5922 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
5923 20 : [&slots](MaybeObjectSlot slot) {
5924 20 : CHECK(slots[slot.address()]);
5925 20 : return KEEP_SLOT;
5926 : },
5927 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
5928 :
5929 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start + kTaggedSize,
5930 : start + Page::kPageSize,
5931 5 : SlotSet::FREE_EMPTY_BUCKETS);
5932 5 : slots[start + kTaggedSize] = false;
5933 5 : slots[start + Page::kPageSize - kTaggedSize] = false;
5934 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
5935 10 : [&slots](MaybeObjectSlot slot) {
5936 10 : CHECK(slots[slot.address()]);
5937 10 : return KEEP_SLOT;
5938 : },
5939 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
5940 :
5941 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start,
5942 : start + Page::kPageSize + kTaggedSize,
5943 5 : SlotSet::FREE_EMPTY_BUCKETS);
5944 5 : slots[start + Page::kPageSize] = false;
5945 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
5946 5 : [&slots](MaybeObjectSlot slot) {
5947 5 : CHECK(slots[slot.address()]);
5948 5 : return KEEP_SLOT;
5949 : },
5950 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
5951 :
5952 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, chunk->area_end() - kTaggedSize,
5953 : chunk->area_end(),
5954 5 : SlotSet::FREE_EMPTY_BUCKETS);
5955 5 : slots[chunk->area_end() - kTaggedSize] = false;
5956 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
5957 0 : [&slots](MaybeObjectSlot slot) {
5958 0 : CHECK(slots[slot.address()]);
5959 0 : return KEEP_SLOT;
5960 : },
5961 10 : SlotSet::PREFREE_EMPTY_BUCKETS);
5962 5 : }
5963 :
5964 28342 : HEAP_TEST(Regress670675) {
5965 5 : if (!FLAG_incremental_marking) return;
5966 5 : FLAG_stress_incremental_marking = false;
5967 5 : CcTest::InitializeVM();
5968 5 : v8::HandleScope scope(CcTest::isolate());
5969 10 : Heap* heap = CcTest::heap();
5970 : Isolate* isolate = heap->isolate();
5971 5 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5972 5 : CcTest::CollectAllGarbage();
5973 :
5974 5 : if (collector->sweeping_in_progress()) {
5975 5 : collector->EnsureSweepingCompleted();
5976 : }
5977 5 : i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5978 5 : if (marking->IsStopped()) {
5979 5 : marking->Start(i::GarbageCollectionReason::kTesting);
5980 : }
5981 : size_t array_length = Page::kPageSize / kTaggedSize + 100;
5982 5 : size_t n = heap->OldGenerationSpaceAvailable() / array_length;
5983 415 : for (size_t i = 0; i < n + 40; i++) {
5984 : {
5985 : HandleScope inner_scope(isolate);
5986 : isolate->factory()->NewFixedArray(static_cast<int>(array_length),
5987 415 : TENURED);
5988 : }
5989 415 : if (marking->IsStopped()) break;
5990 410 : double deadline = heap->MonotonicallyIncreasingTimeInMs() + 1;
5991 : marking->AdvanceIncrementalMarking(
5992 410 : deadline, IncrementalMarking::GC_VIA_STACK_GUARD, StepOrigin::kV8);
5993 : }
5994 5 : DCHECK(marking->IsStopped());
5995 : }
5996 :
5997 : namespace {
5998 10 : Handle<Code> GenerateDummyImmovableCode(Isolate* isolate) {
5999 30 : Assembler assm(AssemblerOptions{});
6000 :
6001 : const int kNumberOfNops = 1 << 10;
6002 10250 : for (int i = 0; i < kNumberOfNops; i++) {
6003 10240 : assm.nop(); // supported on all architectures
6004 : }
6005 :
6006 10 : CodeDesc desc;
6007 10 : assm.GetCode(isolate, &desc);
6008 : Handle<Code> code = isolate->factory()->NewCode(
6009 : desc, Code::STUB, Handle<Code>(), Builtins::kNoBuiltinId,
6010 30 : MaybeHandle<ByteArray>(), DeoptimizationData::Empty(isolate), kImmovable);
6011 20 : CHECK(code->IsCode());
6012 :
6013 10 : return code;
6014 : }
6015 : } // namespace
6016 :
6017 28342 : HEAP_TEST(Regress5831) {
6018 5 : CcTest::InitializeVM();
6019 25 : Heap* heap = CcTest::heap();
6020 : Isolate* isolate = CcTest::i_isolate();
6021 : HandleScope handle_scope(isolate);
6022 :
6023 : // Used to ensure that the generated code is not collected.
6024 : const int kInitialSize = 32;
6025 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(kInitialSize);
6026 :
6027 : // Ensure that all immovable code space pages are full and we overflow into
6028 : // LO_SPACE.
6029 : const int kMaxIterations = 1 << 16;
6030 : bool overflowed_into_lospace = false;
6031 5 : for (int i = 0; i < kMaxIterations; i++) {
6032 5 : Handle<Code> code = GenerateDummyImmovableCode(isolate);
6033 5 : array = FixedArray::SetAndGrow(isolate, array, i, code);
6034 15 : CHECK(heap->code_space()->Contains(code->address()) ||
6035 : heap->code_lo_space()->Contains(*code));
6036 10 : if (heap->code_lo_space()->Contains(*code)) {
6037 : overflowed_into_lospace = true;
6038 : break;
6039 : }
6040 : }
6041 :
6042 5 : CHECK(overflowed_into_lospace);
6043 :
6044 : // Fake a serializer run.
6045 5 : isolate->serializer_enabled_ = true;
6046 :
6047 : // Generate the code.
6048 5 : Handle<Code> code = GenerateDummyImmovableCode(isolate);
6049 5 : CHECK_GE(i::kMaxRegularHeapObjectSize, code->Size());
6050 5 : CHECK(!heap->code_space()->first_page()->Contains(code->address()));
6051 :
6052 : // Ensure it's not in large object space.
6053 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*code);
6054 5 : CHECK(chunk->owner()->identity() != LO_SPACE);
6055 5 : CHECK(chunk->NeverEvacuate());
6056 5 : }
6057 :
6058 28342 : TEST(Regress6800) {
6059 5 : CcTest::InitializeVM();
6060 : Isolate* isolate = CcTest::i_isolate();
6061 : HandleScope handle_scope(isolate);
6062 :
6063 : const int kRootLength = 1000;
6064 : Handle<FixedArray> root =
6065 5 : isolate->factory()->NewFixedArray(kRootLength, TENURED);
6066 : {
6067 : HandleScope inner_scope(isolate);
6068 5 : Handle<FixedArray> new_space_array = isolate->factory()->NewFixedArray(1);
6069 5005 : for (int i = 0; i < kRootLength; i++) {
6070 10000 : root->set(i, *new_space_array);
6071 : }
6072 5000 : for (int i = 0; i < kRootLength; i++) {
6073 15000 : root->set(i, ReadOnlyRoots(CcTest::heap()).undefined_value());
6074 : }
6075 : }
6076 5 : CcTest::CollectGarbage(NEW_SPACE);
6077 5 : CHECK_EQ(0, RememberedSet<OLD_TO_NEW>::NumberOfPreFreedEmptyBuckets(
6078 : MemoryChunk::FromHeapObject(*root)));
6079 5 : }
6080 :
6081 28342 : TEST(Regress6800LargeObject) {
6082 5 : CcTest::InitializeVM();
6083 : Isolate* isolate = CcTest::i_isolate();
6084 : HandleScope handle_scope(isolate);
6085 :
6086 : const int kRootLength = i::kMaxRegularHeapObjectSize / kTaggedSize;
6087 : Handle<FixedArray> root =
6088 5 : isolate->factory()->NewFixedArray(kRootLength, TENURED);
6089 5 : CcTest::heap()->lo_space()->Contains(*root);
6090 : {
6091 : HandleScope inner_scope(isolate);
6092 5 : Handle<FixedArray> new_space_array = isolate->factory()->NewFixedArray(1);
6093 316965 : for (int i = 0; i < kRootLength; i++) {
6094 633920 : root->set(i, *new_space_array);
6095 : }
6096 316960 : for (int i = 0; i < kRootLength; i++) {
6097 950880 : root->set(i, ReadOnlyRoots(CcTest::heap()).undefined_value());
6098 : }
6099 : }
6100 5 : CcTest::CollectGarbage(OLD_SPACE);
6101 5 : CHECK_EQ(0, RememberedSet<OLD_TO_NEW>::NumberOfPreFreedEmptyBuckets(
6102 : MemoryChunk::FromHeapObject(*root)));
6103 5 : }
6104 :
6105 28342 : HEAP_TEST(RegressMissingWriteBarrierInAllocate) {
6106 5 : if (!FLAG_incremental_marking) return;
6107 : ManualGCScope manual_gc_scope;
6108 5 : FLAG_black_allocation = true;
6109 5 : CcTest::InitializeVM();
6110 10 : v8::HandleScope scope(CcTest::isolate());
6111 15 : Heap* heap = CcTest::heap();
6112 : Isolate* isolate = heap->isolate();
6113 5 : CcTest::CollectAllGarbage();
6114 5 : heap::SimulateIncrementalMarking(heap, false);
6115 : Handle<Map> map;
6116 : {
6117 : AlwaysAllocateScope always_allocate(isolate);
6118 5 : map = isolate->factory()->NewMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
6119 : }
6120 : heap->incremental_marking()->StartBlackAllocationForTesting();
6121 : Handle<HeapObject> object;
6122 : {
6123 : AlwaysAllocateScope always_allocate(isolate);
6124 : object = handle(isolate->factory()->NewForTest(map, TENURED), isolate);
6125 : }
6126 : // The object is black. If Factory::New sets the map without write-barrier,
6127 : // then the map is white and will be freed prematurely.
6128 5 : heap::SimulateIncrementalMarking(heap, true);
6129 5 : CcTest::CollectAllGarbage();
6130 5 : MarkCompactCollector* collector = heap->mark_compact_collector();
6131 5 : if (collector->sweeping_in_progress()) {
6132 5 : collector->EnsureSweepingCompleted();
6133 : }
6134 10 : CHECK(object->map()->IsMap());
6135 : }
6136 :
6137 28342 : HEAP_TEST(MarkCompactEpochCounter) {
6138 : ManualGCScope manual_gc_scope;
6139 5 : CcTest::InitializeVM();
6140 10 : v8::HandleScope scope(CcTest::isolate());
6141 20 : Heap* heap = CcTest::heap();
6142 5 : unsigned epoch0 = heap->mark_compact_collector()->epoch();
6143 5 : CcTest::CollectGarbage(OLD_SPACE);
6144 5 : unsigned epoch1 = heap->mark_compact_collector()->epoch();
6145 5 : CHECK_EQ(epoch0 + 1, epoch1);
6146 5 : heap::SimulateIncrementalMarking(heap, true);
6147 5 : CcTest::CollectGarbage(OLD_SPACE);
6148 5 : unsigned epoch2 = heap->mark_compact_collector()->epoch();
6149 5 : CHECK_EQ(epoch1 + 1, epoch2);
6150 5 : CcTest::CollectGarbage(NEW_SPACE);
6151 5 : unsigned epoch3 = heap->mark_compact_collector()->epoch();
6152 5 : CHECK_EQ(epoch2, epoch3);
6153 5 : }
6154 :
6155 28342 : UNINITIALIZED_TEST(ReinitializeStringHashSeed) {
6156 : // Enable rehashing and create an isolate and context.
6157 5 : i::FLAG_rehash_snapshot = true;
6158 15 : for (int i = 1; i < 3; i++) {
6159 10 : i::FLAG_hash_seed = 1337 * i;
6160 : v8::Isolate::CreateParams create_params;
6161 10 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6162 10 : v8::Isolate* isolate = v8::Isolate::New(create_params);
6163 : {
6164 : v8::Isolate::Scope isolate_scope(isolate);
6165 10 : CHECK_EQ(static_cast<uint64_t>(1337 * i),
6166 : reinterpret_cast<i::Isolate*>(isolate)->heap()->HashSeed());
6167 20 : v8::HandleScope handle_scope(isolate);
6168 10 : v8::Local<v8::Context> context = v8::Context::New(isolate);
6169 10 : CHECK(!context.IsEmpty());
6170 : v8::Context::Scope context_scope(context);
6171 : }
6172 10 : isolate->Dispose();
6173 : }
6174 5 : }
6175 :
6176 : const int kHeapLimit = 100 * MB;
6177 : Isolate* oom_isolate = nullptr;
6178 :
6179 0 : void OOMCallback(const char* location, bool is_heap_oom) {
6180 0 : Heap* heap = oom_isolate->heap();
6181 : size_t kSlack = heap->new_space()->Capacity();
6182 0 : CHECK_LE(heap->OldGenerationCapacity(), kHeapLimit + kSlack);
6183 0 : CHECK_LE(heap->memory_allocator()->Size(), heap->MaxReserved() + kSlack);
6184 0 : base::OS::ExitProcess(0);
6185 0 : }
6186 :
6187 28337 : UNINITIALIZED_TEST(OutOfMemory) {
6188 0 : if (FLAG_stress_incremental_marking) return;
6189 : #ifdef VERIFY_HEAP
6190 : if (FLAG_verify_heap) return;
6191 : #endif
6192 0 : FLAG_max_old_space_size = kHeapLimit / MB;
6193 : v8::Isolate::CreateParams create_params;
6194 0 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6195 0 : v8::Isolate* isolate = v8::Isolate::New(create_params);
6196 : Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
6197 0 : oom_isolate = i_isolate;
6198 0 : isolate->SetOOMErrorHandler(OOMCallback);
6199 : {
6200 : Factory* factory = i_isolate->factory();
6201 : HandleScope handle_scope(i_isolate);
6202 : while (true) {
6203 0 : factory->NewFixedArray(100);
6204 : }
6205 : }
6206 : }
6207 :
6208 28338 : UNINITIALIZED_TEST(OutOfMemoryIneffectiveGC) {
6209 2 : if (!FLAG_detect_ineffective_gcs_near_heap_limit) return;
6210 1 : if (FLAG_stress_incremental_marking) return;
6211 : #ifdef VERIFY_HEAP
6212 : if (FLAG_verify_heap) return;
6213 : #endif
6214 :
6215 0 : FLAG_max_old_space_size = kHeapLimit / MB;
6216 : v8::Isolate::CreateParams create_params;
6217 0 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6218 0 : v8::Isolate* isolate = v8::Isolate::New(create_params);
6219 : Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
6220 0 : oom_isolate = i_isolate;
6221 0 : isolate->SetOOMErrorHandler(OOMCallback);
6222 : Factory* factory = i_isolate->factory();
6223 0 : Heap* heap = i_isolate->heap();
6224 0 : heap->CollectAllGarbage(Heap::kNoGCFlags, GarbageCollectionReason::kTesting);
6225 : {
6226 : HandleScope scope(i_isolate);
6227 0 : while (heap->OldGenerationSizeOfObjects() <
6228 0 : heap->MaxOldGenerationSize() * 0.9) {
6229 0 : factory->NewFixedArray(100, TENURED);
6230 : }
6231 : {
6232 : int initial_ms_count = heap->ms_count();
6233 : int ineffective_ms_start = initial_ms_count;
6234 0 : while (heap->ms_count() < initial_ms_count + 10) {
6235 : HandleScope inner_scope(i_isolate);
6236 0 : factory->NewFixedArray(30000, TENURED);
6237 0 : if (heap->tracer()->AverageMarkCompactMutatorUtilization() >= 0.3) {
6238 0 : ineffective_ms_start = heap->ms_count() + 1;
6239 : }
6240 : }
6241 0 : int consecutive_ineffective_ms = heap->ms_count() - ineffective_ms_start;
6242 0 : CHECK_IMPLIES(
6243 : consecutive_ineffective_ms >= 4,
6244 : heap->tracer()->AverageMarkCompactMutatorUtilization() >= 0.3);
6245 : }
6246 : }
6247 0 : isolate->Dispose();
6248 : }
6249 :
6250 28342 : HEAP_TEST(Regress779503) {
6251 : // The following regression test ensures that the Scavenger does not allocate
6252 : // over invalid slots. More specific, the Scavenger should not sweep a page
6253 : // that it currently processes because it might allocate over the currently
6254 : // processed slot.
6255 : const int kArraySize = 2048;
6256 5 : CcTest::InitializeVM();
6257 : Isolate* isolate = CcTest::i_isolate();
6258 5 : Heap* heap = CcTest::heap();
6259 5 : heap::SealCurrentObjects(heap);
6260 : {
6261 : HandleScope handle_scope(isolate);
6262 : // The byte array filled with kHeapObjectTag ensures that we cannot read
6263 : // from the slot again and interpret it as heap value. Doing so will crash.
6264 5 : Handle<ByteArray> byte_array = isolate->factory()->NewByteArray(kArraySize);
6265 5 : CHECK(Heap::InNewSpace(*byte_array));
6266 10240 : for (int i = 0; i < kArraySize; i++) {
6267 : byte_array->set(i, kHeapObjectTag);
6268 : }
6269 :
6270 : {
6271 : HandleScope handle_scope(isolate);
6272 : // The FixedArray in old space serves as space for slots.
6273 : Handle<FixedArray> fixed_array =
6274 5 : isolate->factory()->NewFixedArray(kArraySize, TENURED);
6275 5 : CHECK(!Heap::InNewSpace(*fixed_array));
6276 10240 : for (int i = 0; i < kArraySize; i++) {
6277 20480 : fixed_array->set(i, *byte_array);
6278 : }
6279 : }
6280 : // Delay sweeper tasks to allow the scavenger to sweep the page it is
6281 : // currently scavenging.
6282 5 : heap->delay_sweeper_tasks_for_testing_ = true;
6283 5 : CcTest::CollectGarbage(OLD_SPACE);
6284 5 : CHECK(Heap::InNewSpace(*byte_array));
6285 : }
6286 : // Scavenging and sweeping the same page will crash as slots will be
6287 : // overridden.
6288 5 : CcTest::CollectGarbage(NEW_SPACE);
6289 5 : heap->delay_sweeper_tasks_for_testing_ = false;
6290 5 : }
6291 :
6292 : struct OutOfMemoryState {
6293 : Heap* heap;
6294 : bool oom_triggered;
6295 : size_t old_generation_capacity_at_oom;
6296 : size_t memory_allocator_size_at_oom;
6297 : size_t new_space_capacity_at_oom;
6298 : size_t current_heap_limit;
6299 : size_t initial_heap_limit;
6300 : };
6301 :
6302 12 : size_t NearHeapLimitCallback(void* raw_state, size_t current_heap_limit,
6303 : size_t initial_heap_limit) {
6304 : OutOfMemoryState* state = static_cast<OutOfMemoryState*>(raw_state);
6305 36 : Heap* heap = state->heap;
6306 12 : state->oom_triggered = true;
6307 12 : state->old_generation_capacity_at_oom = heap->OldGenerationCapacity();
6308 12 : state->memory_allocator_size_at_oom = heap->memory_allocator()->Size();
6309 12 : state->new_space_capacity_at_oom = heap->new_space()->Capacity();
6310 12 : state->current_heap_limit = current_heap_limit;
6311 12 : state->initial_heap_limit = initial_heap_limit;
6312 12 : return initial_heap_limit + 100 * MB;
6313 : }
6314 :
6315 0 : size_t MemoryAllocatorSizeFromHeapCapacity(size_t capacity) {
6316 : // Size to capacity factor.
6317 : double factor =
6318 4 : Page::kPageSize * 1.0 / MemoryChunkLayout::AllocatableMemoryInDataPage();
6319 : // Some tables (e.g. deoptimization table) are allocated directly with the
6320 : // memory allocator. Allow some slack to account for them.
6321 : size_t slack = 5 * MB;
6322 4 : return static_cast<size_t>(capacity * factor) + slack;
6323 : }
6324 :
6325 28337 : UNINITIALIZED_TEST(OutOfMemorySmallObjects) {
6326 0 : if (FLAG_stress_incremental_marking) return;
6327 : #ifdef VERIFY_HEAP
6328 : if (FLAG_verify_heap) return;
6329 : #endif
6330 : const size_t kOldGenerationLimit = 300 * MB;
6331 0 : FLAG_max_old_space_size = kOldGenerationLimit / MB;
6332 : v8::Isolate::CreateParams create_params;
6333 0 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6334 : Isolate* isolate =
6335 0 : reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
6336 0 : Heap* heap = isolate->heap();
6337 : Factory* factory = isolate->factory();
6338 : OutOfMemoryState state;
6339 0 : state.heap = heap;
6340 0 : state.oom_triggered = false;
6341 0 : heap->AddNearHeapLimitCallback(NearHeapLimitCallback, &state);
6342 : {
6343 : HandleScope handle_scope(isolate);
6344 0 : while (!state.oom_triggered) {
6345 0 : factory->NewFixedArray(100);
6346 : }
6347 : }
6348 0 : CHECK_LE(state.old_generation_capacity_at_oom,
6349 : kOldGenerationLimit + state.new_space_capacity_at_oom);
6350 0 : CHECK_LE(kOldGenerationLimit, state.old_generation_capacity_at_oom +
6351 : state.new_space_capacity_at_oom);
6352 0 : CHECK_LE(
6353 : state.memory_allocator_size_at_oom,
6354 : MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
6355 : 2 * state.new_space_capacity_at_oom));
6356 0 : reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
6357 : }
6358 :
6359 28342 : UNINITIALIZED_TEST(OutOfMemoryLargeObjects) {
6360 6 : if (FLAG_stress_incremental_marking) return;
6361 : #ifdef VERIFY_HEAP
6362 : if (FLAG_verify_heap) return;
6363 : #endif
6364 : const size_t kOldGenerationLimit = 300 * MB;
6365 4 : FLAG_max_old_space_size = kOldGenerationLimit / MB;
6366 : v8::Isolate::CreateParams create_params;
6367 4 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6368 : Isolate* isolate =
6369 4 : reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
6370 4 : Heap* heap = isolate->heap();
6371 : Factory* factory = isolate->factory();
6372 : OutOfMemoryState state;
6373 4 : state.heap = heap;
6374 4 : state.oom_triggered = false;
6375 4 : heap->AddNearHeapLimitCallback(NearHeapLimitCallback, &state);
6376 : const int kFixedArrayLength = 1000000;
6377 : {
6378 : HandleScope handle_scope(isolate);
6379 164 : while (!state.oom_triggered) {
6380 160 : factory->NewFixedArray(kFixedArrayLength);
6381 : }
6382 : }
6383 4 : CHECK_LE(state.old_generation_capacity_at_oom, kOldGenerationLimit);
6384 4 : CHECK_LE(kOldGenerationLimit, state.old_generation_capacity_at_oom +
6385 : FixedArray::SizeFor(kFixedArrayLength));
6386 8 : CHECK_LE(
6387 : state.memory_allocator_size_at_oom,
6388 : MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
6389 : 2 * state.new_space_capacity_at_oom));
6390 4 : reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
6391 : }
6392 :
6393 28342 : UNINITIALIZED_TEST(RestoreHeapLimit) {
6394 6 : if (FLAG_stress_incremental_marking) return;
6395 : #ifdef VERIFY_HEAP
6396 : if (FLAG_verify_heap) return;
6397 : #endif
6398 : ManualGCScope manual_gc_scope;
6399 : const size_t kOldGenerationLimit = 300 * MB;
6400 4 : FLAG_max_old_space_size = kOldGenerationLimit / MB;
6401 : v8::Isolate::CreateParams create_params;
6402 4 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6403 : Isolate* isolate =
6404 4 : reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
6405 4 : Heap* heap = isolate->heap();
6406 : Factory* factory = isolate->factory();
6407 : OutOfMemoryState state;
6408 4 : state.heap = heap;
6409 4 : state.oom_triggered = false;
6410 4 : heap->AddNearHeapLimitCallback(NearHeapLimitCallback, &state);
6411 4 : heap->AutomaticallyRestoreInitialHeapLimit(0.5);
6412 : const int kFixedArrayLength = 1000000;
6413 : {
6414 : HandleScope handle_scope(isolate);
6415 164 : while (!state.oom_triggered) {
6416 160 : factory->NewFixedArray(kFixedArrayLength);
6417 : }
6418 : }
6419 4 : heap->MemoryPressureNotification(MemoryPressureLevel::kCritical, true);
6420 4 : state.oom_triggered = false;
6421 : {
6422 : HandleScope handle_scope(isolate);
6423 164 : while (!state.oom_triggered) {
6424 160 : factory->NewFixedArray(kFixedArrayLength);
6425 : }
6426 : }
6427 4 : CHECK_EQ(state.current_heap_limit, state.initial_heap_limit);
6428 4 : reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
6429 : }
6430 :
6431 10 : void HeapTester::UncommitFromSpace(Heap* heap) {
6432 5 : heap->UncommitFromSpace();
6433 5 : heap->memory_allocator()->unmapper()->EnsureUnmappingCompleted();
6434 5 : }
6435 :
6436 : class DeleteNative {
6437 : public:
6438 : static void Deleter(void* arg) {
6439 : delete reinterpret_cast<DeleteNative*>(arg);
6440 : }
6441 : };
6442 :
6443 28342 : TEST(Regress8014) {
6444 5 : Isolate* isolate = CcTest::InitIsolateOnce();
6445 15 : Heap* heap = isolate->heap();
6446 : {
6447 : HandleScope scope(isolate);
6448 50005 : for (int i = 0; i < 10000; i++) {
6449 : auto handle = Managed<DeleteNative>::FromRawPtr(isolate, 1000000,
6450 50000 : new DeleteNative());
6451 : USE(handle);
6452 : }
6453 : }
6454 : int ms_count = heap->ms_count();
6455 5 : heap->MemoryPressureNotification(MemoryPressureLevel::kCritical, true);
6456 : // Several GCs can be triggred by the above call.
6457 : // The bad case triggers 10000 GCs.
6458 10 : CHECK_LE(heap->ms_count(), ms_count + 10);
6459 5 : }
6460 :
6461 28342 : TEST(Regress8617) {
6462 : ManualGCScope manual_gc_scope;
6463 5 : FLAG_manual_evacuation_candidates_selection = true;
6464 10 : LocalContext env;
6465 : Isolate* isolate = CcTest::i_isolate();
6466 10 : Heap* heap = isolate->heap();
6467 : HandleScope scope(isolate);
6468 5 : heap::SimulateFullSpace(heap->old_space());
6469 : // Step 1. Create a function and ensure that it is in the old space.
6470 : Handle<Object> foo =
6471 : v8::Utils::OpenHandle(*CompileRun("function foo() { return 42; };"
6472 : "foo;"));
6473 5 : if (heap->InNewSpace(*foo)) {
6474 0 : CcTest::CollectGarbage(NEW_SPACE);
6475 0 : CcTest::CollectGarbage(NEW_SPACE);
6476 : }
6477 : // Step 2. Create an object with a reference to foo in the descriptor array.
6478 : CompileRun(
6479 : "var obj = {};"
6480 : "obj.method = foo;"
6481 : "obj;");
6482 : // Step 3. Make sure that foo moves during Mark-Compact.
6483 : Page* ec_page = Page::FromAddress(foo->ptr());
6484 5 : heap::ForceEvacuationCandidate(ec_page);
6485 : // Step 4. Start incremental marking.
6486 5 : heap::SimulateIncrementalMarking(heap, false);
6487 5 : CHECK(ec_page->IsEvacuationCandidate());
6488 : // Step 5. Install a new descriptor array on the map of the object.
6489 : // This runs the marking barrier for the descriptor array.
6490 : // In the bad case it sets the number of marked descriptors but does not
6491 : // change the color of the descriptor array.
6492 : CompileRun("obj.bar = 10;");
6493 : // Step 6. Promote the descriptor array to old space. During promotion
6494 : // the Scavenger will not record the slot of foo in the descriptor array.
6495 5 : CcTest::CollectGarbage(NEW_SPACE);
6496 5 : CcTest::CollectGarbage(NEW_SPACE);
6497 : // Step 7. Complete the Mark-Compact.
6498 5 : CcTest::CollectAllGarbage();
6499 : // Step 8. Use the descriptor for foo, which contains a stale pointer.
6500 : CompileRun("obj.method()");
6501 5 : }
6502 :
6503 : } // namespace heap
6504 : } // namespace internal
6505 85011 : } // namespace v8
6506 :
6507 : #undef __
|