Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Redistribution and use in source and binary forms, with or without
3 : // modification, are permitted provided that the following conditions are
4 : // met:
5 : //
6 : // * Redistributions of source code must retain the above copyright
7 : // notice, this list of conditions and the following disclaimer.
8 : // * Redistributions in binary form must reproduce the above
9 : // copyright notice, this list of conditions and the following
10 : // disclaimer in the documentation and/or other materials provided
11 : // with the distribution.
12 : // * Neither the name of Google Inc. nor the names of its
13 : // contributors may be used to endorse or promote products derived
14 : // from this software without specific prior written permission.
15 : //
16 : // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 : // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 : // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 : // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 : // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 : // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 : // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 : // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 : // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 : // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 : // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 :
28 : #include <stdlib.h>
29 : #include <utility>
30 :
31 : #include "src/api-inl.h"
32 : #include "src/assembler-inl.h"
33 : #include "src/compilation-cache.h"
34 : #include "src/debug/debug.h"
35 : #include "src/deoptimizer.h"
36 : #include "src/elements.h"
37 : #include "src/execution.h"
38 : #include "src/field-type.h"
39 : #include "src/global-handles.h"
40 : #include "src/hash-seed-inl.h"
41 : #include "src/heap/factory.h"
42 : #include "src/heap/gc-tracer.h"
43 : #include "src/heap/heap-inl.h"
44 : #include "src/heap/incremental-marking.h"
45 : #include "src/heap/mark-compact.h"
46 : #include "src/heap/memory-reducer.h"
47 : #include "src/heap/remembered-set.h"
48 : #include "src/ic/ic.h"
49 : #include "src/macro-assembler-inl.h"
50 : #include "src/objects-inl.h"
51 : #include "src/objects/frame-array-inl.h"
52 : #include "src/objects/heap-number-inl.h"
53 : #include "src/objects/js-array-inl.h"
54 : #include "src/objects/js-collection-inl.h"
55 : #include "src/objects/managed.h"
56 : #include "src/objects/slots.h"
57 : #include "src/ostreams.h"
58 : #include "src/regexp/jsregexp.h"
59 : #include "src/snapshot/snapshot.h"
60 : #include "src/transitions.h"
61 : #include "test/cctest/cctest.h"
62 : #include "test/cctest/heap/heap-tester.h"
63 : #include "test/cctest/heap/heap-utils.h"
64 : #include "test/cctest/test-feedback-vector.h"
65 : #include "test/cctest/test-transitions.h"
66 :
67 : namespace v8 {
68 : namespace internal {
69 : namespace heap {
70 :
71 : // We only start allocation-site tracking with the second instantiation.
72 : static const int kPretenureCreationCount =
73 : AllocationSite::kPretenureMinimumCreated + 1;
74 :
75 25 : static void CheckMap(Map map, int type, int instance_size) {
76 25 : CHECK(map->IsHeapObject());
77 : #ifdef DEBUG
78 : CHECK(CcTest::heap()->Contains(map));
79 : #endif
80 25 : CHECK_EQ(ReadOnlyRoots(CcTest::heap()).meta_map(), map->map());
81 25 : CHECK_EQ(type, map->instance_type());
82 25 : CHECK_EQ(instance_size, map->instance_size());
83 25 : }
84 :
85 :
86 26661 : TEST(HeapMaps) {
87 5 : CcTest::InitializeVM();
88 5 : ReadOnlyRoots roots(CcTest::heap());
89 5 : CheckMap(roots.meta_map(), MAP_TYPE, Map::kSize);
90 5 : CheckMap(roots.heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
91 5 : CheckMap(roots.fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
92 5 : CheckMap(roots.hash_table_map(), HASH_TABLE_TYPE, kVariableSizeSentinel);
93 5 : CheckMap(roots.string_map(), STRING_TYPE, kVariableSizeSentinel);
94 5 : }
95 :
96 10 : static void VerifyStoredPrototypeMap(Isolate* isolate,
97 : int stored_map_context_index,
98 : int stored_ctor_context_index) {
99 10 : Handle<Context> context = isolate->native_context();
100 :
101 : Handle<Map> this_map(Map::cast(context->get(stored_map_context_index)),
102 : isolate);
103 :
104 : Handle<JSFunction> fun(
105 : JSFunction::cast(context->get(stored_ctor_context_index)), isolate);
106 : Handle<JSObject> proto(JSObject::cast(fun->initial_map()->prototype()),
107 : isolate);
108 : Handle<Map> that_map(proto->map(), isolate);
109 :
110 10 : CHECK(proto->HasFastProperties());
111 10 : CHECK_EQ(*this_map, *that_map);
112 10 : }
113 :
114 : // Checks that critical maps stored on the context (mostly used for fast-path
115 : // checks) are unchanged after initialization.
116 26661 : TEST(ContextMaps) {
117 5 : CcTest::InitializeVM();
118 : Isolate* isolate = CcTest::i_isolate();
119 : HandleScope handle_scope(isolate);
120 :
121 : VerifyStoredPrototypeMap(isolate,
122 : Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX,
123 5 : Context::STRING_FUNCTION_INDEX);
124 : VerifyStoredPrototypeMap(isolate, Context::REGEXP_PROTOTYPE_MAP_INDEX,
125 5 : Context::REGEXP_FUNCTION_INDEX);
126 5 : }
127 :
128 26661 : TEST(InitialObjects) {
129 5 : LocalContext env;
130 : HandleScope scope(CcTest::i_isolate());
131 : Handle<Context> context = v8::Utils::OpenHandle(*env);
132 : // Initial ArrayIterator prototype.
133 10 : CHECK_EQ(
134 : context->initial_array_iterator_prototype(),
135 : *v8::Utils::OpenHandle(*CompileRun("[][Symbol.iterator]().__proto__")));
136 : // Initial Array prototype.
137 10 : CHECK_EQ(context->initial_array_prototype(),
138 : *v8::Utils::OpenHandle(*CompileRun("Array.prototype")));
139 : // Initial Generator prototype.
140 10 : CHECK_EQ(context->initial_generator_prototype(),
141 : *v8::Utils::OpenHandle(
142 : *CompileRun("(function*(){}).__proto__.prototype")));
143 : // Initial Iterator prototype.
144 10 : CHECK_EQ(context->initial_iterator_prototype(),
145 : *v8::Utils::OpenHandle(
146 : *CompileRun("[][Symbol.iterator]().__proto__.__proto__")));
147 : // Initial Object prototype.
148 10 : CHECK_EQ(context->initial_object_prototype(),
149 : *v8::Utils::OpenHandle(*CompileRun("Object.prototype")));
150 5 : }
151 :
152 20 : static void CheckOddball(Isolate* isolate, Object obj, const char* string) {
153 20 : CHECK(obj->IsOddball());
154 : Handle<Object> handle(obj, isolate);
155 40 : Object print_string = *Object::ToString(isolate, handle).ToHandleChecked();
156 20 : CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
157 20 : }
158 :
159 15 : static void CheckSmi(Isolate* isolate, int value, const char* string) {
160 : Handle<Object> handle(Smi::FromInt(value), isolate);
161 30 : Object print_string = *Object::ToString(isolate, handle).ToHandleChecked();
162 15 : CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
163 15 : }
164 :
165 :
166 5 : static void CheckNumber(Isolate* isolate, double value, const char* string) {
167 5 : Handle<Object> number = isolate->factory()->NewNumber(value);
168 5 : CHECK(number->IsNumber());
169 : Handle<Object> print_string =
170 10 : Object::ToString(isolate, number).ToHandleChecked();
171 5 : CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
172 5 : }
173 :
174 10 : void CheckEmbeddedObjectsAreEqual(Handle<Code> lhs, Handle<Code> rhs) {
175 : int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
176 10 : RelocIterator lhs_it(*lhs, mode_mask);
177 10 : RelocIterator rhs_it(*rhs, mode_mask);
178 30 : while (!lhs_it.done() && !rhs_it.done()) {
179 10 : CHECK(lhs_it.rinfo()->target_object() == rhs_it.rinfo()->target_object());
180 :
181 10 : lhs_it.next();
182 10 : rhs_it.next();
183 : }
184 10 : CHECK(lhs_it.done() == rhs_it.done());
185 10 : }
186 :
187 26661 : HEAP_TEST(TestNewSpaceRefsInCopiedCode) {
188 5 : CcTest::InitializeVM();
189 : Isolate* isolate = CcTest::i_isolate();
190 : Factory* factory = isolate->factory();
191 : HandleScope sc(isolate);
192 :
193 5 : Handle<HeapNumber> value = factory->NewHeapNumber(1.000123);
194 5 : CHECK(Heap::InYoungGeneration(*value));
195 :
196 : i::byte buffer[i::Assembler::kMinimalBufferSize];
197 : MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
198 10 : ExternalAssemblerBuffer(buffer, sizeof(buffer)));
199 : // Add a new-space reference to the code.
200 5 : masm.Push(value);
201 :
202 5 : CodeDesc desc;
203 : masm.GetCode(isolate, &desc);
204 : Handle<Code> code =
205 10 : isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
206 :
207 : Handle<Code> copy;
208 : {
209 10 : CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
210 5 : copy = factory->CopyCode(code);
211 : }
212 :
213 5 : CheckEmbeddedObjectsAreEqual(code, copy);
214 5 : CcTest::CollectAllAvailableGarbage();
215 5 : CheckEmbeddedObjectsAreEqual(code, copy);
216 5 : }
217 :
218 5 : static void CheckFindCodeObject(Isolate* isolate) {
219 : // Test FindCodeObject
220 : #define __ assm.
221 :
222 20 : Assembler assm(AssemblerOptions{});
223 :
224 5 : __ nop(); // supported on all architectures
225 :
226 5 : CodeDesc desc;
227 : assm.GetCode(isolate, &desc);
228 : Handle<Code> code =
229 10 : isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
230 5 : CHECK(code->IsCode());
231 :
232 5 : HeapObject obj = HeapObject::cast(*code);
233 : Address obj_addr = obj->address();
234 :
235 125 : for (int i = 0; i < obj->Size(); i += kTaggedSize) {
236 60 : Object found = isolate->FindCodeObject(obj_addr + i);
237 60 : CHECK_EQ(*code, found);
238 : }
239 :
240 : Handle<Code> copy =
241 10 : isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
242 5 : HeapObject obj_copy = HeapObject::cast(*copy);
243 : Object not_right =
244 5 : isolate->FindCodeObject(obj_copy->address() + obj_copy->Size() / 2);
245 5 : CHECK(not_right != *code);
246 5 : }
247 :
248 :
249 26661 : TEST(HandleNull) {
250 5 : CcTest::InitializeVM();
251 : Isolate* isolate = CcTest::i_isolate();
252 : HandleScope outer_scope(isolate);
253 5 : LocalContext context;
254 : Handle<Object> n(Object(0), isolate);
255 5 : CHECK(!n.is_null());
256 5 : }
257 :
258 :
259 26661 : TEST(HeapObjects) {
260 5 : CcTest::InitializeVM();
261 : Isolate* isolate = CcTest::i_isolate();
262 : Factory* factory = isolate->factory();
263 : Heap* heap = isolate->heap();
264 :
265 : HandleScope sc(isolate);
266 5 : Handle<Object> value = factory->NewNumber(1.000123);
267 5 : CHECK(value->IsHeapNumber());
268 5 : CHECK(value->IsNumber());
269 5 : CHECK_EQ(1.000123, value->Number());
270 :
271 5 : value = factory->NewNumber(1.0);
272 5 : CHECK(value->IsSmi());
273 5 : CHECK(value->IsNumber());
274 5 : CHECK_EQ(1.0, value->Number());
275 :
276 5 : value = factory->NewNumberFromInt(1024);
277 5 : CHECK(value->IsSmi());
278 5 : CHECK(value->IsNumber());
279 5 : CHECK_EQ(1024.0, value->Number());
280 :
281 5 : value = factory->NewNumberFromInt(Smi::kMinValue);
282 5 : CHECK(value->IsSmi());
283 5 : CHECK(value->IsNumber());
284 5 : CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
285 :
286 5 : value = factory->NewNumberFromInt(Smi::kMaxValue);
287 5 : CHECK(value->IsSmi());
288 5 : CHECK(value->IsNumber());
289 5 : CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
290 :
291 : #if !defined(V8_TARGET_ARCH_64_BIT)
292 : // TODO(lrn): We need a NumberFromIntptr function in order to test this.
293 : value = factory->NewNumberFromInt(Smi::kMinValue - 1);
294 : CHECK(value->IsHeapNumber());
295 : CHECK(value->IsNumber());
296 : CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
297 : #endif
298 :
299 5 : value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
300 5 : CHECK(value->IsHeapNumber());
301 5 : CHECK(value->IsNumber());
302 5 : CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
303 : value->Number());
304 :
305 5 : value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
306 5 : CHECK(value->IsHeapNumber());
307 5 : CHECK(value->IsNumber());
308 5 : CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
309 : value->Number());
310 :
311 : // nan oddball checks
312 5 : CHECK(factory->nan_value()->IsNumber());
313 5 : CHECK(std::isnan(factory->nan_value()->Number()));
314 :
315 5 : Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
316 5 : CHECK(s->IsString());
317 5 : CHECK_EQ(10, s->length());
318 :
319 : Handle<String> object_string = Handle<String>::cast(factory->Object_string());
320 10 : Handle<JSGlobalObject> global(CcTest::i_isolate()->context()->global_object(),
321 5 : isolate);
322 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
323 :
324 : // Check ToString for oddballs
325 : ReadOnlyRoots roots(heap);
326 5 : CheckOddball(isolate, roots.true_value(), "true");
327 5 : CheckOddball(isolate, roots.false_value(), "false");
328 5 : CheckOddball(isolate, roots.null_value(), "null");
329 5 : CheckOddball(isolate, roots.undefined_value(), "undefined");
330 :
331 : // Check ToString for Smis
332 5 : CheckSmi(isolate, 0, "0");
333 5 : CheckSmi(isolate, 42, "42");
334 5 : CheckSmi(isolate, -42, "-42");
335 :
336 : // Check ToString for Numbers
337 5 : CheckNumber(isolate, 1.1, "1.1");
338 :
339 5 : CheckFindCodeObject(isolate);
340 5 : }
341 :
342 26661 : TEST(Tagging) {
343 5 : CcTest::InitializeVM();
344 : int request = 24;
345 : CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
346 : CHECK(Smi::FromInt(42)->IsSmi());
347 : CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
348 : CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
349 5 : }
350 :
351 :
352 26661 : TEST(GarbageCollection) {
353 5 : CcTest::InitializeVM();
354 : Isolate* isolate = CcTest::i_isolate();
355 : Factory* factory = isolate->factory();
356 :
357 : HandleScope sc(isolate);
358 : // Check GC.
359 5 : CcTest::CollectGarbage(NEW_SPACE);
360 :
361 10 : Handle<JSGlobalObject> global(CcTest::i_isolate()->context()->global_object(),
362 5 : isolate);
363 5 : Handle<String> name = factory->InternalizeUtf8String("theFunction");
364 5 : Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
365 5 : Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
366 5 : Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
367 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
368 : Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
369 :
370 : {
371 : HandleScope inner_scope(isolate);
372 : // Allocate a function and keep it in global object's property.
373 5 : Handle<JSFunction> function = factory->NewFunctionForTest(name);
374 10 : Object::SetProperty(isolate, global, name, function).Check();
375 : // Allocate an object. Unrooted after leaving the scope.
376 5 : Handle<JSObject> obj = factory->NewJSObject(function);
377 10 : Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
378 10 : Object::SetProperty(isolate, obj, prop_namex, twenty_four).Check();
379 :
380 10 : CHECK_EQ(Smi::FromInt(23),
381 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
382 10 : CHECK_EQ(Smi::FromInt(24),
383 : *Object::GetProperty(isolate, obj, prop_namex).ToHandleChecked());
384 : }
385 :
386 5 : CcTest::CollectGarbage(NEW_SPACE);
387 :
388 : // Function should be alive.
389 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
390 : // Check function is retained.
391 : Handle<Object> func_value =
392 10 : Object::GetProperty(isolate, global, name).ToHandleChecked();
393 5 : CHECK(func_value->IsJSFunction());
394 5 : Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
395 :
396 : {
397 : HandleScope inner_scope(isolate);
398 : // Allocate another object, make it reachable from global.
399 5 : Handle<JSObject> obj = factory->NewJSObject(function);
400 10 : Object::SetProperty(isolate, global, obj_name, obj).Check();
401 10 : Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
402 : }
403 :
404 : // After gc, it should survive.
405 5 : CcTest::CollectGarbage(NEW_SPACE);
406 :
407 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
408 : Handle<Object> obj =
409 10 : Object::GetProperty(isolate, global, obj_name).ToHandleChecked();
410 5 : CHECK(obj->IsJSObject());
411 10 : CHECK_EQ(Smi::FromInt(23),
412 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
413 5 : }
414 :
415 :
416 25 : static void VerifyStringAllocation(Isolate* isolate, const char* string) {
417 : HandleScope scope(isolate);
418 50 : Handle<String> s = isolate->factory()->NewStringFromUtf8(
419 : CStrVector(string)).ToHandleChecked();
420 25 : CHECK_EQ(StrLength(string), s->length());
421 385 : for (int index = 0; index < s->length(); index++) {
422 180 : CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
423 : }
424 25 : }
425 :
426 :
427 26661 : TEST(String) {
428 5 : CcTest::InitializeVM();
429 5 : Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
430 :
431 5 : VerifyStringAllocation(isolate, "a");
432 5 : VerifyStringAllocation(isolate, "ab");
433 5 : VerifyStringAllocation(isolate, "abc");
434 5 : VerifyStringAllocation(isolate, "abcd");
435 5 : VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
436 5 : }
437 :
438 :
439 26661 : TEST(LocalHandles) {
440 5 : CcTest::InitializeVM();
441 : Isolate* isolate = CcTest::i_isolate();
442 : Factory* factory = isolate->factory();
443 :
444 10 : v8::HandleScope scope(CcTest::isolate());
445 : const char* name = "Kasper the spunky";
446 5 : Handle<String> string = factory->NewStringFromAsciiChecked(name);
447 5 : CHECK_EQ(StrLength(name), string->length());
448 5 : }
449 :
450 :
451 26661 : TEST(GlobalHandles) {
452 5 : CcTest::InitializeVM();
453 : Isolate* isolate = CcTest::i_isolate();
454 : Factory* factory = isolate->factory();
455 : GlobalHandles* global_handles = isolate->global_handles();
456 :
457 : Handle<Object> h1;
458 : Handle<Object> h2;
459 : Handle<Object> h3;
460 : Handle<Object> h4;
461 :
462 : {
463 : HandleScope scope(isolate);
464 :
465 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
466 5 : Handle<Object> u = factory->NewNumber(1.12344);
467 :
468 5 : h1 = global_handles->Create(*i);
469 5 : h2 = global_handles->Create(*u);
470 5 : h3 = global_handles->Create(*i);
471 5 : h4 = global_handles->Create(*u);
472 : }
473 :
474 : // after gc, it should survive
475 5 : CcTest::CollectGarbage(NEW_SPACE);
476 :
477 5 : CHECK((*h1)->IsString());
478 5 : CHECK((*h2)->IsHeapNumber());
479 5 : CHECK((*h3)->IsString());
480 5 : CHECK((*h4)->IsHeapNumber());
481 :
482 5 : CHECK_EQ(*h3, *h1);
483 5 : GlobalHandles::Destroy(h1.location());
484 5 : GlobalHandles::Destroy(h3.location());
485 :
486 5 : CHECK_EQ(*h4, *h2);
487 5 : GlobalHandles::Destroy(h2.location());
488 5 : GlobalHandles::Destroy(h4.location());
489 5 : }
490 :
491 :
492 : static bool WeakPointerCleared = false;
493 :
494 15 : static void TestWeakGlobalHandleCallback(
495 : const v8::WeakCallbackInfo<void>& data) {
496 : std::pair<v8::Persistent<v8::Value>*, int>* p =
497 : reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
498 : data.GetParameter());
499 15 : if (p->second == 1234) WeakPointerCleared = true;
500 15 : p->first->Reset();
501 15 : }
502 :
503 :
504 26661 : TEST(WeakGlobalHandlesScavenge) {
505 5 : FLAG_stress_compaction = false;
506 5 : FLAG_stress_incremental_marking = false;
507 5 : CcTest::InitializeVM();
508 : Isolate* isolate = CcTest::i_isolate();
509 : Factory* factory = isolate->factory();
510 : GlobalHandles* global_handles = isolate->global_handles();
511 :
512 5 : WeakPointerCleared = false;
513 :
514 : Handle<Object> h1;
515 : Handle<Object> h2;
516 :
517 : {
518 : HandleScope scope(isolate);
519 :
520 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
521 5 : Handle<Object> u = factory->NewNumber(1.12344);
522 :
523 5 : h1 = global_handles->Create(*i);
524 5 : h2 = global_handles->Create(*u);
525 : }
526 :
527 : std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
528 : GlobalHandles::MakeWeak(
529 : h2.location(), reinterpret_cast<void*>(&handle_and_id),
530 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
531 :
532 : // Scavenge treats weak pointers as normal roots.
533 5 : CcTest::CollectGarbage(NEW_SPACE);
534 5 : CHECK((*h1)->IsString());
535 5 : CHECK((*h2)->IsHeapNumber());
536 5 : CHECK(!WeakPointerCleared);
537 5 : GlobalHandles::Destroy(h1.location());
538 5 : GlobalHandles::Destroy(h2.location());
539 5 : }
540 :
541 26661 : TEST(WeakGlobalUnmodifiedApiHandlesScavenge) {
542 5 : CcTest::InitializeVM();
543 : Isolate* isolate = CcTest::i_isolate();
544 5 : LocalContext context;
545 : Factory* factory = isolate->factory();
546 : GlobalHandles* global_handles = isolate->global_handles();
547 :
548 5 : WeakPointerCleared = false;
549 :
550 : Handle<Object> h1;
551 : Handle<Object> h2;
552 :
553 : {
554 : HandleScope scope(isolate);
555 :
556 : // Create an Api object that is unmodified.
557 10 : Local<v8::Function> function = FunctionTemplate::New(context->GetIsolate())
558 5 : ->GetFunction(context.local())
559 : .ToLocalChecked();
560 : Local<v8::Object> i =
561 : function->NewInstance(context.local()).ToLocalChecked();
562 5 : Handle<Object> u = factory->NewNumber(1.12344);
563 :
564 5 : h1 = global_handles->Create(*u);
565 5 : h2 = global_handles->Create(*(reinterpret_cast<internal::Address*>(*i)));
566 : }
567 :
568 : std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
569 : GlobalHandles::MakeWeak(
570 : h2.location(), reinterpret_cast<void*>(&handle_and_id),
571 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
572 :
573 5 : CcTest::CollectGarbage(NEW_SPACE);
574 5 : CHECK((*h1)->IsHeapNumber());
575 5 : CHECK(WeakPointerCleared);
576 5 : GlobalHandles::Destroy(h1.location());
577 5 : }
578 :
579 26661 : TEST(WeakGlobalApiHandleModifiedMapScavenge) {
580 5 : CcTest::InitializeVM();
581 : Isolate* isolate = CcTest::i_isolate();
582 5 : LocalContext context;
583 : GlobalHandles* global_handles = isolate->global_handles();
584 :
585 5 : WeakPointerCleared = false;
586 :
587 : Handle<Object> h1;
588 :
589 : {
590 : HandleScope scope(isolate);
591 :
592 : // Create an API object which does not have the same map as constructor.
593 5 : auto function_template = FunctionTemplate::New(context->GetIsolate());
594 5 : auto instance_t = function_template->InstanceTemplate();
595 15 : instance_t->Set(v8::String::NewFromUtf8(context->GetIsolate(), "a",
596 5 : NewStringType::kNormal)
597 : .ToLocalChecked(),
598 5 : v8::Number::New(context->GetIsolate(), 10));
599 : auto function =
600 5 : function_template->GetFunction(context.local()).ToLocalChecked();
601 : auto i = function->NewInstance(context.local()).ToLocalChecked();
602 :
603 5 : h1 = global_handles->Create(*(reinterpret_cast<internal::Address*>(*i)));
604 : }
605 :
606 : std::pair<Handle<Object>*, int> handle_and_id(&h1, 1234);
607 : GlobalHandles::MakeWeak(
608 : h1.location(), reinterpret_cast<void*>(&handle_and_id),
609 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
610 :
611 5 : CcTest::CollectGarbage(NEW_SPACE);
612 5 : CHECK(!WeakPointerCleared);
613 5 : GlobalHandles::Destroy(h1.location());
614 5 : }
615 :
616 26661 : TEST(WeakGlobalApiHandleWithElementsScavenge) {
617 5 : CcTest::InitializeVM();
618 : Isolate* isolate = CcTest::i_isolate();
619 5 : LocalContext context;
620 : GlobalHandles* global_handles = isolate->global_handles();
621 :
622 5 : WeakPointerCleared = false;
623 :
624 : Handle<Object> h1;
625 :
626 : {
627 : HandleScope scope(isolate);
628 :
629 : // Create an API object which has elements.
630 5 : auto function_template = FunctionTemplate::New(context->GetIsolate());
631 5 : auto instance_t = function_template->InstanceTemplate();
632 15 : instance_t->Set(v8::String::NewFromUtf8(context->GetIsolate(), "1",
633 5 : NewStringType::kNormal)
634 : .ToLocalChecked(),
635 5 : v8::Number::New(context->GetIsolate(), 10));
636 15 : instance_t->Set(v8::String::NewFromUtf8(context->GetIsolate(), "2",
637 5 : NewStringType::kNormal)
638 : .ToLocalChecked(),
639 5 : v8::Number::New(context->GetIsolate(), 10));
640 : auto function =
641 5 : function_template->GetFunction(context.local()).ToLocalChecked();
642 : auto i = function->NewInstance(context.local()).ToLocalChecked();
643 :
644 5 : h1 = global_handles->Create(*(reinterpret_cast<internal::Address*>(*i)));
645 : }
646 :
647 : std::pair<Handle<Object>*, int> handle_and_id(&h1, 1234);
648 : GlobalHandles::MakeWeak(
649 : h1.location(), reinterpret_cast<void*>(&handle_and_id),
650 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
651 :
652 5 : CcTest::CollectGarbage(NEW_SPACE);
653 5 : CHECK(!WeakPointerCleared);
654 5 : GlobalHandles::Destroy(h1.location());
655 5 : }
656 :
657 26661 : TEST(WeakGlobalHandlesMark) {
658 5 : FLAG_stress_incremental_marking = false;
659 5 : CcTest::InitializeVM();
660 : Isolate* isolate = CcTest::i_isolate();
661 : Factory* factory = isolate->factory();
662 : GlobalHandles* global_handles = isolate->global_handles();
663 :
664 5 : WeakPointerCleared = false;
665 :
666 : Handle<Object> h1;
667 : Handle<Object> h2;
668 :
669 : {
670 : HandleScope scope(isolate);
671 :
672 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
673 5 : Handle<Object> u = factory->NewNumber(1.12344);
674 :
675 5 : h1 = global_handles->Create(*i);
676 5 : h2 = global_handles->Create(*u);
677 : }
678 :
679 : // Make sure the objects are promoted.
680 5 : CcTest::CollectGarbage(OLD_SPACE);
681 5 : CcTest::CollectGarbage(NEW_SPACE);
682 10 : CHECK(!Heap::InYoungGeneration(*h1) && !Heap::InYoungGeneration(*h2));
683 :
684 : std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
685 : GlobalHandles::MakeWeak(
686 : h2.location(), reinterpret_cast<void*>(&handle_and_id),
687 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
688 :
689 : // Incremental marking potentially marked handles before they turned weak.
690 5 : CcTest::CollectAllGarbage();
691 5 : CHECK((*h1)->IsString());
692 5 : CHECK(WeakPointerCleared);
693 5 : GlobalHandles::Destroy(h1.location());
694 5 : }
695 :
696 :
697 26661 : TEST(DeleteWeakGlobalHandle) {
698 5 : FLAG_stress_compaction = false;
699 5 : FLAG_stress_incremental_marking = false;
700 5 : CcTest::InitializeVM();
701 : Isolate* isolate = CcTest::i_isolate();
702 : Factory* factory = isolate->factory();
703 : GlobalHandles* global_handles = isolate->global_handles();
704 :
705 5 : WeakPointerCleared = false;
706 :
707 : Handle<Object> h;
708 :
709 : {
710 : HandleScope scope(isolate);
711 :
712 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
713 5 : h = global_handles->Create(*i);
714 : }
715 :
716 : std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
717 : GlobalHandles::MakeWeak(h.location(), reinterpret_cast<void*>(&handle_and_id),
718 : &TestWeakGlobalHandleCallback,
719 5 : v8::WeakCallbackType::kParameter);
720 :
721 : // Scanvenge does not recognize weak reference.
722 5 : CcTest::CollectGarbage(NEW_SPACE);
723 :
724 5 : CHECK(!WeakPointerCleared);
725 :
726 : // Mark-compact treats weak reference properly.
727 5 : CcTest::CollectGarbage(OLD_SPACE);
728 :
729 5 : CHECK(WeakPointerCleared);
730 5 : }
731 :
732 26661 : TEST(BytecodeArray) {
733 5 : if (FLAG_never_compact) return;
734 : static const uint8_t kRawBytes[] = {0xC3, 0x7E, 0xA5, 0x5A};
735 : static const int kRawBytesSize = sizeof(kRawBytes);
736 : static const int kFrameSize = 32;
737 : static const int kParameterCount = 2;
738 :
739 : ManualGCScope manual_gc_scope;
740 5 : FLAG_manual_evacuation_candidates_selection = true;
741 5 : CcTest::InitializeVM();
742 : Isolate* isolate = CcTest::i_isolate();
743 : Heap* heap = isolate->heap();
744 : Factory* factory = isolate->factory();
745 : HandleScope scope(isolate);
746 :
747 5 : heap::SimulateFullSpace(heap->old_space());
748 : Handle<FixedArray> constant_pool =
749 5 : factory->NewFixedArray(5, AllocationType::kOld);
750 55 : for (int i = 0; i < 5; i++) {
751 25 : Handle<Object> number = factory->NewHeapNumber(i);
752 25 : constant_pool->set(i, *number);
753 : }
754 :
755 : // Allocate and initialize BytecodeArray
756 : Handle<BytecodeArray> array = factory->NewBytecodeArray(
757 5 : kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
758 :
759 5 : CHECK(array->IsBytecodeArray());
760 5 : CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
761 5 : CHECK_EQ(array->frame_size(), kFrameSize);
762 5 : CHECK_EQ(array->parameter_count(), kParameterCount);
763 5 : CHECK_EQ(array->constant_pool(), *constant_pool);
764 5 : CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
765 10 : CHECK_GE(array->address() + array->BytecodeArraySize(),
766 : array->GetFirstBytecodeAddress() + array->length());
767 45 : for (int i = 0; i < kRawBytesSize; i++) {
768 40 : CHECK_EQ(Memory<uint8_t>(array->GetFirstBytecodeAddress() + i),
769 : kRawBytes[i]);
770 20 : CHECK_EQ(array->get(i), kRawBytes[i]);
771 : }
772 :
773 : FixedArray old_constant_pool_address = *constant_pool;
774 :
775 : // Perform a full garbage collection and force the constant pool to be on an
776 : // evacuation candidate.
777 : Page* evac_page = Page::FromHeapObject(*constant_pool);
778 5 : heap::ForceEvacuationCandidate(evac_page);
779 5 : CcTest::CollectAllGarbage();
780 :
781 : // BytecodeArray should survive.
782 5 : CHECK_EQ(array->length(), kRawBytesSize);
783 5 : CHECK_EQ(array->frame_size(), kFrameSize);
784 45 : for (int i = 0; i < kRawBytesSize; i++) {
785 40 : CHECK_EQ(array->get(i), kRawBytes[i]);
786 40 : CHECK_EQ(Memory<uint8_t>(array->GetFirstBytecodeAddress() + i),
787 : kRawBytes[i]);
788 : }
789 :
790 : // Constant pool should have been migrated.
791 5 : CHECK_EQ(array->constant_pool(), *constant_pool);
792 5 : CHECK_NE(array->constant_pool(), old_constant_pool_address);
793 : }
794 :
795 26661 : TEST(BytecodeArrayAging) {
796 : static const uint8_t kRawBytes[] = {0xC3, 0x7E, 0xA5, 0x5A};
797 : static const int kRawBytesSize = sizeof(kRawBytes);
798 : static const int kFrameSize = 32;
799 : static const int kParameterCount = 2;
800 5 : CcTest::InitializeVM();
801 : Isolate* isolate = CcTest::i_isolate();
802 : Factory* factory = isolate->factory();
803 : HandleScope scope(isolate);
804 :
805 : Handle<BytecodeArray> array =
806 : factory->NewBytecodeArray(kRawBytesSize, kRawBytes, kFrameSize,
807 5 : kParameterCount, factory->empty_fixed_array());
808 :
809 5 : CHECK_EQ(BytecodeArray::kFirstBytecodeAge, array->bytecode_age());
810 5 : array->MakeOlder();
811 5 : CHECK_EQ(BytecodeArray::kQuadragenarianBytecodeAge, array->bytecode_age());
812 : array->set_bytecode_age(BytecodeArray::kLastBytecodeAge);
813 5 : array->MakeOlder();
814 5 : CHECK_EQ(BytecodeArray::kLastBytecodeAge, array->bytecode_age());
815 5 : }
816 :
817 : static const char* not_so_random_string_table[] = {
818 : "abstract",
819 : "boolean",
820 : "break",
821 : "byte",
822 : "case",
823 : "catch",
824 : "char",
825 : "class",
826 : "const",
827 : "continue",
828 : "debugger",
829 : "default",
830 : "delete",
831 : "do",
832 : "double",
833 : "else",
834 : "enum",
835 : "export",
836 : "extends",
837 : "false",
838 : "final",
839 : "finally",
840 : "float",
841 : "for",
842 : "function",
843 : "goto",
844 : "if",
845 : "implements",
846 : "import",
847 : "in",
848 : "instanceof",
849 : "int",
850 : "interface",
851 : "long",
852 : "native",
853 : "new",
854 : "null",
855 : "package",
856 : "private",
857 : "protected",
858 : "public",
859 : "return",
860 : "short",
861 : "static",
862 : "super",
863 : "switch",
864 : "synchronized",
865 : "this",
866 : "throw",
867 : "throws",
868 : "transient",
869 : "true",
870 : "try",
871 : "typeof",
872 : "var",
873 : "void",
874 : "volatile",
875 : "while",
876 : "with",
877 : nullptr
878 : };
879 :
880 10 : static void CheckInternalizedStrings(const char** strings) {
881 : Isolate* isolate = CcTest::i_isolate();
882 : Factory* factory = isolate->factory();
883 600 : for (const char* string = *strings; *strings != nullptr;
884 : string = *strings++) {
885 : HandleScope scope(isolate);
886 : Handle<String> a =
887 590 : isolate->factory()->InternalizeUtf8String(CStrVector(string));
888 : // InternalizeUtf8String may return a failure if a GC is needed.
889 1180 : CHECK(a->IsInternalizedString());
890 590 : Handle<String> b = factory->InternalizeUtf8String(string);
891 590 : CHECK_EQ(*b, *a);
892 590 : CHECK(b->IsUtf8EqualTo(CStrVector(string)));
893 590 : b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
894 590 : CHECK_EQ(*b, *a);
895 590 : CHECK(b->IsUtf8EqualTo(CStrVector(string)));
896 : }
897 10 : }
898 :
899 :
900 26661 : TEST(StringTable) {
901 5 : CcTest::InitializeVM();
902 :
903 10 : v8::HandleScope sc(CcTest::isolate());
904 5 : CheckInternalizedStrings(not_so_random_string_table);
905 5 : CheckInternalizedStrings(not_so_random_string_table);
906 5 : }
907 :
908 :
909 26661 : TEST(FunctionAllocation) {
910 5 : CcTest::InitializeVM();
911 : Isolate* isolate = CcTest::i_isolate();
912 : Factory* factory = isolate->factory();
913 :
914 10 : v8::HandleScope sc(CcTest::isolate());
915 5 : Handle<String> name = factory->InternalizeUtf8String("theFunction");
916 5 : Handle<JSFunction> function = factory->NewFunctionForTest(name);
917 :
918 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
919 : Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
920 :
921 5 : Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
922 5 : Handle<JSObject> obj = factory->NewJSObject(function);
923 10 : Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
924 10 : CHECK_EQ(Smi::FromInt(23),
925 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
926 : // Check that we can add properties to function objects.
927 10 : Object::SetProperty(isolate, function, prop_name, twenty_four).Check();
928 10 : CHECK_EQ(
929 : Smi::FromInt(24),
930 : *Object::GetProperty(isolate, function, prop_name).ToHandleChecked());
931 5 : }
932 :
933 :
934 26661 : TEST(ObjectProperties) {
935 5 : CcTest::InitializeVM();
936 : Isolate* isolate = CcTest::i_isolate();
937 : Factory* factory = isolate->factory();
938 :
939 10 : v8::HandleScope sc(CcTest::isolate());
940 : Handle<String> object_string(
941 5 : String::cast(ReadOnlyRoots(CcTest::heap()).Object_string()), isolate);
942 : Handle<Object> object =
943 10 : Object::GetProperty(isolate, CcTest::i_isolate()->global_object(),
944 10 : object_string)
945 : .ToHandleChecked();
946 5 : Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
947 5 : Handle<JSObject> obj = factory->NewJSObject(constructor);
948 5 : Handle<String> first = factory->InternalizeUtf8String("first");
949 5 : Handle<String> second = factory->InternalizeUtf8String("second");
950 :
951 : Handle<Smi> one(Smi::FromInt(1), isolate);
952 : Handle<Smi> two(Smi::FromInt(2), isolate);
953 :
954 : // check for empty
955 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
956 :
957 : // add first
958 10 : Object::SetProperty(isolate, obj, first, one).Check();
959 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
960 :
961 : // delete first
962 10 : CHECK(Just(true) ==
963 : JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
964 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
965 :
966 : // add first and then second
967 10 : Object::SetProperty(isolate, obj, first, one).Check();
968 10 : Object::SetProperty(isolate, obj, second, two).Check();
969 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
970 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
971 :
972 : // delete first and then second
973 10 : CHECK(Just(true) ==
974 : JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
975 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
976 10 : CHECK(Just(true) ==
977 : JSReceiver::DeleteProperty(obj, second, LanguageMode::kSloppy));
978 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
979 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
980 :
981 : // add first and then second
982 10 : Object::SetProperty(isolate, obj, first, one).Check();
983 10 : Object::SetProperty(isolate, obj, second, two).Check();
984 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
985 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
986 :
987 : // delete second and then first
988 10 : CHECK(Just(true) ==
989 : JSReceiver::DeleteProperty(obj, second, LanguageMode::kSloppy));
990 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
991 10 : CHECK(Just(true) ==
992 : JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
993 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
994 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
995 :
996 : // check string and internalized string match
997 : const char* string1 = "fisk";
998 5 : Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
999 10 : Object::SetProperty(isolate, obj, s1, one).Check();
1000 5 : Handle<String> s1_string = factory->InternalizeUtf8String(string1);
1001 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
1002 :
1003 : // check internalized string and string match
1004 : const char* string2 = "fugl";
1005 5 : Handle<String> s2_string = factory->InternalizeUtf8String(string2);
1006 10 : Object::SetProperty(isolate, obj, s2_string, one).Check();
1007 5 : Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
1008 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
1009 5 : }
1010 :
1011 :
1012 26661 : TEST(JSObjectMaps) {
1013 5 : CcTest::InitializeVM();
1014 : Isolate* isolate = CcTest::i_isolate();
1015 : Factory* factory = isolate->factory();
1016 :
1017 10 : v8::HandleScope sc(CcTest::isolate());
1018 5 : Handle<String> name = factory->InternalizeUtf8String("theFunction");
1019 5 : Handle<JSFunction> function = factory->NewFunctionForTest(name);
1020 :
1021 5 : Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
1022 5 : Handle<JSObject> obj = factory->NewJSObject(function);
1023 : Handle<Map> initial_map(function->initial_map(), isolate);
1024 :
1025 : // Set a propery
1026 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
1027 10 : Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
1028 10 : CHECK_EQ(Smi::FromInt(23),
1029 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
1030 :
1031 : // Check the map has changed
1032 5 : CHECK(*initial_map != obj->map());
1033 5 : }
1034 :
1035 :
1036 26661 : TEST(JSArray) {
1037 5 : CcTest::InitializeVM();
1038 : Isolate* isolate = CcTest::i_isolate();
1039 : Factory* factory = isolate->factory();
1040 :
1041 10 : v8::HandleScope sc(CcTest::isolate());
1042 5 : Handle<String> name = factory->InternalizeUtf8String("Array");
1043 : Handle<Object> fun_obj =
1044 15 : Object::GetProperty(isolate, CcTest::i_isolate()->global_object(), name)
1045 : .ToHandleChecked();
1046 5 : Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
1047 :
1048 : // Allocate the object.
1049 : Handle<Object> element;
1050 5 : Handle<JSObject> object = factory->NewJSObject(function);
1051 : Handle<JSArray> array = Handle<JSArray>::cast(object);
1052 : // We just initialized the VM, no heap allocation failure yet.
1053 5 : JSArray::Initialize(array, 0);
1054 :
1055 : // Set array length to 0.
1056 5 : JSArray::SetLength(array, 0);
1057 5 : CHECK_EQ(Smi::kZero, array->length());
1058 : // Must be in fast mode.
1059 5 : CHECK(array->HasSmiOrObjectElements());
1060 :
1061 : // array[length] = name.
1062 10 : Object::SetElement(isolate, array, 0, name, ShouldThrow::kDontThrow).Check();
1063 5 : CHECK_EQ(Smi::FromInt(1), array->length());
1064 10 : element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
1065 5 : CHECK_EQ(*element, *name);
1066 :
1067 : // Set array length with larger than smi value.
1068 5 : JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
1069 :
1070 5 : uint32_t int_length = 0;
1071 10 : CHECK(array->length()->ToArrayIndex(&int_length));
1072 5 : CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
1073 5 : CHECK(array->HasDictionaryElements()); // Must be in slow mode.
1074 :
1075 : // array[length] = name.
1076 15 : Object::SetElement(isolate, array, int_length, name, ShouldThrow::kDontThrow)
1077 : .Check();
1078 5 : uint32_t new_int_length = 0;
1079 10 : CHECK(array->length()->ToArrayIndex(&new_int_length));
1080 10 : CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
1081 10 : element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
1082 5 : CHECK_EQ(*element, *name);
1083 10 : element = Object::GetElement(isolate, array, 0).ToHandleChecked();
1084 5 : CHECK_EQ(*element, *name);
1085 5 : }
1086 :
1087 :
1088 26661 : TEST(JSObjectCopy) {
1089 5 : CcTest::InitializeVM();
1090 : Isolate* isolate = CcTest::i_isolate();
1091 : Factory* factory = isolate->factory();
1092 :
1093 10 : v8::HandleScope sc(CcTest::isolate());
1094 : Handle<String> object_string(
1095 5 : String::cast(ReadOnlyRoots(CcTest::heap()).Object_string()), isolate);
1096 : Handle<Object> object =
1097 10 : Object::GetProperty(isolate, CcTest::i_isolate()->global_object(),
1098 10 : object_string)
1099 : .ToHandleChecked();
1100 5 : Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
1101 5 : Handle<JSObject> obj = factory->NewJSObject(constructor);
1102 5 : Handle<String> first = factory->InternalizeUtf8String("first");
1103 5 : Handle<String> second = factory->InternalizeUtf8String("second");
1104 :
1105 : Handle<Smi> one(Smi::FromInt(1), isolate);
1106 : Handle<Smi> two(Smi::FromInt(2), isolate);
1107 :
1108 10 : Object::SetProperty(isolate, obj, first, one).Check();
1109 10 : Object::SetProperty(isolate, obj, second, two).Check();
1110 :
1111 10 : Object::SetElement(isolate, obj, 0, first, ShouldThrow::kDontThrow).Check();
1112 10 : Object::SetElement(isolate, obj, 1, second, ShouldThrow::kDontThrow).Check();
1113 :
1114 : // Make the clone.
1115 : Handle<Object> value1, value2;
1116 5 : Handle<JSObject> clone = factory->CopyJSObject(obj);
1117 5 : CHECK(!clone.is_identical_to(obj));
1118 :
1119 10 : value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1120 10 : value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1121 5 : CHECK_EQ(*value1, *value2);
1122 10 : value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1123 10 : value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1124 5 : CHECK_EQ(*value1, *value2);
1125 :
1126 10 : value1 = Object::GetProperty(isolate, obj, first).ToHandleChecked();
1127 10 : value2 = Object::GetProperty(isolate, clone, first).ToHandleChecked();
1128 5 : CHECK_EQ(*value1, *value2);
1129 10 : value1 = Object::GetProperty(isolate, obj, second).ToHandleChecked();
1130 10 : value2 = Object::GetProperty(isolate, clone, second).ToHandleChecked();
1131 5 : CHECK_EQ(*value1, *value2);
1132 :
1133 : // Flip the values.
1134 10 : Object::SetProperty(isolate, clone, first, two).Check();
1135 10 : Object::SetProperty(isolate, clone, second, one).Check();
1136 :
1137 10 : Object::SetElement(isolate, clone, 0, second, ShouldThrow::kDontThrow)
1138 : .Check();
1139 10 : Object::SetElement(isolate, clone, 1, first, ShouldThrow::kDontThrow).Check();
1140 :
1141 10 : value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1142 10 : value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1143 5 : CHECK_EQ(*value1, *value2);
1144 10 : value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1145 10 : value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1146 5 : CHECK_EQ(*value1, *value2);
1147 :
1148 10 : value1 = Object::GetProperty(isolate, obj, second).ToHandleChecked();
1149 10 : value2 = Object::GetProperty(isolate, clone, first).ToHandleChecked();
1150 5 : CHECK_EQ(*value1, *value2);
1151 10 : value1 = Object::GetProperty(isolate, obj, first).ToHandleChecked();
1152 10 : value2 = Object::GetProperty(isolate, clone, second).ToHandleChecked();
1153 5 : CHECK_EQ(*value1, *value2);
1154 5 : }
1155 :
1156 :
1157 26661 : TEST(StringAllocation) {
1158 5 : CcTest::InitializeVM();
1159 : Isolate* isolate = CcTest::i_isolate();
1160 : Factory* factory = isolate->factory();
1161 :
1162 : const unsigned char chars[] = {0xE5, 0xA4, 0xA7};
1163 1005 : for (int length = 0; length < 100; length++) {
1164 1000 : v8::HandleScope scope(CcTest::isolate());
1165 500 : char* non_one_byte = NewArray<char>(3 * length + 1);
1166 500 : char* one_byte = NewArray<char>(length + 1);
1167 500 : non_one_byte[3 * length] = 0;
1168 500 : one_byte[length] = 0;
1169 50000 : for (int i = 0; i < length; i++) {
1170 24750 : one_byte[i] = 'a';
1171 24750 : non_one_byte[3 * i] = chars[0];
1172 24750 : non_one_byte[3 * i + 1] = chars[1];
1173 24750 : non_one_byte[3 * i + 2] = chars[2];
1174 : }
1175 : Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
1176 500 : Vector<const char>(non_one_byte, 3 * length));
1177 500 : CHECK_EQ(length, non_one_byte_sym->length());
1178 : Handle<String> one_byte_sym =
1179 500 : factory->InternalizeOneByteString(OneByteVector(one_byte, length));
1180 500 : CHECK_EQ(length, one_byte_sym->length());
1181 : Handle<String> non_one_byte_str =
1182 1000 : factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
1183 : .ToHandleChecked();
1184 500 : non_one_byte_str->Hash();
1185 500 : CHECK_EQ(length, non_one_byte_str->length());
1186 : Handle<String> one_byte_str =
1187 1000 : factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
1188 : .ToHandleChecked();
1189 500 : one_byte_str->Hash();
1190 500 : CHECK_EQ(length, one_byte_str->length());
1191 : DeleteArray(non_one_byte);
1192 : DeleteArray(one_byte);
1193 : }
1194 5 : }
1195 :
1196 :
1197 5 : static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
1198 : // Count the number of objects found in the heap.
1199 : int found_count = 0;
1200 10 : HeapIterator iterator(heap);
1201 70514 : for (HeapObject obj = iterator.next(); !obj.is_null();
1202 : obj = iterator.next()) {
1203 458276 : for (int i = 0; i < size; i++) {
1204 423024 : if (*objs[i] == obj) {
1205 30 : found_count++;
1206 : }
1207 : }
1208 : }
1209 5 : return found_count;
1210 : }
1211 :
1212 :
1213 26661 : TEST(Iteration) {
1214 5 : CcTest::InitializeVM();
1215 : Isolate* isolate = CcTest::i_isolate();
1216 : Factory* factory = isolate->factory();
1217 10 : v8::HandleScope scope(CcTest::isolate());
1218 :
1219 : // Array of objects to scan haep for.
1220 : const int objs_count = 6;
1221 65 : Handle<Object> objs[objs_count];
1222 : int next_objs_index = 0;
1223 :
1224 : // Allocate a JS array to OLD_SPACE and NEW_SPACE
1225 5 : objs[next_objs_index++] = factory->NewJSArray(10);
1226 : objs[next_objs_index++] =
1227 5 : factory->NewJSArray(10, HOLEY_ELEMENTS, AllocationType::kOld);
1228 :
1229 : // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
1230 5 : objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
1231 : objs[next_objs_index++] =
1232 5 : factory->NewStringFromStaticChars("abcdefghij", AllocationType::kOld);
1233 :
1234 : // Allocate a large string (for large object space).
1235 : int large_size = kMaxRegularHeapObjectSize + 1;
1236 5 : char* str = new char[large_size];
1237 655365 : for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
1238 5 : str[large_size - 1] = '\0';
1239 : objs[next_objs_index++] =
1240 5 : factory->NewStringFromAsciiChecked(str, AllocationType::kOld);
1241 5 : delete[] str;
1242 :
1243 : // Add a Map object to look for.
1244 : objs[next_objs_index++] =
1245 5 : Handle<Map>(HeapObject::cast(*objs[0])->map(), isolate);
1246 :
1247 : CHECK_EQ(objs_count, next_objs_index);
1248 5 : CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
1249 5 : }
1250 :
1251 26661 : TEST(TestBytecodeFlushing) {
1252 : #ifndef V8_LITE_MODE
1253 5 : FLAG_opt = false;
1254 5 : FLAG_always_opt = false;
1255 5 : i::FLAG_optimize_for_size = false;
1256 : #endif // V8_LITE_MODE
1257 5 : i::FLAG_flush_bytecode = true;
1258 5 : i::FLAG_allow_natives_syntax = true;
1259 :
1260 5 : CcTest::InitializeVM();
1261 5 : v8::Isolate* isolate = CcTest::isolate();
1262 : Isolate* i_isolate = CcTest::i_isolate();
1263 : Factory* factory = i_isolate->factory();
1264 :
1265 : {
1266 10 : v8::HandleScope scope(isolate);
1267 10 : v8::Context::New(isolate)->Enter();
1268 : const char* source =
1269 : "function foo() {"
1270 : " var x = 42;"
1271 : " var y = 42;"
1272 : " var z = x + y;"
1273 : "};"
1274 : "foo()";
1275 5 : Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1276 :
1277 : // This compile will add the code to the compilation cache.
1278 : {
1279 10 : v8::HandleScope scope(isolate);
1280 : CompileRun(source);
1281 : }
1282 :
1283 : // Check function is compiled.
1284 : Handle<Object> func_value =
1285 15 : Object::GetProperty(i_isolate, i_isolate->global_object(), foo_name)
1286 : .ToHandleChecked();
1287 5 : CHECK(func_value->IsJSFunction());
1288 : Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1289 5 : CHECK(function->shared()->is_compiled());
1290 :
1291 : // The code will survive at least two GCs.
1292 5 : CcTest::CollectAllGarbage();
1293 5 : CcTest::CollectAllGarbage();
1294 5 : CHECK(function->shared()->is_compiled());
1295 :
1296 : // Simulate several GCs that use full marking.
1297 : const int kAgingThreshold = 6;
1298 65 : for (int i = 0; i < kAgingThreshold; i++) {
1299 30 : CcTest::CollectAllGarbage();
1300 : }
1301 :
1302 : // foo should no longer be in the compilation cache
1303 5 : CHECK(!function->shared()->is_compiled());
1304 5 : CHECK(!function->is_compiled());
1305 : // Call foo to get it recompiled.
1306 : CompileRun("foo()");
1307 5 : CHECK(function->shared()->is_compiled());
1308 5 : CHECK(function->is_compiled());
1309 : }
1310 5 : }
1311 :
1312 : #ifndef V8_LITE_MODE
1313 :
1314 26660 : TEST(TestOptimizeAfterBytecodeFlushingCandidate) {
1315 4 : FLAG_opt = true;
1316 4 : FLAG_always_opt = false;
1317 4 : i::FLAG_optimize_for_size = false;
1318 4 : i::FLAG_incremental_marking = true;
1319 4 : i::FLAG_flush_bytecode = true;
1320 4 : i::FLAG_allow_natives_syntax = true;
1321 :
1322 4 : CcTest::InitializeVM();
1323 : Isolate* isolate = CcTest::i_isolate();
1324 : Factory* factory = isolate->factory();
1325 8 : v8::HandleScope scope(CcTest::isolate());
1326 : const char* source =
1327 : "function foo() {"
1328 : " var x = 42;"
1329 : " var y = 42;"
1330 : " var z = x + y;"
1331 : "};"
1332 : "foo()";
1333 4 : Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1334 :
1335 : // This compile will add the code to the compilation cache.
1336 : {
1337 8 : v8::HandleScope scope(CcTest::isolate());
1338 : CompileRun(source);
1339 : }
1340 :
1341 : // Check function is compiled.
1342 : Handle<Object> func_value =
1343 12 : Object::GetProperty(isolate, isolate->global_object(), foo_name)
1344 : .ToHandleChecked();
1345 4 : CHECK(func_value->IsJSFunction());
1346 : Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1347 4 : CHECK(function->shared()->is_compiled());
1348 :
1349 : // The code will survive at least two GCs.
1350 4 : CcTest::CollectAllGarbage();
1351 4 : CcTest::CollectAllGarbage();
1352 4 : CHECK(function->shared()->is_compiled());
1353 :
1354 : // Simulate several GCs that use incremental marking.
1355 : const int kAgingThreshold = 6;
1356 52 : for (int i = 0; i < kAgingThreshold; i++) {
1357 24 : heap::SimulateIncrementalMarking(CcTest::heap());
1358 24 : CcTest::CollectAllGarbage();
1359 : }
1360 4 : CHECK(!function->shared()->is_compiled());
1361 4 : CHECK(!function->is_compiled());
1362 :
1363 : // This compile will compile the function again.
1364 : {
1365 8 : v8::HandleScope scope(CcTest::isolate());
1366 : CompileRun("foo();");
1367 : }
1368 :
1369 : // Simulate several GCs that use incremental marking but make sure
1370 : // the loop breaks once the function is enqueued as a candidate.
1371 20 : for (int i = 0; i < kAgingThreshold; i++) {
1372 12 : heap::SimulateIncrementalMarking(CcTest::heap());
1373 12 : if (function->shared()->GetBytecodeArray()->IsOld()) break;
1374 8 : CcTest::CollectAllGarbage();
1375 : }
1376 :
1377 : // Force optimization while incremental marking is active and while
1378 : // the function is enqueued as a candidate.
1379 : {
1380 8 : v8::HandleScope scope(CcTest::isolate());
1381 : CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1382 : }
1383 :
1384 : // Simulate one final GC and make sure the candidate wasn't flushed.
1385 4 : CcTest::CollectAllGarbage();
1386 4 : CHECK(function->shared()->is_compiled());
1387 4 : CHECK(function->is_compiled());
1388 4 : }
1389 :
1390 : #endif // V8_LITE_MODE
1391 :
1392 26661 : TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
1393 5 : if (!FLAG_incremental_marking) return;
1394 : // Turn off always_opt because it interferes with running the built-in for
1395 : // the last call to g().
1396 5 : FLAG_always_opt = false;
1397 5 : FLAG_allow_natives_syntax = true;
1398 5 : CcTest::InitializeVM();
1399 : Isolate* isolate = CcTest::i_isolate();
1400 : Factory* factory = isolate->factory();
1401 : Heap* heap = isolate->heap();
1402 10 : v8::HandleScope scope(CcTest::isolate());
1403 :
1404 : CompileRun(
1405 : "function make_closure(x) {"
1406 : " return function() { return x + 3 };"
1407 : "}"
1408 : "var f = make_closure(5); f();"
1409 : "var g = make_closure(5);");
1410 :
1411 : // Check f is compiled.
1412 5 : Handle<String> f_name = factory->InternalizeUtf8String("f");
1413 : Handle<Object> f_value =
1414 15 : Object::GetProperty(isolate, isolate->global_object(), f_name)
1415 : .ToHandleChecked();
1416 : Handle<JSFunction> f_function = Handle<JSFunction>::cast(f_value);
1417 5 : CHECK(f_function->is_compiled());
1418 :
1419 : // Check g is not compiled.
1420 5 : Handle<String> g_name = factory->InternalizeUtf8String("g");
1421 : Handle<Object> g_value =
1422 15 : Object::GetProperty(isolate, isolate->global_object(), g_name)
1423 : .ToHandleChecked();
1424 : Handle<JSFunction> g_function = Handle<JSFunction>::cast(g_value);
1425 5 : CHECK(!g_function->is_compiled());
1426 :
1427 5 : heap::SimulateIncrementalMarking(heap);
1428 : CompileRun("%OptimizeFunctionOnNextCall(f); f();");
1429 :
1430 : // g should now have available an optimized function, unmarked by gc. The
1431 : // CompileLazy built-in will discover it and install it in the closure, and
1432 : // the incremental write barrier should be used.
1433 : CompileRun("g();");
1434 5 : CHECK(g_function->is_compiled());
1435 : }
1436 :
1437 26661 : TEST(CompilationCacheCachingBehavior) {
1438 : // If we do not have the compilation cache turned off, this test is invalid.
1439 5 : if (!FLAG_compilation_cache) {
1440 0 : return;
1441 : }
1442 5 : CcTest::InitializeVM();
1443 : Isolate* isolate = CcTest::i_isolate();
1444 : Factory* factory = isolate->factory();
1445 : CompilationCache* compilation_cache = isolate->compilation_cache();
1446 5 : LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
1447 :
1448 10 : v8::HandleScope scope(CcTest::isolate());
1449 : const char* raw_source =
1450 : "function foo() {"
1451 : " var x = 42;"
1452 : " var y = 42;"
1453 : " var z = x + y;"
1454 : "};"
1455 : "foo();";
1456 5 : Handle<String> source = factory->InternalizeUtf8String(raw_source);
1457 5 : Handle<Context> native_context = isolate->native_context();
1458 :
1459 : {
1460 10 : v8::HandleScope scope(CcTest::isolate());
1461 : CompileRun(raw_source);
1462 : }
1463 :
1464 : // The script should be in the cache now.
1465 : {
1466 10 : v8::HandleScope scope(CcTest::isolate());
1467 : MaybeHandle<SharedFunctionInfo> cached_script =
1468 : compilation_cache->LookupScript(source, Handle<Object>(), 0, 0,
1469 : v8::ScriptOriginOptions(true, false),
1470 5 : native_context, language_mode);
1471 5 : CHECK(!cached_script.is_null());
1472 : }
1473 :
1474 : // Check that the code cache entry survives at least one GC.
1475 : {
1476 5 : CcTest::CollectAllGarbage();
1477 10 : v8::HandleScope scope(CcTest::isolate());
1478 : MaybeHandle<SharedFunctionInfo> cached_script =
1479 : compilation_cache->LookupScript(source, Handle<Object>(), 0, 0,
1480 : v8::ScriptOriginOptions(true, false),
1481 5 : native_context, language_mode);
1482 5 : CHECK(!cached_script.is_null());
1483 :
1484 : // Progress code age until it's old and ready for GC.
1485 : Handle<SharedFunctionInfo> shared = cached_script.ToHandleChecked();
1486 5 : CHECK(shared->HasBytecodeArray());
1487 : const int kAgingThreshold = 6;
1488 65 : for (int i = 0; i < kAgingThreshold; i++) {
1489 30 : shared->GetBytecodeArray()->MakeOlder();
1490 : }
1491 : }
1492 :
1493 5 : CcTest::CollectAllGarbage();
1494 :
1495 : {
1496 10 : v8::HandleScope scope(CcTest::isolate());
1497 : // Ensure code aging cleared the entry from the cache.
1498 : MaybeHandle<SharedFunctionInfo> cached_script =
1499 : compilation_cache->LookupScript(source, Handle<Object>(), 0, 0,
1500 : v8::ScriptOriginOptions(true, false),
1501 5 : native_context, language_mode);
1502 5 : CHECK(cached_script.is_null());
1503 : }
1504 : }
1505 :
1506 :
1507 150 : static void OptimizeEmptyFunction(const char* name) {
1508 : HandleScope scope(CcTest::i_isolate());
1509 : EmbeddedVector<char, 256> source;
1510 : SNPrintF(source,
1511 : "function %s() { return 0; }"
1512 : "%s(); %s();"
1513 : "%%OptimizeFunctionOnNextCall(%s);"
1514 : "%s();",
1515 150 : name, name, name, name, name);
1516 : CompileRun(source.start());
1517 150 : }
1518 :
1519 :
1520 : // Count the number of native contexts in the weak list of native contexts.
1521 366 : int CountNativeContexts() {
1522 : int count = 0;
1523 366 : Object object = CcTest::heap()->native_contexts_list();
1524 4266 : while (!object->IsUndefined(CcTest::i_isolate())) {
1525 1950 : count++;
1526 1950 : object = Context::cast(object)->next_context_link();
1527 : }
1528 366 : return count;
1529 : }
1530 :
1531 26661 : TEST(TestInternalWeakLists) {
1532 5 : FLAG_always_opt = false;
1533 5 : FLAG_allow_natives_syntax = true;
1534 5 : v8::V8::Initialize();
1535 :
1536 : // Some flags turn Scavenge collections into Mark-sweep collections
1537 : // and hence are incompatible with this test case.
1538 5 : if (FLAG_gc_global || FLAG_stress_compaction ||
1539 : FLAG_stress_incremental_marking)
1540 2 : return;
1541 4 : FLAG_retain_maps_for_n_gc = 0;
1542 :
1543 : static const int kNumTestContexts = 10;
1544 :
1545 : Isolate* isolate = CcTest::i_isolate();
1546 : HandleScope scope(isolate);
1547 84 : v8::Local<v8::Context> ctx[kNumTestContexts];
1548 4 : if (!isolate->use_optimizer()) return;
1549 :
1550 3 : CHECK_EQ(0, CountNativeContexts());
1551 :
1552 : // Create a number of global contests which gets linked together.
1553 30 : for (int i = 0; i < kNumTestContexts; i++) {
1554 30 : ctx[i] = v8::Context::New(CcTest::isolate());
1555 :
1556 : // Collect garbage that might have been created by one of the
1557 : // installed extensions.
1558 30 : isolate->compilation_cache()->Clear();
1559 30 : CcTest::CollectAllGarbage();
1560 :
1561 30 : CHECK_EQ(i + 1, CountNativeContexts());
1562 :
1563 30 : ctx[i]->Enter();
1564 :
1565 : // Create a handle scope so no function objects get stuck in the outer
1566 : // handle scope.
1567 : HandleScope scope(isolate);
1568 30 : OptimizeEmptyFunction("f1");
1569 30 : OptimizeEmptyFunction("f2");
1570 30 : OptimizeEmptyFunction("f3");
1571 30 : OptimizeEmptyFunction("f4");
1572 30 : OptimizeEmptyFunction("f5");
1573 :
1574 : // Remove function f1, and
1575 : CompileRun("f1=null");
1576 :
1577 : // Scavenge treats these references as strong.
1578 630 : for (int j = 0; j < 10; j++) {
1579 300 : CcTest::CollectGarbage(NEW_SPACE);
1580 : }
1581 :
1582 : // Mark compact handles the weak references.
1583 30 : isolate->compilation_cache()->Clear();
1584 30 : CcTest::CollectAllGarbage();
1585 :
1586 : // Get rid of f3 and f5 in the same way.
1587 : CompileRun("f3=null");
1588 630 : for (int j = 0; j < 10; j++) {
1589 300 : CcTest::CollectGarbage(NEW_SPACE);
1590 : }
1591 30 : CcTest::CollectAllGarbage();
1592 : CompileRun("f5=null");
1593 630 : for (int j = 0; j < 10; j++) {
1594 300 : CcTest::CollectGarbage(NEW_SPACE);
1595 : }
1596 30 : CcTest::CollectAllGarbage();
1597 :
1598 30 : ctx[i]->Exit();
1599 : }
1600 :
1601 : // Force compilation cache cleanup.
1602 3 : CcTest::heap()->NotifyContextDisposed(true);
1603 3 : CcTest::CollectAllGarbage();
1604 :
1605 : // Dispose the native contexts one by one.
1606 63 : for (int i = 0; i < kNumTestContexts; i++) {
1607 : // TODO(dcarney): is there a better way to do this?
1608 : i::Address* unsafe = reinterpret_cast<i::Address*>(*ctx[i]);
1609 60 : *unsafe = ReadOnlyRoots(CcTest::heap()).undefined_value()->ptr();
1610 : ctx[i].Clear();
1611 :
1612 : // Scavenge treats these references as strong.
1613 630 : for (int j = 0; j < 10; j++) {
1614 300 : CcTest::CollectGarbage(i::NEW_SPACE);
1615 300 : CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1616 : }
1617 :
1618 : // Mark compact handles the weak references.
1619 30 : CcTest::CollectAllGarbage();
1620 30 : CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1621 : }
1622 :
1623 3 : CHECK_EQ(0, CountNativeContexts());
1624 : }
1625 :
1626 :
1627 26661 : TEST(TestSizeOfRegExpCode) {
1628 5 : if (!FLAG_regexp_optimization) return;
1629 :
1630 5 : v8::V8::Initialize();
1631 :
1632 : Isolate* isolate = CcTest::i_isolate();
1633 : HandleScope scope(isolate);
1634 :
1635 5 : LocalContext context;
1636 :
1637 : // Adjust source below and this check to match
1638 : // RegExpImple::kRegExpTooLargeToOptimize.
1639 : CHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
1640 :
1641 : // Compile a regexp that is much larger if we are using regexp optimizations.
1642 : CompileRun(
1643 : "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1644 : "var half_size_reg_exp;"
1645 : "while (reg_exp_source.length < 20 * 1024) {"
1646 : " half_size_reg_exp = reg_exp_source;"
1647 : " reg_exp_source = reg_exp_source + reg_exp_source;"
1648 : "}"
1649 : // Flatten string.
1650 : "reg_exp_source.match(/f/);");
1651 :
1652 : // Get initial heap size after several full GCs, which will stabilize
1653 : // the heap size and return with sweeping finished completely.
1654 5 : CcTest::CollectAllAvailableGarbage();
1655 5 : MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1656 5 : if (collector->sweeping_in_progress()) {
1657 5 : collector->EnsureSweepingCompleted();
1658 : }
1659 5 : int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1660 :
1661 : CompileRun("'foo'.match(reg_exp_source);");
1662 5 : CcTest::CollectAllAvailableGarbage();
1663 5 : int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1664 :
1665 : CompileRun("'foo'.match(half_size_reg_exp);");
1666 5 : CcTest::CollectAllAvailableGarbage();
1667 : int size_with_optimized_regexp =
1668 5 : static_cast<int>(CcTest::heap()->SizeOfObjects());
1669 :
1670 5 : int size_of_regexp_code = size_with_regexp - initial_size;
1671 :
1672 : // On some platforms the debug-code flag causes huge amounts of regexp code
1673 : // to be emitted, breaking this test.
1674 5 : if (!FLAG_debug_code) {
1675 5 : CHECK_LE(size_of_regexp_code, 1 * MB);
1676 : }
1677 :
1678 : // Small regexp is half the size, but compiles to more than twice the code
1679 : // due to the optimization steps.
1680 5 : CHECK_GE(size_with_optimized_regexp,
1681 : size_with_regexp + size_of_regexp_code * 2);
1682 : }
1683 :
1684 :
1685 26661 : HEAP_TEST(TestSizeOfObjects) {
1686 5 : v8::V8::Initialize();
1687 : Isolate* isolate = CcTest::i_isolate();
1688 5 : Heap* heap = CcTest::heap();
1689 : MarkCompactCollector* collector = heap->mark_compact_collector();
1690 :
1691 : // Get initial heap size after several full GCs, which will stabilize
1692 : // the heap size and return with sweeping finished completely.
1693 5 : CcTest::CollectAllAvailableGarbage();
1694 5 : if (collector->sweeping_in_progress()) {
1695 5 : collector->EnsureSweepingCompleted();
1696 : }
1697 5 : int initial_size = static_cast<int>(heap->SizeOfObjects());
1698 :
1699 : {
1700 : HandleScope scope(isolate);
1701 : // Allocate objects on several different old-space pages so that
1702 : // concurrent sweeper threads will be busy sweeping the old space on
1703 : // subsequent GC runs.
1704 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1705 : int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1706 1005 : for (int i = 1; i <= 100; i++) {
1707 500 : isolate->factory()->NewFixedArray(8192, AllocationType::kOld);
1708 500 : CHECK_EQ(initial_size + i * filler_size,
1709 : static_cast<int>(heap->SizeOfObjects()));
1710 : }
1711 : }
1712 :
1713 : // The heap size should go back to initial size after a full GC, even
1714 : // though sweeping didn't finish yet.
1715 5 : CcTest::CollectAllGarbage();
1716 : // Normally sweeping would not be complete here, but no guarantees.
1717 5 : CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
1718 : // Waiting for sweeper threads should not change heap size.
1719 5 : if (collector->sweeping_in_progress()) {
1720 5 : collector->EnsureSweepingCompleted();
1721 : }
1722 5 : CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
1723 5 : }
1724 :
1725 :
1726 26661 : TEST(TestAlignmentCalculations) {
1727 : // Maximum fill amounts are consistent.
1728 : int maximum_double_misalignment = kDoubleSize - kTaggedSize;
1729 5 : int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
1730 5 : CHECK_EQ(0, max_word_fill);
1731 5 : int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
1732 5 : CHECK_EQ(maximum_double_misalignment, max_double_fill);
1733 5 : int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
1734 5 : CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
1735 :
1736 : Address base = kNullAddress;
1737 : int fill = 0;
1738 :
1739 : // Word alignment never requires fill.
1740 5 : fill = Heap::GetFillToAlign(base, kWordAligned);
1741 5 : CHECK_EQ(0, fill);
1742 5 : fill = Heap::GetFillToAlign(base + kTaggedSize, kWordAligned);
1743 5 : CHECK_EQ(0, fill);
1744 :
1745 : // No fill is required when address is double aligned.
1746 5 : fill = Heap::GetFillToAlign(base, kDoubleAligned);
1747 5 : CHECK_EQ(0, fill);
1748 : // Fill is required if address is not double aligned.
1749 5 : fill = Heap::GetFillToAlign(base + kTaggedSize, kDoubleAligned);
1750 5 : CHECK_EQ(maximum_double_misalignment, fill);
1751 : // kDoubleUnaligned has the opposite fill amounts.
1752 5 : fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
1753 5 : CHECK_EQ(maximum_double_misalignment, fill);
1754 5 : fill = Heap::GetFillToAlign(base + kTaggedSize, kDoubleUnaligned);
1755 5 : CHECK_EQ(0, fill);
1756 5 : }
1757 :
1758 0 : static HeapObject NewSpaceAllocateAligned(int size,
1759 : AllocationAlignment alignment) {
1760 0 : Heap* heap = CcTest::heap();
1761 : AllocationResult allocation =
1762 : heap->new_space()->AllocateRawAligned(size, alignment);
1763 : HeapObject obj;
1764 : allocation.To(&obj);
1765 0 : heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
1766 0 : return obj;
1767 : }
1768 :
1769 : // Get new space allocation into the desired alignment.
1770 10 : static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
1771 10 : Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1772 10 : int fill = Heap::GetFillToAlign(*top_addr, alignment);
1773 10 : int allocation = fill + offset;
1774 10 : if (allocation) {
1775 0 : NewSpaceAllocateAligned(allocation, kWordAligned);
1776 : }
1777 10 : return *top_addr;
1778 : }
1779 :
1780 :
1781 26661 : TEST(TestAlignedAllocation) {
1782 : // Double misalignment is 4 on 32-bit platforms or when pointer compression
1783 : // is enabled, 0 on 64-bit ones when pointer compression is disabled.
1784 : const intptr_t double_misalignment = kDoubleSize - kTaggedSize;
1785 5 : Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1786 : Address start;
1787 : HeapObject obj;
1788 : HeapObject filler;
1789 : if (double_misalignment) {
1790 : // Allocate a pointer sized object that must be double aligned at an
1791 : // aligned address.
1792 : start = AlignNewSpace(kDoubleAligned, 0);
1793 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1794 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1795 : // There is no filler.
1796 : CHECK_EQ(kTaggedSize, *top_addr - start);
1797 :
1798 : // Allocate a second pointer sized object that must be double aligned at an
1799 : // unaligned address.
1800 : start = AlignNewSpace(kDoubleAligned, kTaggedSize);
1801 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1802 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1803 : // There is a filler object before the object.
1804 : filler = HeapObject::FromAddress(start);
1805 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1806 : CHECK_EQ(kTaggedSize + double_misalignment, *top_addr - start);
1807 :
1808 : // Similarly for kDoubleUnaligned.
1809 : start = AlignNewSpace(kDoubleUnaligned, 0);
1810 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1811 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1812 : CHECK_EQ(kTaggedSize, *top_addr - start);
1813 : start = AlignNewSpace(kDoubleUnaligned, kTaggedSize);
1814 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1815 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1816 : // There is a filler object before the object.
1817 : filler = HeapObject::FromAddress(start);
1818 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1819 : CHECK_EQ(kTaggedSize + double_misalignment, *top_addr - start);
1820 : }
1821 5 : }
1822 :
1823 0 : static HeapObject OldSpaceAllocateAligned(int size,
1824 : AllocationAlignment alignment) {
1825 0 : Heap* heap = CcTest::heap();
1826 : AllocationResult allocation =
1827 0 : heap->old_space()->AllocateRawAligned(size, alignment);
1828 : HeapObject obj;
1829 : allocation.To(&obj);
1830 0 : heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
1831 0 : return obj;
1832 : }
1833 :
1834 : // Get old space allocation into the desired alignment.
1835 10 : static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
1836 10 : Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
1837 10 : int fill = Heap::GetFillToAlign(*top_addr, alignment);
1838 10 : int allocation = fill + offset;
1839 10 : if (allocation) {
1840 0 : OldSpaceAllocateAligned(allocation, kWordAligned);
1841 : }
1842 10 : Address top = *top_addr;
1843 : // Now force the remaining allocation onto the free list.
1844 10 : CcTest::heap()->old_space()->FreeLinearAllocationArea();
1845 10 : return top;
1846 : }
1847 :
1848 :
1849 : // Test the case where allocation must be done from the free list, so filler
1850 : // may precede or follow the object.
1851 26661 : TEST(TestAlignedOverAllocation) {
1852 5 : Heap* heap = CcTest::heap();
1853 : // Test checks for fillers before and behind objects and requires a fresh
1854 : // page and empty free list.
1855 5 : heap::AbandonCurrentlyFreeMemory(heap->old_space());
1856 : // Allocate a dummy object to properly set up the linear allocation info.
1857 5 : AllocationResult dummy = heap->old_space()->AllocateRawUnaligned(kTaggedSize);
1858 5 : CHECK(!dummy.IsRetry());
1859 : heap->CreateFillerObjectAt(dummy.ToObjectChecked()->address(), kTaggedSize,
1860 5 : ClearRecordedSlots::kNo);
1861 :
1862 : // Double misalignment is 4 on 32-bit platforms or when pointer compression
1863 : // is enabled, 0 on 64-bit ones when pointer compression is disabled.
1864 : const intptr_t double_misalignment = kDoubleSize - kTaggedSize;
1865 : Address start;
1866 : HeapObject obj;
1867 : HeapObject filler;
1868 : if (double_misalignment) {
1869 : start = AlignOldSpace(kDoubleAligned, 0);
1870 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1871 : // The object is aligned.
1872 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1873 : // Try the opposite alignment case.
1874 : start = AlignOldSpace(kDoubleAligned, kTaggedSize);
1875 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1876 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1877 : filler = HeapObject::FromAddress(start);
1878 : CHECK(obj != filler);
1879 : CHECK(filler->IsFiller());
1880 : CHECK_EQ(kTaggedSize, filler->Size());
1881 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1882 :
1883 : // Similarly for kDoubleUnaligned.
1884 : start = AlignOldSpace(kDoubleUnaligned, 0);
1885 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1886 : // The object is aligned.
1887 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1888 : // Try the opposite alignment case.
1889 : start = AlignOldSpace(kDoubleUnaligned, kTaggedSize);
1890 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1891 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1892 : filler = HeapObject::FromAddress(start);
1893 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1894 : }
1895 5 : }
1896 :
1897 26661 : TEST(HeapNumberAlignment) {
1898 5 : CcTest::InitializeVM();
1899 : Isolate* isolate = CcTest::i_isolate();
1900 : Factory* factory = isolate->factory();
1901 : Heap* heap = isolate->heap();
1902 : HandleScope sc(isolate);
1903 :
1904 : const auto required_alignment =
1905 : HeapObject::RequiredAlignment(*factory->heap_number_map());
1906 : const int maximum_misalignment =
1907 5 : Heap::GetMaximumFillToAlign(required_alignment);
1908 :
1909 15 : for (int offset = 0; offset <= maximum_misalignment; offset += kTaggedSize) {
1910 5 : AlignNewSpace(required_alignment, offset);
1911 5 : Handle<Object> number_new = factory->NewNumber(1.000123);
1912 5 : CHECK(number_new->IsHeapNumber());
1913 5 : CHECK(Heap::InYoungGeneration(*number_new));
1914 5 : CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new)->address(),
1915 : required_alignment));
1916 :
1917 5 : AlignOldSpace(required_alignment, offset);
1918 : Handle<Object> number_old =
1919 5 : factory->NewNumber(1.000321, AllocationType::kOld);
1920 5 : CHECK(number_old->IsHeapNumber());
1921 5 : CHECK(heap->InOldSpace(*number_old));
1922 5 : CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_old)->address(),
1923 : required_alignment));
1924 : }
1925 5 : }
1926 :
1927 26661 : TEST(MutableHeapNumberAlignment) {
1928 5 : CcTest::InitializeVM();
1929 : Isolate* isolate = CcTest::i_isolate();
1930 : Factory* factory = isolate->factory();
1931 : Heap* heap = isolate->heap();
1932 : HandleScope sc(isolate);
1933 :
1934 : const auto required_alignment =
1935 : HeapObject::RequiredAlignment(*factory->mutable_heap_number_map());
1936 : const int maximum_misalignment =
1937 5 : Heap::GetMaximumFillToAlign(required_alignment);
1938 :
1939 15 : for (int offset = 0; offset <= maximum_misalignment; offset += kTaggedSize) {
1940 5 : AlignNewSpace(required_alignment, offset);
1941 5 : Handle<Object> number_new = factory->NewMutableHeapNumber(1.000123);
1942 5 : CHECK(number_new->IsMutableHeapNumber());
1943 5 : CHECK(Heap::InYoungGeneration(*number_new));
1944 5 : CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new)->address(),
1945 : required_alignment));
1946 :
1947 5 : AlignOldSpace(required_alignment, offset);
1948 : Handle<Object> number_old =
1949 5 : factory->NewMutableHeapNumber(1.000321, AllocationType::kOld);
1950 5 : CHECK(number_old->IsMutableHeapNumber());
1951 5 : CHECK(heap->InOldSpace(*number_old));
1952 5 : CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_old)->address(),
1953 : required_alignment));
1954 : }
1955 5 : }
1956 :
1957 26661 : TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1958 5 : CcTest::InitializeVM();
1959 10 : HeapIterator iterator(CcTest::heap());
1960 5 : intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
1961 : intptr_t size_of_objects_2 = 0;
1962 70444 : for (HeapObject obj = iterator.next(); !obj.is_null();
1963 : obj = iterator.next()) {
1964 35217 : if (!obj->IsFreeSpace()) {
1965 35217 : size_of_objects_2 += obj->Size();
1966 : }
1967 : }
1968 : // Delta must be within 5% of the larger result.
1969 : // TODO(gc): Tighten this up by distinguishing between byte
1970 : // arrays that are real and those that merely mark free space
1971 : // on the heap.
1972 5 : if (size_of_objects_1 > size_of_objects_2) {
1973 5 : intptr_t delta = size_of_objects_1 - size_of_objects_2;
1974 : PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
1975 : ", "
1976 : "Iterator: %" V8PRIdPTR
1977 : ", "
1978 : "delta: %" V8PRIdPTR "\n",
1979 5 : size_of_objects_1, size_of_objects_2, delta);
1980 5 : CHECK_GT(size_of_objects_1 / 20, delta);
1981 : } else {
1982 0 : intptr_t delta = size_of_objects_2 - size_of_objects_1;
1983 : PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
1984 : ", "
1985 : "Iterator: %" V8PRIdPTR
1986 : ", "
1987 : "delta: %" V8PRIdPTR "\n",
1988 0 : size_of_objects_1, size_of_objects_2, delta);
1989 0 : CHECK_GT(size_of_objects_2 / 20, delta);
1990 : }
1991 5 : }
1992 :
1993 26661 : TEST(GrowAndShrinkNewSpace) {
1994 : // Avoid shrinking new space in GC epilogue. This can happen if allocation
1995 : // throughput samples have been taken while executing the benchmark.
1996 5 : FLAG_predictable = true;
1997 :
1998 5 : CcTest::InitializeVM();
1999 5 : Heap* heap = CcTest::heap();
2000 : NewSpace* new_space = heap->new_space();
2001 :
2002 5 : if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2003 : return;
2004 : }
2005 :
2006 : // Make sure we're in a consistent state to start out.
2007 5 : CcTest::CollectAllGarbage();
2008 5 : CcTest::CollectAllGarbage();
2009 5 : new_space->Shrink();
2010 :
2011 : // Explicitly growing should double the space capacity.
2012 : size_t old_capacity, new_capacity;
2013 : old_capacity = new_space->TotalCapacity();
2014 5 : new_space->Grow();
2015 : new_capacity = new_space->TotalCapacity();
2016 5 : CHECK_EQ(2 * old_capacity, new_capacity);
2017 :
2018 : old_capacity = new_space->TotalCapacity();
2019 : {
2020 10 : v8::HandleScope temporary_scope(CcTest::isolate());
2021 5 : heap::SimulateFullSpace(new_space);
2022 : }
2023 : new_capacity = new_space->TotalCapacity();
2024 5 : CHECK_EQ(old_capacity, new_capacity);
2025 :
2026 : // Explicitly shrinking should not affect space capacity.
2027 : old_capacity = new_space->TotalCapacity();
2028 5 : new_space->Shrink();
2029 : new_capacity = new_space->TotalCapacity();
2030 5 : CHECK_EQ(old_capacity, new_capacity);
2031 :
2032 : // Let the scavenger empty the new space.
2033 5 : CcTest::CollectGarbage(NEW_SPACE);
2034 5 : CHECK_LE(new_space->Size(), old_capacity);
2035 :
2036 : // Explicitly shrinking should halve the space capacity.
2037 : old_capacity = new_space->TotalCapacity();
2038 5 : new_space->Shrink();
2039 : new_capacity = new_space->TotalCapacity();
2040 5 : CHECK_EQ(old_capacity, 2 * new_capacity);
2041 :
2042 : // Consecutive shrinking should not affect space capacity.
2043 : old_capacity = new_space->TotalCapacity();
2044 5 : new_space->Shrink();
2045 5 : new_space->Shrink();
2046 5 : new_space->Shrink();
2047 : new_capacity = new_space->TotalCapacity();
2048 5 : CHECK_EQ(old_capacity, new_capacity);
2049 : }
2050 :
2051 26661 : TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2052 5 : CcTest::InitializeVM();
2053 5 : Heap* heap = CcTest::heap();
2054 5 : if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2055 0 : return;
2056 : }
2057 :
2058 10 : v8::HandleScope scope(CcTest::isolate());
2059 : NewSpace* new_space = heap->new_space();
2060 : size_t old_capacity, new_capacity;
2061 : old_capacity = new_space->TotalCapacity();
2062 5 : new_space->Grow();
2063 : new_capacity = new_space->TotalCapacity();
2064 5 : CHECK_EQ(2 * old_capacity, new_capacity);
2065 : {
2066 10 : v8::HandleScope temporary_scope(CcTest::isolate());
2067 5 : heap::SimulateFullSpace(new_space);
2068 : }
2069 5 : CcTest::CollectAllAvailableGarbage();
2070 : new_capacity = new_space->TotalCapacity();
2071 5 : CHECK_EQ(old_capacity, new_capacity);
2072 : }
2073 :
2074 60 : static int NumberOfGlobalObjects() {
2075 : int count = 0;
2076 120 : HeapIterator iterator(CcTest::heap());
2077 806190 : for (HeapObject obj = iterator.next(); !obj.is_null();
2078 : obj = iterator.next()) {
2079 403035 : if (obj->IsJSGlobalObject()) count++;
2080 : }
2081 60 : return count;
2082 : }
2083 :
2084 :
2085 : // Test that we don't embed maps from foreign contexts into
2086 : // optimized code.
2087 26661 : TEST(LeakNativeContextViaMap) {
2088 5 : FLAG_allow_natives_syntax = true;
2089 5 : v8::Isolate* isolate = CcTest::isolate();
2090 10 : v8::HandleScope outer_scope(isolate);
2091 : v8::Persistent<v8::Context> ctx1p;
2092 : v8::Persistent<v8::Context> ctx2p;
2093 : {
2094 10 : v8::HandleScope scope(isolate);
2095 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2096 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2097 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2098 : }
2099 :
2100 5 : CcTest::CollectAllAvailableGarbage();
2101 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2102 :
2103 : {
2104 10 : v8::HandleScope inner_scope(isolate);
2105 : CompileRun("var v = {x: 42}");
2106 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2107 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2108 : v8::Local<v8::Value> v =
2109 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2110 5 : ctx2->Enter();
2111 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2112 : v8::Local<v8::Value> res = CompileRun(
2113 : "function f() { return o.x; }"
2114 : "for (var i = 0; i < 10; ++i) f();"
2115 : "%OptimizeFunctionOnNextCall(f);"
2116 : "f();");
2117 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2118 25 : CHECK(ctx2->Global()
2119 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2120 : .FromJust());
2121 5 : ctx2->Exit();
2122 5 : v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2123 : ctx1p.Reset();
2124 5 : isolate->ContextDisposedNotification();
2125 : }
2126 5 : CcTest::CollectAllAvailableGarbage();
2127 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2128 : ctx2p.Reset();
2129 5 : CcTest::CollectAllAvailableGarbage();
2130 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2131 5 : }
2132 :
2133 :
2134 : // Test that we don't embed functions from foreign contexts into
2135 : // optimized code.
2136 26661 : TEST(LeakNativeContextViaFunction) {
2137 5 : FLAG_allow_natives_syntax = true;
2138 5 : v8::Isolate* isolate = CcTest::isolate();
2139 10 : v8::HandleScope outer_scope(isolate);
2140 : v8::Persistent<v8::Context> ctx1p;
2141 : v8::Persistent<v8::Context> ctx2p;
2142 : {
2143 10 : v8::HandleScope scope(isolate);
2144 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2145 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2146 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2147 : }
2148 :
2149 5 : CcTest::CollectAllAvailableGarbage();
2150 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2151 :
2152 : {
2153 10 : v8::HandleScope inner_scope(isolate);
2154 : CompileRun("var v = function() { return 42; }");
2155 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2156 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2157 : v8::Local<v8::Value> v =
2158 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2159 5 : ctx2->Enter();
2160 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2161 : v8::Local<v8::Value> res = CompileRun(
2162 : "function f(x) { return x(); }"
2163 : "for (var i = 0; i < 10; ++i) f(o);"
2164 : "%OptimizeFunctionOnNextCall(f);"
2165 : "f(o);");
2166 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2167 25 : CHECK(ctx2->Global()
2168 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2169 : .FromJust());
2170 5 : ctx2->Exit();
2171 5 : ctx1->Exit();
2172 : ctx1p.Reset();
2173 5 : isolate->ContextDisposedNotification();
2174 : }
2175 5 : CcTest::CollectAllAvailableGarbage();
2176 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2177 : ctx2p.Reset();
2178 5 : CcTest::CollectAllAvailableGarbage();
2179 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2180 5 : }
2181 :
2182 :
2183 26661 : TEST(LeakNativeContextViaMapKeyed) {
2184 5 : FLAG_allow_natives_syntax = true;
2185 5 : v8::Isolate* isolate = CcTest::isolate();
2186 10 : v8::HandleScope outer_scope(isolate);
2187 : v8::Persistent<v8::Context> ctx1p;
2188 : v8::Persistent<v8::Context> ctx2p;
2189 : {
2190 10 : v8::HandleScope scope(isolate);
2191 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2192 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2193 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2194 : }
2195 :
2196 5 : CcTest::CollectAllAvailableGarbage();
2197 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2198 :
2199 : {
2200 10 : v8::HandleScope inner_scope(isolate);
2201 : CompileRun("var v = [42, 43]");
2202 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2203 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2204 : v8::Local<v8::Value> v =
2205 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2206 5 : ctx2->Enter();
2207 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2208 : v8::Local<v8::Value> res = CompileRun(
2209 : "function f() { return o[0]; }"
2210 : "for (var i = 0; i < 10; ++i) f();"
2211 : "%OptimizeFunctionOnNextCall(f);"
2212 : "f();");
2213 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2214 25 : CHECK(ctx2->Global()
2215 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2216 : .FromJust());
2217 5 : ctx2->Exit();
2218 5 : ctx1->Exit();
2219 : ctx1p.Reset();
2220 5 : isolate->ContextDisposedNotification();
2221 : }
2222 5 : CcTest::CollectAllAvailableGarbage();
2223 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2224 : ctx2p.Reset();
2225 5 : CcTest::CollectAllAvailableGarbage();
2226 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2227 5 : }
2228 :
2229 :
2230 26661 : TEST(LeakNativeContextViaMapProto) {
2231 5 : FLAG_allow_natives_syntax = true;
2232 5 : v8::Isolate* isolate = CcTest::isolate();
2233 10 : v8::HandleScope outer_scope(isolate);
2234 : v8::Persistent<v8::Context> ctx1p;
2235 : v8::Persistent<v8::Context> ctx2p;
2236 : {
2237 10 : v8::HandleScope scope(isolate);
2238 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2239 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2240 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2241 : }
2242 :
2243 5 : CcTest::CollectAllAvailableGarbage();
2244 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2245 :
2246 : {
2247 10 : v8::HandleScope inner_scope(isolate);
2248 : CompileRun("var v = { y: 42}");
2249 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2250 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2251 : v8::Local<v8::Value> v =
2252 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2253 5 : ctx2->Enter();
2254 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2255 : v8::Local<v8::Value> res = CompileRun(
2256 : "function f() {"
2257 : " var p = {x: 42};"
2258 : " p.__proto__ = o;"
2259 : " return p.x;"
2260 : "}"
2261 : "for (var i = 0; i < 10; ++i) f();"
2262 : "%OptimizeFunctionOnNextCall(f);"
2263 : "f();");
2264 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2265 25 : CHECK(ctx2->Global()
2266 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2267 : .FromJust());
2268 5 : ctx2->Exit();
2269 5 : ctx1->Exit();
2270 : ctx1p.Reset();
2271 5 : isolate->ContextDisposedNotification();
2272 : }
2273 5 : CcTest::CollectAllAvailableGarbage();
2274 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2275 : ctx2p.Reset();
2276 5 : CcTest::CollectAllAvailableGarbage();
2277 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2278 5 : }
2279 :
2280 :
2281 26661 : TEST(InstanceOfStubWriteBarrier) {
2282 6 : if (!FLAG_incremental_marking) return;
2283 : ManualGCScope manual_gc_scope;
2284 5 : FLAG_allow_natives_syntax = true;
2285 : #ifdef VERIFY_HEAP
2286 : FLAG_verify_heap = true;
2287 : #endif
2288 :
2289 5 : CcTest::InitializeVM();
2290 5 : if (!CcTest::i_isolate()->use_optimizer()) return;
2291 4 : if (FLAG_force_marking_deque_overflows) return;
2292 8 : v8::HandleScope outer_scope(CcTest::isolate());
2293 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2294 :
2295 : {
2296 8 : v8::HandleScope scope(CcTest::isolate());
2297 : CompileRun(
2298 : "function foo () { }"
2299 : "function mkbar () { return new (new Function(\"\")) (); }"
2300 : "function f (x) { return (x instanceof foo); }"
2301 : "function g () { f(mkbar()); }"
2302 : "f(new foo()); f(new foo());"
2303 : "%OptimizeFunctionOnNextCall(f);"
2304 : "f(new foo()); g();");
2305 : }
2306 :
2307 4 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2308 4 : marking->Stop();
2309 4 : CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2310 4 : i::GarbageCollectionReason::kTesting);
2311 :
2312 : i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2313 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2314 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2315 :
2316 4 : CHECK(f->IsOptimized());
2317 :
2318 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
2319 :
2320 : const double kStepSizeInMs = 100;
2321 52 : while (!marking_state->IsBlack(f->code()) && !marking->IsStopped()) {
2322 : // Discard any pending GC requests otherwise we will get GC when we enter
2323 : // code below.
2324 : marking->V8Step(kStepSizeInMs, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2325 24 : StepOrigin::kV8);
2326 : }
2327 :
2328 4 : CHECK(marking->IsMarking());
2329 :
2330 : {
2331 8 : v8::HandleScope scope(CcTest::isolate());
2332 4 : v8::Local<v8::Object> global = CcTest::global();
2333 : v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
2334 12 : global->Get(ctx, v8_str("g")).ToLocalChecked());
2335 8 : g->Call(ctx, global, 0, nullptr).ToLocalChecked();
2336 : }
2337 :
2338 4 : CcTest::heap()->incremental_marking()->set_should_hurry(true);
2339 4 : CcTest::CollectGarbage(OLD_SPACE);
2340 : }
2341 :
2342 26661 : HEAP_TEST(GCFlags) {
2343 5 : if (!FLAG_incremental_marking) return;
2344 5 : CcTest::InitializeVM();
2345 5 : Heap* heap = CcTest::heap();
2346 :
2347 : heap->set_current_gc_flags(Heap::kNoGCFlags);
2348 : CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2349 :
2350 : // Check whether we appropriately reset flags after GC.
2351 5 : CcTest::heap()->CollectAllGarbage(Heap::kReduceMemoryFootprintMask,
2352 5 : GarbageCollectionReason::kTesting);
2353 5 : CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2354 :
2355 : MarkCompactCollector* collector = heap->mark_compact_collector();
2356 5 : if (collector->sweeping_in_progress()) {
2357 5 : collector->EnsureSweepingCompleted();
2358 : }
2359 :
2360 : IncrementalMarking* marking = heap->incremental_marking();
2361 5 : marking->Stop();
2362 : heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask,
2363 5 : i::GarbageCollectionReason::kTesting);
2364 5 : CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2365 :
2366 5 : CcTest::CollectGarbage(NEW_SPACE);
2367 : // NewSpace scavenges should not overwrite the flags.
2368 5 : CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2369 :
2370 5 : CcTest::CollectAllGarbage();
2371 5 : CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2372 : }
2373 :
2374 26661 : HEAP_TEST(Regress845060) {
2375 : // Regression test for crbug.com/845060, where a raw pointer to a string's
2376 : // data was kept across an allocation. If the allocation causes GC and
2377 : // moves the string, such raw pointers become invalid.
2378 5 : FLAG_allow_natives_syntax = true;
2379 5 : FLAG_stress_incremental_marking = false;
2380 5 : FLAG_stress_compaction = false;
2381 5 : CcTest::InitializeVM();
2382 5 : LocalContext context;
2383 10 : v8::HandleScope scope(CcTest::isolate());
2384 5 : Heap* heap = CcTest::heap();
2385 :
2386 : // Preparation: create a string in new space.
2387 : Local<Value> str = CompileRun("var str = (new Array(10000)).join('x'); str");
2388 5 : CHECK(Heap::InYoungGeneration(*v8::Utils::OpenHandle(*str)));
2389 :
2390 : // Idle incremental marking sets the "kReduceMemoryFootprint" flag, which
2391 : // causes from_space to be unmapped after scavenging.
2392 5 : heap->StartIdleIncrementalMarking(GarbageCollectionReason::kTesting);
2393 5 : CHECK(heap->ShouldReduceMemory());
2394 :
2395 : // Run the test (which allocates results) until the original string was
2396 : // promoted to old space. Unmapping of from_space causes accesses to any
2397 : // stale raw pointers to crash.
2398 : CompileRun("while (%InNewSpace(str)) { str.split(''); }");
2399 5 : CHECK(!Heap::InYoungGeneration(*v8::Utils::OpenHandle(*str)));
2400 5 : }
2401 :
2402 26661 : TEST(IdleNotificationFinishMarking) {
2403 5 : if (!FLAG_incremental_marking) return;
2404 : ManualGCScope manual_gc_scope;
2405 5 : FLAG_allow_natives_syntax = true;
2406 5 : CcTest::InitializeVM();
2407 5 : const int initial_gc_count = CcTest::heap()->gc_count();
2408 5 : heap::SimulateFullSpace(CcTest::heap()->old_space());
2409 5 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2410 5 : marking->Stop();
2411 5 : CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2412 5 : i::GarbageCollectionReason::kTesting);
2413 :
2414 10 : CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count);
2415 :
2416 : const double kStepSizeInMs = 100;
2417 30 : do {
2418 : marking->V8Step(kStepSizeInMs, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2419 30 : StepOrigin::kV8);
2420 : } while (
2421 60 : !CcTest::heap()->mark_compact_collector()->marking_worklist()->IsEmpty());
2422 :
2423 : marking->SetWeakClosureWasOverApproximatedForTesting(true);
2424 :
2425 : // The next idle notification has to finish incremental marking.
2426 : const double kLongIdleTime = 1000.0;
2427 5 : CcTest::isolate()->IdleNotificationDeadline(
2428 15 : (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2429 : static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2430 5 : kLongIdleTime);
2431 10 : CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count + 1);
2432 : }
2433 :
2434 :
2435 : // Test that HAllocateObject will always return an object in new-space.
2436 26661 : TEST(OptimizedAllocationAlwaysInNewSpace) {
2437 5 : FLAG_allow_natives_syntax = true;
2438 5 : CcTest::InitializeVM();
2439 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2440 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2441 : FLAG_stress_incremental_marking)
2442 : return;
2443 4 : v8::HandleScope scope(CcTest::isolate());
2444 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2445 2 : heap::SimulateFullSpace(CcTest::heap()->new_space());
2446 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2447 : v8::Local<v8::Value> res = CompileRun(
2448 : "function c(x) {"
2449 : " this.x = x;"
2450 : " for (var i = 0; i < 32; i++) {"
2451 : " this['x' + i] = x;"
2452 : " }"
2453 : "}"
2454 : "function f(x) { return new c(x); };"
2455 : "f(1); f(2); f(3);"
2456 : "%OptimizeFunctionOnNextCall(f);"
2457 : "f(4);");
2458 :
2459 8 : CHECK_EQ(4, res.As<v8::Object>()
2460 : ->GetRealNamedProperty(ctx, v8_str("x"))
2461 : .ToLocalChecked()
2462 : ->Int32Value(ctx)
2463 : .FromJust());
2464 :
2465 : i::Handle<JSReceiver> o =
2466 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2467 :
2468 2 : CHECK(Heap::InYoungGeneration(*o));
2469 : }
2470 :
2471 :
2472 26661 : TEST(OptimizedPretenuringAllocationFolding) {
2473 5 : FLAG_allow_natives_syntax = true;
2474 5 : FLAG_expose_gc = true;
2475 5 : CcTest::InitializeVM();
2476 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2477 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2478 : FLAG_stress_incremental_marking)
2479 : return;
2480 4 : v8::HandleScope scope(CcTest::isolate());
2481 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2482 : // Grow new space unitl maximum capacity reached.
2483 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2484 8 : CcTest::heap()->new_space()->Grow();
2485 : }
2486 :
2487 : i::ScopedVector<char> source(1024);
2488 : i::SNPrintF(source,
2489 : "var number_elements = %d;"
2490 : "var elements = new Array();"
2491 : "function f() {"
2492 : " for (var i = 0; i < number_elements; i++) {"
2493 : " elements[i] = [[{}], [1.1]];"
2494 : " }"
2495 : " return elements[number_elements-1]"
2496 : "};"
2497 : "f(); gc();"
2498 : "f(); f();"
2499 : "%%OptimizeFunctionOnNextCall(f);"
2500 : "f();",
2501 2 : kPretenureCreationCount);
2502 :
2503 : v8::Local<v8::Value> res = CompileRun(source.start());
2504 :
2505 : v8::Local<v8::Value> int_array =
2506 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2507 : i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2508 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2509 : v8::Local<v8::Value> double_array =
2510 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2511 : i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2512 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2513 :
2514 : i::Handle<JSReceiver> o =
2515 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2516 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2517 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2518 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2519 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2520 4 : CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2521 : }
2522 :
2523 :
2524 26661 : TEST(OptimizedPretenuringObjectArrayLiterals) {
2525 5 : FLAG_allow_natives_syntax = true;
2526 5 : FLAG_expose_gc = true;
2527 5 : CcTest::InitializeVM();
2528 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2529 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2530 : FLAG_stress_incremental_marking) {
2531 : return;
2532 : }
2533 4 : v8::HandleScope scope(CcTest::isolate());
2534 :
2535 : // Grow new space unitl maximum capacity reached.
2536 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2537 8 : CcTest::heap()->new_space()->Grow();
2538 : }
2539 :
2540 : i::ScopedVector<char> source(1024);
2541 : i::SNPrintF(source,
2542 : "var number_elements = %d;"
2543 : "var elements = new Array(number_elements);"
2544 : "function f() {"
2545 : " for (var i = 0; i < number_elements; i++) {"
2546 : " elements[i] = [{}, {}, {}];"
2547 : " }"
2548 : " return elements[number_elements - 1];"
2549 : "};"
2550 : "f(); gc();"
2551 : "f(); f();"
2552 : "%%OptimizeFunctionOnNextCall(f);"
2553 : "f();",
2554 2 : kPretenureCreationCount);
2555 :
2556 : v8::Local<v8::Value> res = CompileRun(source.start());
2557 :
2558 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2559 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2560 :
2561 4 : CHECK(CcTest::heap()->InOldSpace(o->elements()));
2562 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2563 : }
2564 :
2565 26661 : TEST(OptimizedPretenuringNestedInObjectProperties) {
2566 5 : FLAG_allow_natives_syntax = true;
2567 5 : FLAG_expose_gc = true;
2568 5 : CcTest::InitializeVM();
2569 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2570 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2571 : FLAG_stress_incremental_marking) {
2572 : return;
2573 : }
2574 4 : v8::HandleScope scope(CcTest::isolate());
2575 :
2576 : // Grow new space until maximum capacity reached.
2577 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2578 8 : CcTest::heap()->new_space()->Grow();
2579 : }
2580 :
2581 : // Keep the nested literal alive while its root is freed
2582 : i::ScopedVector<char> source(1024);
2583 : i::SNPrintF(source,
2584 : "let number_elements = %d;"
2585 : "let elements = new Array(number_elements);"
2586 : "function f() {"
2587 : " for (let i = 0; i < number_elements; i++) {"
2588 : " let l = {a: {c: 2.2, d: {e: 3.3}}, b: 1.1}; "
2589 : " elements[i] = l.a;"
2590 : " }"
2591 : " return elements[number_elements-1];"
2592 : "};"
2593 : "f(); gc(); gc();"
2594 : "f(); f();"
2595 : "%%OptimizeFunctionOnNextCall(f);"
2596 : "f();",
2597 2 : kPretenureCreationCount);
2598 :
2599 : v8::Local<v8::Value> res = CompileRun(source.start());
2600 :
2601 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2602 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2603 :
2604 : // Nested literal sites are only pretenured if the top level
2605 : // literal is pretenured
2606 2 : CHECK(Heap::InYoungGeneration(*o));
2607 : }
2608 :
2609 26661 : TEST(OptimizedPretenuringMixedInObjectProperties) {
2610 5 : FLAG_allow_natives_syntax = true;
2611 5 : FLAG_expose_gc = true;
2612 5 : CcTest::InitializeVM();
2613 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2614 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2615 : FLAG_stress_incremental_marking)
2616 : return;
2617 4 : v8::HandleScope scope(CcTest::isolate());
2618 :
2619 : // Grow new space unitl maximum capacity reached.
2620 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2621 8 : CcTest::heap()->new_space()->Grow();
2622 : }
2623 :
2624 :
2625 : i::ScopedVector<char> source(1024);
2626 : i::SNPrintF(source,
2627 : "var number_elements = %d;"
2628 : "var elements = new Array(number_elements);"
2629 : "function f() {"
2630 : " for (var i = 0; i < number_elements; i++) {"
2631 : " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2632 : " }"
2633 : " return elements[number_elements - 1];"
2634 : "};"
2635 : "f(); gc();"
2636 : "f(); f();"
2637 : "%%OptimizeFunctionOnNextCall(f);"
2638 : "f();",
2639 2 : kPretenureCreationCount);
2640 :
2641 : v8::Local<v8::Value> res = CompileRun(source.start());
2642 :
2643 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2644 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2645 :
2646 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2647 2 : FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2648 2 : FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2649 4 : CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
2650 2 : if (!o->IsUnboxedDoubleField(idx2)) {
2651 0 : CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
2652 : } else {
2653 2 : CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
2654 : }
2655 :
2656 2 : JSObject inner_object = JSObject::cast(o->RawFastPropertyAt(idx1));
2657 4 : CHECK(CcTest::heap()->InOldSpace(inner_object));
2658 2 : if (!inner_object->IsUnboxedDoubleField(idx1)) {
2659 0 : CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
2660 : } else {
2661 2 : CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
2662 : }
2663 4 : CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
2664 : }
2665 :
2666 :
2667 26661 : TEST(OptimizedPretenuringDoubleArrayProperties) {
2668 5 : FLAG_allow_natives_syntax = true;
2669 5 : FLAG_expose_gc = true;
2670 5 : CcTest::InitializeVM();
2671 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2672 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2673 : FLAG_stress_incremental_marking)
2674 : return;
2675 4 : v8::HandleScope scope(CcTest::isolate());
2676 :
2677 : // Grow new space until maximum capacity reached.
2678 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2679 8 : CcTest::heap()->new_space()->Grow();
2680 : }
2681 :
2682 : i::ScopedVector<char> source(1024);
2683 : i::SNPrintF(source,
2684 : "var number_elements = %d;"
2685 : "var elements = new Array(number_elements);"
2686 : "function f() {"
2687 : " for (var i = 0; i < number_elements; i++) {"
2688 : " elements[i] = {a: 1.1, b: 2.2};"
2689 : " }"
2690 : " return elements[i - 1];"
2691 : "};"
2692 : "f(); gc();"
2693 : "f(); f();"
2694 : "%%OptimizeFunctionOnNextCall(f);"
2695 : "f();",
2696 2 : kPretenureCreationCount);
2697 :
2698 : v8::Local<v8::Value> res = CompileRun(source.start());
2699 :
2700 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2701 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2702 :
2703 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2704 6 : CHECK_EQ(o->property_array(),
2705 : ReadOnlyRoots(CcTest::heap()).empty_property_array());
2706 : }
2707 :
2708 :
2709 26661 : TEST(OptimizedPretenuringdoubleArrayLiterals) {
2710 5 : FLAG_allow_natives_syntax = true;
2711 5 : FLAG_expose_gc = true;
2712 5 : CcTest::InitializeVM();
2713 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2714 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2715 : FLAG_stress_incremental_marking)
2716 : return;
2717 4 : v8::HandleScope scope(CcTest::isolate());
2718 :
2719 : // Grow new space unitl maximum capacity reached.
2720 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2721 8 : CcTest::heap()->new_space()->Grow();
2722 : }
2723 :
2724 : i::ScopedVector<char> source(1024);
2725 : i::SNPrintF(source,
2726 : "var number_elements = %d;"
2727 : "var elements = new Array(number_elements);"
2728 : "function f() {"
2729 : " for (var i = 0; i < number_elements; i++) {"
2730 : " elements[i] = [1.1, 2.2, 3.3];"
2731 : " }"
2732 : " return elements[number_elements - 1];"
2733 : "};"
2734 : "f(); gc();"
2735 : "f(); f();"
2736 : "%%OptimizeFunctionOnNextCall(f);"
2737 : "f();",
2738 2 : kPretenureCreationCount);
2739 :
2740 : v8::Local<v8::Value> res = CompileRun(source.start());
2741 :
2742 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2743 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2744 :
2745 4 : CHECK(CcTest::heap()->InOldSpace(o->elements()));
2746 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2747 : }
2748 :
2749 :
2750 26661 : TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2751 5 : FLAG_allow_natives_syntax = true;
2752 5 : FLAG_expose_gc = true;
2753 5 : CcTest::InitializeVM();
2754 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2755 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2756 : FLAG_stress_incremental_marking)
2757 : return;
2758 4 : v8::HandleScope scope(CcTest::isolate());
2759 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2760 : // Grow new space unitl maximum capacity reached.
2761 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2762 8 : CcTest::heap()->new_space()->Grow();
2763 : }
2764 :
2765 : i::ScopedVector<char> source(1024);
2766 : i::SNPrintF(source,
2767 : "var number_elements = %d;"
2768 : "var elements = new Array(number_elements);"
2769 : "function f() {"
2770 : " for (var i = 0; i < number_elements; i++) {"
2771 : " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
2772 : " }"
2773 : " return elements[number_elements - 1];"
2774 : "};"
2775 : "f(); gc();"
2776 : "f(); f();"
2777 : "%%OptimizeFunctionOnNextCall(f);"
2778 : "f();",
2779 2 : kPretenureCreationCount);
2780 :
2781 : v8::Local<v8::Value> res = CompileRun(source.start());
2782 :
2783 : v8::Local<v8::Value> int_array =
2784 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2785 : i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2786 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2787 : v8::Local<v8::Value> double_array =
2788 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2789 : i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2790 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2791 :
2792 : Handle<JSObject> o = Handle<JSObject>::cast(
2793 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2794 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2795 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2796 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2797 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2798 4 : CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2799 : }
2800 :
2801 :
2802 26661 : TEST(OptimizedPretenuringNestedObjectLiterals) {
2803 5 : FLAG_allow_natives_syntax = true;
2804 5 : FLAG_expose_gc = true;
2805 5 : CcTest::InitializeVM();
2806 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2807 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2808 : FLAG_stress_incremental_marking)
2809 : return;
2810 4 : v8::HandleScope scope(CcTest::isolate());
2811 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2812 : // Grow new space unitl maximum capacity reached.
2813 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2814 8 : CcTest::heap()->new_space()->Grow();
2815 : }
2816 :
2817 : i::ScopedVector<char> source(1024);
2818 : i::SNPrintF(source,
2819 : "var number_elements = %d;"
2820 : "var elements = new Array(number_elements);"
2821 : "function f() {"
2822 : " for (var i = 0; i < number_elements; i++) {"
2823 : " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
2824 : " }"
2825 : " return elements[number_elements - 1];"
2826 : "};"
2827 : "f(); gc();"
2828 : "f(); f();"
2829 : "%%OptimizeFunctionOnNextCall(f);"
2830 : "f();",
2831 2 : kPretenureCreationCount);
2832 :
2833 : v8::Local<v8::Value> res = CompileRun(source.start());
2834 :
2835 : v8::Local<v8::Value> int_array_1 =
2836 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2837 : Handle<JSObject> int_array_handle_1 = Handle<JSObject>::cast(
2838 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_1)));
2839 : v8::Local<v8::Value> int_array_2 =
2840 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2841 : Handle<JSObject> int_array_handle_2 = Handle<JSObject>::cast(
2842 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_2)));
2843 :
2844 : Handle<JSObject> o = Handle<JSObject>::cast(
2845 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2846 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2847 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
2848 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
2849 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
2850 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
2851 : }
2852 :
2853 :
2854 26661 : TEST(OptimizedPretenuringNestedDoubleLiterals) {
2855 5 : FLAG_allow_natives_syntax = true;
2856 5 : FLAG_expose_gc = true;
2857 5 : CcTest::InitializeVM();
2858 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2859 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2860 : FLAG_stress_incremental_marking)
2861 : return;
2862 4 : v8::HandleScope scope(CcTest::isolate());
2863 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2864 : // Grow new space unitl maximum capacity reached.
2865 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2866 8 : CcTest::heap()->new_space()->Grow();
2867 : }
2868 :
2869 : i::ScopedVector<char> source(1024);
2870 : i::SNPrintF(source,
2871 : "var number_elements = %d;"
2872 : "var elements = new Array(number_elements);"
2873 : "function f() {"
2874 : " for (var i = 0; i < number_elements; i++) {"
2875 : " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
2876 : " }"
2877 : " return elements[number_elements - 1];"
2878 : "};"
2879 : "f(); gc();"
2880 : "f(); f();"
2881 : "%%OptimizeFunctionOnNextCall(f);"
2882 : "f();",
2883 2 : kPretenureCreationCount);
2884 :
2885 : v8::Local<v8::Value> res = CompileRun(source.start());
2886 :
2887 : v8::Local<v8::Value> double_array_1 =
2888 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2889 : i::Handle<JSObject> double_array_handle_1 = i::Handle<JSObject>::cast(
2890 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_1)));
2891 : v8::Local<v8::Value> double_array_2 =
2892 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2893 : i::Handle<JSObject> double_array_handle_2 = Handle<JSObject>::cast(
2894 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_2)));
2895 :
2896 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2897 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2898 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2899 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
2900 4 : CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
2901 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
2902 4 : CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
2903 : }
2904 :
2905 :
2906 : // Test regular array literals allocation.
2907 26661 : TEST(OptimizedAllocationArrayLiterals) {
2908 5 : FLAG_allow_natives_syntax = true;
2909 5 : CcTest::InitializeVM();
2910 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2911 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2912 : FLAG_stress_incremental_marking)
2913 : return;
2914 4 : v8::HandleScope scope(CcTest::isolate());
2915 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2916 : v8::Local<v8::Value> res = CompileRun(
2917 : "function f() {"
2918 : " var numbers = new Array(1, 2, 3);"
2919 : " numbers[0] = 3.14;"
2920 : " return numbers;"
2921 : "};"
2922 : "f(); f(); f();"
2923 : "%OptimizeFunctionOnNextCall(f);"
2924 : "f();");
2925 8 : CHECK_EQ(static_cast<int>(3.14), v8::Object::Cast(*res)
2926 : ->Get(ctx, v8_str("0"))
2927 : .ToLocalChecked()
2928 : ->Int32Value(ctx)
2929 : .FromJust());
2930 :
2931 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2932 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2933 :
2934 2 : CHECK(Heap::InYoungGeneration(o->elements()));
2935 : }
2936 :
2937 10 : static int CountMapTransitions(i::Isolate* isolate, Map map) {
2938 : DisallowHeapAllocation no_gc;
2939 10 : return TransitionsAccessor(isolate, map, &no_gc).NumberOfTransitions();
2940 : }
2941 :
2942 :
2943 : // Test that map transitions are cleared and maps are collected with
2944 : // incremental marking as well.
2945 26661 : TEST(Regress1465) {
2946 5 : if (!FLAG_incremental_marking) return;
2947 5 : FLAG_stress_compaction = false;
2948 5 : FLAG_stress_incremental_marking = false;
2949 5 : FLAG_allow_natives_syntax = true;
2950 5 : FLAG_trace_incremental_marking = true;
2951 5 : FLAG_retain_maps_for_n_gc = 0;
2952 5 : CcTest::InitializeVM();
2953 5 : v8::Isolate* isolate = CcTest::isolate();
2954 : i::Isolate* i_isolate = CcTest::i_isolate();
2955 10 : v8::HandleScope scope(isolate);
2956 5 : v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
2957 : static const int transitions_count = 256;
2958 :
2959 : CompileRun("function F() {}");
2960 : {
2961 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2962 2565 : for (int i = 0; i < transitions_count; i++) {
2963 : EmbeddedVector<char, 64> buffer;
2964 1280 : SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2965 : CompileRun(buffer.start());
2966 : }
2967 : CompileRun("var root = new F;");
2968 : }
2969 :
2970 : i::Handle<JSReceiver> root =
2971 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
2972 20 : CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
2973 :
2974 : // Count number of live transitions before marking.
2975 5 : int transitions_before = CountMapTransitions(i_isolate, root->map());
2976 : CompileRun("%DebugPrint(root);");
2977 5 : CHECK_EQ(transitions_count, transitions_before);
2978 :
2979 5 : heap::SimulateIncrementalMarking(CcTest::heap());
2980 5 : CcTest::CollectAllGarbage();
2981 :
2982 : // Count number of live transitions after marking. Note that one transition
2983 : // is left, because 'o' still holds an instance of one transition target.
2984 5 : int transitions_after = CountMapTransitions(i_isolate, root->map());
2985 : CompileRun("%DebugPrint(root);");
2986 5 : CHECK_EQ(1, transitions_after);
2987 : }
2988 :
2989 5 : static i::Handle<JSObject> GetByName(const char* name) {
2990 : return i::Handle<JSObject>::cast(
2991 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
2992 10 : CcTest::global()
2993 20 : ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(name))
2994 5 : .ToLocalChecked())));
2995 : }
2996 :
2997 : #ifdef DEBUG
2998 : static void AddTransitions(int transitions_count) {
2999 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3000 : for (int i = 0; i < transitions_count; i++) {
3001 : EmbeddedVector<char, 64> buffer;
3002 : SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3003 : CompileRun(buffer.start());
3004 : }
3005 : }
3006 :
3007 :
3008 : static void AddPropertyTo(
3009 : int gc_count, Handle<JSObject> object, const char* property_name) {
3010 : Isolate* isolate = CcTest::i_isolate();
3011 : Factory* factory = isolate->factory();
3012 : Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
3013 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
3014 : FLAG_gc_interval = gc_count;
3015 : FLAG_gc_global = true;
3016 : FLAG_retain_maps_for_n_gc = 0;
3017 : CcTest::heap()->set_allocation_timeout(gc_count);
3018 : Object::SetProperty(isolate, object, prop_name, twenty_three).Check();
3019 : }
3020 :
3021 :
3022 : TEST(TransitionArrayShrinksDuringAllocToZero) {
3023 : FLAG_stress_compaction = false;
3024 : FLAG_stress_incremental_marking = false;
3025 : FLAG_allow_natives_syntax = true;
3026 : CcTest::InitializeVM();
3027 : i::Isolate* i_isolate = CcTest::i_isolate();
3028 : v8::HandleScope scope(CcTest::isolate());
3029 : static const int transitions_count = 10;
3030 : CompileRun("function F() { }");
3031 : AddTransitions(transitions_count);
3032 : CompileRun("var root = new F;");
3033 : Handle<JSObject> root = GetByName("root");
3034 :
3035 : // Count number of live transitions before marking.
3036 : int transitions_before = CountMapTransitions(i_isolate, root->map());
3037 : CHECK_EQ(transitions_count, transitions_before);
3038 :
3039 : // Get rid of o
3040 : CompileRun("o = new F;"
3041 : "root = new F");
3042 : root = GetByName("root");
3043 : AddPropertyTo(2, root, "funny");
3044 : CcTest::CollectGarbage(NEW_SPACE);
3045 :
3046 : // Count number of live transitions after marking. Note that one transition
3047 : // is left, because 'o' still holds an instance of one transition target.
3048 : int transitions_after =
3049 : CountMapTransitions(i_isolate, Map::cast(root->map()->GetBackPointer()));
3050 : CHECK_EQ(1, transitions_after);
3051 : }
3052 :
3053 :
3054 : TEST(TransitionArrayShrinksDuringAllocToOne) {
3055 : FLAG_stress_compaction = false;
3056 : FLAG_stress_incremental_marking = false;
3057 : FLAG_allow_natives_syntax = true;
3058 : CcTest::InitializeVM();
3059 : i::Isolate* i_isolate = CcTest::i_isolate();
3060 : v8::HandleScope scope(CcTest::isolate());
3061 : static const int transitions_count = 10;
3062 : CompileRun("function F() {}");
3063 : AddTransitions(transitions_count);
3064 : CompileRun("var root = new F;");
3065 : Handle<JSObject> root = GetByName("root");
3066 :
3067 : // Count number of live transitions before marking.
3068 : int transitions_before = CountMapTransitions(i_isolate, root->map());
3069 : CHECK_EQ(transitions_count, transitions_before);
3070 :
3071 : root = GetByName("root");
3072 : AddPropertyTo(2, root, "funny");
3073 : CcTest::CollectGarbage(NEW_SPACE);
3074 :
3075 : // Count number of live transitions after marking. Note that one transition
3076 : // is left, because 'o' still holds an instance of one transition target.
3077 : int transitions_after =
3078 : CountMapTransitions(i_isolate, Map::cast(root->map()->GetBackPointer()));
3079 : CHECK_EQ(2, transitions_after);
3080 : }
3081 :
3082 :
3083 : TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3084 : FLAG_stress_compaction = false;
3085 : FLAG_stress_incremental_marking = false;
3086 : FLAG_allow_natives_syntax = true;
3087 : CcTest::InitializeVM();
3088 : i::Isolate* i_isolate = CcTest::i_isolate();
3089 : v8::HandleScope scope(CcTest::isolate());
3090 : static const int transitions_count = 10;
3091 : CompileRun("function F() {}");
3092 : AddTransitions(transitions_count);
3093 : CompileRun("var root = new F;");
3094 : Handle<JSObject> root = GetByName("root");
3095 :
3096 : // Count number of live transitions before marking.
3097 : int transitions_before = CountMapTransitions(i_isolate, root->map());
3098 : CHECK_EQ(transitions_count, transitions_before);
3099 :
3100 : root = GetByName("root");
3101 : AddPropertyTo(0, root, "prop9");
3102 : CcTest::CollectGarbage(OLD_SPACE);
3103 :
3104 : // Count number of live transitions after marking. Note that one transition
3105 : // is left, because 'o' still holds an instance of one transition target.
3106 : int transitions_after =
3107 : CountMapTransitions(i_isolate, Map::cast(root->map()->GetBackPointer()));
3108 : CHECK_EQ(1, transitions_after);
3109 : }
3110 : #endif // DEBUG
3111 :
3112 :
3113 26661 : TEST(ReleaseOverReservedPages) {
3114 5 : if (FLAG_never_compact) return;
3115 5 : FLAG_trace_gc = true;
3116 : // The optimizer can allocate stuff, messing up the test.
3117 : #ifndef V8_LITE_MODE
3118 5 : FLAG_opt = false;
3119 5 : FLAG_always_opt = false;
3120 : #endif // V8_LITE_MODE
3121 : // - Parallel compaction increases fragmentation, depending on how existing
3122 : // memory is distributed. Since this is non-deterministic because of
3123 : // concurrent sweeping, we disable it for this test.
3124 : // - Concurrent sweeping adds non determinism, depending on when memory is
3125 : // available for further reuse.
3126 : // - Fast evacuation of pages may result in a different page count in old
3127 : // space.
3128 : ManualGCScope manual_gc_scope;
3129 5 : FLAG_page_promotion = false;
3130 5 : FLAG_parallel_compaction = false;
3131 5 : CcTest::InitializeVM();
3132 : Isolate* isolate = CcTest::i_isolate();
3133 : // If there's snapshot available, we don't know whether 20 small arrays will
3134 : // fit on the initial pages.
3135 5 : if (!isolate->snapshot_available()) return;
3136 : Factory* factory = isolate->factory();
3137 : Heap* heap = isolate->heap();
3138 10 : v8::HandleScope scope(CcTest::isolate());
3139 : // Ensure that the young generation is empty.
3140 5 : CcTest::CollectGarbage(NEW_SPACE);
3141 5 : CcTest::CollectGarbage(NEW_SPACE);
3142 : static const int number_of_test_pages = 20;
3143 :
3144 : // Prepare many pages with low live-bytes count.
3145 : PagedSpace* old_space = heap->old_space();
3146 5 : const int initial_page_count = old_space->CountTotalPages();
3147 5 : const int overall_page_count = number_of_test_pages + initial_page_count;
3148 205 : for (int i = 0; i < number_of_test_pages; i++) {
3149 : AlwaysAllocateScope always_allocate(isolate);
3150 100 : heap::SimulateFullSpace(old_space);
3151 100 : factory->NewFixedArray(1, AllocationType::kOld);
3152 : }
3153 5 : CHECK_EQ(overall_page_count, old_space->CountTotalPages());
3154 :
3155 : // Triggering one GC will cause a lot of garbage to be discovered but
3156 : // even spread across all allocated pages.
3157 5 : CcTest::CollectAllGarbage();
3158 5 : CHECK_GE(overall_page_count, old_space->CountTotalPages());
3159 :
3160 : // Triggering subsequent GCs should cause at least half of the pages
3161 : // to be released to the OS after at most two cycles.
3162 5 : CcTest::CollectAllGarbage();
3163 5 : CHECK_GE(overall_page_count, old_space->CountTotalPages());
3164 5 : CcTest::CollectAllGarbage();
3165 5 : CHECK_GE(overall_page_count, old_space->CountTotalPages() * 2);
3166 :
3167 : // Triggering a last-resort GC should cause all pages to be released to the
3168 : // OS so that other processes can seize the memory. If we get a failure here
3169 : // where there are 2 pages left instead of 1, then we should increase the
3170 : // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3171 : // first page should be small in order to reduce memory used when the VM
3172 : // boots, but if the 20 small arrays don't fit on the first page then that's
3173 : // an indication that it is too small.
3174 5 : CcTest::CollectAllAvailableGarbage();
3175 5 : CHECK_GE(initial_page_count, old_space->CountTotalPages());
3176 : }
3177 :
3178 : static int forced_gc_counter = 0;
3179 :
3180 6 : void MockUseCounterCallback(v8::Isolate* isolate,
3181 : v8::Isolate::UseCounterFeature feature) {
3182 6 : isolate->GetCurrentContext();
3183 6 : if (feature == v8::Isolate::kForcedGC) {
3184 5 : forced_gc_counter++;
3185 : }
3186 6 : }
3187 :
3188 :
3189 26661 : TEST(CountForcedGC) {
3190 5 : FLAG_expose_gc = true;
3191 5 : CcTest::InitializeVM();
3192 : Isolate* isolate = CcTest::i_isolate();
3193 10 : v8::HandleScope scope(CcTest::isolate());
3194 :
3195 5 : isolate->SetUseCounterCallback(MockUseCounterCallback);
3196 :
3197 5 : forced_gc_counter = 0;
3198 : const char* source = "gc();";
3199 : CompileRun(source);
3200 5 : CHECK_GT(forced_gc_counter, 0);
3201 5 : }
3202 :
3203 :
3204 : #ifdef OBJECT_PRINT
3205 : TEST(PrintSharedFunctionInfo) {
3206 : CcTest::InitializeVM();
3207 : v8::HandleScope scope(CcTest::isolate());
3208 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3209 : const char* source = "f = function() { return 987654321; }\n"
3210 : "g = function() { return 123456789; }\n";
3211 : CompileRun(source);
3212 : i::Handle<JSFunction> g = i::Handle<JSFunction>::cast(
3213 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3214 : CcTest::global()->Get(ctx, v8_str("g")).ToLocalChecked())));
3215 :
3216 : StdoutStream os;
3217 : g->shared()->Print(os);
3218 : os << std::endl;
3219 : }
3220 : #endif // OBJECT_PRINT
3221 :
3222 :
3223 26661 : TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3224 6 : if (!FLAG_use_ic) return;
3225 5 : if (!FLAG_incremental_marking) return;
3226 5 : if (FLAG_always_opt) return;
3227 4 : FLAG_allow_natives_syntax = true;
3228 4 : CcTest::InitializeVM();
3229 8 : v8::HandleScope scope(CcTest::isolate());
3230 : v8::Local<v8::Value> fun1, fun2;
3231 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3232 : {
3233 : CompileRun("function fun() {};");
3234 16 : fun1 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3235 : }
3236 :
3237 : {
3238 : CompileRun("function fun() {};");
3239 16 : fun2 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3240 : }
3241 :
3242 : // Prepare function f that contains type feedback for the two closures.
3243 16 : CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3244 16 : CHECK(CcTest::global()->Set(ctx, v8_str("fun2"), fun2).FromJust());
3245 : CompileRun(
3246 : "function f(a, b) { a(); b(); } %EnsureFeedbackVectorForFunction(f); "
3247 : "f(fun1, fun2);");
3248 :
3249 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3250 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3251 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3252 :
3253 : Handle<FeedbackVector> feedback_vector(f->feedback_vector(), f->GetIsolate());
3254 4 : FeedbackVectorHelper feedback_helper(feedback_vector);
3255 :
3256 : int expected_slots = 2;
3257 4 : CHECK_EQ(expected_slots, feedback_helper.slot_count());
3258 : int slot1 = 0;
3259 : int slot2 = 1;
3260 4 : CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeak());
3261 4 : CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeak());
3262 :
3263 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3264 4 : CcTest::CollectAllGarbage();
3265 :
3266 4 : CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeak());
3267 4 : CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeak());
3268 : }
3269 :
3270 :
3271 24 : static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
3272 : InlineCacheState desired_state) {
3273 : Handle<FeedbackVector> vector =
3274 : Handle<FeedbackVector>(f->feedback_vector(), f->GetIsolate());
3275 24 : FeedbackVectorHelper helper(vector);
3276 : FeedbackSlot slot = helper.slot(slot_index);
3277 : FeedbackNexus nexus(vector, slot);
3278 24 : CHECK(nexus.ic_state() == desired_state);
3279 24 : }
3280 :
3281 26661 : TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3282 6 : if (!FLAG_incremental_marking) return;
3283 5 : if (FLAG_always_opt) return;
3284 4 : FLAG_allow_natives_syntax = true;
3285 4 : CcTest::InitializeVM();
3286 8 : v8::HandleScope scope(CcTest::isolate());
3287 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3288 : // Prepare function f that contains a monomorphic IC for object
3289 : // originating from the same native context.
3290 : CompileRun(
3291 : "function fun() { this.x = 1; };"
3292 : "function f(o) { return new o(); }"
3293 : "%EnsureFeedbackVectorForFunction(f);"
3294 : "f(fun); f(fun);");
3295 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3296 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3297 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3298 :
3299 : Handle<FeedbackVector> vector(f->feedback_vector(), f->GetIsolate());
3300 4 : CHECK(vector->Get(FeedbackSlot(0))->IsWeakOrCleared());
3301 :
3302 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3303 4 : CcTest::CollectAllGarbage();
3304 :
3305 4 : CHECK(vector->Get(FeedbackSlot(0))->IsWeakOrCleared());
3306 : }
3307 :
3308 26661 : TEST(IncrementalMarkingPreservesMonomorphicIC) {
3309 6 : if (!FLAG_use_ic) return;
3310 5 : if (!FLAG_incremental_marking) return;
3311 5 : if (FLAG_always_opt) return;
3312 4 : FLAG_allow_natives_syntax = true;
3313 4 : CcTest::InitializeVM();
3314 8 : v8::HandleScope scope(CcTest::isolate());
3315 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3316 : // Prepare function f that contains a monomorphic IC for object
3317 : // originating from the same native context.
3318 : CompileRun(
3319 : "function fun() { this.x = 1; }; var obj = new fun();"
3320 : "%EnsureFeedbackVectorForFunction(f);"
3321 : "function f(o) { return o.x; } f(obj); f(obj);");
3322 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3323 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3324 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3325 :
3326 4 : CheckVectorIC(f, 0, MONOMORPHIC);
3327 :
3328 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3329 4 : CcTest::CollectAllGarbage();
3330 :
3331 4 : CheckVectorIC(f, 0, MONOMORPHIC);
3332 : }
3333 :
3334 26661 : TEST(IncrementalMarkingPreservesPolymorphicIC) {
3335 6 : if (!FLAG_use_ic) return;
3336 5 : if (!FLAG_incremental_marking) return;
3337 5 : if (FLAG_always_opt) return;
3338 4 : FLAG_allow_natives_syntax = true;
3339 4 : CcTest::InitializeVM();
3340 8 : v8::HandleScope scope(CcTest::isolate());
3341 : v8::Local<v8::Value> obj1, obj2;
3342 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3343 :
3344 : {
3345 4 : LocalContext env;
3346 : CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3347 16 : obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3348 : }
3349 :
3350 : {
3351 4 : LocalContext env;
3352 : CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3353 16 : obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3354 : }
3355 :
3356 : // Prepare function f that contains a polymorphic IC for objects
3357 : // originating from two different native contexts.
3358 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3359 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3360 : CompileRun(
3361 : "function f(o) { return o.x; }; "
3362 : "%EnsureFeedbackVectorForFunction(f);"
3363 : "f(obj1); f(obj1); f(obj2);");
3364 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3365 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3366 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3367 :
3368 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3369 :
3370 : // Fire context dispose notification.
3371 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3372 4 : CcTest::CollectAllGarbage();
3373 :
3374 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3375 : }
3376 :
3377 26661 : TEST(ContextDisposeDoesntClearPolymorphicIC) {
3378 6 : if (!FLAG_use_ic) return;
3379 5 : if (!FLAG_incremental_marking) return;
3380 5 : if (FLAG_always_opt) return;
3381 4 : FLAG_allow_natives_syntax = true;
3382 4 : CcTest::InitializeVM();
3383 8 : v8::HandleScope scope(CcTest::isolate());
3384 : v8::Local<v8::Value> obj1, obj2;
3385 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3386 :
3387 : {
3388 4 : LocalContext env;
3389 : CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3390 16 : obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3391 : }
3392 :
3393 : {
3394 4 : LocalContext env;
3395 : CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3396 16 : obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3397 : }
3398 :
3399 : // Prepare function f that contains a polymorphic IC for objects
3400 : // originating from two different native contexts.
3401 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3402 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3403 : CompileRun(
3404 : "function f(o) { return o.x; }; "
3405 : "%EnsureFeedbackVectorForFunction(f);"
3406 : "f(obj1); f(obj1); f(obj2);");
3407 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3408 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3409 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3410 :
3411 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3412 :
3413 : // Fire context dispose notification.
3414 4 : CcTest::isolate()->ContextDisposedNotification();
3415 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3416 4 : CcTest::CollectAllGarbage();
3417 :
3418 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3419 : }
3420 :
3421 :
3422 48 : class SourceResource : public v8::String::ExternalOneByteStringResource {
3423 : public:
3424 : explicit SourceResource(const char* data)
3425 24 : : data_(data), length_(strlen(data)) { }
3426 :
3427 24 : void Dispose() override {
3428 24 : i::DeleteArray(data_);
3429 24 : data_ = nullptr;
3430 24 : }
3431 :
3432 288 : const char* data() const override { return data_; }
3433 :
3434 96 : size_t length() const override { return length_; }
3435 :
3436 48 : bool IsDisposed() { return data_ == nullptr; }
3437 :
3438 : private:
3439 : const char* data_;
3440 : size_t length_;
3441 : };
3442 :
3443 :
3444 24 : void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3445 : const char* accessor) {
3446 : // Test that the data retained by the Error.stack accessor is released
3447 : // after the first time the accessor is fired. We use external string
3448 : // to check whether the data is being released since the external string
3449 : // resource's callback is fired when the external string is GC'ed.
3450 : i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3451 48 : v8::HandleScope scope(isolate);
3452 24 : SourceResource* resource = new SourceResource(i::StrDup(source));
3453 : {
3454 48 : v8::HandleScope scope(isolate);
3455 24 : v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
3456 : v8::Local<v8::String> source_string =
3457 48 : v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked();
3458 : i_isolate->heap()->CollectAllAvailableGarbage(
3459 24 : i::GarbageCollectionReason::kTesting);
3460 24 : v8::Script::Compile(ctx, source_string)
3461 : .ToLocalChecked()
3462 24 : ->Run(ctx)
3463 : .ToLocalChecked();
3464 24 : CHECK(!resource->IsDisposed());
3465 : }
3466 : // i_isolate->heap()->CollectAllAvailableGarbage();
3467 24 : CHECK(!resource->IsDisposed());
3468 :
3469 : CompileRun(accessor);
3470 : i_isolate->heap()->CollectAllAvailableGarbage(
3471 24 : i::GarbageCollectionReason::kTesting);
3472 :
3473 : // External source has been released.
3474 24 : CHECK(resource->IsDisposed());
3475 24 : delete resource;
3476 24 : }
3477 :
3478 :
3479 26661 : UNINITIALIZED_TEST(ReleaseStackTraceData) {
3480 5 : if (FLAG_always_opt) {
3481 : // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3482 : // See: https://codereview.chromium.org/181833004/
3483 1 : return;
3484 : }
3485 : #ifndef V8_LITE_MODE
3486 : // ICs retain objects.
3487 4 : FLAG_use_ic = false;
3488 : #endif // V8_LITE_MODE
3489 4 : FLAG_concurrent_recompilation = false;
3490 : v8::Isolate::CreateParams create_params;
3491 4 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
3492 4 : v8::Isolate* isolate = v8::Isolate::New(create_params);
3493 : {
3494 : v8::Isolate::Scope isolate_scope(isolate);
3495 8 : v8::HandleScope handle_scope(isolate);
3496 8 : v8::Context::New(isolate)->Enter();
3497 : static const char* source1 = "var error = null; "
3498 : /* Normal Error */ "try { "
3499 : " throw new Error(); "
3500 : "} catch (e) { "
3501 : " error = e; "
3502 : "} ";
3503 : static const char* source2 = "var error = null; "
3504 : /* Stack overflow */ "try { "
3505 : " (function f() { f(); })(); "
3506 : "} catch (e) { "
3507 : " error = e; "
3508 : "} ";
3509 : static const char* source3 = "var error = null; "
3510 : /* Normal Error */ "try { "
3511 : /* as prototype */ " throw new Error(); "
3512 : "} catch (e) { "
3513 : " error = {}; "
3514 : " error.__proto__ = e; "
3515 : "} ";
3516 : static const char* source4 = "var error = null; "
3517 : /* Stack overflow */ "try { "
3518 : /* as prototype */ " (function f() { f(); })(); "
3519 : "} catch (e) { "
3520 : " error = {}; "
3521 : " error.__proto__ = e; "
3522 : "} ";
3523 : static const char* getter = "error.stack";
3524 : static const char* setter = "error.stack = 0";
3525 :
3526 4 : ReleaseStackTraceDataTest(isolate, source1, setter);
3527 4 : ReleaseStackTraceDataTest(isolate, source2, setter);
3528 : // We do not test source3 and source4 with setter, since the setter is
3529 : // supposed to (untypically) write to the receiver, not the holder. This is
3530 : // to emulate the behavior of a data property.
3531 :
3532 4 : ReleaseStackTraceDataTest(isolate, source1, getter);
3533 4 : ReleaseStackTraceDataTest(isolate, source2, getter);
3534 4 : ReleaseStackTraceDataTest(isolate, source3, getter);
3535 4 : ReleaseStackTraceDataTest(isolate, source4, getter);
3536 : }
3537 4 : isolate->Dispose();
3538 : }
3539 :
3540 : // TODO(mmarchini) also write tests for async/await and Promise.all
3541 15 : void DetailedErrorStackTraceTest(const char* src,
3542 : std::function<void(Handle<FrameArray>)> test) {
3543 15 : FLAG_detailed_error_stack_trace = true;
3544 15 : CcTest::InitializeVM();
3545 30 : v8::HandleScope scope(CcTest::isolate());
3546 :
3547 30 : v8::TryCatch try_catch(CcTest::isolate());
3548 : CompileRun(src);
3549 :
3550 15 : CHECK(try_catch.HasCaught());
3551 30 : Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
3552 :
3553 : Isolate* isolate = CcTest::i_isolate();
3554 : Handle<Name> key = isolate->factory()->stack_trace_symbol();
3555 :
3556 : Handle<FrameArray> stack_trace(Handle<FrameArray>::cast(
3557 30 : Object::GetProperty(isolate, exception, key).ToHandleChecked()));
3558 :
3559 : test(stack_trace);
3560 15 : }
3561 :
3562 : // * Test interpreted function error
3563 26661 : TEST(DetailedErrorStackTrace) {
3564 : static const char* source =
3565 : "function func1(arg1) { "
3566 : " let err = new Error(); "
3567 : " throw err; "
3568 : "} "
3569 : "function func2(arg1, arg2) { "
3570 : " func1(42); "
3571 : "} "
3572 : "class Foo {}; "
3573 : "function main(arg1, arg2) { "
3574 : " func2(arg1, false); "
3575 : "} "
3576 : "var foo = new Foo(); "
3577 : "main(foo); ";
3578 :
3579 10 : DetailedErrorStackTraceTest(source, [](Handle<FrameArray> stack_trace) {
3580 : FixedArray foo_parameters = stack_trace->Parameters(0);
3581 5 : CHECK_EQ(foo_parameters->length(), 1);
3582 5 : CHECK(foo_parameters->get(0)->IsSmi());
3583 5 : CHECK_EQ(Smi::ToInt(foo_parameters->get(0)), 42);
3584 :
3585 : FixedArray bar_parameters = stack_trace->Parameters(1);
3586 5 : CHECK_EQ(bar_parameters->length(), 2);
3587 5 : CHECK(bar_parameters->get(0)->IsJSObject());
3588 5 : CHECK(bar_parameters->get(1)->IsBoolean());
3589 5 : Handle<Object> foo = Handle<Object>::cast(GetByName("foo"));
3590 5 : CHECK_EQ(bar_parameters->get(0), *foo);
3591 5 : CHECK(!bar_parameters->get(1)->BooleanValue(CcTest::i_isolate()));
3592 :
3593 : FixedArray main_parameters = stack_trace->Parameters(2);
3594 5 : CHECK_EQ(main_parameters->length(), 2);
3595 5 : CHECK(main_parameters->get(0)->IsJSObject());
3596 5 : CHECK(main_parameters->get(1)->IsUndefined());
3597 5 : CHECK_EQ(main_parameters->get(0), *foo);
3598 10 : });
3599 5 : }
3600 :
3601 : // * Test optimized function with inline frame error
3602 26661 : TEST(DetailedErrorStackTraceInline) {
3603 5 : FLAG_allow_natives_syntax = true;
3604 : static const char* source =
3605 : "function add(x) { "
3606 : " if (x == 42) "
3607 : " throw new Error(); "
3608 : " return x + x; "
3609 : "} "
3610 : "add(0); "
3611 : "add(1); "
3612 : "function foo(x) { "
3613 : " return add(x + 1) "
3614 : "} "
3615 : "foo(40); "
3616 : "%OptimizeFunctionOnNextCall(foo); "
3617 : "foo(41); ";
3618 :
3619 10 : DetailedErrorStackTraceTest(source, [](Handle<FrameArray> stack_trace) {
3620 : FixedArray parameters_add = stack_trace->Parameters(0);
3621 5 : CHECK_EQ(parameters_add->length(), 1);
3622 5 : CHECK(parameters_add->get(0)->IsSmi());
3623 5 : CHECK_EQ(Smi::ToInt(parameters_add->get(0)), 42);
3624 :
3625 : FixedArray parameters_foo = stack_trace->Parameters(1);
3626 5 : CHECK_EQ(parameters_foo->length(), 1);
3627 5 : CHECK(parameters_foo->get(0)->IsSmi());
3628 5 : CHECK_EQ(Smi::ToInt(parameters_foo->get(0)), 41);
3629 10 : });
3630 5 : }
3631 :
3632 : // * Test builtin exit error
3633 26661 : TEST(DetailedErrorStackTraceBuiltinExit) {
3634 : static const char* source =
3635 : "function test(arg1) { "
3636 : " (new Number()).toFixed(arg1); "
3637 : "} "
3638 : "test(9999); ";
3639 :
3640 10 : DetailedErrorStackTraceTest(source, [](Handle<FrameArray> stack_trace) {
3641 : FixedArray parameters = stack_trace->Parameters(0);
3642 :
3643 5 : CHECK_EQ(parameters->length(), 2);
3644 5 : CHECK(parameters->get(0)->IsSmi());
3645 5 : CHECK_EQ(Smi::ToInt(parameters->get(0)), 9999);
3646 10 : });
3647 5 : }
3648 :
3649 26661 : TEST(Regress169928) {
3650 5 : FLAG_allow_natives_syntax = true;
3651 : #ifndef V8_LITE_MODE
3652 5 : FLAG_opt = false;
3653 : #endif // V8_LITE_MODE
3654 5 : CcTest::InitializeVM();
3655 : Isolate* isolate = CcTest::i_isolate();
3656 4 : LocalContext env;
3657 : Factory* factory = isolate->factory();
3658 9 : v8::HandleScope scope(CcTest::isolate());
3659 :
3660 : // Some flags turn Scavenge collections into Mark-sweep collections
3661 : // and hence are incompatible with this test case.
3662 5 : if (FLAG_gc_global || FLAG_stress_compaction ||
3663 : FLAG_stress_incremental_marking)
3664 1 : return;
3665 :
3666 : // Prepare the environment
3667 : CompileRun("function fastliteralcase(literal, value) {"
3668 : " literal[0] = value;"
3669 : " return literal;"
3670 : "}"
3671 : "function get_standard_literal() {"
3672 : " var literal = [1, 2, 3];"
3673 : " return literal;"
3674 : "}"
3675 : "obj = fastliteralcase(get_standard_literal(), 1);"
3676 : "obj = fastliteralcase(get_standard_literal(), 1.5);"
3677 : "obj = fastliteralcase(get_standard_literal(), 2);");
3678 :
3679 : // prepare the heap
3680 : v8::Local<v8::String> mote_code_string =
3681 4 : v8_str("fastliteralcase(mote, 2.5);");
3682 :
3683 4 : v8::Local<v8::String> array_name = v8_str("mote");
3684 16 : CHECK(CcTest::global()
3685 : ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0))
3686 : .FromJust());
3687 :
3688 : // First make sure we flip spaces
3689 4 : CcTest::CollectGarbage(NEW_SPACE);
3690 :
3691 : // Allocate the object.
3692 : Handle<FixedArray> array_data =
3693 4 : factory->NewFixedArray(2, AllocationType::kYoung);
3694 : array_data->set(0, Smi::FromInt(1));
3695 : array_data->set(1, Smi::FromInt(2));
3696 :
3697 4 : heap::AllocateAllButNBytes(
3698 : CcTest::heap()->new_space(),
3699 4 : JSArray::kSize + AllocationMemento::kSize + kTaggedSize);
3700 :
3701 : Handle<JSArray> array =
3702 : factory->NewJSArrayWithElements(array_data, PACKED_SMI_ELEMENTS);
3703 :
3704 4 : CHECK_EQ(Smi::FromInt(2), array->length());
3705 4 : CHECK(array->HasSmiOrObjectElements());
3706 :
3707 : // We need filler the size of AllocationMemento object, plus an extra
3708 : // fill pointer value.
3709 : HeapObject obj;
3710 : AllocationResult allocation =
3711 : CcTest::heap()->new_space()->AllocateRawUnaligned(
3712 4 : AllocationMemento::kSize + kTaggedSize);
3713 4 : CHECK(allocation.To(&obj));
3714 : Address addr_obj = obj->address();
3715 : CcTest::heap()->CreateFillerObjectAt(addr_obj,
3716 : AllocationMemento::kSize + kTaggedSize,
3717 4 : ClearRecordedSlots::kNo);
3718 :
3719 : // Give the array a name, making sure not to allocate strings.
3720 : v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
3721 12 : CHECK(CcTest::global()->Set(env.local(), array_name, array_obj).FromJust());
3722 :
3723 : // This should crash with a protection violation if we are running a build
3724 : // with the bug.
3725 : AlwaysAllocateScope aa_scope(isolate);
3726 4 : v8::Script::Compile(env.local(), mote_code_string)
3727 : .ToLocalChecked()
3728 4 : ->Run(env.local())
3729 : .ToLocalChecked();
3730 : }
3731 :
3732 26661 : TEST(LargeObjectSlotRecording) {
3733 5 : if (!FLAG_incremental_marking) return;
3734 5 : if (FLAG_never_compact) return;
3735 : ManualGCScope manual_gc_scope;
3736 5 : FLAG_manual_evacuation_candidates_selection = true;
3737 5 : CcTest::InitializeVM();
3738 : Isolate* isolate = CcTest::i_isolate();
3739 : Heap* heap = isolate->heap();
3740 : HandleScope scope(isolate);
3741 :
3742 : // Create an object on an evacuation candidate.
3743 5 : heap::SimulateFullSpace(heap->old_space());
3744 : Handle<FixedArray> lit =
3745 5 : isolate->factory()->NewFixedArray(4, AllocationType::kOld);
3746 : Page* evac_page = Page::FromHeapObject(*lit);
3747 5 : heap::ForceEvacuationCandidate(evac_page);
3748 : FixedArray old_location = *lit;
3749 :
3750 : // Allocate a large object.
3751 : int size = Max(1000000, kMaxRegularHeapObjectSize + KB);
3752 : CHECK_LT(kMaxRegularHeapObjectSize, size);
3753 : Handle<FixedArray> lo =
3754 5 : isolate->factory()->NewFixedArray(size, AllocationType::kOld);
3755 5 : CHECK(heap->lo_space()->Contains(*lo));
3756 :
3757 : // Start incremental marking to active write barrier.
3758 5 : heap::SimulateIncrementalMarking(heap, false);
3759 :
3760 : // Create references from the large object to the object on the evacuation
3761 : // candidate.
3762 : const int kStep = size / 10;
3763 105 : for (int i = 0; i < size; i += kStep) {
3764 100 : lo->set(i, *lit);
3765 50 : CHECK(lo->get(i) == old_location);
3766 : }
3767 :
3768 5 : heap::SimulateIncrementalMarking(heap, true);
3769 :
3770 : // Move the evaucation candidate object.
3771 5 : CcTest::CollectAllGarbage();
3772 :
3773 : // Verify that the pointers in the large object got updated.
3774 105 : for (int i = 0; i < size; i += kStep) {
3775 50 : CHECK_EQ(lo->get(i), *lit);
3776 50 : CHECK(lo->get(i) != old_location);
3777 : }
3778 : }
3779 :
3780 10 : class DummyVisitor : public RootVisitor {
3781 : public:
3782 20 : void VisitRootPointers(Root root, const char* description,
3783 20 : FullObjectSlot start, FullObjectSlot end) override {}
3784 : };
3785 :
3786 :
3787 26661 : TEST(DeferredHandles) {
3788 5 : CcTest::InitializeVM();
3789 : Isolate* isolate = CcTest::i_isolate();
3790 : Heap* heap = isolate->heap();
3791 10 : v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
3792 : HandleScopeData* data = isolate->handle_scope_data();
3793 : Handle<Object> init(ReadOnlyRoots(heap).empty_string(), isolate);
3794 10215 : while (data->next < data->limit) {
3795 : Handle<Object> obj(ReadOnlyRoots(heap).empty_string(), isolate);
3796 : }
3797 : // An entire block of handles has been filled.
3798 : // Next handle would require a new block.
3799 5 : CHECK(data->next == data->limit);
3800 :
3801 10 : DeferredHandleScope deferred(isolate);
3802 5 : DummyVisitor visitor;
3803 5 : isolate->handle_scope_implementer()->Iterate(&visitor);
3804 5 : delete deferred.Detach();
3805 5 : }
3806 :
3807 :
3808 26660 : TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
3809 4 : if (!FLAG_incremental_marking) return;
3810 : ManualGCScope manual_gc_scope;
3811 4 : CcTest::InitializeVM();
3812 8 : v8::HandleScope scope(CcTest::isolate());
3813 : CompileRun("function f(n) {"
3814 : " var a = new Array(n);"
3815 : " for (var i = 0; i < n; i += 100) a[i] = i;"
3816 : "};"
3817 : "f(10 * 1024 * 1024);");
3818 4 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
3819 4 : if (marking->IsStopped()) {
3820 4 : CcTest::heap()->StartIncrementalMarking(
3821 4 : i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
3822 : }
3823 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3824 4 : CHECK(marking->IsComplete() ||
3825 : marking->IsReadyToOverApproximateWeakClosure());
3826 : }
3827 :
3828 :
3829 26661 : TEST(DisableInlineAllocation) {
3830 5 : FLAG_allow_natives_syntax = true;
3831 5 : CcTest::InitializeVM();
3832 10 : v8::HandleScope scope(CcTest::isolate());
3833 : CompileRun("function test() {"
3834 : " var x = [];"
3835 : " for (var i = 0; i < 10; i++) {"
3836 : " x[i] = [ {}, [1,2,3], [1,x,3] ];"
3837 : " }"
3838 : "}"
3839 : "function run() {"
3840 : " %OptimizeFunctionOnNextCall(test);"
3841 : " test();"
3842 : " %DeoptimizeFunction(test);"
3843 : "}");
3844 :
3845 : // Warm-up with inline allocation enabled.
3846 : CompileRun("test(); test(); run();");
3847 :
3848 : // Run test with inline allocation disabled.
3849 5 : CcTest::heap()->DisableInlineAllocation();
3850 : CompileRun("run()");
3851 :
3852 : // Run test with inline allocation re-enabled.
3853 5 : CcTest::heap()->EnableInlineAllocation();
3854 : CompileRun("run()");
3855 5 : }
3856 :
3857 :
3858 266 : static int AllocationSitesCount(Heap* heap) {
3859 : int count = 0;
3860 2852 : for (Object site = heap->allocation_sites_list(); site->IsAllocationSite();) {
3861 1293 : AllocationSite cur = AllocationSite::cast(site);
3862 1293 : CHECK(cur->HasWeakNext());
3863 : site = cur->weak_next();
3864 1293 : count++;
3865 : }
3866 266 : return count;
3867 : }
3868 :
3869 260 : static int SlimAllocationSiteCount(Heap* heap) {
3870 : int count = 0;
3871 2840 : for (Object weak_list = heap->allocation_sites_list();
3872 : weak_list->IsAllocationSite();) {
3873 : AllocationSite weak_cur = AllocationSite::cast(weak_list);
3874 3420 : for (Object site = weak_cur->nested_site(); site->IsAllocationSite();) {
3875 1065 : AllocationSite cur = AllocationSite::cast(site);
3876 1065 : CHECK(!cur->HasWeakNext());
3877 : site = cur->nested_site();
3878 1065 : count++;
3879 : }
3880 : weak_list = weak_cur->weak_next();
3881 : }
3882 260 : return count;
3883 : }
3884 :
3885 26661 : TEST(EnsureAllocationSiteDependentCodesProcessed) {
3886 5 : if (FLAG_always_opt || !FLAG_opt) return;
3887 3 : FLAG_allow_natives_syntax = true;
3888 3 : CcTest::InitializeVM();
3889 : Isolate* isolate = CcTest::i_isolate();
3890 3 : v8::internal::Heap* heap = CcTest::heap();
3891 : GlobalHandles* global_handles = isolate->global_handles();
3892 :
3893 3 : if (!isolate->use_optimizer()) return;
3894 :
3895 : // The allocation site at the head of the list is ours.
3896 : Handle<AllocationSite> site;
3897 : {
3898 3 : LocalContext context;
3899 6 : v8::HandleScope scope(context->GetIsolate());
3900 :
3901 3 : int count = AllocationSitesCount(heap);
3902 : CompileRun("var bar = function() { return (new Array()); };"
3903 : "var a = bar();"
3904 : "bar();"
3905 : "bar();");
3906 :
3907 : // One allocation site should have been created.
3908 3 : int new_count = AllocationSitesCount(heap);
3909 3 : CHECK_EQ(new_count, (count + 1));
3910 : site = Handle<AllocationSite>::cast(
3911 : global_handles->Create(
3912 : AllocationSite::cast(heap->allocation_sites_list())));
3913 :
3914 : CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
3915 :
3916 : Handle<JSFunction> bar_handle = Handle<JSFunction>::cast(
3917 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3918 6 : CcTest::global()
3919 9 : ->Get(context.local(), v8_str("bar"))
3920 : .ToLocalChecked())));
3921 :
3922 : int dependency_group_count = 0;
3923 : DependentCode dependency = site->dependent_code();
3924 15 : while (dependency != ReadOnlyRoots(heap).empty_weak_fixed_array()) {
3925 9 : CHECK(dependency->group() ==
3926 : DependentCode::kAllocationSiteTransitionChangedGroup ||
3927 : dependency->group() ==
3928 : DependentCode::kAllocationSiteTenuringChangedGroup);
3929 6 : CHECK_EQ(1, dependency->count());
3930 6 : CHECK(dependency->object_at(0)->IsWeak());
3931 : Code function_bar =
3932 : Code::cast(dependency->object_at(0)->GetHeapObjectAssumeWeak());
3933 6 : CHECK_EQ(bar_handle->code(), function_bar);
3934 : dependency = dependency->next_link();
3935 6 : dependency_group_count++;
3936 : }
3937 : // Expect a dependent code object for transitioning and pretenuring.
3938 3 : CHECK_EQ(2, dependency_group_count);
3939 : }
3940 :
3941 : // Now make sure that a gc should get rid of the function, even though we
3942 : // still have the allocation site alive.
3943 27 : for (int i = 0; i < 4; i++) {
3944 12 : CcTest::CollectAllGarbage();
3945 : }
3946 :
3947 : // The site still exists because of our global handle, but the code is no
3948 : // longer referred to by dependent_code().
3949 3 : CHECK(site->dependent_code()->object_at(0)->IsCleared());
3950 : }
3951 :
3952 130 : void CheckNumberOfAllocations(Heap* heap, const char* source,
3953 : int expected_full_alloc,
3954 : int expected_slim_alloc) {
3955 130 : int prev_fat_alloc_count = AllocationSitesCount(heap);
3956 130 : int prev_slim_alloc_count = SlimAllocationSiteCount(heap);
3957 :
3958 : CompileRun(source);
3959 :
3960 130 : int fat_alloc_sites = AllocationSitesCount(heap) - prev_fat_alloc_count;
3961 130 : int slim_alloc_sites = SlimAllocationSiteCount(heap) - prev_slim_alloc_count;
3962 :
3963 130 : CHECK_EQ(expected_full_alloc, fat_alloc_sites);
3964 130 : CHECK_EQ(expected_slim_alloc, slim_alloc_sites);
3965 130 : }
3966 :
3967 26661 : TEST(AllocationSiteCreation) {
3968 : // No feedback vectors and hence no allocation sites.
3969 : if (FLAG_lite_mode) return;
3970 5 : FLAG_always_opt = false;
3971 5 : CcTest::InitializeVM();
3972 : Isolate* isolate = CcTest::i_isolate();
3973 : Heap* heap = isolate->heap();
3974 : HandleScope scope(isolate);
3975 5 : i::FLAG_enable_one_shot_optimization = true;
3976 :
3977 : // Array literals.
3978 5 : CheckNumberOfAllocations(heap, "function f1() { return []; }; f1()", 1, 0);
3979 : CheckNumberOfAllocations(heap, "function f2() { return [1, 2]; }; f2()", 1,
3980 5 : 0);
3981 : CheckNumberOfAllocations(heap, "function f3() { return [[1], [2]]; }; f3()",
3982 5 : 1, 2);
3983 :
3984 : CheckNumberOfAllocations(heap,
3985 : "function f4() { "
3986 : "return [0, [1, 1.1, 1.2, "
3987 : "], 1.5, [2.1, 2.2], 3];"
3988 : "}; f4();",
3989 5 : 1, 2);
3990 :
3991 : // No allocation sites within IIFE/top-level
3992 : CheckNumberOfAllocations(heap,
3993 : R"(
3994 : (function f4() {
3995 : return [ 0, [ 1, 1.1, 1.2,], 1.5, [2.1, 2.2], 3 ];
3996 : })();
3997 : )",
3998 5 : 0, 0);
3999 :
4000 : CheckNumberOfAllocations(heap,
4001 : R"(
4002 : l = [ 1, 2, 3, 4];
4003 : )",
4004 5 : 0, 0);
4005 :
4006 : CheckNumberOfAllocations(heap,
4007 : R"(
4008 : a = [];
4009 : )",
4010 5 : 0, 0);
4011 :
4012 : CheckNumberOfAllocations(heap,
4013 : R"(
4014 : (function f4() {
4015 : return [];
4016 : })();
4017 : )",
4018 5 : 0, 0);
4019 :
4020 : // Object literals have lazy AllocationSites
4021 5 : CheckNumberOfAllocations(heap, "function f5() { return {}; }; f5(); ", 0, 0);
4022 :
4023 : // No AllocationSites are created for the empty object literal.
4024 55 : for (int i = 0; i < 5; i++) {
4025 25 : CheckNumberOfAllocations(heap, "f5(); ", 0, 0);
4026 : }
4027 :
4028 : CheckNumberOfAllocations(heap, "function f6() { return {a:1}; }; f6(); ", 0,
4029 5 : 0);
4030 :
4031 5 : CheckNumberOfAllocations(heap, "f6(); ", 1, 0);
4032 :
4033 : CheckNumberOfAllocations(heap, "function f7() { return {a:1, b:2}; }; f7(); ",
4034 5 : 0, 0);
4035 5 : CheckNumberOfAllocations(heap, "f7(); ", 1, 0);
4036 :
4037 : // No Allocation sites are created for object subliterals
4038 : CheckNumberOfAllocations(heap,
4039 : "function f8() {"
4040 : "return {a:{}, b:{ a:2, c:{ d:{f:{}}} } }; "
4041 : "}; f8(); ",
4042 5 : 0, 0);
4043 5 : CheckNumberOfAllocations(heap, "f8(); ", 1, 0);
4044 :
4045 : // We currently eagerly create allocation sites if there are sub-arrays.
4046 : // Allocation sites are created only for array subliterals
4047 : CheckNumberOfAllocations(heap,
4048 : "function f9() {"
4049 : "return {a:[1, 2, 3], b:{ a:2, c:{ d:{f:[]} } }}; "
4050 : "}; f9(); ",
4051 5 : 1, 2);
4052 :
4053 : // No new AllocationSites created on the second invocation.
4054 5 : CheckNumberOfAllocations(heap, "f9(); ", 0, 0);
4055 :
4056 : // No allocation sites for literals in an iife/top level code even if it has
4057 : // array subliterals
4058 : CheckNumberOfAllocations(heap,
4059 : R"(
4060 : (function f10() {
4061 : return {a: [1], b: [2]};
4062 : })();
4063 : )",
4064 5 : 0, 0);
4065 :
4066 : CheckNumberOfAllocations(heap,
4067 : R"(
4068 : l = {
4069 : a: 1,
4070 : b: {
4071 : c: [5],
4072 : }
4073 : };
4074 : )",
4075 5 : 0, 0);
4076 :
4077 : // Eagerly create allocation sites for literals within a loop of iife or
4078 : // top-level code
4079 : CheckNumberOfAllocations(heap,
4080 : R"(
4081 : (function f11() {
4082 : while(true) {
4083 : return {a: [1], b: [2]};
4084 : }
4085 : })();
4086 : )",
4087 5 : 1, 2);
4088 :
4089 : CheckNumberOfAllocations(heap,
4090 : R"(
4091 : for (i = 0; i < 1; ++i) {
4092 : l = {
4093 : a: 1,
4094 : b: {
4095 : c: [5],
4096 : }
4097 : };
4098 : }
4099 : )",
4100 5 : 1, 1);
4101 : }
4102 :
4103 26661 : TEST(CellsInOptimizedCodeAreWeak) {
4104 5 : if (FLAG_always_opt || !FLAG_opt) return;
4105 3 : FLAG_allow_natives_syntax = true;
4106 3 : CcTest::InitializeVM();
4107 : Isolate* isolate = CcTest::i_isolate();
4108 3 : v8::internal::Heap* heap = CcTest::heap();
4109 :
4110 3 : if (!isolate->use_optimizer()) return;
4111 : HandleScope outer_scope(heap->isolate());
4112 : Handle<Code> code;
4113 : {
4114 3 : LocalContext context;
4115 : HandleScope scope(heap->isolate());
4116 :
4117 : CompileRun(
4118 : "bar = (function() {"
4119 : " function bar() {"
4120 : " return foo(1);"
4121 : " };"
4122 : " var foo = function(x) { with (x) { return 1 + x; } };"
4123 : " %NeverOptimizeFunction(foo);"
4124 : " bar(foo);"
4125 : " bar(foo);"
4126 : " bar(foo);"
4127 : " %OptimizeFunctionOnNextCall(bar);"
4128 : " bar(foo);"
4129 : " return bar;})();");
4130 :
4131 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4132 6 : *v8::Local<v8::Function>::Cast(CcTest::global()
4133 9 : ->Get(context.local(), v8_str("bar"))
4134 : .ToLocalChecked())));
4135 3 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4136 : }
4137 :
4138 : // Now make sure that a gc should get rid of the function
4139 27 : for (int i = 0; i < 4; i++) {
4140 12 : CcTest::CollectAllGarbage();
4141 : }
4142 :
4143 3 : CHECK(code->marked_for_deoptimization());
4144 3 : CHECK(code->embedded_objects_cleared());
4145 : }
4146 :
4147 :
4148 26661 : TEST(ObjectsInOptimizedCodeAreWeak) {
4149 5 : if (FLAG_always_opt || !FLAG_opt) return;
4150 3 : FLAG_allow_natives_syntax = true;
4151 3 : CcTest::InitializeVM();
4152 : Isolate* isolate = CcTest::i_isolate();
4153 3 : v8::internal::Heap* heap = CcTest::heap();
4154 :
4155 3 : if (!isolate->use_optimizer()) return;
4156 : HandleScope outer_scope(heap->isolate());
4157 : Handle<Code> code;
4158 : {
4159 3 : LocalContext context;
4160 : HandleScope scope(heap->isolate());
4161 :
4162 : CompileRun(
4163 : "function bar() {"
4164 : " return foo(1);"
4165 : "};"
4166 : "function foo(x) { with (x) { return 1 + x; } };"
4167 : "%NeverOptimizeFunction(foo);"
4168 : "bar();"
4169 : "bar();"
4170 : "bar();"
4171 : "%OptimizeFunctionOnNextCall(bar);"
4172 : "bar();");
4173 :
4174 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4175 6 : *v8::Local<v8::Function>::Cast(CcTest::global()
4176 9 : ->Get(context.local(), v8_str("bar"))
4177 : .ToLocalChecked())));
4178 3 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4179 : }
4180 :
4181 : // Now make sure that a gc should get rid of the function
4182 27 : for (int i = 0; i < 4; i++) {
4183 12 : CcTest::CollectAllGarbage();
4184 : }
4185 :
4186 3 : CHECK(code->marked_for_deoptimization());
4187 3 : CHECK(code->embedded_objects_cleared());
4188 : }
4189 :
4190 26661 : TEST(NewSpaceObjectsInOptimizedCode) {
4191 5 : if (FLAG_always_opt || !FLAG_opt) return;
4192 3 : FLAG_allow_natives_syntax = true;
4193 3 : CcTest::InitializeVM();
4194 : Isolate* isolate = CcTest::i_isolate();
4195 :
4196 3 : if (!isolate->use_optimizer()) return;
4197 : HandleScope outer_scope(isolate);
4198 : Handle<Code> code;
4199 : {
4200 3 : LocalContext context;
4201 : HandleScope scope(isolate);
4202 :
4203 : CompileRun(
4204 : "var foo;"
4205 : "var bar;"
4206 : "(function() {"
4207 : " function foo_func(x) { with (x) { return 1 + x; } };"
4208 : " %NeverOptimizeFunction(foo_func);"
4209 : " function bar_func() {"
4210 : " return foo(1);"
4211 : " };"
4212 : " bar = bar_func;"
4213 : " foo = foo_func;"
4214 : " bar_func();"
4215 : " bar_func();"
4216 : " bar_func();"
4217 : " %OptimizeFunctionOnNextCall(bar_func);"
4218 : " bar_func();"
4219 : "})();");
4220 :
4221 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4222 6 : *v8::Local<v8::Function>::Cast(CcTest::global()
4223 9 : ->Get(context.local(), v8_str("bar"))
4224 : .ToLocalChecked())));
4225 :
4226 : Handle<JSFunction> foo = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4227 6 : *v8::Local<v8::Function>::Cast(CcTest::global()
4228 9 : ->Get(context.local(), v8_str("foo"))
4229 : .ToLocalChecked())));
4230 :
4231 3 : CHECK(Heap::InYoungGeneration(*foo));
4232 3 : CcTest::CollectGarbage(NEW_SPACE);
4233 3 : CcTest::CollectGarbage(NEW_SPACE);
4234 3 : CHECK(!Heap::InYoungGeneration(*foo));
4235 : #ifdef VERIFY_HEAP
4236 : CcTest::heap()->Verify();
4237 : #endif
4238 3 : CHECK(!bar->code()->marked_for_deoptimization());
4239 3 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4240 : }
4241 :
4242 : // Now make sure that a gc should get rid of the function
4243 27 : for (int i = 0; i < 4; i++) {
4244 12 : CcTest::CollectAllGarbage();
4245 : }
4246 :
4247 3 : CHECK(code->marked_for_deoptimization());
4248 3 : CHECK(code->embedded_objects_cleared());
4249 : }
4250 :
4251 26661 : TEST(ObjectsInEagerlyDeoptimizedCodeAreWeak) {
4252 5 : if (FLAG_always_opt || !FLAG_opt) return;
4253 3 : FLAG_allow_natives_syntax = true;
4254 3 : CcTest::InitializeVM();
4255 : Isolate* isolate = CcTest::i_isolate();
4256 3 : v8::internal::Heap* heap = CcTest::heap();
4257 :
4258 3 : if (!isolate->use_optimizer()) return;
4259 : HandleScope outer_scope(heap->isolate());
4260 : Handle<Code> code;
4261 : {
4262 3 : LocalContext context;
4263 : HandleScope scope(heap->isolate());
4264 :
4265 : CompileRun(
4266 : "function bar() {"
4267 : " return foo(1);"
4268 : "};"
4269 : "function foo(x) { with (x) { return 1 + x; } };"
4270 : "%NeverOptimizeFunction(foo);"
4271 : "bar();"
4272 : "bar();"
4273 : "bar();"
4274 : "%OptimizeFunctionOnNextCall(bar);"
4275 : "bar();"
4276 : "%DeoptimizeFunction(bar);");
4277 :
4278 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4279 6 : *v8::Local<v8::Function>::Cast(CcTest::global()
4280 9 : ->Get(context.local(), v8_str("bar"))
4281 : .ToLocalChecked())));
4282 3 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4283 : }
4284 :
4285 3 : CHECK(code->marked_for_deoptimization());
4286 :
4287 : // Now make sure that a gc should get rid of the function
4288 27 : for (int i = 0; i < 4; i++) {
4289 12 : CcTest::CollectAllGarbage();
4290 : }
4291 :
4292 3 : CHECK(code->marked_for_deoptimization());
4293 3 : CHECK(code->embedded_objects_cleared());
4294 : }
4295 :
4296 16 : static Handle<JSFunction> OptimizeDummyFunction(v8::Isolate* isolate,
4297 : const char* name) {
4298 : EmbeddedVector<char, 256> source;
4299 : SNPrintF(source,
4300 : "function %s() { return 0; }"
4301 : "%s(); %s();"
4302 : "%%OptimizeFunctionOnNextCall(%s);"
4303 16 : "%s();", name, name, name, name, name);
4304 : CompileRun(source.start());
4305 : i::Handle<JSFunction> fun = Handle<JSFunction>::cast(
4306 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4307 32 : CcTest::global()
4308 64 : ->Get(isolate->GetCurrentContext(), v8_str(name))
4309 : .ToLocalChecked())));
4310 16 : return fun;
4311 : }
4312 :
4313 8 : static int GetCodeChainLength(Code code) {
4314 : int result = 0;
4315 16 : while (code->next_code_link()->IsCode()) {
4316 4 : result++;
4317 : code = Code::cast(code->next_code_link());
4318 : }
4319 8 : return result;
4320 : }
4321 :
4322 :
4323 26661 : TEST(NextCodeLinkIsWeak) {
4324 5 : FLAG_always_opt = false;
4325 5 : FLAG_allow_natives_syntax = true;
4326 5 : CcTest::InitializeVM();
4327 : Isolate* isolate = CcTest::i_isolate();
4328 5 : v8::internal::Heap* heap = CcTest::heap();
4329 :
4330 5 : if (!isolate->use_optimizer()) return;
4331 : HandleScope outer_scope(heap->isolate());
4332 : Handle<Code> code;
4333 4 : CcTest::CollectAllAvailableGarbage();
4334 : int code_chain_length_before, code_chain_length_after;
4335 : {
4336 : HandleScope scope(heap->isolate());
4337 : Handle<JSFunction> mortal =
4338 4 : OptimizeDummyFunction(CcTest::isolate(), "mortal");
4339 : Handle<JSFunction> immortal =
4340 4 : OptimizeDummyFunction(CcTest::isolate(), "immortal");
4341 4 : CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4342 4 : code_chain_length_before = GetCodeChainLength(immortal->code());
4343 : // Keep the immortal code and let the mortal code die.
4344 4 : code = scope.CloseAndEscape(Handle<Code>(immortal->code(), isolate));
4345 : CompileRun("mortal = null; immortal = null;");
4346 : }
4347 4 : CcTest::CollectAllAvailableGarbage();
4348 : // Now mortal code should be dead.
4349 4 : code_chain_length_after = GetCodeChainLength(*code);
4350 4 : CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
4351 : }
4352 :
4353 26661 : TEST(NextCodeLinkInCodeDataContainerIsCleared) {
4354 5 : FLAG_always_opt = false;
4355 5 : FLAG_allow_natives_syntax = true;
4356 5 : CcTest::InitializeVM();
4357 : Isolate* isolate = CcTest::i_isolate();
4358 5 : v8::internal::Heap* heap = CcTest::heap();
4359 :
4360 5 : if (!isolate->use_optimizer()) return;
4361 : HandleScope outer_scope(heap->isolate());
4362 : Handle<CodeDataContainer> code_data_container;
4363 : {
4364 : HandleScope scope(heap->isolate());
4365 : Handle<JSFunction> mortal1 =
4366 4 : OptimizeDummyFunction(CcTest::isolate(), "mortal1");
4367 : Handle<JSFunction> mortal2 =
4368 4 : OptimizeDummyFunction(CcTest::isolate(), "mortal2");
4369 4 : CHECK_EQ(mortal2->code()->next_code_link(), mortal1->code());
4370 : code_data_container = scope.CloseAndEscape(Handle<CodeDataContainer>(
4371 4 : mortal2->code()->code_data_container(), isolate));
4372 : CompileRun("mortal1 = null; mortal2 = null;");
4373 : }
4374 4 : CcTest::CollectAllAvailableGarbage();
4375 4 : CHECK(code_data_container->next_code_link()->IsUndefined(isolate));
4376 : }
4377 :
4378 8 : static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
4379 : i::byte buffer[i::Assembler::kMinimalBufferSize];
4380 : MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
4381 16 : ExternalAssemblerBuffer(buffer, sizeof(buffer)));
4382 8 : CodeDesc desc;
4383 8 : masm.Push(isolate->factory()->undefined_value());
4384 8 : masm.Push(isolate->factory()->undefined_value());
4385 8 : masm.Drop(2);
4386 : masm.GetCode(isolate, &desc);
4387 : Handle<Code> code = isolate->factory()->NewCode(
4388 16 : desc, Code::OPTIMIZED_FUNCTION, masm.CodeObject());
4389 8 : CHECK(code->IsCode());
4390 16 : return code;
4391 : }
4392 :
4393 :
4394 26661 : TEST(NextCodeLinkIsWeak2) {
4395 5 : FLAG_allow_natives_syntax = true;
4396 5 : CcTest::InitializeVM();
4397 : Isolate* isolate = CcTest::i_isolate();
4398 5 : v8::internal::Heap* heap = CcTest::heap();
4399 :
4400 5 : if (!isolate->use_optimizer()) return;
4401 : HandleScope outer_scope(heap->isolate());
4402 4 : CcTest::CollectAllAvailableGarbage();
4403 : Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
4404 : Handle<Code> new_head;
4405 : Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
4406 : {
4407 : HandleScope scope(heap->isolate());
4408 4 : Handle<Code> immortal = DummyOptimizedCode(isolate);
4409 4 : Handle<Code> mortal = DummyOptimizedCode(isolate);
4410 4 : mortal->set_next_code_link(*old_head);
4411 8 : immortal->set_next_code_link(*mortal);
4412 : context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
4413 4 : new_head = scope.CloseAndEscape(immortal);
4414 : }
4415 4 : CcTest::CollectAllAvailableGarbage();
4416 : // Now mortal code should be dead.
4417 4 : CHECK_EQ(*old_head, new_head->next_code_link());
4418 : }
4419 :
4420 :
4421 : static bool weak_ic_cleared = false;
4422 :
4423 49 : static void ClearWeakIC(
4424 : const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
4425 : printf("clear weak is called\n");
4426 49 : weak_ic_cleared = true;
4427 : data.GetParameter()->Reset();
4428 49 : }
4429 :
4430 :
4431 26661 : TEST(WeakFunctionInConstructor) {
4432 1 : if (FLAG_lite_mode) return;
4433 5 : if (FLAG_always_opt) return;
4434 4 : FLAG_stress_compaction = false;
4435 4 : FLAG_stress_incremental_marking = false;
4436 4 : CcTest::InitializeVM();
4437 4 : v8::Isolate* isolate = CcTest::isolate();
4438 4 : LocalContext env;
4439 8 : v8::HandleScope scope(isolate);
4440 : CompileRun(
4441 : "function createObj(obj) {"
4442 : " return new obj();"
4443 : "}");
4444 : i::Handle<JSFunction> createObj = Handle<JSFunction>::cast(
4445 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4446 8 : CcTest::global()
4447 12 : ->Get(env.local(), v8_str("createObj"))
4448 : .ToLocalChecked())));
4449 :
4450 : v8::Persistent<v8::Object> garbage;
4451 : {
4452 8 : v8::HandleScope scope(isolate);
4453 : const char* source =
4454 : " (function() {"
4455 : " function hat() { this.x = 5; }"
4456 : " createObj(hat);"
4457 : " createObj(hat);"
4458 : " return hat;"
4459 : " })();";
4460 4 : garbage.Reset(isolate, CompileRun(env.local(), source)
4461 : .ToLocalChecked()
4462 4 : ->ToObject(env.local())
4463 : .ToLocalChecked());
4464 : }
4465 4 : weak_ic_cleared = false;
4466 : garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4467 4 : CcTest::CollectAllGarbage();
4468 4 : CHECK(weak_ic_cleared);
4469 :
4470 : // We've determined the constructor in createObj has had it's weak cell
4471 : // cleared. Now, verify that one additional call with a new function
4472 : // allows monomorphicity.
4473 : Handle<FeedbackVector> feedback_vector =
4474 : Handle<FeedbackVector>(createObj->feedback_vector(), CcTest::i_isolate());
4475 4 : for (int i = 0; i < 20; i++) {
4476 : MaybeObject slot_value = feedback_vector->Get(FeedbackSlot(0));
4477 4 : CHECK(slot_value->IsWeakOrCleared());
4478 4 : if (slot_value->IsCleared()) break;
4479 0 : CcTest::CollectAllGarbage();
4480 : }
4481 :
4482 : MaybeObject slot_value = feedback_vector->Get(FeedbackSlot(0));
4483 4 : CHECK(slot_value->IsCleared());
4484 : CompileRun(
4485 : "function coat() { this.x = 6; }"
4486 : "createObj(coat);");
4487 : slot_value = feedback_vector->Get(FeedbackSlot(0));
4488 4 : CHECK(slot_value->IsWeak());
4489 : }
4490 :
4491 :
4492 : // Checks that the value returned by execution of the source is weak.
4493 45 : void CheckWeakness(const char* source) {
4494 45 : FLAG_stress_compaction = false;
4495 45 : FLAG_stress_incremental_marking = false;
4496 45 : FLAG_allow_natives_syntax = true;
4497 45 : CcTest::InitializeVM();
4498 45 : v8::Isolate* isolate = CcTest::isolate();
4499 45 : LocalContext env;
4500 90 : v8::HandleScope scope(isolate);
4501 : v8::Persistent<v8::Object> garbage;
4502 : {
4503 90 : v8::HandleScope scope(isolate);
4504 45 : garbage.Reset(isolate, CompileRun(env.local(), source)
4505 : .ToLocalChecked()
4506 45 : ->ToObject(env.local())
4507 : .ToLocalChecked());
4508 : }
4509 45 : weak_ic_cleared = false;
4510 : garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4511 45 : CcTest::CollectAllGarbage();
4512 45 : CHECK(weak_ic_cleared);
4513 45 : }
4514 :
4515 :
4516 : // Each of the following "weak IC" tests creates an IC that embeds a map with
4517 : // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
4518 26661 : TEST(WeakMapInMonomorphicLoadIC) {
4519 : CheckWeakness(
4520 : "function loadIC(obj) {"
4521 : " return obj.name;"
4522 : "}"
4523 : "%EnsureFeedbackVectorForFunction(loadIC);"
4524 : " (function() {"
4525 : " var proto = {'name' : 'weak'};"
4526 : " var obj = Object.create(proto);"
4527 : " loadIC(obj);"
4528 : " loadIC(obj);"
4529 : " loadIC(obj);"
4530 : " return proto;"
4531 5 : " })();");
4532 5 : }
4533 :
4534 :
4535 26661 : TEST(WeakMapInPolymorphicLoadIC) {
4536 : CheckWeakness(
4537 : "function loadIC(obj) {"
4538 : " return obj.name;"
4539 : "}"
4540 : "%EnsureFeedbackVectorForFunction(loadIC);"
4541 : " (function() {"
4542 : " var proto = {'name' : 'weak'};"
4543 : " var obj = Object.create(proto);"
4544 : " loadIC(obj);"
4545 : " loadIC(obj);"
4546 : " loadIC(obj);"
4547 : " var poly = Object.create(proto);"
4548 : " poly.x = true;"
4549 : " loadIC(poly);"
4550 : " return proto;"
4551 5 : " })();");
4552 5 : }
4553 :
4554 :
4555 26661 : TEST(WeakMapInMonomorphicKeyedLoadIC) {
4556 : CheckWeakness(
4557 : "function keyedLoadIC(obj, field) {"
4558 : " return obj[field];"
4559 : "}"
4560 : "%EnsureFeedbackVectorForFunction(keyedLoadIC);"
4561 : " (function() {"
4562 : " var proto = {'name' : 'weak'};"
4563 : " var obj = Object.create(proto);"
4564 : " keyedLoadIC(obj, 'name');"
4565 : " keyedLoadIC(obj, 'name');"
4566 : " keyedLoadIC(obj, 'name');"
4567 : " return proto;"
4568 5 : " })();");
4569 5 : }
4570 :
4571 :
4572 26661 : TEST(WeakMapInPolymorphicKeyedLoadIC) {
4573 : CheckWeakness(
4574 : "function keyedLoadIC(obj, field) {"
4575 : " return obj[field];"
4576 : "}"
4577 : "%EnsureFeedbackVectorForFunction(keyedLoadIC);"
4578 : " (function() {"
4579 : " var proto = {'name' : 'weak'};"
4580 : " var obj = Object.create(proto);"
4581 : " keyedLoadIC(obj, 'name');"
4582 : " keyedLoadIC(obj, 'name');"
4583 : " keyedLoadIC(obj, 'name');"
4584 : " var poly = Object.create(proto);"
4585 : " poly.x = true;"
4586 : " keyedLoadIC(poly, 'name');"
4587 : " return proto;"
4588 5 : " })();");
4589 5 : }
4590 :
4591 :
4592 26661 : TEST(WeakMapInMonomorphicStoreIC) {
4593 : CheckWeakness(
4594 : "function storeIC(obj, value) {"
4595 : " obj.name = value;"
4596 : "}"
4597 : "%EnsureFeedbackVectorForFunction(storeIC);"
4598 : " (function() {"
4599 : " var proto = {'name' : 'weak'};"
4600 : " var obj = Object.create(proto);"
4601 : " storeIC(obj, 'x');"
4602 : " storeIC(obj, 'x');"
4603 : " storeIC(obj, 'x');"
4604 : " return proto;"
4605 5 : " })();");
4606 5 : }
4607 :
4608 :
4609 26661 : TEST(WeakMapInPolymorphicStoreIC) {
4610 : CheckWeakness(
4611 : "function storeIC(obj, value) {"
4612 : " obj.name = value;"
4613 : "}"
4614 : "%EnsureFeedbackVectorForFunction(storeIC);"
4615 : " (function() {"
4616 : " var proto = {'name' : 'weak'};"
4617 : " var obj = Object.create(proto);"
4618 : " storeIC(obj, 'x');"
4619 : " storeIC(obj, 'x');"
4620 : " storeIC(obj, 'x');"
4621 : " var poly = Object.create(proto);"
4622 : " poly.x = true;"
4623 : " storeIC(poly, 'x');"
4624 : " return proto;"
4625 5 : " })();");
4626 5 : }
4627 :
4628 :
4629 26661 : TEST(WeakMapInMonomorphicKeyedStoreIC) {
4630 : CheckWeakness(
4631 : "function keyedStoreIC(obj, field, value) {"
4632 : " obj[field] = value;"
4633 : "}"
4634 : "%EnsureFeedbackVectorForFunction(keyedStoreIC);"
4635 : " (function() {"
4636 : " var proto = {'name' : 'weak'};"
4637 : " var obj = Object.create(proto);"
4638 : " keyedStoreIC(obj, 'x');"
4639 : " keyedStoreIC(obj, 'x');"
4640 : " keyedStoreIC(obj, 'x');"
4641 : " return proto;"
4642 5 : " })();");
4643 5 : }
4644 :
4645 :
4646 26661 : TEST(WeakMapInPolymorphicKeyedStoreIC) {
4647 : CheckWeakness(
4648 : "function keyedStoreIC(obj, field, value) {"
4649 : " obj[field] = value;"
4650 : "}"
4651 : "%EnsureFeedbackVectorForFunction(keyedStoreIC);"
4652 : " (function() {"
4653 : " var proto = {'name' : 'weak'};"
4654 : " var obj = Object.create(proto);"
4655 : " keyedStoreIC(obj, 'x');"
4656 : " keyedStoreIC(obj, 'x');"
4657 : " keyedStoreIC(obj, 'x');"
4658 : " var poly = Object.create(proto);"
4659 : " poly.x = true;"
4660 : " keyedStoreIC(poly, 'x');"
4661 : " return proto;"
4662 5 : " })();");
4663 5 : }
4664 :
4665 :
4666 26661 : TEST(WeakMapInMonomorphicCompareNilIC) {
4667 5 : FLAG_allow_natives_syntax = true;
4668 : CheckWeakness(
4669 : "function compareNilIC(obj) {"
4670 : " return obj == null;"
4671 : "}"
4672 : "%EnsureFeedbackVectorForFunction(compareNilIC);"
4673 : " (function() {"
4674 : " var proto = {'name' : 'weak'};"
4675 : " var obj = Object.create(proto);"
4676 : " compareNilIC(obj);"
4677 : " compareNilIC(obj);"
4678 : " compareNilIC(obj);"
4679 : " return proto;"
4680 5 : " })();");
4681 5 : }
4682 :
4683 :
4684 8 : Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
4685 8 : Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
4686 : Handle<Object> obj =
4687 24 : Object::GetProperty(isolate, isolate->global_object(), str)
4688 : .ToHandleChecked();
4689 8 : return Handle<JSFunction>::cast(obj);
4690 : }
4691 :
4692 16 : void CheckIC(Handle<JSFunction> function, int slot_index,
4693 : InlineCacheState state) {
4694 : FeedbackVector vector = function->feedback_vector();
4695 : FeedbackSlot slot(slot_index);
4696 : FeedbackNexus nexus(vector, slot);
4697 16 : CHECK_EQ(nexus.ic_state(), state);
4698 16 : }
4699 :
4700 26661 : TEST(MonomorphicStaysMonomorphicAfterGC) {
4701 6 : if (!FLAG_use_ic) return;
4702 5 : if (FLAG_always_opt) return;
4703 : ManualGCScope manual_gc_scope;
4704 4 : CcTest::InitializeVM();
4705 : Isolate* isolate = CcTest::i_isolate();
4706 8 : v8::HandleScope scope(CcTest::isolate());
4707 4 : FLAG_allow_natives_syntax = true;
4708 : CompileRun(
4709 : "function loadIC(obj) {"
4710 : " return obj.name;"
4711 : "}"
4712 : "%EnsureFeedbackVectorForFunction(loadIC);"
4713 : "function testIC() {"
4714 : " var proto = {'name' : 'weak'};"
4715 : " var obj = Object.create(proto);"
4716 : " loadIC(obj);"
4717 : " loadIC(obj);"
4718 : " loadIC(obj);"
4719 : " return proto;"
4720 : "};");
4721 4 : Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4722 : {
4723 8 : v8::HandleScope scope(CcTest::isolate());
4724 : CompileRun("(testIC())");
4725 : }
4726 4 : CcTest::CollectAllGarbage();
4727 4 : CheckIC(loadIC, 0, MONOMORPHIC);
4728 : {
4729 8 : v8::HandleScope scope(CcTest::isolate());
4730 : CompileRun("(testIC())");
4731 : }
4732 4 : CheckIC(loadIC, 0, MONOMORPHIC);
4733 : }
4734 :
4735 :
4736 26661 : TEST(PolymorphicStaysPolymorphicAfterGC) {
4737 6 : if (!FLAG_use_ic) return;
4738 5 : if (FLAG_always_opt) return;
4739 : ManualGCScope manual_gc_scope;
4740 4 : CcTest::InitializeVM();
4741 : Isolate* isolate = CcTest::i_isolate();
4742 8 : v8::HandleScope scope(CcTest::isolate());
4743 4 : FLAG_allow_natives_syntax = true;
4744 : CompileRun(
4745 : "function loadIC(obj) {"
4746 : " return obj.name;"
4747 : "}"
4748 : "%EnsureFeedbackVectorForFunction(loadIC);"
4749 : "function testIC() {"
4750 : " var proto = {'name' : 'weak'};"
4751 : " var obj = Object.create(proto);"
4752 : " loadIC(obj);"
4753 : " loadIC(obj);"
4754 : " loadIC(obj);"
4755 : " var poly = Object.create(proto);"
4756 : " poly.x = true;"
4757 : " loadIC(poly);"
4758 : " return proto;"
4759 : "};");
4760 4 : Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4761 : {
4762 8 : v8::HandleScope scope(CcTest::isolate());
4763 : CompileRun("(testIC())");
4764 : }
4765 4 : CcTest::CollectAllGarbage();
4766 4 : CheckIC(loadIC, 0, POLYMORPHIC);
4767 : {
4768 8 : v8::HandleScope scope(CcTest::isolate());
4769 : CompileRun("(testIC())");
4770 : }
4771 4 : CheckIC(loadIC, 0, POLYMORPHIC);
4772 : }
4773 :
4774 : #ifdef DEBUG
4775 : TEST(AddInstructionChangesNewSpacePromotion) {
4776 : FLAG_allow_natives_syntax = true;
4777 : FLAG_expose_gc = true;
4778 : FLAG_stress_compaction = true;
4779 : FLAG_gc_interval = 1000;
4780 : CcTest::InitializeVM();
4781 : if (!FLAG_allocation_site_pretenuring) return;
4782 : v8::HandleScope scope(CcTest::isolate());
4783 : Isolate* isolate = CcTest::i_isolate();
4784 : Heap* heap = isolate->heap();
4785 : LocalContext env;
4786 : CompileRun(
4787 : "function add(a, b) {"
4788 : " return a + b;"
4789 : "}"
4790 : "add(1, 2);"
4791 : "add(\"a\", \"b\");"
4792 : "var oldSpaceObject;"
4793 : "gc();"
4794 : "function crash(x) {"
4795 : " var object = {a: null, b: null};"
4796 : " var result = add(1.5, x | 0);"
4797 : " object.a = result;"
4798 : " oldSpaceObject = object;"
4799 : " return object;"
4800 : "}"
4801 : "crash(1);"
4802 : "crash(1);"
4803 : "%OptimizeFunctionOnNextCall(crash);"
4804 : "crash(1);");
4805 :
4806 : v8::Local<v8::Object> global = CcTest::global();
4807 : v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
4808 : global->Get(env.local(), v8_str("crash")).ToLocalChecked());
4809 : v8::Local<v8::Value> args1[] = {v8_num(1)};
4810 : heap->DisableInlineAllocation();
4811 : heap->set_allocation_timeout(1);
4812 : g->Call(env.local(), global, 1, args1).ToLocalChecked();
4813 : CcTest::CollectAllGarbage();
4814 : }
4815 :
4816 :
4817 : void OnFatalErrorExpectOOM(const char* location, const char* message) {
4818 : // Exit with 0 if the location matches our expectation.
4819 : exit(strcmp(location, "CALL_AND_RETRY_LAST"));
4820 : }
4821 :
4822 :
4823 : TEST(CEntryStubOOM) {
4824 : FLAG_allow_natives_syntax = true;
4825 : CcTest::InitializeVM();
4826 : v8::HandleScope scope(CcTest::isolate());
4827 : CcTest::isolate()->SetFatalErrorHandler(OnFatalErrorExpectOOM);
4828 :
4829 : v8::Local<v8::Value> result = CompileRun(
4830 : "%SetAllocationTimeout(1, 1);"
4831 : "var a = [];"
4832 : "a.__proto__ = [];"
4833 : "a.unshift(1)");
4834 :
4835 : CHECK(result->IsNumber());
4836 : }
4837 :
4838 : #endif // DEBUG
4839 :
4840 :
4841 5 : static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
4842 :
4843 :
4844 5 : static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
4845 5 : CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, nullptr);
4846 5 : }
4847 :
4848 26661 : HEAP_TEST(Regress538257) {
4849 : ManualGCScope manual_gc_scope;
4850 5 : FLAG_manual_evacuation_candidates_selection = true;
4851 : v8::Isolate::CreateParams create_params;
4852 : // Set heap limits.
4853 : create_params.constraints.set_max_semi_space_size_in_kb(1024);
4854 : #ifdef DEBUG
4855 : create_params.constraints.set_max_old_space_size(20);
4856 : #else
4857 : create_params.constraints.set_max_old_space_size(6);
4858 : #endif
4859 5 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
4860 5 : v8::Isolate* isolate = v8::Isolate::New(create_params);
4861 5 : isolate->Enter();
4862 : {
4863 : i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
4864 : Heap* heap = i_isolate->heap();
4865 : HandleScope handle_scope(i_isolate);
4866 : PagedSpace* old_space = heap->old_space();
4867 : const int kMaxObjects = 10000;
4868 : const int kFixedArrayLen = 512;
4869 100005 : Handle<FixedArray> objects[kMaxObjects];
4870 18925 : for (int i = 0; (i < kMaxObjects) &&
4871 6310 : heap->CanExpandOldGeneration(old_space->AreaSize());
4872 : i++) {
4873 : objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen,
4874 6305 : AllocationType::kOld);
4875 12610 : heap::ForceEvacuationCandidate(Page::FromHeapObject(*objects[i]));
4876 : }
4877 5 : heap::SimulateFullSpace(old_space);
4878 5 : CcTest::CollectAllGarbage();
4879 : // If we get this far, we've successfully aborted compaction. Any further
4880 : // allocations might trigger OOM.
4881 : }
4882 5 : isolate->Exit();
4883 5 : isolate->Dispose();
4884 5 : }
4885 :
4886 :
4887 26661 : TEST(Regress357137) {
4888 5 : CcTest::InitializeVM();
4889 5 : v8::Isolate* isolate = CcTest::isolate();
4890 10 : v8::HandleScope hscope(isolate);
4891 5 : v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
4892 10 : global->Set(
4893 5 : v8::String::NewFromUtf8(isolate, "interrupt", v8::NewStringType::kNormal)
4894 : .ToLocalChecked(),
4895 5 : v8::FunctionTemplate::New(isolate, RequestInterrupt));
4896 5 : v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
4897 5 : CHECK(!context.IsEmpty());
4898 : v8::Context::Scope cscope(context);
4899 :
4900 : v8::Local<v8::Value> result = CompileRun(
4901 : "var locals = '';"
4902 : "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
4903 : "eval('function f() {' + locals + 'return function() { return v0; }; }');"
4904 : "interrupt();" // This triggers a fake stack overflow in f.
4905 : "f()()");
4906 10 : CHECK_EQ(42.0, result->ToNumber(context).ToLocalChecked()->Value());
4907 5 : }
4908 :
4909 :
4910 26661 : TEST(Regress507979) {
4911 : const int kFixedArrayLen = 10;
4912 5 : CcTest::InitializeVM();
4913 : Isolate* isolate = CcTest::i_isolate();
4914 : HandleScope handle_scope(isolate);
4915 :
4916 5 : Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
4917 5 : Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
4918 5 : CHECK(Heap::InYoungGeneration(*o1));
4919 5 : CHECK(Heap::InYoungGeneration(*o2));
4920 :
4921 10 : HeapIterator it(isolate->heap(), i::HeapIterator::kFilterUnreachable);
4922 :
4923 : // Replace parts of an object placed before a live object with a filler. This
4924 : // way the filler object shares the mark bits with the following live object.
4925 5 : o1->Shrink(isolate, kFixedArrayLen - 1);
4926 :
4927 32831 : for (HeapObject obj = it.next(); !obj.is_null(); obj = it.next()) {
4928 : // Let's not optimize the loop away.
4929 32826 : CHECK_NE(obj->address(), kNullAddress);
4930 : }
4931 5 : }
4932 :
4933 26661 : TEST(Regress388880) {
4934 5 : if (!FLAG_incremental_marking) return;
4935 5 : FLAG_stress_incremental_marking = false;
4936 5 : FLAG_expose_gc = true;
4937 5 : CcTest::InitializeVM();
4938 10 : v8::HandleScope scope(CcTest::isolate());
4939 : Isolate* isolate = CcTest::i_isolate();
4940 : Factory* factory = isolate->factory();
4941 : Heap* heap = isolate->heap();
4942 :
4943 5 : Handle<Map> map1 = Map::Create(isolate, 1);
4944 5 : Handle<String> name = factory->NewStringFromStaticChars("foo");
4945 5 : name = factory->InternalizeString(name);
4946 : Handle<Map> map2 =
4947 10 : Map::CopyWithField(isolate, map1, name, FieldType::Any(isolate), NONE,
4948 : PropertyConstness::kMutable, Representation::Tagged(),
4949 10 : OMIT_TRANSITION)
4950 5 : .ToHandleChecked();
4951 :
4952 5 : size_t desired_offset = Page::kPageSize - map1->instance_size();
4953 :
4954 : // Allocate padding objects in old pointer space so, that object allocated
4955 : // afterwards would end at the end of the page.
4956 5 : heap::SimulateFullSpace(heap->old_space());
4957 : size_t padding_size =
4958 5 : desired_offset - MemoryChunkLayout::ObjectStartOffsetInDataPage();
4959 : heap::CreatePadding(heap, static_cast<int>(padding_size),
4960 10 : AllocationType::kOld);
4961 :
4962 5 : Handle<JSObject> o = factory->NewJSObjectFromMap(map1, AllocationType::kOld);
4963 10 : o->set_raw_properties_or_hash(*factory->empty_fixed_array());
4964 :
4965 : // Ensure that the object allocated where we need it.
4966 : Page* page = Page::FromHeapObject(*o);
4967 5 : CHECK_EQ(desired_offset, page->Offset(o->address()));
4968 :
4969 : // Now we have an object right at the end of the page.
4970 :
4971 : // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
4972 : // that would cause crash.
4973 5 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4974 5 : marking->Stop();
4975 5 : CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
4976 5 : i::GarbageCollectionReason::kTesting);
4977 5 : CHECK(marking->IsMarking());
4978 :
4979 : // Now everything is set up for crashing in JSObject::MigrateFastToFast()
4980 : // when it calls heap->AdjustLiveBytes(...).
4981 5 : JSObject::MigrateToMap(o, map2);
4982 : }
4983 :
4984 :
4985 26661 : TEST(Regress3631) {
4986 5 : if (!FLAG_incremental_marking) return;
4987 5 : FLAG_expose_gc = true;
4988 5 : CcTest::InitializeVM();
4989 10 : v8::HandleScope scope(CcTest::isolate());
4990 : Isolate* isolate = CcTest::i_isolate();
4991 : Heap* heap = isolate->heap();
4992 5 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4993 : v8::Local<v8::Value> result = CompileRun(
4994 : "var weak_map = new WeakMap();"
4995 : "var future_keys = [];"
4996 : "for (var i = 0; i < 50; i++) {"
4997 : " var key = {'k' : i + 0.1};"
4998 : " weak_map.set(key, 1);"
4999 : " future_keys.push({'x' : i + 0.2});"
5000 : "}"
5001 : "weak_map");
5002 5 : if (marking->IsStopped()) {
5003 4 : CcTest::heap()->StartIncrementalMarking(
5004 4 : i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
5005 : }
5006 : // Incrementally mark the backing store.
5007 : Handle<JSReceiver> obj =
5008 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5009 : Handle<JSWeakCollection> weak_map(JSWeakCollection::cast(*obj), isolate);
5010 5 : SimulateIncrementalMarking(heap);
5011 : // Stash the backing store in a handle.
5012 : Handle<Object> save(weak_map->table(), isolate);
5013 : // The following line will update the backing store.
5014 : CompileRun(
5015 : "for (var i = 0; i < 50; i++) {"
5016 : " weak_map.set(future_keys[i], i);"
5017 : "}");
5018 : heap->incremental_marking()->set_should_hurry(true);
5019 5 : CcTest::CollectGarbage(OLD_SPACE);
5020 : }
5021 :
5022 :
5023 26661 : TEST(Regress442710) {
5024 5 : CcTest::InitializeVM();
5025 : Isolate* isolate = CcTest::i_isolate();
5026 : Factory* factory = isolate->factory();
5027 :
5028 : HandleScope sc(isolate);
5029 10 : Handle<JSGlobalObject> global(CcTest::i_isolate()->context()->global_object(),
5030 5 : isolate);
5031 : Handle<JSArray> array = factory->NewJSArray(2);
5032 :
5033 5 : Handle<String> name = factory->InternalizeUtf8String("testArray");
5034 10 : Object::SetProperty(isolate, global, name, array).Check();
5035 : CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5036 5 : CcTest::CollectGarbage(OLD_SPACE);
5037 5 : }
5038 :
5039 :
5040 26661 : HEAP_TEST(NumberStringCacheSize) {
5041 : // Test that the number-string cache has not been resized in the snapshot.
5042 5 : CcTest::InitializeVM();
5043 : Isolate* isolate = CcTest::i_isolate();
5044 5 : if (!isolate->snapshot_available()) return;
5045 : Heap* heap = isolate->heap();
5046 5 : CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
5047 : heap->number_string_cache()->length());
5048 : }
5049 :
5050 :
5051 26661 : TEST(Regress3877) {
5052 5 : CcTest::InitializeVM();
5053 : Isolate* isolate = CcTest::i_isolate();
5054 : Factory* factory = isolate->factory();
5055 : HandleScope scope(isolate);
5056 : CompileRun("function cls() { this.x = 10; }");
5057 5 : Handle<WeakFixedArray> weak_prototype_holder = factory->NewWeakFixedArray(1);
5058 : {
5059 : HandleScope inner_scope(isolate);
5060 : v8::Local<v8::Value> result = CompileRun("cls.prototype");
5061 : Handle<JSReceiver> proto =
5062 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5063 10 : weak_prototype_holder->Set(0, HeapObjectReference::Weak(*proto));
5064 : }
5065 5 : CHECK(!weak_prototype_holder->Get(0)->IsCleared());
5066 : CompileRun(
5067 : "var a = { };"
5068 : "a.x = new cls();"
5069 : "cls.prototype = null;");
5070 45 : for (int i = 0; i < 4; i++) {
5071 20 : CcTest::CollectAllGarbage();
5072 : }
5073 : // The map of a.x keeps prototype alive
5074 5 : CHECK(!weak_prototype_holder->Get(0)->IsCleared());
5075 : // Change the map of a.x and make the previous map garbage collectable.
5076 : CompileRun("a.x.__proto__ = {};");
5077 45 : for (int i = 0; i < 4; i++) {
5078 20 : CcTest::CollectAllGarbage();
5079 : }
5080 5 : CHECK(weak_prototype_holder->Get(0)->IsCleared());
5081 5 : }
5082 :
5083 20 : Handle<WeakFixedArray> AddRetainedMap(Isolate* isolate, Heap* heap) {
5084 : HandleScope inner_scope(isolate);
5085 20 : Handle<Map> map = Map::Create(isolate, 1);
5086 : v8::Local<v8::Value> result =
5087 : CompileRun("(function () { return {x : 10}; })();");
5088 : Handle<JSReceiver> proto =
5089 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5090 20 : Map::SetPrototype(isolate, map, proto);
5091 20 : heap->AddRetainedMap(map);
5092 20 : Handle<WeakFixedArray> array = isolate->factory()->NewWeakFixedArray(1);
5093 40 : array->Set(0, HeapObjectReference::Weak(*map));
5094 40 : return inner_scope.CloseAndEscape(array);
5095 : }
5096 :
5097 :
5098 20 : void CheckMapRetainingFor(int n) {
5099 20 : FLAG_retain_maps_for_n_gc = n;
5100 : Isolate* isolate = CcTest::i_isolate();
5101 : Heap* heap = isolate->heap();
5102 20 : Handle<WeakFixedArray> array_with_map = AddRetainedMap(isolate, heap);
5103 20 : CHECK(array_with_map->Get(0)->IsWeak());
5104 120 : for (int i = 0; i < n; i++) {
5105 50 : heap::SimulateIncrementalMarking(heap);
5106 50 : CcTest::CollectGarbage(OLD_SPACE);
5107 : }
5108 20 : CHECK(array_with_map->Get(0)->IsWeak());
5109 20 : heap::SimulateIncrementalMarking(heap);
5110 20 : CcTest::CollectGarbage(OLD_SPACE);
5111 20 : CHECK(array_with_map->Get(0)->IsCleared());
5112 20 : }
5113 :
5114 :
5115 26661 : TEST(MapRetaining) {
5116 5 : if (!FLAG_incremental_marking) return;
5117 : ManualGCScope manual_gc_scope;
5118 5 : CcTest::InitializeVM();
5119 10 : v8::HandleScope scope(CcTest::isolate());
5120 5 : CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5121 5 : CheckMapRetainingFor(0);
5122 5 : CheckMapRetainingFor(1);
5123 5 : CheckMapRetainingFor(7);
5124 : }
5125 :
5126 26656 : TEST(PreprocessStackTrace) {
5127 : // Do not automatically trigger early GC.
5128 0 : FLAG_gc_interval = -1;
5129 0 : CcTest::InitializeVM();
5130 0 : v8::HandleScope scope(CcTest::isolate());
5131 0 : v8::TryCatch try_catch(CcTest::isolate());
5132 : CompileRun("throw new Error();");
5133 0 : CHECK(try_catch.HasCaught());
5134 : Isolate* isolate = CcTest::i_isolate();
5135 0 : Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
5136 : Handle<Name> key = isolate->factory()->stack_trace_symbol();
5137 : Handle<Object> stack_trace =
5138 0 : Object::GetProperty(isolate, exception, key).ToHandleChecked();
5139 : Handle<Object> code =
5140 0 : Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5141 0 : CHECK(code->IsCode());
5142 :
5143 0 : CcTest::CollectAllAvailableGarbage();
5144 :
5145 : Handle<Object> pos =
5146 0 : Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5147 0 : CHECK(pos->IsSmi());
5148 :
5149 : Handle<FrameArray> frame_array = Handle<FrameArray>::cast(stack_trace);
5150 : int array_length = frame_array->FrameCount();
5151 0 : for (int i = 0; i < array_length; i++) {
5152 : Handle<Object> element =
5153 0 : Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
5154 0 : CHECK(!element->IsCode());
5155 : }
5156 0 : }
5157 :
5158 :
5159 215 : void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
5160 215 : CHECK_LE(FixedArray::kHeaderSize, bytes);
5161 215 : CHECK(IsAligned(bytes, kTaggedSize));
5162 : Factory* factory = isolate->factory();
5163 : HandleScope scope(isolate);
5164 : AlwaysAllocateScope always_allocate(isolate);
5165 : int elements =
5166 215 : static_cast<int>((bytes - FixedArray::kHeaderSize) / kTaggedSize);
5167 : Handle<FixedArray> array = factory->NewFixedArray(
5168 : elements,
5169 215 : space == NEW_SPACE ? AllocationType::kYoung : AllocationType::kOld);
5170 430 : CHECK((space == NEW_SPACE) == Heap::InYoungGeneration(*array));
5171 215 : CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
5172 215 : }
5173 :
5174 :
5175 26661 : TEST(NewSpaceAllocationCounter) {
5176 5 : CcTest::InitializeVM();
5177 10 : v8::HandleScope scope(CcTest::isolate());
5178 : Isolate* isolate = CcTest::i_isolate();
5179 : Heap* heap = isolate->heap();
5180 : size_t counter1 = heap->NewSpaceAllocationCounter();
5181 5 : CcTest::CollectGarbage(NEW_SPACE);
5182 5 : CcTest::CollectGarbage(NEW_SPACE); // Ensure new space is empty.
5183 : const size_t kSize = 1024;
5184 5 : AllocateInSpace(isolate, kSize, NEW_SPACE);
5185 : size_t counter2 = heap->NewSpaceAllocationCounter();
5186 5 : CHECK_EQ(kSize, counter2 - counter1);
5187 5 : CcTest::CollectGarbage(NEW_SPACE);
5188 : size_t counter3 = heap->NewSpaceAllocationCounter();
5189 5 : CHECK_EQ(0U, counter3 - counter2);
5190 : // Test counter overflow.
5191 : size_t max_counter = static_cast<size_t>(-1);
5192 : heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
5193 : size_t start = heap->NewSpaceAllocationCounter();
5194 205 : for (int i = 0; i < 20; i++) {
5195 100 : AllocateInSpace(isolate, kSize, NEW_SPACE);
5196 : size_t counter = heap->NewSpaceAllocationCounter();
5197 100 : CHECK_EQ(kSize, counter - start);
5198 : start = counter;
5199 : }
5200 5 : }
5201 :
5202 :
5203 26661 : TEST(OldSpaceAllocationCounter) {
5204 5 : CcTest::InitializeVM();
5205 10 : v8::HandleScope scope(CcTest::isolate());
5206 : Isolate* isolate = CcTest::i_isolate();
5207 : Heap* heap = isolate->heap();
5208 : size_t counter1 = heap->OldGenerationAllocationCounter();
5209 5 : CcTest::CollectGarbage(NEW_SPACE);
5210 5 : CcTest::CollectGarbage(NEW_SPACE);
5211 : const size_t kSize = 1024;
5212 5 : AllocateInSpace(isolate, kSize, OLD_SPACE);
5213 : size_t counter2 = heap->OldGenerationAllocationCounter();
5214 : // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
5215 5 : CHECK_LE(kSize, counter2 - counter1);
5216 5 : CcTest::CollectGarbage(NEW_SPACE);
5217 : size_t counter3 = heap->OldGenerationAllocationCounter();
5218 5 : CHECK_EQ(0u, counter3 - counter2);
5219 5 : AllocateInSpace(isolate, kSize, OLD_SPACE);
5220 5 : CcTest::CollectGarbage(OLD_SPACE);
5221 : size_t counter4 = heap->OldGenerationAllocationCounter();
5222 5 : CHECK_LE(kSize, counter4 - counter3);
5223 : // Test counter overflow.
5224 : size_t max_counter = static_cast<size_t>(-1);
5225 : heap->set_old_generation_allocation_counter_at_last_gc(max_counter -
5226 : 10 * kSize);
5227 : size_t start = heap->OldGenerationAllocationCounter();
5228 205 : for (int i = 0; i < 20; i++) {
5229 100 : AllocateInSpace(isolate, kSize, OLD_SPACE);
5230 : size_t counter = heap->OldGenerationAllocationCounter();
5231 100 : CHECK_LE(kSize, counter - start);
5232 : start = counter;
5233 : }
5234 5 : }
5235 :
5236 :
5237 20 : static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
5238 : Isolate* isolate = CcTest::i_isolate();
5239 : Object message(
5240 20 : *reinterpret_cast<Address*>(isolate->pending_message_obj_address()));
5241 20 : CHECK(message->IsTheHole(isolate));
5242 20 : }
5243 :
5244 :
5245 26661 : TEST(MessageObjectLeak) {
5246 5 : CcTest::InitializeVM();
5247 5 : v8::Isolate* isolate = CcTest::isolate();
5248 10 : v8::HandleScope scope(isolate);
5249 5 : v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5250 10 : global->Set(
5251 5 : v8::String::NewFromUtf8(isolate, "check", v8::NewStringType::kNormal)
5252 : .ToLocalChecked(),
5253 5 : v8::FunctionTemplate::New(isolate, CheckLeak));
5254 5 : v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
5255 : v8::Context::Scope cscope(context);
5256 :
5257 : const char* test =
5258 : "try {"
5259 : " throw 'message 1';"
5260 : "} catch (e) {"
5261 : "}"
5262 : "check();"
5263 : "L: try {"
5264 : " throw 'message 2';"
5265 : "} finally {"
5266 : " break L;"
5267 : "}"
5268 : "check();";
5269 : CompileRun(test);
5270 :
5271 : const char* flag = "--turbo-filter=*";
5272 5 : FlagList::SetFlagsFromString(flag, StrLength(flag));
5273 5 : FLAG_always_opt = true;
5274 :
5275 : CompileRun(test);
5276 5 : }
5277 :
5278 :
5279 10 : static void CheckEqualSharedFunctionInfos(
5280 : const v8::FunctionCallbackInfo<v8::Value>& args) {
5281 : Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
5282 : Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
5283 : Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
5284 : Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
5285 10 : CHECK(fun1->shared() == fun2->shared());
5286 10 : }
5287 :
5288 :
5289 10 : static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
5290 : Isolate* isolate = CcTest::i_isolate();
5291 : Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
5292 : Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
5293 : // Bytecode is code too.
5294 10 : SharedFunctionInfo::DiscardCompiled(isolate, handle(fun->shared(), isolate));
5295 20 : fun->set_code(*BUILTIN_CODE(isolate, CompileLazy));
5296 10 : CcTest::CollectAllAvailableGarbage();
5297 10 : }
5298 :
5299 :
5300 26661 : TEST(CanonicalSharedFunctionInfo) {
5301 5 : CcTest::InitializeVM();
5302 5 : v8::Isolate* isolate = CcTest::isolate();
5303 10 : v8::HandleScope scope(isolate);
5304 5 : v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5305 10 : global->Set(isolate, "check", v8::FunctionTemplate::New(
5306 : isolate, CheckEqualSharedFunctionInfos));
5307 10 : global->Set(isolate, "remove",
5308 : v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
5309 5 : v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
5310 : v8::Context::Scope cscope(context);
5311 : CompileRun(
5312 : "function f() { return function g() {}; }"
5313 : "var g1 = f();"
5314 : "remove(f);"
5315 : "var g2 = f();"
5316 : "check(g1, g2);");
5317 :
5318 : CompileRun(
5319 : "function f() { return (function() { return function g() {}; })(); }"
5320 : "var g1 = f();"
5321 : "remove(f);"
5322 : "var g2 = f();"
5323 : "check(g1, g2);");
5324 5 : }
5325 :
5326 :
5327 26661 : TEST(ScriptIterator) {
5328 5 : CcTest::InitializeVM();
5329 10 : v8::HandleScope scope(CcTest::isolate());
5330 : Isolate* isolate = CcTest::i_isolate();
5331 5 : Heap* heap = CcTest::heap();
5332 5 : LocalContext context;
5333 :
5334 5 : CcTest::CollectAllGarbage();
5335 :
5336 : int script_count = 0;
5337 : {
5338 10 : HeapIterator it(heap);
5339 42506 : for (HeapObject obj = it.next(); !obj.is_null(); obj = it.next()) {
5340 42501 : if (obj->IsScript()) script_count++;
5341 : }
5342 : }
5343 :
5344 : {
5345 5 : Script::Iterator iterator(isolate);
5346 40 : for (Script script = iterator.Next(); !script.is_null();
5347 : script = iterator.Next()) {
5348 15 : script_count--;
5349 : }
5350 : }
5351 :
5352 5 : CHECK_EQ(0, script_count);
5353 5 : }
5354 :
5355 :
5356 26661 : TEST(SharedFunctionInfoIterator) {
5357 5 : CcTest::InitializeVM();
5358 10 : v8::HandleScope scope(CcTest::isolate());
5359 : Isolate* isolate = CcTest::i_isolate();
5360 5 : Heap* heap = CcTest::heap();
5361 5 : LocalContext context;
5362 :
5363 5 : CcTest::CollectAllGarbage();
5364 5 : CcTest::CollectAllGarbage();
5365 :
5366 : int sfi_count = 0;
5367 : {
5368 10 : HeapIterator it(heap);
5369 42012 : for (HeapObject obj = it.next(); !obj.is_null(); obj = it.next()) {
5370 42007 : if (!obj->IsSharedFunctionInfo()) continue;
5371 3491 : sfi_count++;
5372 : }
5373 : }
5374 :
5375 : {
5376 5 : SharedFunctionInfo::GlobalIterator iterator(isolate);
5377 6992 : while (!iterator.Next().is_null()) sfi_count--;
5378 : }
5379 :
5380 5 : CHECK_EQ(0, sfi_count);
5381 5 : }
5382 :
5383 : // This is the same as Factory::NewByteArray, except it doesn't retry on
5384 : // allocation failure.
5385 49910 : AllocationResult HeapTester::AllocateByteArrayForTest(
5386 : Heap* heap, int length, AllocationType allocation_type) {
5387 : DCHECK(length >= 0 && length <= ByteArray::kMaxLength);
5388 : int size = ByteArray::SizeFor(length);
5389 : HeapObject result;
5390 : {
5391 49910 : AllocationResult allocation = heap->AllocateRaw(size, allocation_type);
5392 49910 : if (!allocation.To(&result)) return allocation;
5393 : }
5394 :
5395 : result->set_map_after_allocation(ReadOnlyRoots(heap).byte_array_map(),
5396 : SKIP_WRITE_BARRIER);
5397 : ByteArray::cast(result)->set_length(length);
5398 49900 : ByteArray::cast(result)->clear_padding();
5399 49900 : return result;
5400 : }
5401 :
5402 26661 : HEAP_TEST(Regress587004) {
5403 : ManualGCScope manual_gc_scope;
5404 : #ifdef VERIFY_HEAP
5405 : FLAG_verify_heap = false;
5406 : #endif
5407 5 : CcTest::InitializeVM();
5408 10 : v8::HandleScope scope(CcTest::isolate());
5409 5 : Heap* heap = CcTest::heap();
5410 : Isolate* isolate = CcTest::i_isolate();
5411 : Factory* factory = isolate->factory();
5412 : const int N =
5413 : (kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / kTaggedSize;
5414 5 : Handle<FixedArray> array = factory->NewFixedArray(N, AllocationType::kOld);
5415 5 : CHECK(heap->old_space()->Contains(*array));
5416 5 : Handle<Object> number = factory->NewHeapNumber(1.0);
5417 5 : CHECK(Heap::InYoungGeneration(*number));
5418 163825 : for (int i = 0; i < N; i++) {
5419 81910 : array->set(i, *number);
5420 : }
5421 5 : CcTest::CollectGarbage(OLD_SPACE);
5422 5 : heap::SimulateFullSpace(heap->old_space());
5423 5 : heap->RightTrimFixedArray(*array, N - 1);
5424 5 : heap->mark_compact_collector()->EnsureSweepingCompleted();
5425 : ByteArray byte_array;
5426 : const int M = 256;
5427 : // Don't allow old space expansion. The test works without this flag too,
5428 : // but becomes very slow.
5429 : heap->set_force_oom(true);
5430 5 : while (
5431 10 : AllocateByteArrayForTest(heap, M, AllocationType::kOld).To(&byte_array)) {
5432 0 : for (int j = 0; j < M; j++) {
5433 : byte_array->set(j, 0x31);
5434 : }
5435 : }
5436 : // Re-enable old space expansion to avoid OOM crash.
5437 : heap->set_force_oom(false);
5438 5 : CcTest::CollectGarbage(NEW_SPACE);
5439 5 : }
5440 :
5441 26661 : HEAP_TEST(Regress589413) {
5442 5 : if (!FLAG_incremental_marking) return;
5443 5 : FLAG_stress_compaction = true;
5444 5 : FLAG_manual_evacuation_candidates_selection = true;
5445 5 : FLAG_parallel_compaction = false;
5446 : ManualGCScope manual_gc_scope;
5447 5 : CcTest::InitializeVM();
5448 10 : v8::HandleScope scope(CcTest::isolate());
5449 5 : Heap* heap = CcTest::heap();
5450 : // Get the heap in clean state.
5451 5 : CcTest::CollectGarbage(OLD_SPACE);
5452 5 : CcTest::CollectGarbage(OLD_SPACE);
5453 : Isolate* isolate = CcTest::i_isolate();
5454 : Factory* factory = isolate->factory();
5455 : // Fill the new space with byte arrays with elements looking like pointers.
5456 : const int M = 256;
5457 : ByteArray byte_array;
5458 38490 : while (AllocateByteArrayForTest(heap, M, AllocationType::kYoung)
5459 : .To(&byte_array)) {
5460 9870120 : for (int j = 0; j < M; j++) {
5461 : byte_array->set(j, 0x31);
5462 : }
5463 : // Add the array in root set.
5464 : handle(byte_array, isolate);
5465 : }
5466 : // Make sure the byte arrays will be promoted on the next GC.
5467 5 : CcTest::CollectGarbage(NEW_SPACE);
5468 : // This number is close to large free list category threshold.
5469 : const int N = 0x3EEE;
5470 : {
5471 : std::vector<FixedArray> arrays;
5472 : std::set<Page*> pages;
5473 5 : FixedArray array;
5474 : // Fill all pages with fixed arrays.
5475 : heap->set_force_oom(true);
5476 5 : while (
5477 10 : AllocateFixedArrayForTest(heap, N, AllocationType::kOld).To(&array)) {
5478 0 : arrays.push_back(array);
5479 0 : pages.insert(Page::FromHeapObject(array));
5480 : // Add the array in root set.
5481 : handle(array, isolate);
5482 : }
5483 : // Expand and full one complete page with fixed arrays.
5484 : heap->set_force_oom(false);
5485 15 : while (
5486 30 : AllocateFixedArrayForTest(heap, N, AllocationType::kOld).To(&array)) {
5487 10 : arrays.push_back(array);
5488 20 : pages.insert(Page::FromHeapObject(array));
5489 : // Add the array in root set.
5490 : handle(array, isolate);
5491 : // Do not expand anymore.
5492 : heap->set_force_oom(true);
5493 : }
5494 : // Expand and mark the new page as evacuation candidate.
5495 : heap->set_force_oom(false);
5496 : {
5497 : AlwaysAllocateScope always_allocate(isolate);
5498 : Handle<HeapObject> ec_obj =
5499 5 : factory->NewFixedArray(5000, AllocationType::kOld);
5500 : Page* ec_page = Page::FromHeapObject(*ec_obj);
5501 5 : heap::ForceEvacuationCandidate(ec_page);
5502 : // Make all arrays point to evacuation candidate so that
5503 : // slots are recorded for them.
5504 25 : for (size_t j = 0; j < arrays.size(); j++) {
5505 10 : array = arrays[j];
5506 322210 : for (int i = 0; i < N; i++) {
5507 161100 : array->set(i, *ec_obj);
5508 : }
5509 : }
5510 : }
5511 5 : heap::SimulateIncrementalMarking(heap);
5512 25 : for (size_t j = 0; j < arrays.size(); j++) {
5513 10 : heap->RightTrimFixedArray(arrays[j], N - 1);
5514 : }
5515 : }
5516 : // Force allocation from the free list.
5517 : heap->set_force_oom(true);
5518 5 : CcTest::CollectGarbage(OLD_SPACE);
5519 : }
5520 :
5521 26661 : TEST(Regress598319) {
5522 5 : if (!FLAG_incremental_marking) return;
5523 : ManualGCScope manual_gc_scope;
5524 : // This test ensures that no white objects can cross the progress bar of large
5525 : // objects during incremental marking. It checks this by using Shift() during
5526 : // incremental marking.
5527 5 : CcTest::InitializeVM();
5528 10 : v8::HandleScope scope(CcTest::isolate());
5529 5 : Heap* heap = CcTest::heap();
5530 : Isolate* isolate = heap->isolate();
5531 :
5532 : // The size of the array should be larger than kProgressBarScanningChunk.
5533 : const int kNumberOfObjects = Max(FixedArray::kMaxRegularLength + 1, 128 * KB);
5534 :
5535 : struct Arr {
5536 5 : Arr(Isolate* isolate, int number_of_objects) {
5537 5 : root = isolate->factory()->NewFixedArray(1, AllocationType::kOld);
5538 : {
5539 : // Temporary scope to avoid getting any other objects into the root set.
5540 10 : v8::HandleScope scope(CcTest::isolate());
5541 : Handle<FixedArray> tmp = isolate->factory()->NewFixedArray(
5542 5 : number_of_objects, AllocationType::kOld);
5543 10 : root->set(0, *tmp);
5544 1310725 : for (int i = 0; i < get()->length(); i++) {
5545 655360 : tmp = isolate->factory()->NewFixedArray(100, AllocationType::kOld);
5546 1310720 : get()->set(i, *tmp);
5547 : }
5548 : }
5549 5 : }
5550 :
5551 : FixedArray get() { return FixedArray::cast(root->get(0)); }
5552 :
5553 : Handle<FixedArray> root;
5554 5 : } arr(isolate, kNumberOfObjects);
5555 :
5556 5 : CHECK_EQ(arr.get()->length(), kNumberOfObjects);
5557 5 : CHECK(heap->lo_space()->Contains(arr.get()));
5558 : LargePage* page = LargePage::FromHeapObject(arr.get());
5559 5 : CHECK_NOT_NULL(page);
5560 :
5561 : // GC to cleanup state
5562 5 : CcTest::CollectGarbage(OLD_SPACE);
5563 : MarkCompactCollector* collector = heap->mark_compact_collector();
5564 5 : if (collector->sweeping_in_progress()) {
5565 5 : collector->EnsureSweepingCompleted();
5566 : }
5567 :
5568 5 : CHECK(heap->lo_space()->Contains(arr.get()));
5569 : IncrementalMarking* marking = heap->incremental_marking();
5570 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5571 5 : CHECK(marking_state->IsWhite(arr.get()));
5572 1310725 : for (int i = 0; i < arr.get()->length(); i++) {
5573 : HeapObject arr_value = HeapObject::cast(arr.get()->get(i));
5574 655360 : CHECK(marking_state->IsWhite(arr_value));
5575 : }
5576 :
5577 : // Start incremental marking.
5578 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5579 5 : if (marking->IsStopped()) {
5580 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5581 5 : i::GarbageCollectionReason::kTesting);
5582 : }
5583 5 : CHECK(marking->IsMarking());
5584 :
5585 : // Check that we have not marked the interesting array during root scanning.
5586 1310725 : for (int i = 0; i < arr.get()->length(); i++) {
5587 : HeapObject arr_value = HeapObject::cast(arr.get()->get(i));
5588 655360 : CHECK(marking_state->IsWhite(arr_value));
5589 : }
5590 :
5591 : // Now we search for a state where we are in incremental marking and have
5592 : // only partially marked the large object.
5593 : const double kSmallStepSizeInMs = 0.1;
5594 29 : while (!marking->IsComplete()) {
5595 : marking->V8Step(kSmallStepSizeInMs,
5596 : i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5597 29 : StepOrigin::kV8);
5598 58 : if (page->IsFlagSet(Page::HAS_PROGRESS_BAR) && page->ProgressBar() > 0) {
5599 10 : CHECK_NE(page->ProgressBar(), arr.get()->Size());
5600 : {
5601 : // Shift by 1, effectively moving one white object across the progress
5602 : // bar, meaning that we will miss marking it.
5603 10 : v8::HandleScope scope(CcTest::isolate());
5604 : Handle<JSArray> js_array = isolate->factory()->NewJSArrayWithElements(
5605 : Handle<FixedArray>(arr.get(), isolate));
5606 5 : js_array->GetElementsAccessor()->Shift(js_array);
5607 : }
5608 5 : break;
5609 : }
5610 : }
5611 :
5612 : // Finish marking with bigger steps to speed up test.
5613 : const double kLargeStepSizeInMs = 1000;
5614 8185 : while (!marking->IsComplete()) {
5615 : marking->V8Step(kLargeStepSizeInMs,
5616 : i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5617 8180 : StepOrigin::kV8);
5618 8180 : if (marking->IsReadyToOverApproximateWeakClosure()) {
5619 5 : marking->FinalizeIncrementally();
5620 : }
5621 : }
5622 5 : CHECK(marking->IsComplete());
5623 :
5624 : // All objects need to be black after marking. If a white object crossed the
5625 : // progress bar, we would fail here.
5626 1310725 : for (int i = 0; i < arr.get()->length(); i++) {
5627 : HeapObject arr_value = HeapObject::cast(arr.get()->get(i));
5628 655360 : CHECK(marking_state->IsBlack(arr_value));
5629 : }
5630 : }
5631 :
5632 10 : Handle<FixedArray> ShrinkArrayAndCheckSize(Heap* heap, int length) {
5633 : // Make sure there is no garbage and the compilation cache is empty.
5634 110 : for (int i = 0; i < 5; i++) {
5635 50 : CcTest::CollectAllGarbage();
5636 : }
5637 10 : heap->mark_compact_collector()->EnsureSweepingCompleted();
5638 10 : size_t size_before_allocation = heap->SizeOfObjects();
5639 : Handle<FixedArray> array =
5640 10 : heap->isolate()->factory()->NewFixedArray(length, AllocationType::kOld);
5641 10 : size_t size_after_allocation = heap->SizeOfObjects();
5642 10 : CHECK_EQ(size_after_allocation, size_before_allocation + array->Size());
5643 10 : array->Shrink(heap->isolate(), 1);
5644 10 : size_t size_after_shrinking = heap->SizeOfObjects();
5645 : // Shrinking does not change the space size immediately.
5646 10 : CHECK_EQ(size_after_allocation, size_after_shrinking);
5647 : // GC and sweeping updates the size to acccount for shrinking.
5648 10 : CcTest::CollectAllGarbage();
5649 10 : heap->mark_compact_collector()->EnsureSweepingCompleted();
5650 10 : intptr_t size_after_gc = heap->SizeOfObjects();
5651 20 : CHECK_EQ(size_after_gc, size_before_allocation + array->Size());
5652 10 : return array;
5653 : }
5654 :
5655 26661 : TEST(Regress609761) {
5656 5 : CcTest::InitializeVM();
5657 10 : v8::HandleScope scope(CcTest::isolate());
5658 5 : Heap* heap = CcTest::heap();
5659 : int length = kMaxRegularHeapObjectSize / kTaggedSize + 1;
5660 5 : Handle<FixedArray> array = ShrinkArrayAndCheckSize(heap, length);
5661 5 : CHECK(heap->lo_space()->Contains(*array));
5662 5 : }
5663 :
5664 26661 : TEST(LiveBytes) {
5665 5 : CcTest::InitializeVM();
5666 10 : v8::HandleScope scope(CcTest::isolate());
5667 5 : Heap* heap = CcTest::heap();
5668 5 : Handle<FixedArray> array = ShrinkArrayAndCheckSize(heap, 2000);
5669 5 : CHECK(heap->old_space()->Contains(*array));
5670 5 : }
5671 :
5672 26661 : TEST(Regress615489) {
5673 5 : if (!FLAG_incremental_marking) return;
5674 5 : CcTest::InitializeVM();
5675 10 : v8::HandleScope scope(CcTest::isolate());
5676 5 : Heap* heap = CcTest::heap();
5677 : Isolate* isolate = heap->isolate();
5678 5 : CcTest::CollectAllGarbage();
5679 :
5680 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5681 : i::IncrementalMarking* marking = heap->incremental_marking();
5682 5 : if (collector->sweeping_in_progress()) {
5683 5 : collector->EnsureSweepingCompleted();
5684 : }
5685 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5686 5 : if (marking->IsStopped()) {
5687 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5688 5 : i::GarbageCollectionReason::kTesting);
5689 : }
5690 5 : CHECK(marking->IsMarking());
5691 : marking->StartBlackAllocationForTesting();
5692 : {
5693 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
5694 10 : v8::HandleScope inner(CcTest::isolate());
5695 10 : isolate->factory()->NewFixedArray(500, AllocationType::kOld)->Size();
5696 : }
5697 : const double kStepSizeInMs = 100;
5698 17 : while (!marking->IsComplete()) {
5699 : marking->V8Step(kStepSizeInMs, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5700 12 : StepOrigin::kV8);
5701 12 : if (marking->IsReadyToOverApproximateWeakClosure()) {
5702 5 : marking->FinalizeIncrementally();
5703 : }
5704 : }
5705 5 : CHECK(marking->IsComplete());
5706 5 : intptr_t size_before = heap->SizeOfObjects();
5707 5 : CcTest::CollectAllGarbage();
5708 5 : intptr_t size_after = heap->SizeOfObjects();
5709 : // Live size does not increase after garbage collection.
5710 5 : CHECK_LE(size_after, size_before);
5711 : }
5712 :
5713 : class StaticOneByteResource : public v8::String::ExternalOneByteStringResource {
5714 : public:
5715 5 : explicit StaticOneByteResource(const char* data) : data_(data) {}
5716 :
5717 10 : ~StaticOneByteResource() override = default;
5718 :
5719 5 : const char* data() const override { return data_; }
5720 :
5721 5 : size_t length() const override { return strlen(data_); }
5722 :
5723 : private:
5724 : const char* data_;
5725 : };
5726 :
5727 26661 : TEST(Regress631969) {
5728 5 : if (!FLAG_incremental_marking) return;
5729 5 : FLAG_manual_evacuation_candidates_selection = true;
5730 5 : FLAG_parallel_compaction = false;
5731 : ManualGCScope manual_gc_scope;
5732 5 : CcTest::InitializeVM();
5733 10 : v8::HandleScope scope(CcTest::isolate());
5734 5 : Heap* heap = CcTest::heap();
5735 : // Get the heap in clean state.
5736 5 : CcTest::CollectGarbage(OLD_SPACE);
5737 5 : CcTest::CollectGarbage(OLD_SPACE);
5738 : Isolate* isolate = CcTest::i_isolate();
5739 : Factory* factory = isolate->factory();
5740 : // Allocate two strings in a fresh page and mark the page as evacuation
5741 : // candidate.
5742 5 : heap::SimulateFullSpace(heap->old_space());
5743 : Handle<String> s1 =
5744 5 : factory->NewStringFromStaticChars("123456789", AllocationType::kOld);
5745 : Handle<String> s2 =
5746 5 : factory->NewStringFromStaticChars("01234", AllocationType::kOld);
5747 5 : heap::ForceEvacuationCandidate(Page::FromHeapObject(*s1));
5748 :
5749 5 : heap::SimulateIncrementalMarking(heap, false);
5750 :
5751 : // Allocate a cons string and promote it to a fresh page in the old space.
5752 5 : heap::SimulateFullSpace(heap->old_space());
5753 : Handle<String> s3;
5754 10 : factory->NewConsString(s1, s2).ToHandle(&s3);
5755 5 : CcTest::CollectGarbage(NEW_SPACE);
5756 5 : CcTest::CollectGarbage(NEW_SPACE);
5757 :
5758 : // Finish incremental marking.
5759 : const double kStepSizeInMs = 100;
5760 : IncrementalMarking* marking = heap->incremental_marking();
5761 40 : while (!marking->IsComplete()) {
5762 : marking->V8Step(kStepSizeInMs, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5763 35 : StepOrigin::kV8);
5764 35 : if (marking->IsReadyToOverApproximateWeakClosure()) {
5765 5 : marking->FinalizeIncrementally();
5766 : }
5767 : }
5768 :
5769 : {
5770 : StaticOneByteResource external_string("12345678901234");
5771 5 : s3->MakeExternal(&external_string);
5772 5 : CcTest::CollectGarbage(OLD_SPACE);
5773 : // This avoids the GC from trying to free stack allocated resources.
5774 10 : i::Handle<i::ExternalOneByteString>::cast(s3)->SetResource(isolate,
5775 5 : nullptr);
5776 : }
5777 : }
5778 :
5779 26661 : TEST(LeftTrimFixedArrayInBlackArea) {
5780 5 : if (!FLAG_incremental_marking) return;
5781 5 : CcTest::InitializeVM();
5782 10 : v8::HandleScope scope(CcTest::isolate());
5783 5 : Heap* heap = CcTest::heap();
5784 : Isolate* isolate = heap->isolate();
5785 5 : CcTest::CollectAllGarbage();
5786 :
5787 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5788 : i::IncrementalMarking* marking = heap->incremental_marking();
5789 5 : if (collector->sweeping_in_progress()) {
5790 5 : collector->EnsureSweepingCompleted();
5791 : }
5792 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5793 5 : if (marking->IsStopped()) {
5794 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5795 5 : i::GarbageCollectionReason::kTesting);
5796 : }
5797 5 : CHECK(marking->IsMarking());
5798 : marking->StartBlackAllocationForTesting();
5799 :
5800 : // Ensure that we allocate a new page, set up a bump pointer area, and
5801 : // perform the allocation in a black area.
5802 5 : heap::SimulateFullSpace(heap->old_space());
5803 5 : isolate->factory()->NewFixedArray(4, AllocationType::kOld);
5804 : Handle<FixedArray> array =
5805 5 : isolate->factory()->NewFixedArray(50, AllocationType::kOld);
5806 5 : CHECK(heap->old_space()->Contains(*array));
5807 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5808 5 : CHECK(marking_state->IsBlack(*array));
5809 :
5810 : // Now left trim the allocated black area. A filler has to be installed
5811 : // for the trimmed area and all mark bits of the trimmed area have to be
5812 : // cleared.
5813 5 : FixedArrayBase trimmed = heap->LeftTrimFixedArray(*array, 10);
5814 5 : CHECK(marking_state->IsBlack(trimmed));
5815 :
5816 5 : heap::GcAndSweep(heap, OLD_SPACE);
5817 : }
5818 :
5819 26661 : TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
5820 5 : if (!FLAG_incremental_marking) return;
5821 5 : CcTest::InitializeVM();
5822 10 : v8::HandleScope scope(CcTest::isolate());
5823 5 : Heap* heap = CcTest::heap();
5824 : Isolate* isolate = heap->isolate();
5825 5 : CcTest::CollectAllGarbage();
5826 :
5827 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5828 : i::IncrementalMarking* marking = heap->incremental_marking();
5829 5 : if (collector->sweeping_in_progress()) {
5830 5 : collector->EnsureSweepingCompleted();
5831 : }
5832 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5833 5 : if (marking->IsStopped()) {
5834 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5835 5 : i::GarbageCollectionReason::kTesting);
5836 : }
5837 5 : CHECK(marking->IsMarking());
5838 : marking->StartBlackAllocationForTesting();
5839 :
5840 : // Ensure that we allocate a new page, set up a bump pointer area, and
5841 : // perform the allocation in a black area.
5842 5 : heap::SimulateFullSpace(heap->old_space());
5843 5 : isolate->factory()->NewFixedArray(10, AllocationType::kOld);
5844 :
5845 : // Allocate the fixed array that will be trimmed later.
5846 : Handle<FixedArray> array =
5847 5 : isolate->factory()->NewFixedArray(100, AllocationType::kOld);
5848 : Address start_address = array->address();
5849 5 : Address end_address = start_address + array->Size();
5850 : Page* page = Page::FromAddress(start_address);
5851 : IncrementalMarking::NonAtomicMarkingState* marking_state =
5852 : marking->non_atomic_marking_state();
5853 5 : CHECK(marking_state->IsBlack(*array));
5854 10 : CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
5855 : page->AddressToMarkbitIndex(start_address),
5856 : page->AddressToMarkbitIndex(end_address)));
5857 5 : CHECK(heap->old_space()->Contains(*array));
5858 :
5859 : FixedArrayBase previous = *array;
5860 : FixedArrayBase trimmed;
5861 :
5862 : // First trim in one word steps.
5863 105 : for (int i = 0; i < 10; i++) {
5864 50 : trimmed = heap->LeftTrimFixedArray(previous, 1);
5865 : HeapObject filler = HeapObject::FromAddress(previous->address());
5866 50 : CHECK(filler->IsFiller());
5867 50 : CHECK(marking_state->IsBlack(trimmed));
5868 50 : CHECK(marking_state->IsBlack(previous));
5869 : previous = trimmed;
5870 : }
5871 :
5872 : // Then trim in two and three word steps.
5873 25 : for (int i = 2; i <= 3; i++) {
5874 210 : for (int j = 0; j < 10; j++) {
5875 100 : trimmed = heap->LeftTrimFixedArray(previous, i);
5876 : HeapObject filler = HeapObject::FromAddress(previous->address());
5877 100 : CHECK(filler->IsFiller());
5878 100 : CHECK(marking_state->IsBlack(trimmed));
5879 100 : CHECK(marking_state->IsBlack(previous));
5880 : previous = trimmed;
5881 : }
5882 : }
5883 :
5884 5 : heap::GcAndSweep(heap, OLD_SPACE);
5885 : }
5886 :
5887 26661 : TEST(ContinuousRightTrimFixedArrayInBlackArea) {
5888 5 : if (!FLAG_incremental_marking) return;
5889 5 : CcTest::InitializeVM();
5890 10 : v8::HandleScope scope(CcTest::isolate());
5891 5 : Heap* heap = CcTest::heap();
5892 : Isolate* isolate = CcTest::i_isolate();
5893 5 : CcTest::CollectAllGarbage();
5894 :
5895 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5896 : i::IncrementalMarking* marking = heap->incremental_marking();
5897 5 : if (collector->sweeping_in_progress()) {
5898 5 : collector->EnsureSweepingCompleted();
5899 : }
5900 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5901 5 : if (marking->IsStopped()) {
5902 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5903 5 : i::GarbageCollectionReason::kTesting);
5904 : }
5905 5 : CHECK(marking->IsMarking());
5906 : marking->StartBlackAllocationForTesting();
5907 :
5908 : // Ensure that we allocate a new page, set up a bump pointer area, and
5909 : // perform the allocation in a black area.
5910 5 : heap::SimulateFullSpace(heap->old_space());
5911 5 : isolate->factory()->NewFixedArray(10, AllocationType::kOld);
5912 :
5913 : // Allocate the fixed array that will be trimmed later.
5914 : Handle<FixedArray> array =
5915 5 : CcTest::i_isolate()->factory()->NewFixedArray(100, AllocationType::kOld);
5916 : Address start_address = array->address();
5917 5 : Address end_address = start_address + array->Size();
5918 : Page* page = Page::FromAddress(start_address);
5919 : IncrementalMarking::NonAtomicMarkingState* marking_state =
5920 : marking->non_atomic_marking_state();
5921 5 : CHECK(marking_state->IsBlack(*array));
5922 :
5923 10 : CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
5924 : page->AddressToMarkbitIndex(start_address),
5925 : page->AddressToMarkbitIndex(end_address)));
5926 5 : CHECK(heap->old_space()->Contains(*array));
5927 :
5928 : // Trim it once by one word to make checking for white marking color uniform.
5929 5 : Address previous = end_address - kTaggedSize;
5930 10 : isolate->heap()->RightTrimFixedArray(*array, 1);
5931 :
5932 : HeapObject filler = HeapObject::FromAddress(previous);
5933 5 : CHECK(filler->IsFiller());
5934 5 : CHECK(marking_state->IsImpossible(filler));
5935 :
5936 : // Trim 10 times by one, two, and three word.
5937 35 : for (int i = 1; i <= 3; i++) {
5938 315 : for (int j = 0; j < 10; j++) {
5939 150 : previous -= kTaggedSize * i;
5940 150 : isolate->heap()->RightTrimFixedArray(*array, i);
5941 : HeapObject filler = HeapObject::FromAddress(previous);
5942 150 : CHECK(filler->IsFiller());
5943 150 : CHECK(marking_state->IsWhite(filler));
5944 : }
5945 : }
5946 :
5947 5 : heap::GcAndSweep(heap, OLD_SPACE);
5948 : }
5949 :
5950 26661 : TEST(Regress618958) {
5951 5 : if (!FLAG_incremental_marking) return;
5952 5 : CcTest::InitializeVM();
5953 10 : v8::HandleScope scope(CcTest::isolate());
5954 5 : Heap* heap = CcTest::heap();
5955 : bool isolate_is_locked = true;
5956 5 : CcTest::isolate()->AdjustAmountOfExternalAllocatedMemory(100 * MB);
5957 : int mark_sweep_count_before = heap->ms_count();
5958 : heap->MemoryPressureNotification(MemoryPressureLevel::kCritical,
5959 5 : isolate_is_locked);
5960 : int mark_sweep_count_after = heap->ms_count();
5961 5 : int mark_sweeps_performed = mark_sweep_count_after - mark_sweep_count_before;
5962 : // The memory pressuer handler either performed two GCs or performed one and
5963 : // started incremental marking.
5964 5 : CHECK(mark_sweeps_performed == 2 ||
5965 : (mark_sweeps_performed == 1 &&
5966 : !heap->incremental_marking()->IsStopped()));
5967 : }
5968 :
5969 26661 : TEST(YoungGenerationLargeObjectAllocationScavenge) {
5970 10 : if (FLAG_minor_mc) return;
5971 5 : FLAG_young_generation_large_objects = true;
5972 5 : CcTest::InitializeVM();
5973 5 : v8::HandleScope scope(CcTest::isolate());
5974 5 : Heap* heap = CcTest::heap();
5975 : Isolate* isolate = heap->isolate();
5976 10 : if (!isolate->serializer_enabled()) return;
5977 :
5978 : // TODO(hpayer): Update the test as soon as we have a tenure limit for LO.
5979 0 : Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(200000);
5980 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
5981 0 : CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
5982 0 : CHECK(chunk->IsFlagSet(MemoryChunk::LARGE_PAGE));
5983 0 : CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
5984 :
5985 0 : Handle<Object> number = isolate->factory()->NewHeapNumber(123.456);
5986 0 : array_small->set(0, *number);
5987 :
5988 0 : CcTest::CollectGarbage(NEW_SPACE);
5989 :
5990 : // After the first young generation GC array_small will be in the old
5991 : // generation large object space.
5992 : chunk = MemoryChunk::FromHeapObject(*array_small);
5993 0 : CHECK_EQ(LO_SPACE, chunk->owner()->identity());
5994 0 : CHECK(!chunk->InYoungGeneration());
5995 :
5996 0 : CcTest::CollectAllAvailableGarbage();
5997 : }
5998 :
5999 26661 : TEST(YoungGenerationLargeObjectAllocationMarkCompact) {
6000 10 : if (FLAG_minor_mc) return;
6001 5 : FLAG_young_generation_large_objects = true;
6002 5 : CcTest::InitializeVM();
6003 5 : v8::HandleScope scope(CcTest::isolate());
6004 5 : Heap* heap = CcTest::heap();
6005 : Isolate* isolate = heap->isolate();
6006 10 : if (!isolate->serializer_enabled()) return;
6007 :
6008 : // TODO(hpayer): Update the test as soon as we have a tenure limit for LO.
6009 0 : Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(200000);
6010 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
6011 0 : CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
6012 0 : CHECK(chunk->IsFlagSet(MemoryChunk::LARGE_PAGE));
6013 0 : CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
6014 :
6015 0 : Handle<Object> number = isolate->factory()->NewHeapNumber(123.456);
6016 0 : array_small->set(0, *number);
6017 :
6018 0 : CcTest::CollectGarbage(OLD_SPACE);
6019 :
6020 : // After the first full GC array_small will be in the old generation
6021 : // large object space.
6022 : chunk = MemoryChunk::FromHeapObject(*array_small);
6023 0 : CHECK_EQ(LO_SPACE, chunk->owner()->identity());
6024 0 : CHECK(!chunk->InYoungGeneration());
6025 :
6026 0 : CcTest::CollectAllAvailableGarbage();
6027 : }
6028 :
6029 26661 : TEST(YoungGenerationLargeObjectAllocationReleaseScavenger) {
6030 10 : if (FLAG_minor_mc) return;
6031 5 : FLAG_young_generation_large_objects = true;
6032 5 : CcTest::InitializeVM();
6033 5 : v8::HandleScope scope(CcTest::isolate());
6034 5 : Heap* heap = CcTest::heap();
6035 : Isolate* isolate = heap->isolate();
6036 10 : if (!isolate->serializer_enabled()) return;
6037 :
6038 : {
6039 : HandleScope scope(isolate);
6040 0 : for (int i = 0; i < 10; i++) {
6041 0 : Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(20000);
6042 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
6043 0 : CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
6044 0 : CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
6045 : }
6046 : }
6047 :
6048 0 : CcTest::CollectGarbage(NEW_SPACE);
6049 0 : CHECK(isolate->heap()->new_lo_space()->IsEmpty());
6050 0 : CHECK_EQ(0, isolate->heap()->new_lo_space()->Size());
6051 0 : CHECK_EQ(0, isolate->heap()->new_lo_space()->SizeOfObjects());
6052 0 : CHECK(isolate->heap()->lo_space()->IsEmpty());
6053 0 : CHECK_EQ(0, isolate->heap()->lo_space()->Size());
6054 0 : CHECK_EQ(0, isolate->heap()->lo_space()->SizeOfObjects());
6055 : }
6056 :
6057 26661 : TEST(UncommitUnusedLargeObjectMemory) {
6058 5 : CcTest::InitializeVM();
6059 10 : v8::HandleScope scope(CcTest::isolate());
6060 5 : Heap* heap = CcTest::heap();
6061 : Isolate* isolate = heap->isolate();
6062 :
6063 : Handle<FixedArray> array =
6064 5 : isolate->factory()->NewFixedArray(200000, AllocationType::kOld);
6065 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array);
6066 5 : CHECK(chunk->owner()->identity() == LO_SPACE);
6067 :
6068 5 : intptr_t size_before = array->Size();
6069 5 : size_t committed_memory_before = chunk->CommittedPhysicalMemory();
6070 :
6071 5 : array->Shrink(isolate, 1);
6072 5 : CHECK(array->Size() < size_before);
6073 :
6074 5 : CcTest::CollectAllGarbage();
6075 5 : CHECK(chunk->CommittedPhysicalMemory() < committed_memory_before);
6076 10 : size_t shrinked_size = RoundUp(
6077 15 : (array->address() - chunk->address()) + array->Size(), CommitPageSize());
6078 5 : CHECK_EQ(shrinked_size, chunk->CommittedPhysicalMemory());
6079 5 : }
6080 :
6081 26661 : TEST(RememberedSetRemoveRange) {
6082 5 : CcTest::InitializeVM();
6083 10 : v8::HandleScope scope(CcTest::isolate());
6084 5 : Heap* heap = CcTest::heap();
6085 : Isolate* isolate = heap->isolate();
6086 :
6087 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(
6088 5 : Page::kPageSize / kTaggedSize, AllocationType::kOld);
6089 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array);
6090 5 : CHECK(chunk->owner()->identity() == LO_SPACE);
6091 5 : Address start = array->address();
6092 : // Maps slot to boolean indicator of whether the slot should be in the set.
6093 : std::map<Address, bool> slots;
6094 5 : slots[start + 0] = true;
6095 5 : slots[start + kTaggedSize] = true;
6096 5 : slots[start + Page::kPageSize - kTaggedSize] = true;
6097 5 : slots[start + Page::kPageSize] = true;
6098 5 : slots[start + Page::kPageSize + kTaggedSize] = true;
6099 5 : slots[chunk->area_end() - kTaggedSize] = true;
6100 :
6101 30 : for (auto x : slots) {
6102 25 : RememberedSet<OLD_TO_NEW>::Insert(chunk, x.first);
6103 : }
6104 :
6105 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
6106 50 : [&slots](MaybeObjectSlot slot) {
6107 50 : CHECK(slots[slot.address()]);
6108 25 : return KEEP_SLOT;
6109 : },
6110 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
6111 :
6112 5 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start, start + kTaggedSize,
6113 5 : SlotSet::FREE_EMPTY_BUCKETS);
6114 5 : slots[start] = false;
6115 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
6116 40 : [&slots](MaybeObjectSlot slot) {
6117 40 : CHECK(slots[slot.address()]);
6118 20 : return KEEP_SLOT;
6119 : },
6120 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
6121 :
6122 5 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start + kTaggedSize,
6123 : start + Page::kPageSize,
6124 5 : SlotSet::FREE_EMPTY_BUCKETS);
6125 5 : slots[start + kTaggedSize] = false;
6126 5 : slots[start + Page::kPageSize - kTaggedSize] = false;
6127 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
6128 20 : [&slots](MaybeObjectSlot slot) {
6129 20 : CHECK(slots[slot.address()]);
6130 10 : return KEEP_SLOT;
6131 : },
6132 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
6133 :
6134 5 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start,
6135 : start + Page::kPageSize + kTaggedSize,
6136 5 : SlotSet::FREE_EMPTY_BUCKETS);
6137 5 : slots[start + Page::kPageSize] = false;
6138 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
6139 10 : [&slots](MaybeObjectSlot slot) {
6140 10 : CHECK(slots[slot.address()]);
6141 5 : return KEEP_SLOT;
6142 : },
6143 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
6144 :
6145 5 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, chunk->area_end() - kTaggedSize,
6146 : chunk->area_end(),
6147 5 : SlotSet::FREE_EMPTY_BUCKETS);
6148 5 : slots[chunk->area_end() - kTaggedSize] = false;
6149 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
6150 0 : [&slots](MaybeObjectSlot slot) {
6151 0 : CHECK(slots[slot.address()]);
6152 0 : return KEEP_SLOT;
6153 : },
6154 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
6155 5 : }
6156 :
6157 26661 : HEAP_TEST(Regress670675) {
6158 5 : if (!FLAG_incremental_marking) return;
6159 5 : FLAG_stress_incremental_marking = false;
6160 5 : CcTest::InitializeVM();
6161 10 : v8::HandleScope scope(CcTest::isolate());
6162 5 : Heap* heap = CcTest::heap();
6163 : Isolate* isolate = heap->isolate();
6164 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
6165 5 : CcTest::CollectAllGarbage();
6166 :
6167 5 : if (collector->sweeping_in_progress()) {
6168 5 : collector->EnsureSweepingCompleted();
6169 : }
6170 5 : i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
6171 5 : if (marking->IsStopped()) {
6172 5 : marking->Start(i::GarbageCollectionReason::kTesting);
6173 : }
6174 : size_t array_length = 128 * KB;
6175 5 : size_t n = heap->OldGenerationSpaceAvailable() / array_length;
6176 415 : for (size_t i = 0; i < n + 40; i++) {
6177 : {
6178 : HandleScope inner_scope(isolate);
6179 : isolate->factory()->NewFixedArray(static_cast<int>(array_length),
6180 210 : AllocationType::kOld);
6181 : }
6182 210 : if (marking->IsStopped()) break;
6183 205 : double deadline = heap->MonotonicallyIncreasingTimeInMs() + 1;
6184 : marking->AdvanceWithDeadline(
6185 205 : deadline, IncrementalMarking::GC_VIA_STACK_GUARD, StepOrigin::kV8);
6186 : }
6187 : DCHECK(marking->IsStopped());
6188 : }
6189 :
6190 : namespace {
6191 10 : Handle<Code> GenerateDummyImmovableCode(Isolate* isolate) {
6192 40 : Assembler assm(AssemblerOptions{});
6193 :
6194 : const int kNumberOfNops = 1 << 10;
6195 20490 : for (int i = 0; i < kNumberOfNops; i++) {
6196 10240 : assm.nop(); // supported on all architectures
6197 : }
6198 :
6199 10 : CodeDesc desc;
6200 : assm.GetCode(isolate, &desc);
6201 : Handle<Code> code = isolate->factory()->NewCode(
6202 : desc, Code::STUB, Handle<Code>(), Builtins::kNoBuiltinId,
6203 30 : MaybeHandle<ByteArray>(), DeoptimizationData::Empty(isolate), kImmovable);
6204 10 : CHECK(code->IsCode());
6205 :
6206 20 : return code;
6207 : }
6208 : } // namespace
6209 :
6210 26661 : HEAP_TEST(Regress5831) {
6211 5 : CcTest::InitializeVM();
6212 5 : Heap* heap = CcTest::heap();
6213 : Isolate* isolate = CcTest::i_isolate();
6214 : HandleScope handle_scope(isolate);
6215 :
6216 : // Used to ensure that the generated code is not collected.
6217 : const int kInitialSize = 32;
6218 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(kInitialSize);
6219 :
6220 : // Ensure that all immovable code space pages are full and we overflow into
6221 : // LO_SPACE.
6222 : const int kMaxIterations = 1 << 16;
6223 : bool overflowed_into_lospace = false;
6224 5 : for (int i = 0; i < kMaxIterations; i++) {
6225 5 : Handle<Code> code = GenerateDummyImmovableCode(isolate);
6226 5 : array = FixedArray::SetAndGrow(isolate, array, i, code);
6227 10 : CHECK(heap->code_space()->Contains(code->address()) ||
6228 : heap->code_lo_space()->Contains(*code));
6229 5 : if (heap->code_lo_space()->Contains(*code)) {
6230 : overflowed_into_lospace = true;
6231 : break;
6232 : }
6233 : }
6234 :
6235 5 : CHECK(overflowed_into_lospace);
6236 :
6237 : // Fake a serializer run.
6238 5 : isolate->serializer_enabled_ = true;
6239 :
6240 : // Generate the code.
6241 5 : Handle<Code> code = GenerateDummyImmovableCode(isolate);
6242 5 : CHECK_GE(i::kMaxRegularHeapObjectSize, code->Size());
6243 5 : CHECK(!heap->code_space()->first_page()->Contains(code->address()));
6244 :
6245 : // Ensure it's not in large object space.
6246 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*code);
6247 5 : CHECK(chunk->owner()->identity() != LO_SPACE);
6248 5 : CHECK(chunk->NeverEvacuate());
6249 5 : }
6250 :
6251 26661 : TEST(Regress6800) {
6252 5 : CcTest::InitializeVM();
6253 : Isolate* isolate = CcTest::i_isolate();
6254 : HandleScope handle_scope(isolate);
6255 :
6256 : const int kRootLength = 1000;
6257 : Handle<FixedArray> root =
6258 5 : isolate->factory()->NewFixedArray(kRootLength, AllocationType::kOld);
6259 : {
6260 : HandleScope inner_scope(isolate);
6261 5 : Handle<FixedArray> new_space_array = isolate->factory()->NewFixedArray(1);
6262 10005 : for (int i = 0; i < kRootLength; i++) {
6263 10000 : root->set(i, *new_space_array);
6264 : }
6265 10005 : for (int i = 0; i < kRootLength; i++) {
6266 15000 : root->set(i, ReadOnlyRoots(CcTest::heap()).undefined_value());
6267 : }
6268 : }
6269 5 : CcTest::CollectGarbage(NEW_SPACE);
6270 5 : CHECK_EQ(0, RememberedSet<OLD_TO_NEW>::NumberOfPreFreedEmptyBuckets(
6271 : MemoryChunk::FromHeapObject(*root)));
6272 5 : }
6273 :
6274 26661 : TEST(Regress6800LargeObject) {
6275 5 : CcTest::InitializeVM();
6276 : Isolate* isolate = CcTest::i_isolate();
6277 : HandleScope handle_scope(isolate);
6278 :
6279 : const int kRootLength = i::kMaxRegularHeapObjectSize / kTaggedSize;
6280 : Handle<FixedArray> root =
6281 5 : isolate->factory()->NewFixedArray(kRootLength, AllocationType::kOld);
6282 5 : CcTest::heap()->lo_space()->Contains(*root);
6283 : {
6284 : HandleScope inner_scope(isolate);
6285 5 : Handle<FixedArray> new_space_array = isolate->factory()->NewFixedArray(1);
6286 163845 : for (int i = 0; i < kRootLength; i++) {
6287 163840 : root->set(i, *new_space_array);
6288 : }
6289 163845 : for (int i = 0; i < kRootLength; i++) {
6290 245760 : root->set(i, ReadOnlyRoots(CcTest::heap()).undefined_value());
6291 : }
6292 : }
6293 5 : CcTest::CollectGarbage(OLD_SPACE);
6294 5 : CHECK_EQ(0, RememberedSet<OLD_TO_NEW>::NumberOfPreFreedEmptyBuckets(
6295 : MemoryChunk::FromHeapObject(*root)));
6296 5 : }
6297 :
6298 26661 : HEAP_TEST(RegressMissingWriteBarrierInAllocate) {
6299 5 : if (!FLAG_incremental_marking) return;
6300 : ManualGCScope manual_gc_scope;
6301 5 : CcTest::InitializeVM();
6302 10 : v8::HandleScope scope(CcTest::isolate());
6303 5 : Heap* heap = CcTest::heap();
6304 : Isolate* isolate = heap->isolate();
6305 5 : CcTest::CollectAllGarbage();
6306 5 : heap::SimulateIncrementalMarking(heap, false);
6307 : Handle<Map> map;
6308 : {
6309 : AlwaysAllocateScope always_allocate(isolate);
6310 5 : map = isolate->factory()->NewMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
6311 : }
6312 : heap->incremental_marking()->StartBlackAllocationForTesting();
6313 : Handle<HeapObject> object;
6314 : {
6315 : AlwaysAllocateScope always_allocate(isolate);
6316 : object = handle(isolate->factory()->NewForTest(map, AllocationType::kOld),
6317 : isolate);
6318 : }
6319 : // The object is black. If Factory::New sets the map without write-barrier,
6320 : // then the map is white and will be freed prematurely.
6321 5 : heap::SimulateIncrementalMarking(heap, true);
6322 5 : CcTest::CollectAllGarbage();
6323 : MarkCompactCollector* collector = heap->mark_compact_collector();
6324 5 : if (collector->sweeping_in_progress()) {
6325 5 : collector->EnsureSweepingCompleted();
6326 : }
6327 5 : CHECK(object->map()->IsMap());
6328 : }
6329 :
6330 26661 : HEAP_TEST(MarkCompactEpochCounter) {
6331 : ManualGCScope manual_gc_scope;
6332 5 : CcTest::InitializeVM();
6333 10 : v8::HandleScope scope(CcTest::isolate());
6334 5 : Heap* heap = CcTest::heap();
6335 : unsigned epoch0 = heap->mark_compact_collector()->epoch();
6336 5 : CcTest::CollectGarbage(OLD_SPACE);
6337 : unsigned epoch1 = heap->mark_compact_collector()->epoch();
6338 5 : CHECK_EQ(epoch0 + 1, epoch1);
6339 5 : heap::SimulateIncrementalMarking(heap, true);
6340 5 : CcTest::CollectGarbage(OLD_SPACE);
6341 : unsigned epoch2 = heap->mark_compact_collector()->epoch();
6342 5 : CHECK_EQ(epoch1 + 1, epoch2);
6343 5 : CcTest::CollectGarbage(NEW_SPACE);
6344 : unsigned epoch3 = heap->mark_compact_collector()->epoch();
6345 5 : CHECK_EQ(epoch2, epoch3);
6346 5 : }
6347 :
6348 26661 : UNINITIALIZED_TEST(ReinitializeStringHashSeed) {
6349 : // Enable rehashing and create an isolate and context.
6350 5 : i::FLAG_rehash_snapshot = true;
6351 25 : for (int i = 1; i < 3; i++) {
6352 10 : i::FLAG_hash_seed = 1337 * i;
6353 : v8::Isolate::CreateParams create_params;
6354 10 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6355 10 : v8::Isolate* isolate = v8::Isolate::New(create_params);
6356 : {
6357 : v8::Isolate::Scope isolate_scope(isolate);
6358 10 : CHECK_EQ(static_cast<uint64_t>(1337 * i),
6359 : HashSeed(reinterpret_cast<i::Isolate*>(isolate)));
6360 20 : v8::HandleScope handle_scope(isolate);
6361 10 : v8::Local<v8::Context> context = v8::Context::New(isolate);
6362 10 : CHECK(!context.IsEmpty());
6363 : v8::Context::Scope context_scope(context);
6364 : }
6365 10 : isolate->Dispose();
6366 : }
6367 5 : }
6368 :
6369 : const int kHeapLimit = 100 * MB;
6370 : Isolate* oom_isolate = nullptr;
6371 :
6372 0 : void OOMCallback(const char* location, bool is_heap_oom) {
6373 0 : Heap* heap = oom_isolate->heap();
6374 : size_t kSlack = heap->new_space()->Capacity();
6375 0 : CHECK_LE(heap->OldGenerationCapacity(), kHeapLimit + kSlack);
6376 0 : CHECK_LE(heap->memory_allocator()->Size(), heap->MaxReserved() + kSlack);
6377 0 : base::OS::ExitProcess(0);
6378 0 : }
6379 :
6380 26656 : UNINITIALIZED_TEST(OutOfMemory) {
6381 0 : if (FLAG_stress_incremental_marking) return;
6382 : #ifdef VERIFY_HEAP
6383 : if (FLAG_verify_heap) return;
6384 : #endif
6385 0 : FLAG_max_old_space_size = kHeapLimit / MB;
6386 : v8::Isolate::CreateParams create_params;
6387 0 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6388 0 : v8::Isolate* isolate = v8::Isolate::New(create_params);
6389 : Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
6390 0 : oom_isolate = i_isolate;
6391 0 : isolate->SetOOMErrorHandler(OOMCallback);
6392 : {
6393 : Factory* factory = i_isolate->factory();
6394 : HandleScope handle_scope(i_isolate);
6395 : while (true) {
6396 0 : factory->NewFixedArray(100);
6397 : }
6398 : }
6399 : }
6400 :
6401 26657 : UNINITIALIZED_TEST(OutOfMemoryIneffectiveGC) {
6402 2 : if (!FLAG_detect_ineffective_gcs_near_heap_limit) return;
6403 1 : if (FLAG_stress_incremental_marking) return;
6404 : #ifdef VERIFY_HEAP
6405 : if (FLAG_verify_heap) return;
6406 : #endif
6407 :
6408 0 : FLAG_max_old_space_size = kHeapLimit / MB;
6409 : v8::Isolate::CreateParams create_params;
6410 0 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6411 0 : v8::Isolate* isolate = v8::Isolate::New(create_params);
6412 : Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
6413 0 : oom_isolate = i_isolate;
6414 0 : isolate->SetOOMErrorHandler(OOMCallback);
6415 : Factory* factory = i_isolate->factory();
6416 : Heap* heap = i_isolate->heap();
6417 0 : heap->CollectAllGarbage(Heap::kNoGCFlags, GarbageCollectionReason::kTesting);
6418 : {
6419 : HandleScope scope(i_isolate);
6420 0 : while (heap->OldGenerationSizeOfObjects() <
6421 0 : heap->MaxOldGenerationSize() * 0.9) {
6422 0 : factory->NewFixedArray(100, AllocationType::kOld);
6423 : }
6424 : {
6425 : int initial_ms_count = heap->ms_count();
6426 : int ineffective_ms_start = initial_ms_count;
6427 0 : while (heap->ms_count() < initial_ms_count + 10) {
6428 : HandleScope inner_scope(i_isolate);
6429 0 : factory->NewFixedArray(30000, AllocationType::kOld);
6430 0 : if (heap->tracer()->AverageMarkCompactMutatorUtilization() >= 0.3) {
6431 0 : ineffective_ms_start = heap->ms_count() + 1;
6432 : }
6433 : }
6434 0 : int consecutive_ineffective_ms = heap->ms_count() - ineffective_ms_start;
6435 0 : CHECK_IMPLIES(
6436 : consecutive_ineffective_ms >= 4,
6437 : heap->tracer()->AverageMarkCompactMutatorUtilization() >= 0.3);
6438 : }
6439 : }
6440 0 : isolate->Dispose();
6441 : }
6442 :
6443 26661 : HEAP_TEST(Regress779503) {
6444 : // The following regression test ensures that the Scavenger does not allocate
6445 : // over invalid slots. More specific, the Scavenger should not sweep a page
6446 : // that it currently processes because it might allocate over the currently
6447 : // processed slot.
6448 : const int kArraySize = 2048;
6449 5 : CcTest::InitializeVM();
6450 : Isolate* isolate = CcTest::i_isolate();
6451 5 : Heap* heap = CcTest::heap();
6452 5 : heap::SealCurrentObjects(heap);
6453 : {
6454 : HandleScope handle_scope(isolate);
6455 : // The byte array filled with kHeapObjectTag ensures that we cannot read
6456 : // from the slot again and interpret it as heap value. Doing so will crash.
6457 5 : Handle<ByteArray> byte_array = isolate->factory()->NewByteArray(kArraySize);
6458 5 : CHECK(Heap::InYoungGeneration(*byte_array));
6459 20485 : for (int i = 0; i < kArraySize; i++) {
6460 : byte_array->set(i, kHeapObjectTag);
6461 : }
6462 :
6463 : {
6464 : HandleScope handle_scope(isolate);
6465 : // The FixedArray in old space serves as space for slots.
6466 : Handle<FixedArray> fixed_array =
6467 5 : isolate->factory()->NewFixedArray(kArraySize, AllocationType::kOld);
6468 5 : CHECK(!Heap::InYoungGeneration(*fixed_array));
6469 20485 : for (int i = 0; i < kArraySize; i++) {
6470 20480 : fixed_array->set(i, *byte_array);
6471 : }
6472 : }
6473 : // Delay sweeper tasks to allow the scavenger to sweep the page it is
6474 : // currently scavenging.
6475 5 : heap->delay_sweeper_tasks_for_testing_ = true;
6476 5 : CcTest::CollectGarbage(OLD_SPACE);
6477 5 : CHECK(Heap::InYoungGeneration(*byte_array));
6478 : }
6479 : // Scavenging and sweeping the same page will crash as slots will be
6480 : // overridden.
6481 5 : CcTest::CollectGarbage(NEW_SPACE);
6482 5 : heap->delay_sweeper_tasks_for_testing_ = false;
6483 5 : }
6484 :
6485 : struct OutOfMemoryState {
6486 : Heap* heap;
6487 : bool oom_triggered;
6488 : size_t old_generation_capacity_at_oom;
6489 : size_t memory_allocator_size_at_oom;
6490 : size_t new_space_capacity_at_oom;
6491 : size_t new_lo_space_size_at_oom;
6492 : size_t current_heap_limit;
6493 : size_t initial_heap_limit;
6494 : };
6495 :
6496 12 : size_t NearHeapLimitCallback(void* raw_state, size_t current_heap_limit,
6497 : size_t initial_heap_limit) {
6498 : OutOfMemoryState* state = static_cast<OutOfMemoryState*>(raw_state);
6499 12 : Heap* heap = state->heap;
6500 12 : state->oom_triggered = true;
6501 12 : state->old_generation_capacity_at_oom = heap->OldGenerationCapacity();
6502 12 : state->memory_allocator_size_at_oom = heap->memory_allocator()->Size();
6503 12 : state->new_space_capacity_at_oom = heap->new_space()->Capacity();
6504 12 : state->new_lo_space_size_at_oom = heap->new_lo_space()->Size();
6505 12 : state->current_heap_limit = current_heap_limit;
6506 12 : state->initial_heap_limit = initial_heap_limit;
6507 12 : return initial_heap_limit + 100 * MB;
6508 : }
6509 :
6510 0 : size_t MemoryAllocatorSizeFromHeapCapacity(size_t capacity) {
6511 : // Size to capacity factor.
6512 : double factor =
6513 4 : Page::kPageSize * 1.0 / MemoryChunkLayout::AllocatableMemoryInDataPage();
6514 : // Some tables (e.g. deoptimization table) are allocated directly with the
6515 : // memory allocator. Allow some slack to account for them.
6516 : size_t slack = 5 * MB;
6517 4 : return static_cast<size_t>(capacity * factor) + slack;
6518 : }
6519 :
6520 26656 : UNINITIALIZED_TEST(OutOfMemorySmallObjects) {
6521 0 : if (FLAG_stress_incremental_marking) return;
6522 : #ifdef VERIFY_HEAP
6523 : if (FLAG_verify_heap) return;
6524 : #endif
6525 : const size_t kOldGenerationLimit = 300 * MB;
6526 0 : FLAG_max_old_space_size = kOldGenerationLimit / MB;
6527 : v8::Isolate::CreateParams create_params;
6528 0 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6529 : Isolate* isolate =
6530 0 : reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
6531 : Heap* heap = isolate->heap();
6532 : Factory* factory = isolate->factory();
6533 : OutOfMemoryState state;
6534 0 : state.heap = heap;
6535 0 : state.oom_triggered = false;
6536 0 : heap->AddNearHeapLimitCallback(NearHeapLimitCallback, &state);
6537 : {
6538 : HandleScope handle_scope(isolate);
6539 0 : while (!state.oom_triggered) {
6540 0 : factory->NewFixedArray(100);
6541 : }
6542 : }
6543 0 : CHECK_LE(state.old_generation_capacity_at_oom,
6544 : kOldGenerationLimit + state.new_space_capacity_at_oom);
6545 0 : CHECK_LE(kOldGenerationLimit, state.old_generation_capacity_at_oom +
6546 : state.new_space_capacity_at_oom);
6547 0 : CHECK_LE(
6548 : state.memory_allocator_size_at_oom,
6549 : MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
6550 : 2 * state.new_space_capacity_at_oom));
6551 0 : reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
6552 : }
6553 :
6554 26661 : UNINITIALIZED_TEST(OutOfMemoryLargeObjects) {
6555 6 : if (FLAG_stress_incremental_marking) return;
6556 : #ifdef VERIFY_HEAP
6557 : if (FLAG_verify_heap) return;
6558 : #endif
6559 : const size_t kOldGenerationLimit = 300 * MB;
6560 4 : FLAG_max_old_space_size = kOldGenerationLimit / MB;
6561 : v8::Isolate::CreateParams create_params;
6562 4 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6563 : Isolate* isolate =
6564 4 : reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
6565 : Heap* heap = isolate->heap();
6566 : Factory* factory = isolate->factory();
6567 : OutOfMemoryState state;
6568 4 : state.heap = heap;
6569 4 : state.oom_triggered = false;
6570 4 : heap->AddNearHeapLimitCallback(NearHeapLimitCallback, &state);
6571 : const int kFixedArrayLength = 1000000;
6572 : {
6573 : HandleScope handle_scope(isolate);
6574 316 : while (!state.oom_triggered) {
6575 156 : factory->NewFixedArray(kFixedArrayLength);
6576 : }
6577 : }
6578 4 : CHECK_LE(state.old_generation_capacity_at_oom, kOldGenerationLimit);
6579 4 : CHECK_LE(kOldGenerationLimit, state.old_generation_capacity_at_oom +
6580 : state.new_space_capacity_at_oom +
6581 : state.new_lo_space_size_at_oom +
6582 : FixedArray::SizeFor(kFixedArrayLength));
6583 8 : CHECK_LE(
6584 : state.memory_allocator_size_at_oom,
6585 : MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
6586 : 2 * state.new_space_capacity_at_oom +
6587 : state.new_lo_space_size_at_oom));
6588 4 : reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
6589 : }
6590 :
6591 26661 : UNINITIALIZED_TEST(RestoreHeapLimit) {
6592 6 : if (FLAG_stress_incremental_marking) return;
6593 : #ifdef VERIFY_HEAP
6594 : if (FLAG_verify_heap) return;
6595 : #endif
6596 : ManualGCScope manual_gc_scope;
6597 : const size_t kOldGenerationLimit = 300 * MB;
6598 4 : FLAG_max_old_space_size = kOldGenerationLimit / MB;
6599 : v8::Isolate::CreateParams create_params;
6600 4 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6601 : Isolate* isolate =
6602 4 : reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
6603 : Heap* heap = isolate->heap();
6604 : Factory* factory = isolate->factory();
6605 : OutOfMemoryState state;
6606 4 : state.heap = heap;
6607 4 : state.oom_triggered = false;
6608 4 : heap->AddNearHeapLimitCallback(NearHeapLimitCallback, &state);
6609 4 : heap->AutomaticallyRestoreInitialHeapLimit(0.5);
6610 : const int kFixedArrayLength = 1000000;
6611 : {
6612 : HandleScope handle_scope(isolate);
6613 316 : while (!state.oom_triggered) {
6614 156 : factory->NewFixedArray(kFixedArrayLength);
6615 : }
6616 : }
6617 4 : heap->MemoryPressureNotification(MemoryPressureLevel::kCritical, true);
6618 4 : state.oom_triggered = false;
6619 : {
6620 : HandleScope handle_scope(isolate);
6621 316 : while (!state.oom_triggered) {
6622 156 : factory->NewFixedArray(kFixedArrayLength);
6623 : }
6624 : }
6625 4 : CHECK_EQ(state.current_heap_limit, state.initial_heap_limit);
6626 4 : reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
6627 : }
6628 :
6629 5 : void HeapTester::UncommitFromSpace(Heap* heap) {
6630 5 : heap->UncommitFromSpace();
6631 5 : heap->memory_allocator()->unmapper()->EnsureUnmappingCompleted();
6632 5 : }
6633 :
6634 : class DeleteNative {
6635 : public:
6636 : static void Deleter(void* arg) {
6637 : delete reinterpret_cast<DeleteNative*>(arg);
6638 : }
6639 : };
6640 :
6641 26661 : TEST(Regress8014) {
6642 : Isolate* isolate = CcTest::InitIsolateOnce();
6643 : Heap* heap = isolate->heap();
6644 : {
6645 : HandleScope scope(isolate);
6646 100005 : for (int i = 0; i < 10000; i++) {
6647 : auto handle = Managed<DeleteNative>::FromRawPtr(isolate, 1000000,
6648 50000 : new DeleteNative());
6649 : USE(handle);
6650 : }
6651 : }
6652 : int ms_count = heap->ms_count();
6653 5 : heap->MemoryPressureNotification(MemoryPressureLevel::kCritical, true);
6654 : // Several GCs can be triggred by the above call.
6655 : // The bad case triggers 10000 GCs.
6656 10 : CHECK_LE(heap->ms_count(), ms_count + 10);
6657 5 : }
6658 :
6659 26661 : TEST(Regress8617) {
6660 : ManualGCScope manual_gc_scope;
6661 5 : FLAG_manual_evacuation_candidates_selection = true;
6662 5 : LocalContext env;
6663 : Isolate* isolate = CcTest::i_isolate();
6664 : Heap* heap = isolate->heap();
6665 : HandleScope scope(isolate);
6666 5 : heap::SimulateFullSpace(heap->old_space());
6667 : // Step 1. Create a function and ensure that it is in the old space.
6668 : Handle<Object> foo =
6669 : v8::Utils::OpenHandle(*CompileRun("function foo() { return 42; };"
6670 : "foo;"));
6671 5 : if (heap->InYoungGeneration(*foo)) {
6672 0 : CcTest::CollectGarbage(NEW_SPACE);
6673 0 : CcTest::CollectGarbage(NEW_SPACE);
6674 : }
6675 : // Step 2. Create an object with a reference to foo in the descriptor array.
6676 : CompileRun(
6677 : "var obj = {};"
6678 : "obj.method = foo;"
6679 : "obj;");
6680 : // Step 3. Make sure that foo moves during Mark-Compact.
6681 : Page* ec_page = Page::FromAddress(foo->ptr());
6682 5 : heap::ForceEvacuationCandidate(ec_page);
6683 : // Step 4. Start incremental marking.
6684 5 : heap::SimulateIncrementalMarking(heap, false);
6685 5 : CHECK(ec_page->IsEvacuationCandidate());
6686 : // Step 5. Install a new descriptor array on the map of the object.
6687 : // This runs the marking barrier for the descriptor array.
6688 : // In the bad case it sets the number of marked descriptors but does not
6689 : // change the color of the descriptor array.
6690 : CompileRun("obj.bar = 10;");
6691 : // Step 6. Promote the descriptor array to old space. During promotion
6692 : // the Scavenger will not record the slot of foo in the descriptor array.
6693 5 : CcTest::CollectGarbage(NEW_SPACE);
6694 5 : CcTest::CollectGarbage(NEW_SPACE);
6695 : // Step 7. Complete the Mark-Compact.
6696 5 : CcTest::CollectAllGarbage();
6697 : // Step 8. Use the descriptor for foo, which contains a stale pointer.
6698 : CompileRun("obj.method()");
6699 5 : }
6700 :
6701 26661 : HEAP_TEST(MemoryReducerActivationForSmallHeaps) {
6702 : ManualGCScope manual_gc_scope;
6703 5 : LocalContext env;
6704 : Isolate* isolate = CcTest::i_isolate();
6705 : Heap* heap = isolate->heap();
6706 5 : CHECK_EQ(heap->memory_reducer()->state_.action, MemoryReducer::Action::kDone);
6707 : HandleScope scope(isolate);
6708 : const size_t kActivationThreshold = 1 * MB;
6709 5 : size_t initial_capacity = heap->OldGenerationCapacity();
6710 2143 : while (heap->OldGenerationCapacity() <
6711 716 : initial_capacity + kActivationThreshold) {
6712 711 : isolate->factory()->NewFixedArray(1 * KB, AllocationType::kOld);
6713 : }
6714 5 : CHECK_EQ(heap->memory_reducer()->state_.action, MemoryReducer::Action::kWait);
6715 5 : }
6716 :
6717 : } // namespace heap
6718 : } // namespace internal
6719 79968 : } // namespace v8
6720 :
6721 : #undef __
|