Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Redistribution and use in source and binary forms, with or without
3 : // modification, are permitted provided that the following conditions are
4 : // met:
5 : //
6 : // * Redistributions of source code must retain the above copyright
7 : // notice, this list of conditions and the following disclaimer.
8 : // * Redistributions in binary form must reproduce the above
9 : // copyright notice, this list of conditions and the following
10 : // disclaimer in the documentation and/or other materials provided
11 : // with the distribution.
12 : // * Neither the name of Google Inc. nor the names of its
13 : // contributors may be used to endorse or promote products derived
14 : // from this software without specific prior written permission.
15 : //
16 : // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 : // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 : // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 : // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 : // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 : // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 : // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 : // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 : // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 : // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 : // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 :
28 : #include <stdlib.h>
29 : #include <utility>
30 :
31 : #include "src/api-inl.h"
32 : #include "src/assembler-inl.h"
33 : #include "src/compilation-cache.h"
34 : #include "src/debug/debug.h"
35 : #include "src/deoptimizer.h"
36 : #include "src/elements.h"
37 : #include "src/execution.h"
38 : #include "src/field-type.h"
39 : #include "src/global-handles.h"
40 : #include "src/hash-seed-inl.h"
41 : #include "src/heap/factory.h"
42 : #include "src/heap/gc-tracer.h"
43 : #include "src/heap/heap-inl.h"
44 : #include "src/heap/incremental-marking.h"
45 : #include "src/heap/mark-compact.h"
46 : #include "src/heap/memory-reducer.h"
47 : #include "src/heap/remembered-set.h"
48 : #include "src/ic/ic.h"
49 : #include "src/macro-assembler-inl.h"
50 : #include "src/objects-inl.h"
51 : #include "src/objects/frame-array-inl.h"
52 : #include "src/objects/heap-number-inl.h"
53 : #include "src/objects/js-array-inl.h"
54 : #include "src/objects/js-collection-inl.h"
55 : #include "src/objects/managed.h"
56 : #include "src/objects/slots.h"
57 : #include "src/ostreams.h"
58 : #include "src/regexp/jsregexp.h"
59 : #include "src/snapshot/snapshot.h"
60 : #include "src/transitions.h"
61 : #include "test/cctest/cctest.h"
62 : #include "test/cctest/heap/heap-tester.h"
63 : #include "test/cctest/heap/heap-utils.h"
64 : #include "test/cctest/test-feedback-vector.h"
65 : #include "test/cctest/test-transitions.h"
66 :
67 : namespace v8 {
68 : namespace internal {
69 : namespace heap {
70 :
71 : // We only start allocation-site tracking with the second instantiation.
72 : static const int kPretenureCreationCount =
73 : AllocationSite::kPretenureMinimumCreated + 1;
74 :
75 25 : static void CheckMap(Map map, int type, int instance_size) {
76 25 : CHECK(map->IsHeapObject());
77 : #ifdef DEBUG
78 : CHECK(CcTest::heap()->Contains(map));
79 : #endif
80 25 : CHECK_EQ(ReadOnlyRoots(CcTest::heap()).meta_map(), map->map());
81 25 : CHECK_EQ(type, map->instance_type());
82 25 : CHECK_EQ(instance_size, map->instance_size());
83 25 : }
84 :
85 :
86 26068 : TEST(HeapMaps) {
87 5 : CcTest::InitializeVM();
88 5 : ReadOnlyRoots roots(CcTest::heap());
89 5 : CheckMap(roots.meta_map(), MAP_TYPE, Map::kSize);
90 5 : CheckMap(roots.heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
91 5 : CheckMap(roots.fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
92 5 : CheckMap(roots.hash_table_map(), HASH_TABLE_TYPE, kVariableSizeSentinel);
93 5 : CheckMap(roots.string_map(), STRING_TYPE, kVariableSizeSentinel);
94 5 : }
95 :
96 10 : static void VerifyStoredPrototypeMap(Isolate* isolate,
97 : int stored_map_context_index,
98 : int stored_ctor_context_index) {
99 10 : Handle<Context> context = isolate->native_context();
100 :
101 : Handle<Map> this_map(Map::cast(context->get(stored_map_context_index)),
102 : isolate);
103 :
104 : Handle<JSFunction> fun(
105 : JSFunction::cast(context->get(stored_ctor_context_index)), isolate);
106 : Handle<JSObject> proto(JSObject::cast(fun->initial_map()->prototype()),
107 : isolate);
108 : Handle<Map> that_map(proto->map(), isolate);
109 :
110 10 : CHECK(proto->HasFastProperties());
111 10 : CHECK_EQ(*this_map, *that_map);
112 10 : }
113 :
114 : // Checks that critical maps stored on the context (mostly used for fast-path
115 : // checks) are unchanged after initialization.
116 26068 : TEST(ContextMaps) {
117 5 : CcTest::InitializeVM();
118 : Isolate* isolate = CcTest::i_isolate();
119 : HandleScope handle_scope(isolate);
120 :
121 : VerifyStoredPrototypeMap(isolate,
122 : Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX,
123 5 : Context::STRING_FUNCTION_INDEX);
124 : VerifyStoredPrototypeMap(isolate, Context::REGEXP_PROTOTYPE_MAP_INDEX,
125 5 : Context::REGEXP_FUNCTION_INDEX);
126 5 : }
127 :
128 26068 : TEST(InitialObjects) {
129 5 : LocalContext env;
130 : HandleScope scope(CcTest::i_isolate());
131 : Handle<Context> context = v8::Utils::OpenHandle(*env);
132 : // Initial ArrayIterator prototype.
133 10 : CHECK_EQ(
134 : context->initial_array_iterator_prototype(),
135 : *v8::Utils::OpenHandle(*CompileRun("[][Symbol.iterator]().__proto__")));
136 : // Initial Array prototype.
137 10 : CHECK_EQ(context->initial_array_prototype(),
138 : *v8::Utils::OpenHandle(*CompileRun("Array.prototype")));
139 : // Initial Generator prototype.
140 10 : CHECK_EQ(context->initial_generator_prototype(),
141 : *v8::Utils::OpenHandle(
142 : *CompileRun("(function*(){}).__proto__.prototype")));
143 : // Initial Iterator prototype.
144 10 : CHECK_EQ(context->initial_iterator_prototype(),
145 : *v8::Utils::OpenHandle(
146 : *CompileRun("[][Symbol.iterator]().__proto__.__proto__")));
147 : // Initial Object prototype.
148 10 : CHECK_EQ(context->initial_object_prototype(),
149 : *v8::Utils::OpenHandle(*CompileRun("Object.prototype")));
150 5 : }
151 :
152 20 : static void CheckOddball(Isolate* isolate, Object obj, const char* string) {
153 20 : CHECK(obj->IsOddball());
154 : Handle<Object> handle(obj, isolate);
155 40 : Object print_string = *Object::ToString(isolate, handle).ToHandleChecked();
156 20 : CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
157 20 : }
158 :
159 15 : static void CheckSmi(Isolate* isolate, int value, const char* string) {
160 : Handle<Object> handle(Smi::FromInt(value), isolate);
161 30 : Object print_string = *Object::ToString(isolate, handle).ToHandleChecked();
162 15 : CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
163 15 : }
164 :
165 :
166 5 : static void CheckNumber(Isolate* isolate, double value, const char* string) {
167 5 : Handle<Object> number = isolate->factory()->NewNumber(value);
168 5 : CHECK(number->IsNumber());
169 : Handle<Object> print_string =
170 10 : Object::ToString(isolate, number).ToHandleChecked();
171 5 : CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
172 5 : }
173 :
174 10 : void CheckEmbeddedObjectsAreEqual(Handle<Code> lhs, Handle<Code> rhs) {
175 : int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
176 10 : RelocIterator lhs_it(*lhs, mode_mask);
177 10 : RelocIterator rhs_it(*rhs, mode_mask);
178 30 : while (!lhs_it.done() && !rhs_it.done()) {
179 10 : CHECK(lhs_it.rinfo()->target_object() == rhs_it.rinfo()->target_object());
180 :
181 10 : lhs_it.next();
182 10 : rhs_it.next();
183 : }
184 10 : CHECK(lhs_it.done() == rhs_it.done());
185 10 : }
186 :
187 26068 : HEAP_TEST(TestNewSpaceRefsInCopiedCode) {
188 5 : CcTest::InitializeVM();
189 : Isolate* isolate = CcTest::i_isolate();
190 : Factory* factory = isolate->factory();
191 : HandleScope sc(isolate);
192 :
193 5 : Handle<HeapNumber> value = factory->NewHeapNumber(1.000123);
194 5 : CHECK(Heap::InYoungGeneration(*value));
195 :
196 : i::byte buffer[i::Assembler::kMinimalBufferSize];
197 : MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
198 10 : ExternalAssemblerBuffer(buffer, sizeof(buffer)));
199 : // Add a new-space reference to the code.
200 5 : masm.Push(value);
201 :
202 5 : CodeDesc desc;
203 : masm.GetCode(isolate, &desc);
204 : Handle<Code> code =
205 10 : isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
206 :
207 : Handle<Code> copy;
208 : {
209 10 : CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
210 5 : copy = factory->CopyCode(code);
211 : }
212 :
213 5 : CheckEmbeddedObjectsAreEqual(code, copy);
214 5 : CcTest::CollectAllAvailableGarbage();
215 5 : CheckEmbeddedObjectsAreEqual(code, copy);
216 5 : }
217 :
218 5 : static void CheckFindCodeObject(Isolate* isolate) {
219 : // Test FindCodeObject
220 : #define __ assm.
221 :
222 20 : Assembler assm(AssemblerOptions{});
223 :
224 5 : __ nop(); // supported on all architectures
225 :
226 5 : CodeDesc desc;
227 : assm.GetCode(isolate, &desc);
228 : Handle<Code> code =
229 10 : isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
230 5 : CHECK(code->IsCode());
231 :
232 5 : HeapObject obj = HeapObject::cast(*code);
233 : Address obj_addr = obj->address();
234 :
235 245 : for (int i = 0; i < obj->Size(); i += kTaggedSize) {
236 120 : Object found = isolate->FindCodeObject(obj_addr + i);
237 120 : CHECK_EQ(*code, found);
238 : }
239 :
240 : Handle<Code> copy =
241 10 : isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
242 5 : HeapObject obj_copy = HeapObject::cast(*copy);
243 : Object not_right =
244 5 : isolate->FindCodeObject(obj_copy->address() + obj_copy->Size() / 2);
245 5 : CHECK(not_right != *code);
246 5 : }
247 :
248 :
249 26068 : TEST(HandleNull) {
250 5 : CcTest::InitializeVM();
251 : Isolate* isolate = CcTest::i_isolate();
252 : HandleScope outer_scope(isolate);
253 5 : LocalContext context;
254 : Handle<Object> n(Object(0), isolate);
255 5 : CHECK(!n.is_null());
256 5 : }
257 :
258 :
259 26068 : TEST(HeapObjects) {
260 5 : CcTest::InitializeVM();
261 : Isolate* isolate = CcTest::i_isolate();
262 : Factory* factory = isolate->factory();
263 : Heap* heap = isolate->heap();
264 :
265 : HandleScope sc(isolate);
266 5 : Handle<Object> value = factory->NewNumber(1.000123);
267 5 : CHECK(value->IsHeapNumber());
268 5 : CHECK(value->IsNumber());
269 5 : CHECK_EQ(1.000123, value->Number());
270 :
271 5 : value = factory->NewNumber(1.0);
272 5 : CHECK(value->IsSmi());
273 5 : CHECK(value->IsNumber());
274 5 : CHECK_EQ(1.0, value->Number());
275 :
276 5 : value = factory->NewNumberFromInt(1024);
277 5 : CHECK(value->IsSmi());
278 5 : CHECK(value->IsNumber());
279 5 : CHECK_EQ(1024.0, value->Number());
280 :
281 5 : value = factory->NewNumberFromInt(Smi::kMinValue);
282 5 : CHECK(value->IsSmi());
283 5 : CHECK(value->IsNumber());
284 5 : CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
285 :
286 5 : value = factory->NewNumberFromInt(Smi::kMaxValue);
287 5 : CHECK(value->IsSmi());
288 5 : CHECK(value->IsNumber());
289 5 : CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
290 :
291 : #if !defined(V8_TARGET_ARCH_64_BIT)
292 : // TODO(lrn): We need a NumberFromIntptr function in order to test this.
293 : value = factory->NewNumberFromInt(Smi::kMinValue - 1);
294 : CHECK(value->IsHeapNumber());
295 : CHECK(value->IsNumber());
296 : CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
297 : #endif
298 :
299 5 : value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
300 5 : CHECK(value->IsHeapNumber());
301 5 : CHECK(value->IsNumber());
302 5 : CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
303 : value->Number());
304 :
305 5 : value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
306 5 : CHECK(value->IsHeapNumber());
307 5 : CHECK(value->IsNumber());
308 5 : CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
309 : value->Number());
310 :
311 : // nan oddball checks
312 5 : CHECK(factory->nan_value()->IsNumber());
313 5 : CHECK(std::isnan(factory->nan_value()->Number()));
314 :
315 5 : Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
316 5 : CHECK(s->IsString());
317 5 : CHECK_EQ(10, s->length());
318 :
319 : Handle<String> object_string = Handle<String>::cast(factory->Object_string());
320 10 : Handle<JSGlobalObject> global(CcTest::i_isolate()->context()->global_object(),
321 5 : isolate);
322 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
323 :
324 : // Check ToString for oddballs
325 : ReadOnlyRoots roots(heap);
326 5 : CheckOddball(isolate, roots.true_value(), "true");
327 5 : CheckOddball(isolate, roots.false_value(), "false");
328 5 : CheckOddball(isolate, roots.null_value(), "null");
329 5 : CheckOddball(isolate, roots.undefined_value(), "undefined");
330 :
331 : // Check ToString for Smis
332 5 : CheckSmi(isolate, 0, "0");
333 5 : CheckSmi(isolate, 42, "42");
334 5 : CheckSmi(isolate, -42, "-42");
335 :
336 : // Check ToString for Numbers
337 5 : CheckNumber(isolate, 1.1, "1.1");
338 :
339 5 : CheckFindCodeObject(isolate);
340 5 : }
341 :
342 26068 : TEST(Tagging) {
343 5 : CcTest::InitializeVM();
344 : int request = 24;
345 : CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
346 : CHECK(Smi::FromInt(42)->IsSmi());
347 : CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
348 : CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
349 5 : }
350 :
351 :
352 26068 : TEST(GarbageCollection) {
353 5 : CcTest::InitializeVM();
354 : Isolate* isolate = CcTest::i_isolate();
355 : Factory* factory = isolate->factory();
356 :
357 : HandleScope sc(isolate);
358 : // Check GC.
359 5 : CcTest::CollectGarbage(NEW_SPACE);
360 :
361 10 : Handle<JSGlobalObject> global(CcTest::i_isolate()->context()->global_object(),
362 5 : isolate);
363 5 : Handle<String> name = factory->InternalizeUtf8String("theFunction");
364 5 : Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
365 5 : Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
366 5 : Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
367 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
368 : Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
369 :
370 : {
371 : HandleScope inner_scope(isolate);
372 : // Allocate a function and keep it in global object's property.
373 5 : Handle<JSFunction> function = factory->NewFunctionForTest(name);
374 10 : Object::SetProperty(isolate, global, name, function).Check();
375 : // Allocate an object. Unrooted after leaving the scope.
376 5 : Handle<JSObject> obj = factory->NewJSObject(function);
377 10 : Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
378 10 : Object::SetProperty(isolate, obj, prop_namex, twenty_four).Check();
379 :
380 10 : CHECK_EQ(Smi::FromInt(23),
381 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
382 10 : CHECK_EQ(Smi::FromInt(24),
383 : *Object::GetProperty(isolate, obj, prop_namex).ToHandleChecked());
384 : }
385 :
386 5 : CcTest::CollectGarbage(NEW_SPACE);
387 :
388 : // Function should be alive.
389 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
390 : // Check function is retained.
391 : Handle<Object> func_value =
392 10 : Object::GetProperty(isolate, global, name).ToHandleChecked();
393 5 : CHECK(func_value->IsJSFunction());
394 5 : Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
395 :
396 : {
397 : HandleScope inner_scope(isolate);
398 : // Allocate another object, make it reachable from global.
399 5 : Handle<JSObject> obj = factory->NewJSObject(function);
400 10 : Object::SetProperty(isolate, global, obj_name, obj).Check();
401 10 : Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
402 : }
403 :
404 : // After gc, it should survive.
405 5 : CcTest::CollectGarbage(NEW_SPACE);
406 :
407 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
408 : Handle<Object> obj =
409 10 : Object::GetProperty(isolate, global, obj_name).ToHandleChecked();
410 5 : CHECK(obj->IsJSObject());
411 10 : CHECK_EQ(Smi::FromInt(23),
412 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
413 5 : }
414 :
415 :
416 25 : static void VerifyStringAllocation(Isolate* isolate, const char* string) {
417 : HandleScope scope(isolate);
418 50 : Handle<String> s = isolate->factory()->NewStringFromUtf8(
419 : CStrVector(string)).ToHandleChecked();
420 25 : CHECK_EQ(StrLength(string), s->length());
421 385 : for (int index = 0; index < s->length(); index++) {
422 180 : CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
423 : }
424 25 : }
425 :
426 :
427 26068 : TEST(String) {
428 5 : CcTest::InitializeVM();
429 5 : Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
430 :
431 5 : VerifyStringAllocation(isolate, "a");
432 5 : VerifyStringAllocation(isolate, "ab");
433 5 : VerifyStringAllocation(isolate, "abc");
434 5 : VerifyStringAllocation(isolate, "abcd");
435 5 : VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
436 5 : }
437 :
438 :
439 26068 : TEST(LocalHandles) {
440 5 : CcTest::InitializeVM();
441 : Isolate* isolate = CcTest::i_isolate();
442 : Factory* factory = isolate->factory();
443 :
444 10 : v8::HandleScope scope(CcTest::isolate());
445 : const char* name = "Kasper the spunky";
446 5 : Handle<String> string = factory->NewStringFromAsciiChecked(name);
447 5 : CHECK_EQ(StrLength(name), string->length());
448 5 : }
449 :
450 :
451 26068 : TEST(GlobalHandles) {
452 5 : CcTest::InitializeVM();
453 : Isolate* isolate = CcTest::i_isolate();
454 : Factory* factory = isolate->factory();
455 : GlobalHandles* global_handles = isolate->global_handles();
456 :
457 : Handle<Object> h1;
458 : Handle<Object> h2;
459 : Handle<Object> h3;
460 : Handle<Object> h4;
461 :
462 : {
463 : HandleScope scope(isolate);
464 :
465 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
466 5 : Handle<Object> u = factory->NewNumber(1.12344);
467 :
468 5 : h1 = global_handles->Create(*i);
469 5 : h2 = global_handles->Create(*u);
470 5 : h3 = global_handles->Create(*i);
471 5 : h4 = global_handles->Create(*u);
472 : }
473 :
474 : // after gc, it should survive
475 5 : CcTest::CollectGarbage(NEW_SPACE);
476 :
477 5 : CHECK((*h1)->IsString());
478 5 : CHECK((*h2)->IsHeapNumber());
479 5 : CHECK((*h3)->IsString());
480 5 : CHECK((*h4)->IsHeapNumber());
481 :
482 5 : CHECK_EQ(*h3, *h1);
483 5 : GlobalHandles::Destroy(h1.location());
484 5 : GlobalHandles::Destroy(h3.location());
485 :
486 5 : CHECK_EQ(*h4, *h2);
487 5 : GlobalHandles::Destroy(h2.location());
488 5 : GlobalHandles::Destroy(h4.location());
489 5 : }
490 :
491 :
492 : static bool WeakPointerCleared = false;
493 :
494 15 : static void TestWeakGlobalHandleCallback(
495 : const v8::WeakCallbackInfo<void>& data) {
496 : std::pair<v8::Persistent<v8::Value>*, int>* p =
497 : reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
498 : data.GetParameter());
499 15 : if (p->second == 1234) WeakPointerCleared = true;
500 15 : p->first->Reset();
501 15 : }
502 :
503 :
504 26068 : TEST(WeakGlobalHandlesScavenge) {
505 5 : FLAG_stress_compaction = false;
506 5 : FLAG_stress_incremental_marking = false;
507 5 : CcTest::InitializeVM();
508 : Isolate* isolate = CcTest::i_isolate();
509 : Factory* factory = isolate->factory();
510 : GlobalHandles* global_handles = isolate->global_handles();
511 :
512 5 : WeakPointerCleared = false;
513 :
514 : Handle<Object> h1;
515 : Handle<Object> h2;
516 :
517 : {
518 : HandleScope scope(isolate);
519 :
520 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
521 5 : Handle<Object> u = factory->NewNumber(1.12344);
522 :
523 5 : h1 = global_handles->Create(*i);
524 5 : h2 = global_handles->Create(*u);
525 : }
526 :
527 : std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
528 : GlobalHandles::MakeWeak(
529 : h2.location(), reinterpret_cast<void*>(&handle_and_id),
530 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
531 :
532 : // Scavenge treats weak pointers as normal roots.
533 5 : CcTest::CollectGarbage(NEW_SPACE);
534 5 : CHECK((*h1)->IsString());
535 5 : CHECK((*h2)->IsHeapNumber());
536 5 : CHECK(!WeakPointerCleared);
537 5 : GlobalHandles::Destroy(h1.location());
538 5 : GlobalHandles::Destroy(h2.location());
539 5 : }
540 :
541 26068 : TEST(WeakGlobalUnmodifiedApiHandlesScavenge) {
542 5 : CcTest::InitializeVM();
543 : Isolate* isolate = CcTest::i_isolate();
544 5 : LocalContext context;
545 : Factory* factory = isolate->factory();
546 : GlobalHandles* global_handles = isolate->global_handles();
547 :
548 5 : WeakPointerCleared = false;
549 :
550 : Handle<Object> h1;
551 : Handle<Object> h2;
552 :
553 : {
554 : HandleScope scope(isolate);
555 :
556 : // Create an Api object that is unmodified.
557 10 : Local<v8::Function> function = FunctionTemplate::New(context->GetIsolate())
558 5 : ->GetFunction(context.local())
559 : .ToLocalChecked();
560 : Local<v8::Object> i =
561 : function->NewInstance(context.local()).ToLocalChecked();
562 5 : Handle<Object> u = factory->NewNumber(1.12344);
563 :
564 5 : h1 = global_handles->Create(*u);
565 5 : h2 = global_handles->Create(*(reinterpret_cast<internal::Address*>(*i)));
566 : }
567 :
568 : std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
569 : GlobalHandles::MakeWeak(
570 : h2.location(), reinterpret_cast<void*>(&handle_and_id),
571 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
572 :
573 5 : CcTest::CollectGarbage(NEW_SPACE);
574 5 : CHECK((*h1)->IsHeapNumber());
575 5 : CHECK(WeakPointerCleared);
576 5 : GlobalHandles::Destroy(h1.location());
577 5 : }
578 :
579 26068 : TEST(WeakGlobalApiHandleModifiedMapScavenge) {
580 5 : CcTest::InitializeVM();
581 : Isolate* isolate = CcTest::i_isolate();
582 5 : LocalContext context;
583 : GlobalHandles* global_handles = isolate->global_handles();
584 :
585 5 : WeakPointerCleared = false;
586 :
587 : Handle<Object> h1;
588 :
589 : {
590 : HandleScope scope(isolate);
591 :
592 : // Create an API object which does not have the same map as constructor.
593 5 : auto function_template = FunctionTemplate::New(context->GetIsolate());
594 5 : auto instance_t = function_template->InstanceTemplate();
595 15 : instance_t->Set(v8::String::NewFromUtf8(context->GetIsolate(), "a",
596 5 : NewStringType::kNormal)
597 : .ToLocalChecked(),
598 5 : v8::Number::New(context->GetIsolate(), 10));
599 : auto function =
600 5 : function_template->GetFunction(context.local()).ToLocalChecked();
601 : auto i = function->NewInstance(context.local()).ToLocalChecked();
602 :
603 5 : h1 = global_handles->Create(*(reinterpret_cast<internal::Address*>(*i)));
604 : }
605 :
606 : std::pair<Handle<Object>*, int> handle_and_id(&h1, 1234);
607 : GlobalHandles::MakeWeak(
608 : h1.location(), reinterpret_cast<void*>(&handle_and_id),
609 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
610 :
611 5 : CcTest::CollectGarbage(NEW_SPACE);
612 5 : CHECK(!WeakPointerCleared);
613 5 : GlobalHandles::Destroy(h1.location());
614 5 : }
615 :
616 26068 : TEST(WeakGlobalApiHandleWithElementsScavenge) {
617 5 : CcTest::InitializeVM();
618 : Isolate* isolate = CcTest::i_isolate();
619 5 : LocalContext context;
620 : GlobalHandles* global_handles = isolate->global_handles();
621 :
622 5 : WeakPointerCleared = false;
623 :
624 : Handle<Object> h1;
625 :
626 : {
627 : HandleScope scope(isolate);
628 :
629 : // Create an API object which has elements.
630 5 : auto function_template = FunctionTemplate::New(context->GetIsolate());
631 5 : auto instance_t = function_template->InstanceTemplate();
632 15 : instance_t->Set(v8::String::NewFromUtf8(context->GetIsolate(), "1",
633 5 : NewStringType::kNormal)
634 : .ToLocalChecked(),
635 5 : v8::Number::New(context->GetIsolate(), 10));
636 15 : instance_t->Set(v8::String::NewFromUtf8(context->GetIsolate(), "2",
637 5 : NewStringType::kNormal)
638 : .ToLocalChecked(),
639 5 : v8::Number::New(context->GetIsolate(), 10));
640 : auto function =
641 5 : function_template->GetFunction(context.local()).ToLocalChecked();
642 : auto i = function->NewInstance(context.local()).ToLocalChecked();
643 :
644 5 : h1 = global_handles->Create(*(reinterpret_cast<internal::Address*>(*i)));
645 : }
646 :
647 : std::pair<Handle<Object>*, int> handle_and_id(&h1, 1234);
648 : GlobalHandles::MakeWeak(
649 : h1.location(), reinterpret_cast<void*>(&handle_and_id),
650 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
651 :
652 5 : CcTest::CollectGarbage(NEW_SPACE);
653 5 : CHECK(!WeakPointerCleared);
654 5 : GlobalHandles::Destroy(h1.location());
655 5 : }
656 :
657 26068 : TEST(WeakGlobalHandlesMark) {
658 5 : FLAG_stress_incremental_marking = false;
659 5 : CcTest::InitializeVM();
660 : Isolate* isolate = CcTest::i_isolate();
661 : Factory* factory = isolate->factory();
662 : GlobalHandles* global_handles = isolate->global_handles();
663 :
664 5 : WeakPointerCleared = false;
665 :
666 : Handle<Object> h1;
667 : Handle<Object> h2;
668 :
669 : {
670 : HandleScope scope(isolate);
671 :
672 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
673 5 : Handle<Object> u = factory->NewNumber(1.12344);
674 :
675 5 : h1 = global_handles->Create(*i);
676 5 : h2 = global_handles->Create(*u);
677 : }
678 :
679 : // Make sure the objects are promoted.
680 5 : CcTest::CollectGarbage(OLD_SPACE);
681 5 : CcTest::CollectGarbage(NEW_SPACE);
682 10 : CHECK(!Heap::InYoungGeneration(*h1) && !Heap::InYoungGeneration(*h2));
683 :
684 : std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
685 : GlobalHandles::MakeWeak(
686 : h2.location(), reinterpret_cast<void*>(&handle_and_id),
687 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
688 :
689 : // Incremental marking potentially marked handles before they turned weak.
690 5 : CcTest::CollectAllGarbage();
691 5 : CHECK((*h1)->IsString());
692 5 : CHECK(WeakPointerCleared);
693 5 : GlobalHandles::Destroy(h1.location());
694 5 : }
695 :
696 :
697 26068 : TEST(DeleteWeakGlobalHandle) {
698 5 : FLAG_stress_compaction = false;
699 5 : FLAG_stress_incremental_marking = false;
700 5 : CcTest::InitializeVM();
701 : Isolate* isolate = CcTest::i_isolate();
702 : Factory* factory = isolate->factory();
703 : GlobalHandles* global_handles = isolate->global_handles();
704 :
705 5 : WeakPointerCleared = false;
706 :
707 : Handle<Object> h;
708 :
709 : {
710 : HandleScope scope(isolate);
711 :
712 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
713 5 : h = global_handles->Create(*i);
714 : }
715 :
716 : std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
717 : GlobalHandles::MakeWeak(h.location(), reinterpret_cast<void*>(&handle_and_id),
718 : &TestWeakGlobalHandleCallback,
719 5 : v8::WeakCallbackType::kParameter);
720 :
721 : // Scanvenge does not recognize weak reference.
722 5 : CcTest::CollectGarbage(NEW_SPACE);
723 :
724 5 : CHECK(!WeakPointerCleared);
725 :
726 : // Mark-compact treats weak reference properly.
727 5 : CcTest::CollectGarbage(OLD_SPACE);
728 :
729 5 : CHECK(WeakPointerCleared);
730 5 : }
731 :
732 26068 : TEST(BytecodeArray) {
733 5 : if (FLAG_never_compact) return;
734 : static const uint8_t kRawBytes[] = {0xC3, 0x7E, 0xA5, 0x5A};
735 : static const int kRawBytesSize = sizeof(kRawBytes);
736 : static const int kFrameSize = 32;
737 : static const int kParameterCount = 2;
738 :
739 : ManualGCScope manual_gc_scope;
740 5 : FLAG_manual_evacuation_candidates_selection = true;
741 5 : CcTest::InitializeVM();
742 : Isolate* isolate = CcTest::i_isolate();
743 : Heap* heap = isolate->heap();
744 : Factory* factory = isolate->factory();
745 : HandleScope scope(isolate);
746 :
747 5 : heap::SimulateFullSpace(heap->old_space());
748 : Handle<FixedArray> constant_pool =
749 5 : factory->NewFixedArray(5, AllocationType::kOld);
750 55 : for (int i = 0; i < 5; i++) {
751 25 : Handle<Object> number = factory->NewHeapNumber(i);
752 25 : constant_pool->set(i, *number);
753 : }
754 :
755 : // Allocate and initialize BytecodeArray
756 : Handle<BytecodeArray> array = factory->NewBytecodeArray(
757 5 : kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
758 :
759 5 : CHECK(array->IsBytecodeArray());
760 5 : CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
761 5 : CHECK_EQ(array->frame_size(), kFrameSize);
762 5 : CHECK_EQ(array->parameter_count(), kParameterCount);
763 5 : CHECK_EQ(array->constant_pool(), *constant_pool);
764 5 : CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
765 10 : CHECK_GE(array->address() + array->BytecodeArraySize(),
766 : array->GetFirstBytecodeAddress() + array->length());
767 45 : for (int i = 0; i < kRawBytesSize; i++) {
768 40 : CHECK_EQ(Memory<uint8_t>(array->GetFirstBytecodeAddress() + i),
769 : kRawBytes[i]);
770 20 : CHECK_EQ(array->get(i), kRawBytes[i]);
771 : }
772 :
773 : FixedArray old_constant_pool_address = *constant_pool;
774 :
775 : // Perform a full garbage collection and force the constant pool to be on an
776 : // evacuation candidate.
777 : Page* evac_page = Page::FromHeapObject(*constant_pool);
778 5 : heap::ForceEvacuationCandidate(evac_page);
779 5 : CcTest::CollectAllGarbage();
780 :
781 : // BytecodeArray should survive.
782 5 : CHECK_EQ(array->length(), kRawBytesSize);
783 5 : CHECK_EQ(array->frame_size(), kFrameSize);
784 45 : for (int i = 0; i < kRawBytesSize; i++) {
785 40 : CHECK_EQ(array->get(i), kRawBytes[i]);
786 40 : CHECK_EQ(Memory<uint8_t>(array->GetFirstBytecodeAddress() + i),
787 : kRawBytes[i]);
788 : }
789 :
790 : // Constant pool should have been migrated.
791 5 : CHECK_EQ(array->constant_pool(), *constant_pool);
792 5 : CHECK_NE(array->constant_pool(), old_constant_pool_address);
793 : }
794 :
795 26068 : TEST(BytecodeArrayAging) {
796 : static const uint8_t kRawBytes[] = {0xC3, 0x7E, 0xA5, 0x5A};
797 : static const int kRawBytesSize = sizeof(kRawBytes);
798 : static const int kFrameSize = 32;
799 : static const int kParameterCount = 2;
800 5 : CcTest::InitializeVM();
801 : Isolate* isolate = CcTest::i_isolate();
802 : Factory* factory = isolate->factory();
803 : HandleScope scope(isolate);
804 :
805 : Handle<BytecodeArray> array =
806 : factory->NewBytecodeArray(kRawBytesSize, kRawBytes, kFrameSize,
807 5 : kParameterCount, factory->empty_fixed_array());
808 :
809 5 : CHECK_EQ(BytecodeArray::kFirstBytecodeAge, array->bytecode_age());
810 5 : array->MakeOlder();
811 5 : CHECK_EQ(BytecodeArray::kQuadragenarianBytecodeAge, array->bytecode_age());
812 : array->set_bytecode_age(BytecodeArray::kLastBytecodeAge);
813 5 : array->MakeOlder();
814 5 : CHECK_EQ(BytecodeArray::kLastBytecodeAge, array->bytecode_age());
815 5 : }
816 :
817 : static const char* not_so_random_string_table[] = {
818 : "abstract",
819 : "boolean",
820 : "break",
821 : "byte",
822 : "case",
823 : "catch",
824 : "char",
825 : "class",
826 : "const",
827 : "continue",
828 : "debugger",
829 : "default",
830 : "delete",
831 : "do",
832 : "double",
833 : "else",
834 : "enum",
835 : "export",
836 : "extends",
837 : "false",
838 : "final",
839 : "finally",
840 : "float",
841 : "for",
842 : "function",
843 : "goto",
844 : "if",
845 : "implements",
846 : "import",
847 : "in",
848 : "instanceof",
849 : "int",
850 : "interface",
851 : "long",
852 : "native",
853 : "new",
854 : "null",
855 : "package",
856 : "private",
857 : "protected",
858 : "public",
859 : "return",
860 : "short",
861 : "static",
862 : "super",
863 : "switch",
864 : "synchronized",
865 : "this",
866 : "throw",
867 : "throws",
868 : "transient",
869 : "true",
870 : "try",
871 : "typeof",
872 : "var",
873 : "void",
874 : "volatile",
875 : "while",
876 : "with",
877 : nullptr
878 : };
879 :
880 10 : static void CheckInternalizedStrings(const char** strings) {
881 : Isolate* isolate = CcTest::i_isolate();
882 : Factory* factory = isolate->factory();
883 600 : for (const char* string = *strings; *strings != nullptr;
884 : string = *strings++) {
885 : HandleScope scope(isolate);
886 : Handle<String> a =
887 590 : isolate->factory()->InternalizeUtf8String(CStrVector(string));
888 : // InternalizeUtf8String may return a failure if a GC is needed.
889 1180 : CHECK(a->IsInternalizedString());
890 590 : Handle<String> b = factory->InternalizeUtf8String(string);
891 590 : CHECK_EQ(*b, *a);
892 590 : CHECK(b->IsUtf8EqualTo(CStrVector(string)));
893 590 : b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
894 590 : CHECK_EQ(*b, *a);
895 590 : CHECK(b->IsUtf8EqualTo(CStrVector(string)));
896 : }
897 10 : }
898 :
899 :
900 26068 : TEST(StringTable) {
901 5 : CcTest::InitializeVM();
902 :
903 10 : v8::HandleScope sc(CcTest::isolate());
904 5 : CheckInternalizedStrings(not_so_random_string_table);
905 5 : CheckInternalizedStrings(not_so_random_string_table);
906 5 : }
907 :
908 :
909 26068 : TEST(FunctionAllocation) {
910 5 : CcTest::InitializeVM();
911 : Isolate* isolate = CcTest::i_isolate();
912 : Factory* factory = isolate->factory();
913 :
914 10 : v8::HandleScope sc(CcTest::isolate());
915 5 : Handle<String> name = factory->InternalizeUtf8String("theFunction");
916 5 : Handle<JSFunction> function = factory->NewFunctionForTest(name);
917 :
918 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
919 : Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
920 :
921 5 : Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
922 5 : Handle<JSObject> obj = factory->NewJSObject(function);
923 10 : Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
924 10 : CHECK_EQ(Smi::FromInt(23),
925 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
926 : // Check that we can add properties to function objects.
927 10 : Object::SetProperty(isolate, function, prop_name, twenty_four).Check();
928 10 : CHECK_EQ(
929 : Smi::FromInt(24),
930 : *Object::GetProperty(isolate, function, prop_name).ToHandleChecked());
931 5 : }
932 :
933 :
934 26068 : TEST(ObjectProperties) {
935 5 : CcTest::InitializeVM();
936 : Isolate* isolate = CcTest::i_isolate();
937 : Factory* factory = isolate->factory();
938 :
939 10 : v8::HandleScope sc(CcTest::isolate());
940 : Handle<String> object_string(
941 5 : String::cast(ReadOnlyRoots(CcTest::heap()).Object_string()), isolate);
942 : Handle<Object> object =
943 10 : Object::GetProperty(isolate, CcTest::i_isolate()->global_object(),
944 10 : object_string)
945 : .ToHandleChecked();
946 5 : Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
947 5 : Handle<JSObject> obj = factory->NewJSObject(constructor);
948 5 : Handle<String> first = factory->InternalizeUtf8String("first");
949 5 : Handle<String> second = factory->InternalizeUtf8String("second");
950 :
951 : Handle<Smi> one(Smi::FromInt(1), isolate);
952 : Handle<Smi> two(Smi::FromInt(2), isolate);
953 :
954 : // check for empty
955 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
956 :
957 : // add first
958 10 : Object::SetProperty(isolate, obj, first, one).Check();
959 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
960 :
961 : // delete first
962 10 : CHECK(Just(true) ==
963 : JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
964 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
965 :
966 : // add first and then second
967 10 : Object::SetProperty(isolate, obj, first, one).Check();
968 10 : Object::SetProperty(isolate, obj, second, two).Check();
969 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
970 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
971 :
972 : // delete first and then second
973 10 : CHECK(Just(true) ==
974 : JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
975 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
976 10 : CHECK(Just(true) ==
977 : JSReceiver::DeleteProperty(obj, second, LanguageMode::kSloppy));
978 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
979 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
980 :
981 : // add first and then second
982 10 : Object::SetProperty(isolate, obj, first, one).Check();
983 10 : Object::SetProperty(isolate, obj, second, two).Check();
984 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
985 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
986 :
987 : // delete second and then first
988 10 : CHECK(Just(true) ==
989 : JSReceiver::DeleteProperty(obj, second, LanguageMode::kSloppy));
990 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
991 10 : CHECK(Just(true) ==
992 : JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
993 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
994 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
995 :
996 : // check string and internalized string match
997 : const char* string1 = "fisk";
998 5 : Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
999 10 : Object::SetProperty(isolate, obj, s1, one).Check();
1000 5 : Handle<String> s1_string = factory->InternalizeUtf8String(string1);
1001 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
1002 :
1003 : // check internalized string and string match
1004 : const char* string2 = "fugl";
1005 5 : Handle<String> s2_string = factory->InternalizeUtf8String(string2);
1006 10 : Object::SetProperty(isolate, obj, s2_string, one).Check();
1007 5 : Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
1008 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
1009 5 : }
1010 :
1011 :
1012 26068 : TEST(JSObjectMaps) {
1013 5 : CcTest::InitializeVM();
1014 : Isolate* isolate = CcTest::i_isolate();
1015 : Factory* factory = isolate->factory();
1016 :
1017 10 : v8::HandleScope sc(CcTest::isolate());
1018 5 : Handle<String> name = factory->InternalizeUtf8String("theFunction");
1019 5 : Handle<JSFunction> function = factory->NewFunctionForTest(name);
1020 :
1021 5 : Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
1022 5 : Handle<JSObject> obj = factory->NewJSObject(function);
1023 : Handle<Map> initial_map(function->initial_map(), isolate);
1024 :
1025 : // Set a propery
1026 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
1027 10 : Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
1028 10 : CHECK_EQ(Smi::FromInt(23),
1029 : *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
1030 :
1031 : // Check the map has changed
1032 5 : CHECK(*initial_map != obj->map());
1033 5 : }
1034 :
1035 :
1036 26068 : TEST(JSArray) {
1037 5 : CcTest::InitializeVM();
1038 : Isolate* isolate = CcTest::i_isolate();
1039 : Factory* factory = isolate->factory();
1040 :
1041 10 : v8::HandleScope sc(CcTest::isolate());
1042 5 : Handle<String> name = factory->InternalizeUtf8String("Array");
1043 : Handle<Object> fun_obj =
1044 15 : Object::GetProperty(isolate, CcTest::i_isolate()->global_object(), name)
1045 : .ToHandleChecked();
1046 5 : Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
1047 :
1048 : // Allocate the object.
1049 : Handle<Object> element;
1050 5 : Handle<JSObject> object = factory->NewJSObject(function);
1051 : Handle<JSArray> array = Handle<JSArray>::cast(object);
1052 : // We just initialized the VM, no heap allocation failure yet.
1053 5 : JSArray::Initialize(array, 0);
1054 :
1055 : // Set array length to 0.
1056 5 : JSArray::SetLength(array, 0);
1057 5 : CHECK_EQ(Smi::kZero, array->length());
1058 : // Must be in fast mode.
1059 10 : CHECK(array->HasSmiOrObjectElements());
1060 :
1061 : // array[length] = name.
1062 10 : Object::SetElement(isolate, array, 0, name, ShouldThrow::kDontThrow).Check();
1063 5 : CHECK_EQ(Smi::FromInt(1), array->length());
1064 10 : element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
1065 5 : CHECK_EQ(*element, *name);
1066 :
1067 : // Set array length with larger than smi value.
1068 5 : JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
1069 :
1070 5 : uint32_t int_length = 0;
1071 10 : CHECK(array->length()->ToArrayIndex(&int_length));
1072 5 : CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
1073 10 : CHECK(array->HasDictionaryElements()); // Must be in slow mode.
1074 :
1075 : // array[length] = name.
1076 15 : Object::SetElement(isolate, array, int_length, name, ShouldThrow::kDontThrow)
1077 : .Check();
1078 5 : uint32_t new_int_length = 0;
1079 10 : CHECK(array->length()->ToArrayIndex(&new_int_length));
1080 10 : CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
1081 10 : element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
1082 5 : CHECK_EQ(*element, *name);
1083 10 : element = Object::GetElement(isolate, array, 0).ToHandleChecked();
1084 5 : CHECK_EQ(*element, *name);
1085 5 : }
1086 :
1087 :
1088 26068 : TEST(JSObjectCopy) {
1089 5 : CcTest::InitializeVM();
1090 : Isolate* isolate = CcTest::i_isolate();
1091 : Factory* factory = isolate->factory();
1092 :
1093 10 : v8::HandleScope sc(CcTest::isolate());
1094 : Handle<String> object_string(
1095 5 : String::cast(ReadOnlyRoots(CcTest::heap()).Object_string()), isolate);
1096 : Handle<Object> object =
1097 10 : Object::GetProperty(isolate, CcTest::i_isolate()->global_object(),
1098 10 : object_string)
1099 : .ToHandleChecked();
1100 5 : Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
1101 5 : Handle<JSObject> obj = factory->NewJSObject(constructor);
1102 5 : Handle<String> first = factory->InternalizeUtf8String("first");
1103 5 : Handle<String> second = factory->InternalizeUtf8String("second");
1104 :
1105 : Handle<Smi> one(Smi::FromInt(1), isolate);
1106 : Handle<Smi> two(Smi::FromInt(2), isolate);
1107 :
1108 10 : Object::SetProperty(isolate, obj, first, one).Check();
1109 10 : Object::SetProperty(isolate, obj, second, two).Check();
1110 :
1111 10 : Object::SetElement(isolate, obj, 0, first, ShouldThrow::kDontThrow).Check();
1112 10 : Object::SetElement(isolate, obj, 1, second, ShouldThrow::kDontThrow).Check();
1113 :
1114 : // Make the clone.
1115 : Handle<Object> value1, value2;
1116 5 : Handle<JSObject> clone = factory->CopyJSObject(obj);
1117 5 : CHECK(!clone.is_identical_to(obj));
1118 :
1119 10 : value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1120 10 : value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1121 5 : CHECK_EQ(*value1, *value2);
1122 10 : value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1123 10 : value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1124 5 : CHECK_EQ(*value1, *value2);
1125 :
1126 10 : value1 = Object::GetProperty(isolate, obj, first).ToHandleChecked();
1127 10 : value2 = Object::GetProperty(isolate, clone, first).ToHandleChecked();
1128 5 : CHECK_EQ(*value1, *value2);
1129 10 : value1 = Object::GetProperty(isolate, obj, second).ToHandleChecked();
1130 10 : value2 = Object::GetProperty(isolate, clone, second).ToHandleChecked();
1131 5 : CHECK_EQ(*value1, *value2);
1132 :
1133 : // Flip the values.
1134 10 : Object::SetProperty(isolate, clone, first, two).Check();
1135 10 : Object::SetProperty(isolate, clone, second, one).Check();
1136 :
1137 10 : Object::SetElement(isolate, clone, 0, second, ShouldThrow::kDontThrow)
1138 : .Check();
1139 10 : Object::SetElement(isolate, clone, 1, first, ShouldThrow::kDontThrow).Check();
1140 :
1141 10 : value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1142 10 : value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1143 5 : CHECK_EQ(*value1, *value2);
1144 10 : value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1145 10 : value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1146 5 : CHECK_EQ(*value1, *value2);
1147 :
1148 10 : value1 = Object::GetProperty(isolate, obj, second).ToHandleChecked();
1149 10 : value2 = Object::GetProperty(isolate, clone, first).ToHandleChecked();
1150 5 : CHECK_EQ(*value1, *value2);
1151 10 : value1 = Object::GetProperty(isolate, obj, first).ToHandleChecked();
1152 10 : value2 = Object::GetProperty(isolate, clone, second).ToHandleChecked();
1153 5 : CHECK_EQ(*value1, *value2);
1154 5 : }
1155 :
1156 :
1157 26068 : TEST(StringAllocation) {
1158 5 : CcTest::InitializeVM();
1159 : Isolate* isolate = CcTest::i_isolate();
1160 : Factory* factory = isolate->factory();
1161 :
1162 : const unsigned char chars[] = {0xE5, 0xA4, 0xA7};
1163 1005 : for (int length = 0; length < 100; length++) {
1164 1000 : v8::HandleScope scope(CcTest::isolate());
1165 500 : char* non_one_byte = NewArray<char>(3 * length + 1);
1166 500 : char* one_byte = NewArray<char>(length + 1);
1167 500 : non_one_byte[3 * length] = 0;
1168 500 : one_byte[length] = 0;
1169 50000 : for (int i = 0; i < length; i++) {
1170 24750 : one_byte[i] = 'a';
1171 24750 : non_one_byte[3 * i] = chars[0];
1172 24750 : non_one_byte[3 * i + 1] = chars[1];
1173 24750 : non_one_byte[3 * i + 2] = chars[2];
1174 : }
1175 : Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
1176 500 : Vector<const char>(non_one_byte, 3 * length));
1177 500 : CHECK_EQ(length, non_one_byte_sym->length());
1178 : Handle<String> one_byte_sym =
1179 500 : factory->InternalizeOneByteString(OneByteVector(one_byte, length));
1180 500 : CHECK_EQ(length, one_byte_sym->length());
1181 : Handle<String> non_one_byte_str =
1182 1000 : factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
1183 : .ToHandleChecked();
1184 500 : non_one_byte_str->Hash();
1185 500 : CHECK_EQ(length, non_one_byte_str->length());
1186 : Handle<String> one_byte_str =
1187 1000 : factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
1188 : .ToHandleChecked();
1189 500 : one_byte_str->Hash();
1190 500 : CHECK_EQ(length, one_byte_str->length());
1191 : DeleteArray(non_one_byte);
1192 : DeleteArray(one_byte);
1193 : }
1194 5 : }
1195 :
1196 :
1197 5 : static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
1198 : // Count the number of objects found in the heap.
1199 : int found_count = 0;
1200 10 : HeapIterator iterator(heap);
1201 70304 : for (HeapObject obj = iterator.next(); !obj.is_null();
1202 : obj = iterator.next()) {
1203 456911 : for (int i = 0; i < size; i++) {
1204 421764 : if (*objs[i] == obj) {
1205 30 : found_count++;
1206 : }
1207 : }
1208 : }
1209 5 : return found_count;
1210 : }
1211 :
1212 :
1213 26068 : TEST(Iteration) {
1214 5 : CcTest::InitializeVM();
1215 : Isolate* isolate = CcTest::i_isolate();
1216 : Factory* factory = isolate->factory();
1217 10 : v8::HandleScope scope(CcTest::isolate());
1218 :
1219 : // Array of objects to scan haep for.
1220 : const int objs_count = 6;
1221 65 : Handle<Object> objs[objs_count];
1222 : int next_objs_index = 0;
1223 :
1224 : // Allocate a JS array to OLD_SPACE and NEW_SPACE
1225 5 : objs[next_objs_index++] = factory->NewJSArray(10);
1226 : objs[next_objs_index++] =
1227 5 : factory->NewJSArray(10, HOLEY_ELEMENTS, AllocationType::kOld);
1228 :
1229 : // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
1230 5 : objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
1231 : objs[next_objs_index++] =
1232 5 : factory->NewStringFromStaticChars("abcdefghij", AllocationType::kOld);
1233 :
1234 : // Allocate a large string (for large object space).
1235 : int large_size = kMaxRegularHeapObjectSize + 1;
1236 5 : char* str = new char[large_size];
1237 655365 : for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
1238 5 : str[large_size - 1] = '\0';
1239 : objs[next_objs_index++] =
1240 5 : factory->NewStringFromAsciiChecked(str, AllocationType::kOld);
1241 5 : delete[] str;
1242 :
1243 : // Add a Map object to look for.
1244 : objs[next_objs_index++] =
1245 5 : Handle<Map>(HeapObject::cast(*objs[0])->map(), isolate);
1246 :
1247 : CHECK_EQ(objs_count, next_objs_index);
1248 5 : CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
1249 5 : }
1250 :
1251 26068 : TEST(TestBytecodeFlushing) {
1252 : #ifndef V8_LITE_MODE
1253 5 : FLAG_opt = false;
1254 5 : FLAG_always_opt = false;
1255 5 : i::FLAG_optimize_for_size = false;
1256 : #endif // V8_LITE_MODE
1257 5 : i::FLAG_flush_bytecode = true;
1258 5 : i::FLAG_allow_natives_syntax = true;
1259 :
1260 5 : CcTest::InitializeVM();
1261 5 : v8::Isolate* isolate = CcTest::isolate();
1262 : Isolate* i_isolate = CcTest::i_isolate();
1263 : Factory* factory = i_isolate->factory();
1264 :
1265 : {
1266 10 : v8::HandleScope scope(isolate);
1267 10 : v8::Context::New(isolate)->Enter();
1268 : const char* source =
1269 : "function foo() {"
1270 : " var x = 42;"
1271 : " var y = 42;"
1272 : " var z = x + y;"
1273 : "};"
1274 : "foo()";
1275 5 : Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1276 :
1277 : // This compile will add the code to the compilation cache.
1278 : {
1279 10 : v8::HandleScope scope(isolate);
1280 : CompileRun(source);
1281 : }
1282 :
1283 : // Check function is compiled.
1284 : Handle<Object> func_value =
1285 15 : Object::GetProperty(i_isolate, i_isolate->global_object(), foo_name)
1286 : .ToHandleChecked();
1287 5 : CHECK(func_value->IsJSFunction());
1288 : Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1289 5 : CHECK(function->shared()->is_compiled());
1290 :
1291 : // The code will survive at least two GCs.
1292 5 : CcTest::CollectAllGarbage();
1293 5 : CcTest::CollectAllGarbage();
1294 5 : CHECK(function->shared()->is_compiled());
1295 :
1296 : // Simulate several GCs that use full marking.
1297 : const int kAgingThreshold = 6;
1298 65 : for (int i = 0; i < kAgingThreshold; i++) {
1299 30 : CcTest::CollectAllGarbage();
1300 : }
1301 :
1302 : // foo should no longer be in the compilation cache
1303 5 : CHECK(!function->shared()->is_compiled());
1304 5 : CHECK(!function->is_compiled());
1305 : // Call foo to get it recompiled.
1306 : CompileRun("foo()");
1307 5 : CHECK(function->shared()->is_compiled());
1308 5 : CHECK(function->is_compiled());
1309 : }
1310 5 : }
1311 :
1312 : #ifndef V8_LITE_MODE
1313 :
1314 26067 : TEST(TestOptimizeAfterBytecodeFlushingCandidate) {
1315 4 : FLAG_opt = true;
1316 4 : FLAG_always_opt = false;
1317 4 : i::FLAG_optimize_for_size = false;
1318 4 : i::FLAG_incremental_marking = true;
1319 4 : i::FLAG_flush_bytecode = true;
1320 4 : i::FLAG_allow_natives_syntax = true;
1321 :
1322 4 : CcTest::InitializeVM();
1323 : Isolate* isolate = CcTest::i_isolate();
1324 : Factory* factory = isolate->factory();
1325 8 : v8::HandleScope scope(CcTest::isolate());
1326 : const char* source =
1327 : "function foo() {"
1328 : " var x = 42;"
1329 : " var y = 42;"
1330 : " var z = x + y;"
1331 : "};"
1332 : "foo()";
1333 4 : Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1334 :
1335 : // This compile will add the code to the compilation cache.
1336 : {
1337 8 : v8::HandleScope scope(CcTest::isolate());
1338 : CompileRun(source);
1339 : }
1340 :
1341 : // Check function is compiled.
1342 : Handle<Object> func_value =
1343 12 : Object::GetProperty(isolate, isolate->global_object(), foo_name)
1344 : .ToHandleChecked();
1345 4 : CHECK(func_value->IsJSFunction());
1346 : Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1347 4 : CHECK(function->shared()->is_compiled());
1348 :
1349 : // The code will survive at least two GCs.
1350 4 : CcTest::CollectAllGarbage();
1351 4 : CcTest::CollectAllGarbage();
1352 4 : CHECK(function->shared()->is_compiled());
1353 :
1354 : // Simulate several GCs that use incremental marking.
1355 : const int kAgingThreshold = 6;
1356 52 : for (int i = 0; i < kAgingThreshold; i++) {
1357 24 : heap::SimulateIncrementalMarking(CcTest::heap());
1358 24 : CcTest::CollectAllGarbage();
1359 : }
1360 4 : CHECK(!function->shared()->is_compiled());
1361 4 : CHECK(!function->is_compiled());
1362 :
1363 : // This compile will compile the function again.
1364 : {
1365 8 : v8::HandleScope scope(CcTest::isolate());
1366 : CompileRun("foo();");
1367 : }
1368 :
1369 : // Simulate several GCs that use incremental marking but make sure
1370 : // the loop breaks once the function is enqueued as a candidate.
1371 22 : for (int i = 0; i < kAgingThreshold; i++) {
1372 13 : heap::SimulateIncrementalMarking(CcTest::heap());
1373 13 : if (function->shared()->GetBytecodeArray()->IsOld()) break;
1374 9 : CcTest::CollectAllGarbage();
1375 : }
1376 :
1377 : // Force optimization while incremental marking is active and while
1378 : // the function is enqueued as a candidate.
1379 : {
1380 8 : v8::HandleScope scope(CcTest::isolate());
1381 : CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1382 : }
1383 :
1384 : // Simulate one final GC and make sure the candidate wasn't flushed.
1385 4 : CcTest::CollectAllGarbage();
1386 4 : CHECK(function->shared()->is_compiled());
1387 4 : CHECK(function->is_compiled());
1388 4 : }
1389 :
1390 : #endif // V8_LITE_MODE
1391 :
1392 26068 : TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
1393 5 : if (!FLAG_incremental_marking) return;
1394 : // Turn off always_opt because it interferes with running the built-in for
1395 : // the last call to g().
1396 5 : FLAG_always_opt = false;
1397 5 : FLAG_allow_natives_syntax = true;
1398 5 : CcTest::InitializeVM();
1399 : Isolate* isolate = CcTest::i_isolate();
1400 : Factory* factory = isolate->factory();
1401 : Heap* heap = isolate->heap();
1402 10 : v8::HandleScope scope(CcTest::isolate());
1403 :
1404 : CompileRun(
1405 : "function make_closure(x) {"
1406 : " return function() { return x + 3 };"
1407 : "}"
1408 : "var f = make_closure(5); f();"
1409 : "var g = make_closure(5);");
1410 :
1411 : // Check f is compiled.
1412 5 : Handle<String> f_name = factory->InternalizeUtf8String("f");
1413 : Handle<Object> f_value =
1414 15 : Object::GetProperty(isolate, isolate->global_object(), f_name)
1415 : .ToHandleChecked();
1416 : Handle<JSFunction> f_function = Handle<JSFunction>::cast(f_value);
1417 5 : CHECK(f_function->is_compiled());
1418 :
1419 : // Check g is not compiled.
1420 5 : Handle<String> g_name = factory->InternalizeUtf8String("g");
1421 : Handle<Object> g_value =
1422 15 : Object::GetProperty(isolate, isolate->global_object(), g_name)
1423 : .ToHandleChecked();
1424 : Handle<JSFunction> g_function = Handle<JSFunction>::cast(g_value);
1425 5 : CHECK(!g_function->is_compiled());
1426 :
1427 5 : heap::SimulateIncrementalMarking(heap);
1428 : CompileRun("%OptimizeFunctionOnNextCall(f); f();");
1429 :
1430 : // g should now have available an optimized function, unmarked by gc. The
1431 : // CompileLazy built-in will discover it and install it in the closure, and
1432 : // the incremental write barrier should be used.
1433 : CompileRun("g();");
1434 5 : CHECK(g_function->is_compiled());
1435 : }
1436 :
1437 26068 : TEST(CompilationCacheCachingBehavior) {
1438 : // If we do not have the compilation cache turned off, this test is invalid.
1439 5 : if (!FLAG_compilation_cache) {
1440 0 : return;
1441 : }
1442 5 : CcTest::InitializeVM();
1443 : Isolate* isolate = CcTest::i_isolate();
1444 : Factory* factory = isolate->factory();
1445 : CompilationCache* compilation_cache = isolate->compilation_cache();
1446 5 : LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
1447 :
1448 10 : v8::HandleScope scope(CcTest::isolate());
1449 : const char* raw_source =
1450 : "function foo() {"
1451 : " var x = 42;"
1452 : " var y = 42;"
1453 : " var z = x + y;"
1454 : "};"
1455 : "foo();";
1456 5 : Handle<String> source = factory->InternalizeUtf8String(raw_source);
1457 5 : Handle<Context> native_context = isolate->native_context();
1458 :
1459 : {
1460 10 : v8::HandleScope scope(CcTest::isolate());
1461 : CompileRun(raw_source);
1462 : }
1463 :
1464 : // The script should be in the cache now.
1465 : {
1466 10 : v8::HandleScope scope(CcTest::isolate());
1467 : MaybeHandle<SharedFunctionInfo> cached_script =
1468 : compilation_cache->LookupScript(source, Handle<Object>(), 0, 0,
1469 : v8::ScriptOriginOptions(true, false),
1470 5 : native_context, language_mode);
1471 5 : CHECK(!cached_script.is_null());
1472 : }
1473 :
1474 : // Check that the code cache entry survives at least one GC.
1475 : {
1476 5 : CcTest::CollectAllGarbage();
1477 10 : v8::HandleScope scope(CcTest::isolate());
1478 : MaybeHandle<SharedFunctionInfo> cached_script =
1479 : compilation_cache->LookupScript(source, Handle<Object>(), 0, 0,
1480 : v8::ScriptOriginOptions(true, false),
1481 5 : native_context, language_mode);
1482 5 : CHECK(!cached_script.is_null());
1483 :
1484 : // Progress code age until it's old and ready for GC.
1485 : Handle<SharedFunctionInfo> shared = cached_script.ToHandleChecked();
1486 5 : CHECK(shared->HasBytecodeArray());
1487 : const int kAgingThreshold = 6;
1488 65 : for (int i = 0; i < kAgingThreshold; i++) {
1489 30 : shared->GetBytecodeArray()->MakeOlder();
1490 : }
1491 : }
1492 :
1493 5 : CcTest::CollectAllGarbage();
1494 :
1495 : {
1496 10 : v8::HandleScope scope(CcTest::isolate());
1497 : // Ensure code aging cleared the entry from the cache.
1498 : MaybeHandle<SharedFunctionInfo> cached_script =
1499 : compilation_cache->LookupScript(source, Handle<Object>(), 0, 0,
1500 : v8::ScriptOriginOptions(true, false),
1501 5 : native_context, language_mode);
1502 5 : CHECK(cached_script.is_null());
1503 : }
1504 : }
1505 :
1506 :
1507 150 : static void OptimizeEmptyFunction(const char* name) {
1508 : HandleScope scope(CcTest::i_isolate());
1509 : EmbeddedVector<char, 256> source;
1510 : SNPrintF(source,
1511 : "function %s() { return 0; }"
1512 : "%s(); %s();"
1513 : "%%OptimizeFunctionOnNextCall(%s);"
1514 : "%s();",
1515 150 : name, name, name, name, name);
1516 : CompileRun(source.start());
1517 150 : }
1518 :
1519 :
1520 : // Count the number of native contexts in the weak list of native contexts.
1521 366 : int CountNativeContexts() {
1522 : int count = 0;
1523 366 : Object object = CcTest::heap()->native_contexts_list();
1524 4266 : while (!object->IsUndefined(CcTest::i_isolate())) {
1525 1950 : count++;
1526 1950 : object = Context::cast(object)->next_context_link();
1527 : }
1528 366 : return count;
1529 : }
1530 :
1531 26068 : TEST(TestInternalWeakLists) {
1532 5 : FLAG_always_opt = false;
1533 5 : FLAG_allow_natives_syntax = true;
1534 5 : v8::V8::Initialize();
1535 :
1536 : // Some flags turn Scavenge collections into Mark-sweep collections
1537 : // and hence are incompatible with this test case.
1538 5 : if (FLAG_gc_global || FLAG_stress_compaction ||
1539 : FLAG_stress_incremental_marking)
1540 2 : return;
1541 4 : FLAG_retain_maps_for_n_gc = 0;
1542 :
1543 : static const int kNumTestContexts = 10;
1544 :
1545 : Isolate* isolate = CcTest::i_isolate();
1546 : HandleScope scope(isolate);
1547 84 : v8::Local<v8::Context> ctx[kNumTestContexts];
1548 4 : if (!isolate->use_optimizer()) return;
1549 :
1550 3 : CHECK_EQ(0, CountNativeContexts());
1551 :
1552 : // Create a number of global contests which gets linked together.
1553 30 : for (int i = 0; i < kNumTestContexts; i++) {
1554 30 : ctx[i] = v8::Context::New(CcTest::isolate());
1555 :
1556 : // Collect garbage that might have been created by one of the
1557 : // installed extensions.
1558 30 : isolate->compilation_cache()->Clear();
1559 30 : CcTest::CollectAllGarbage();
1560 :
1561 30 : CHECK_EQ(i + 1, CountNativeContexts());
1562 :
1563 30 : ctx[i]->Enter();
1564 :
1565 : // Create a handle scope so no function objects get stuck in the outer
1566 : // handle scope.
1567 : HandleScope scope(isolate);
1568 30 : OptimizeEmptyFunction("f1");
1569 30 : OptimizeEmptyFunction("f2");
1570 30 : OptimizeEmptyFunction("f3");
1571 30 : OptimizeEmptyFunction("f4");
1572 30 : OptimizeEmptyFunction("f5");
1573 :
1574 : // Remove function f1, and
1575 : CompileRun("f1=null");
1576 :
1577 : // Scavenge treats these references as strong.
1578 630 : for (int j = 0; j < 10; j++) {
1579 300 : CcTest::CollectGarbage(NEW_SPACE);
1580 : }
1581 :
1582 : // Mark compact handles the weak references.
1583 30 : isolate->compilation_cache()->Clear();
1584 30 : CcTest::CollectAllGarbage();
1585 :
1586 : // Get rid of f3 and f5 in the same way.
1587 : CompileRun("f3=null");
1588 630 : for (int j = 0; j < 10; j++) {
1589 300 : CcTest::CollectGarbage(NEW_SPACE);
1590 : }
1591 30 : CcTest::CollectAllGarbage();
1592 : CompileRun("f5=null");
1593 630 : for (int j = 0; j < 10; j++) {
1594 300 : CcTest::CollectGarbage(NEW_SPACE);
1595 : }
1596 30 : CcTest::CollectAllGarbage();
1597 :
1598 30 : ctx[i]->Exit();
1599 : }
1600 :
1601 : // Force compilation cache cleanup.
1602 3 : CcTest::heap()->NotifyContextDisposed(true);
1603 3 : CcTest::CollectAllGarbage();
1604 :
1605 : // Dispose the native contexts one by one.
1606 63 : for (int i = 0; i < kNumTestContexts; i++) {
1607 : // TODO(dcarney): is there a better way to do this?
1608 : i::Address* unsafe = reinterpret_cast<i::Address*>(*ctx[i]);
1609 60 : *unsafe = ReadOnlyRoots(CcTest::heap()).undefined_value()->ptr();
1610 : ctx[i].Clear();
1611 :
1612 : // Scavenge treats these references as strong.
1613 630 : for (int j = 0; j < 10; j++) {
1614 300 : CcTest::CollectGarbage(i::NEW_SPACE);
1615 300 : CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1616 : }
1617 :
1618 : // Mark compact handles the weak references.
1619 30 : CcTest::CollectAllGarbage();
1620 30 : CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1621 : }
1622 :
1623 3 : CHECK_EQ(0, CountNativeContexts());
1624 : }
1625 :
1626 :
1627 26068 : TEST(TestSizeOfRegExpCode) {
1628 5 : if (!FLAG_regexp_optimization) return;
1629 :
1630 5 : v8::V8::Initialize();
1631 :
1632 : Isolate* isolate = CcTest::i_isolate();
1633 : HandleScope scope(isolate);
1634 :
1635 5 : LocalContext context;
1636 :
1637 : // Adjust source below and this check to match
1638 : // RegExpImple::kRegExpTooLargeToOptimize.
1639 : CHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
1640 :
1641 : // Compile a regexp that is much larger if we are using regexp optimizations.
1642 : CompileRun(
1643 : "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1644 : "var half_size_reg_exp;"
1645 : "while (reg_exp_source.length < 20 * 1024) {"
1646 : " half_size_reg_exp = reg_exp_source;"
1647 : " reg_exp_source = reg_exp_source + reg_exp_source;"
1648 : "}"
1649 : // Flatten string.
1650 : "reg_exp_source.match(/f/);");
1651 :
1652 : // Get initial heap size after several full GCs, which will stabilize
1653 : // the heap size and return with sweeping finished completely.
1654 5 : CcTest::CollectAllAvailableGarbage();
1655 5 : MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1656 5 : if (collector->sweeping_in_progress()) {
1657 5 : collector->EnsureSweepingCompleted();
1658 : }
1659 5 : int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1660 :
1661 : CompileRun("'foo'.match(reg_exp_source);");
1662 5 : CcTest::CollectAllAvailableGarbage();
1663 5 : int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1664 :
1665 : CompileRun("'foo'.match(half_size_reg_exp);");
1666 5 : CcTest::CollectAllAvailableGarbage();
1667 : int size_with_optimized_regexp =
1668 5 : static_cast<int>(CcTest::heap()->SizeOfObjects());
1669 :
1670 5 : int size_of_regexp_code = size_with_regexp - initial_size;
1671 :
1672 : // On some platforms the debug-code flag causes huge amounts of regexp code
1673 : // to be emitted, breaking this test.
1674 5 : if (!FLAG_debug_code) {
1675 5 : CHECK_LE(size_of_regexp_code, 1 * MB);
1676 : }
1677 :
1678 : // Small regexp is half the size, but compiles to more than twice the code
1679 : // due to the optimization steps.
1680 5 : CHECK_GE(size_with_optimized_regexp,
1681 : size_with_regexp + size_of_regexp_code * 2);
1682 : }
1683 :
1684 :
1685 26068 : HEAP_TEST(TestSizeOfObjects) {
1686 5 : v8::V8::Initialize();
1687 : Isolate* isolate = CcTest::i_isolate();
1688 5 : Heap* heap = CcTest::heap();
1689 : MarkCompactCollector* collector = heap->mark_compact_collector();
1690 :
1691 : // Get initial heap size after several full GCs, which will stabilize
1692 : // the heap size and return with sweeping finished completely.
1693 5 : CcTest::CollectAllAvailableGarbage();
1694 5 : if (collector->sweeping_in_progress()) {
1695 5 : collector->EnsureSweepingCompleted();
1696 : }
1697 5 : int initial_size = static_cast<int>(heap->SizeOfObjects());
1698 :
1699 : {
1700 : HandleScope scope(isolate);
1701 : // Allocate objects on several different old-space pages so that
1702 : // concurrent sweeper threads will be busy sweeping the old space on
1703 : // subsequent GC runs.
1704 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1705 : int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1706 1005 : for (int i = 1; i <= 100; i++) {
1707 500 : isolate->factory()->NewFixedArray(8192, AllocationType::kOld);
1708 500 : CHECK_EQ(initial_size + i * filler_size,
1709 : static_cast<int>(heap->SizeOfObjects()));
1710 : }
1711 : }
1712 :
1713 : // The heap size should go back to initial size after a full GC, even
1714 : // though sweeping didn't finish yet.
1715 5 : CcTest::CollectAllGarbage();
1716 : // Normally sweeping would not be complete here, but no guarantees.
1717 5 : CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
1718 : // Waiting for sweeper threads should not change heap size.
1719 5 : if (collector->sweeping_in_progress()) {
1720 5 : collector->EnsureSweepingCompleted();
1721 : }
1722 5 : CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
1723 5 : }
1724 :
1725 :
1726 26068 : TEST(TestAlignmentCalculations) {
1727 : // Maximum fill amounts are consistent.
1728 : int maximum_double_misalignment = kDoubleSize - kTaggedSize;
1729 5 : int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
1730 5 : CHECK_EQ(0, max_word_fill);
1731 5 : int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
1732 5 : CHECK_EQ(maximum_double_misalignment, max_double_fill);
1733 5 : int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
1734 5 : CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
1735 :
1736 : Address base = kNullAddress;
1737 : int fill = 0;
1738 :
1739 : // Word alignment never requires fill.
1740 5 : fill = Heap::GetFillToAlign(base, kWordAligned);
1741 5 : CHECK_EQ(0, fill);
1742 5 : fill = Heap::GetFillToAlign(base + kTaggedSize, kWordAligned);
1743 5 : CHECK_EQ(0, fill);
1744 :
1745 : // No fill is required when address is double aligned.
1746 5 : fill = Heap::GetFillToAlign(base, kDoubleAligned);
1747 5 : CHECK_EQ(0, fill);
1748 : // Fill is required if address is not double aligned.
1749 5 : fill = Heap::GetFillToAlign(base + kTaggedSize, kDoubleAligned);
1750 5 : CHECK_EQ(maximum_double_misalignment, fill);
1751 : // kDoubleUnaligned has the opposite fill amounts.
1752 5 : fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
1753 5 : CHECK_EQ(maximum_double_misalignment, fill);
1754 5 : fill = Heap::GetFillToAlign(base + kTaggedSize, kDoubleUnaligned);
1755 5 : CHECK_EQ(0, fill);
1756 5 : }
1757 :
1758 30 : static HeapObject NewSpaceAllocateAligned(int size,
1759 : AllocationAlignment alignment) {
1760 30 : Heap* heap = CcTest::heap();
1761 : AllocationResult allocation =
1762 : heap->new_space()->AllocateRawAligned(size, alignment);
1763 : HeapObject obj;
1764 : allocation.To(&obj);
1765 30 : heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
1766 30 : return obj;
1767 : }
1768 :
1769 : // Get new space allocation into the desired alignment.
1770 30 : static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
1771 30 : Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1772 30 : int fill = Heap::GetFillToAlign(*top_addr, alignment);
1773 30 : int allocation = fill + offset;
1774 30 : if (allocation) {
1775 10 : NewSpaceAllocateAligned(allocation, kWordAligned);
1776 : }
1777 30 : return *top_addr;
1778 : }
1779 :
1780 :
1781 26068 : TEST(TestAlignedAllocation) {
1782 : // Double misalignment is 4 on 32-bit platforms or when pointer compression
1783 : // is enabled, 0 on 64-bit ones when pointer compression is disabled.
1784 : const intptr_t double_misalignment = kDoubleSize - kTaggedSize;
1785 5 : Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1786 : Address start;
1787 : HeapObject obj;
1788 5 : HeapObject filler;
1789 : if (double_misalignment) {
1790 : // Allocate a pointer sized object that must be double aligned at an
1791 : // aligned address.
1792 5 : start = AlignNewSpace(kDoubleAligned, 0);
1793 5 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1794 5 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1795 : // There is no filler.
1796 10 : CHECK_EQ(kTaggedSize, *top_addr - start);
1797 :
1798 : // Allocate a second pointer sized object that must be double aligned at an
1799 : // unaligned address.
1800 5 : start = AlignNewSpace(kDoubleAligned, kTaggedSize);
1801 5 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1802 5 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1803 : // There is a filler object before the object.
1804 5 : filler = HeapObject::FromAddress(start);
1805 10 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1806 5 : CHECK_EQ(kTaggedSize + double_misalignment, *top_addr - start);
1807 :
1808 : // Similarly for kDoubleUnaligned.
1809 5 : start = AlignNewSpace(kDoubleUnaligned, 0);
1810 5 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1811 10 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1812 10 : CHECK_EQ(kTaggedSize, *top_addr - start);
1813 5 : start = AlignNewSpace(kDoubleUnaligned, kTaggedSize);
1814 5 : obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1815 10 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1816 : // There is a filler object before the object.
1817 5 : filler = HeapObject::FromAddress(start);
1818 10 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1819 5 : CHECK_EQ(kTaggedSize + double_misalignment, *top_addr - start);
1820 : }
1821 5 : }
1822 :
1823 35 : static HeapObject OldSpaceAllocateAligned(int size,
1824 : AllocationAlignment alignment) {
1825 35 : Heap* heap = CcTest::heap();
1826 : AllocationResult allocation =
1827 35 : heap->old_space()->AllocateRawAligned(size, alignment);
1828 : HeapObject obj;
1829 : allocation.To(&obj);
1830 35 : heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
1831 35 : return obj;
1832 : }
1833 :
1834 : // Get old space allocation into the desired alignment.
1835 30 : static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
1836 30 : Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
1837 30 : int fill = Heap::GetFillToAlign(*top_addr, alignment);
1838 30 : int allocation = fill + offset;
1839 30 : if (allocation) {
1840 15 : OldSpaceAllocateAligned(allocation, kWordAligned);
1841 : }
1842 30 : Address top = *top_addr;
1843 : // Now force the remaining allocation onto the free list.
1844 30 : CcTest::heap()->old_space()->FreeLinearAllocationArea();
1845 30 : return top;
1846 : }
1847 :
1848 :
1849 : // Test the case where allocation must be done from the free list, so filler
1850 : // may precede or follow the object.
1851 26068 : TEST(TestAlignedOverAllocation) {
1852 5 : Heap* heap = CcTest::heap();
1853 : // Test checks for fillers before and behind objects and requires a fresh
1854 : // page and empty free list.
1855 5 : heap::AbandonCurrentlyFreeMemory(heap->old_space());
1856 : // Allocate a dummy object to properly set up the linear allocation info.
1857 5 : AllocationResult dummy = heap->old_space()->AllocateRawUnaligned(kTaggedSize);
1858 5 : CHECK(!dummy.IsRetry());
1859 : heap->CreateFillerObjectAt(dummy.ToObjectChecked()->address(), kTaggedSize,
1860 5 : ClearRecordedSlots::kNo);
1861 :
1862 : // Double misalignment is 4 on 32-bit platforms or when pointer compression
1863 : // is enabled, 0 on 64-bit ones when pointer compression is disabled.
1864 : const intptr_t double_misalignment = kDoubleSize - kTaggedSize;
1865 : Address start;
1866 : HeapObject obj;
1867 5 : HeapObject filler;
1868 : if (double_misalignment) {
1869 5 : start = AlignOldSpace(kDoubleAligned, 0);
1870 5 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1871 : // The object is aligned.
1872 5 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1873 : // Try the opposite alignment case.
1874 5 : start = AlignOldSpace(kDoubleAligned, kTaggedSize);
1875 5 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1876 5 : CHECK(IsAligned(obj->address(), kDoubleAlignment));
1877 5 : filler = HeapObject::FromAddress(start);
1878 5 : CHECK(obj != filler);
1879 5 : CHECK(filler->IsFiller());
1880 5 : CHECK_EQ(kTaggedSize, filler->Size());
1881 10 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1882 :
1883 : // Similarly for kDoubleUnaligned.
1884 5 : start = AlignOldSpace(kDoubleUnaligned, 0);
1885 5 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1886 : // The object is aligned.
1887 10 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1888 : // Try the opposite alignment case.
1889 5 : start = AlignOldSpace(kDoubleUnaligned, kTaggedSize);
1890 5 : obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1891 10 : CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
1892 5 : filler = HeapObject::FromAddress(start);
1893 10 : CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
1894 : }
1895 5 : }
1896 :
1897 26068 : TEST(HeapNumberAlignment) {
1898 5 : CcTest::InitializeVM();
1899 : Isolate* isolate = CcTest::i_isolate();
1900 : Factory* factory = isolate->factory();
1901 : Heap* heap = isolate->heap();
1902 : HandleScope sc(isolate);
1903 :
1904 : const auto required_alignment =
1905 : HeapObject::RequiredAlignment(*factory->heap_number_map());
1906 : const int maximum_misalignment =
1907 5 : Heap::GetMaximumFillToAlign(required_alignment);
1908 :
1909 15 : for (int offset = 0; offset <= maximum_misalignment; offset += kTaggedSize) {
1910 5 : AlignNewSpace(required_alignment, offset);
1911 5 : Handle<Object> number_new = factory->NewNumber(1.000123);
1912 5 : CHECK(number_new->IsHeapNumber());
1913 5 : CHECK(Heap::InYoungGeneration(*number_new));
1914 5 : CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new)->address(),
1915 : required_alignment));
1916 :
1917 5 : AlignOldSpace(required_alignment, offset);
1918 : Handle<Object> number_old =
1919 5 : factory->NewNumber(1.000321, AllocationType::kOld);
1920 5 : CHECK(number_old->IsHeapNumber());
1921 5 : CHECK(heap->InOldSpace(*number_old));
1922 5 : CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_old)->address(),
1923 : required_alignment));
1924 : }
1925 5 : }
1926 :
1927 26068 : TEST(MutableHeapNumberAlignment) {
1928 5 : CcTest::InitializeVM();
1929 : Isolate* isolate = CcTest::i_isolate();
1930 : Factory* factory = isolate->factory();
1931 : Heap* heap = isolate->heap();
1932 : HandleScope sc(isolate);
1933 :
1934 : const auto required_alignment =
1935 : HeapObject::RequiredAlignment(*factory->mutable_heap_number_map());
1936 : const int maximum_misalignment =
1937 5 : Heap::GetMaximumFillToAlign(required_alignment);
1938 :
1939 15 : for (int offset = 0; offset <= maximum_misalignment; offset += kTaggedSize) {
1940 5 : AlignNewSpace(required_alignment, offset);
1941 5 : Handle<Object> number_new = factory->NewMutableHeapNumber(1.000123);
1942 5 : CHECK(number_new->IsMutableHeapNumber());
1943 5 : CHECK(Heap::InYoungGeneration(*number_new));
1944 5 : CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new)->address(),
1945 : required_alignment));
1946 :
1947 5 : AlignOldSpace(required_alignment, offset);
1948 : Handle<Object> number_old =
1949 5 : factory->NewMutableHeapNumber(1.000321, AllocationType::kOld);
1950 5 : CHECK(number_old->IsMutableHeapNumber());
1951 5 : CHECK(heap->InOldSpace(*number_old));
1952 5 : CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_old)->address(),
1953 : required_alignment));
1954 : }
1955 5 : }
1956 :
1957 26068 : TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1958 5 : CcTest::InitializeVM();
1959 10 : HeapIterator iterator(CcTest::heap());
1960 5 : intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
1961 : intptr_t size_of_objects_2 = 0;
1962 70234 : for (HeapObject obj = iterator.next(); !obj.is_null();
1963 : obj = iterator.next()) {
1964 35112 : if (!obj->IsFreeSpace()) {
1965 35112 : size_of_objects_2 += obj->Size();
1966 : }
1967 : }
1968 : // Delta must be within 5% of the larger result.
1969 : // TODO(gc): Tighten this up by distinguishing between byte
1970 : // arrays that are real and those that merely mark free space
1971 : // on the heap.
1972 5 : if (size_of_objects_1 > size_of_objects_2) {
1973 5 : intptr_t delta = size_of_objects_1 - size_of_objects_2;
1974 : PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
1975 : ", "
1976 : "Iterator: %" V8PRIdPTR
1977 : ", "
1978 : "delta: %" V8PRIdPTR "\n",
1979 5 : size_of_objects_1, size_of_objects_2, delta);
1980 5 : CHECK_GT(size_of_objects_1 / 20, delta);
1981 : } else {
1982 0 : intptr_t delta = size_of_objects_2 - size_of_objects_1;
1983 : PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
1984 : ", "
1985 : "Iterator: %" V8PRIdPTR
1986 : ", "
1987 : "delta: %" V8PRIdPTR "\n",
1988 0 : size_of_objects_1, size_of_objects_2, delta);
1989 0 : CHECK_GT(size_of_objects_2 / 20, delta);
1990 : }
1991 5 : }
1992 :
1993 26068 : TEST(GrowAndShrinkNewSpace) {
1994 : // Avoid shrinking new space in GC epilogue. This can happen if allocation
1995 : // throughput samples have been taken while executing the benchmark.
1996 5 : FLAG_predictable = true;
1997 :
1998 5 : CcTest::InitializeVM();
1999 5 : Heap* heap = CcTest::heap();
2000 : NewSpace* new_space = heap->new_space();
2001 :
2002 5 : if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2003 : return;
2004 : }
2005 :
2006 : // Make sure we're in a consistent state to start out.
2007 5 : CcTest::CollectAllGarbage();
2008 5 : CcTest::CollectAllGarbage();
2009 5 : new_space->Shrink();
2010 :
2011 : // Explicitly growing should double the space capacity.
2012 : size_t old_capacity, new_capacity;
2013 : old_capacity = new_space->TotalCapacity();
2014 5 : new_space->Grow();
2015 : new_capacity = new_space->TotalCapacity();
2016 5 : CHECK_EQ(2 * old_capacity, new_capacity);
2017 :
2018 : old_capacity = new_space->TotalCapacity();
2019 : {
2020 10 : v8::HandleScope temporary_scope(CcTest::isolate());
2021 5 : heap::SimulateFullSpace(new_space);
2022 : }
2023 : new_capacity = new_space->TotalCapacity();
2024 5 : CHECK_EQ(old_capacity, new_capacity);
2025 :
2026 : // Explicitly shrinking should not affect space capacity.
2027 : old_capacity = new_space->TotalCapacity();
2028 5 : new_space->Shrink();
2029 : new_capacity = new_space->TotalCapacity();
2030 5 : CHECK_EQ(old_capacity, new_capacity);
2031 :
2032 : // Let the scavenger empty the new space.
2033 5 : CcTest::CollectGarbage(NEW_SPACE);
2034 5 : CHECK_LE(new_space->Size(), old_capacity);
2035 :
2036 : // Explicitly shrinking should halve the space capacity.
2037 : old_capacity = new_space->TotalCapacity();
2038 5 : new_space->Shrink();
2039 : new_capacity = new_space->TotalCapacity();
2040 5 : CHECK_EQ(old_capacity, 2 * new_capacity);
2041 :
2042 : // Consecutive shrinking should not affect space capacity.
2043 : old_capacity = new_space->TotalCapacity();
2044 5 : new_space->Shrink();
2045 5 : new_space->Shrink();
2046 5 : new_space->Shrink();
2047 : new_capacity = new_space->TotalCapacity();
2048 5 : CHECK_EQ(old_capacity, new_capacity);
2049 : }
2050 :
2051 26068 : TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2052 5 : CcTest::InitializeVM();
2053 5 : Heap* heap = CcTest::heap();
2054 5 : if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2055 0 : return;
2056 : }
2057 :
2058 10 : v8::HandleScope scope(CcTest::isolate());
2059 : NewSpace* new_space = heap->new_space();
2060 : size_t old_capacity, new_capacity;
2061 : old_capacity = new_space->TotalCapacity();
2062 5 : new_space->Grow();
2063 : new_capacity = new_space->TotalCapacity();
2064 5 : CHECK_EQ(2 * old_capacity, new_capacity);
2065 : {
2066 10 : v8::HandleScope temporary_scope(CcTest::isolate());
2067 5 : heap::SimulateFullSpace(new_space);
2068 : }
2069 5 : CcTest::CollectAllAvailableGarbage();
2070 : new_capacity = new_space->TotalCapacity();
2071 5 : CHECK_EQ(old_capacity, new_capacity);
2072 : }
2073 :
2074 60 : static int NumberOfGlobalObjects() {
2075 : int count = 0;
2076 120 : HeapIterator iterator(CcTest::heap());
2077 803670 : for (HeapObject obj = iterator.next(); !obj.is_null();
2078 : obj = iterator.next()) {
2079 401775 : if (obj->IsJSGlobalObject()) count++;
2080 : }
2081 60 : return count;
2082 : }
2083 :
2084 :
2085 : // Test that we don't embed maps from foreign contexts into
2086 : // optimized code.
2087 26068 : TEST(LeakNativeContextViaMap) {
2088 5 : FLAG_allow_natives_syntax = true;
2089 5 : v8::Isolate* isolate = CcTest::isolate();
2090 10 : v8::HandleScope outer_scope(isolate);
2091 : v8::Persistent<v8::Context> ctx1p;
2092 : v8::Persistent<v8::Context> ctx2p;
2093 : {
2094 10 : v8::HandleScope scope(isolate);
2095 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2096 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2097 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2098 : }
2099 :
2100 5 : CcTest::CollectAllAvailableGarbage();
2101 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2102 :
2103 : {
2104 10 : v8::HandleScope inner_scope(isolate);
2105 : CompileRun("var v = {x: 42}");
2106 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2107 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2108 : v8::Local<v8::Value> v =
2109 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2110 5 : ctx2->Enter();
2111 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2112 : v8::Local<v8::Value> res = CompileRun(
2113 : "function f() { return o.x; }"
2114 : "for (var i = 0; i < 10; ++i) f();"
2115 : "%OptimizeFunctionOnNextCall(f);"
2116 : "f();");
2117 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2118 25 : CHECK(ctx2->Global()
2119 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2120 : .FromJust());
2121 5 : ctx2->Exit();
2122 5 : v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2123 : ctx1p.Reset();
2124 5 : isolate->ContextDisposedNotification();
2125 : }
2126 5 : CcTest::CollectAllAvailableGarbage();
2127 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2128 : ctx2p.Reset();
2129 5 : CcTest::CollectAllAvailableGarbage();
2130 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2131 5 : }
2132 :
2133 :
2134 : // Test that we don't embed functions from foreign contexts into
2135 : // optimized code.
2136 26068 : TEST(LeakNativeContextViaFunction) {
2137 5 : FLAG_allow_natives_syntax = true;
2138 5 : v8::Isolate* isolate = CcTest::isolate();
2139 10 : v8::HandleScope outer_scope(isolate);
2140 : v8::Persistent<v8::Context> ctx1p;
2141 : v8::Persistent<v8::Context> ctx2p;
2142 : {
2143 10 : v8::HandleScope scope(isolate);
2144 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2145 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2146 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2147 : }
2148 :
2149 5 : CcTest::CollectAllAvailableGarbage();
2150 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2151 :
2152 : {
2153 10 : v8::HandleScope inner_scope(isolate);
2154 : CompileRun("var v = function() { return 42; }");
2155 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2156 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2157 : v8::Local<v8::Value> v =
2158 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2159 5 : ctx2->Enter();
2160 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2161 : v8::Local<v8::Value> res = CompileRun(
2162 : "function f(x) { return x(); }"
2163 : "for (var i = 0; i < 10; ++i) f(o);"
2164 : "%OptimizeFunctionOnNextCall(f);"
2165 : "f(o);");
2166 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2167 25 : CHECK(ctx2->Global()
2168 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2169 : .FromJust());
2170 5 : ctx2->Exit();
2171 5 : ctx1->Exit();
2172 : ctx1p.Reset();
2173 5 : isolate->ContextDisposedNotification();
2174 : }
2175 5 : CcTest::CollectAllAvailableGarbage();
2176 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2177 : ctx2p.Reset();
2178 5 : CcTest::CollectAllAvailableGarbage();
2179 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2180 5 : }
2181 :
2182 :
2183 26068 : TEST(LeakNativeContextViaMapKeyed) {
2184 5 : FLAG_allow_natives_syntax = true;
2185 5 : v8::Isolate* isolate = CcTest::isolate();
2186 10 : v8::HandleScope outer_scope(isolate);
2187 : v8::Persistent<v8::Context> ctx1p;
2188 : v8::Persistent<v8::Context> ctx2p;
2189 : {
2190 10 : v8::HandleScope scope(isolate);
2191 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2192 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2193 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2194 : }
2195 :
2196 5 : CcTest::CollectAllAvailableGarbage();
2197 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2198 :
2199 : {
2200 10 : v8::HandleScope inner_scope(isolate);
2201 : CompileRun("var v = [42, 43]");
2202 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2203 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2204 : v8::Local<v8::Value> v =
2205 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2206 5 : ctx2->Enter();
2207 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2208 : v8::Local<v8::Value> res = CompileRun(
2209 : "function f() { return o[0]; }"
2210 : "for (var i = 0; i < 10; ++i) f();"
2211 : "%OptimizeFunctionOnNextCall(f);"
2212 : "f();");
2213 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2214 25 : CHECK(ctx2->Global()
2215 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2216 : .FromJust());
2217 5 : ctx2->Exit();
2218 5 : ctx1->Exit();
2219 : ctx1p.Reset();
2220 5 : isolate->ContextDisposedNotification();
2221 : }
2222 5 : CcTest::CollectAllAvailableGarbage();
2223 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2224 : ctx2p.Reset();
2225 5 : CcTest::CollectAllAvailableGarbage();
2226 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2227 5 : }
2228 :
2229 :
2230 26068 : TEST(LeakNativeContextViaMapProto) {
2231 5 : FLAG_allow_natives_syntax = true;
2232 5 : v8::Isolate* isolate = CcTest::isolate();
2233 10 : v8::HandleScope outer_scope(isolate);
2234 : v8::Persistent<v8::Context> ctx1p;
2235 : v8::Persistent<v8::Context> ctx2p;
2236 : {
2237 10 : v8::HandleScope scope(isolate);
2238 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2239 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2240 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2241 : }
2242 :
2243 5 : CcTest::CollectAllAvailableGarbage();
2244 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2245 :
2246 : {
2247 10 : v8::HandleScope inner_scope(isolate);
2248 : CompileRun("var v = { y: 42}");
2249 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2250 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2251 : v8::Local<v8::Value> v =
2252 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2253 5 : ctx2->Enter();
2254 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2255 : v8::Local<v8::Value> res = CompileRun(
2256 : "function f() {"
2257 : " var p = {x: 42};"
2258 : " p.__proto__ = o;"
2259 : " return p.x;"
2260 : "}"
2261 : "for (var i = 0; i < 10; ++i) f();"
2262 : "%OptimizeFunctionOnNextCall(f);"
2263 : "f();");
2264 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2265 25 : CHECK(ctx2->Global()
2266 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2267 : .FromJust());
2268 5 : ctx2->Exit();
2269 5 : ctx1->Exit();
2270 : ctx1p.Reset();
2271 5 : isolate->ContextDisposedNotification();
2272 : }
2273 5 : CcTest::CollectAllAvailableGarbage();
2274 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2275 : ctx2p.Reset();
2276 5 : CcTest::CollectAllAvailableGarbage();
2277 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2278 5 : }
2279 :
2280 :
2281 26068 : TEST(InstanceOfStubWriteBarrier) {
2282 6 : if (!FLAG_incremental_marking) return;
2283 : ManualGCScope manual_gc_scope;
2284 5 : FLAG_allow_natives_syntax = true;
2285 : #ifdef VERIFY_HEAP
2286 : FLAG_verify_heap = true;
2287 : #endif
2288 :
2289 5 : CcTest::InitializeVM();
2290 5 : if (!CcTest::i_isolate()->use_optimizer()) return;
2291 4 : if (FLAG_force_marking_deque_overflows) return;
2292 8 : v8::HandleScope outer_scope(CcTest::isolate());
2293 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2294 :
2295 : {
2296 8 : v8::HandleScope scope(CcTest::isolate());
2297 : CompileRun(
2298 : "function foo () { }"
2299 : "function mkbar () { return new (new Function(\"\")) (); }"
2300 : "function f (x) { return (x instanceof foo); }"
2301 : "function g () { f(mkbar()); }"
2302 : "f(new foo()); f(new foo());"
2303 : "%OptimizeFunctionOnNextCall(f);"
2304 : "f(new foo()); g();");
2305 : }
2306 :
2307 4 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2308 4 : marking->Stop();
2309 4 : CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2310 4 : i::GarbageCollectionReason::kTesting);
2311 :
2312 : i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2313 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2314 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2315 :
2316 4 : CHECK(f->IsOptimized());
2317 :
2318 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
2319 :
2320 : const double kStepSizeInMs = 100;
2321 36 : while (!marking_state->IsBlack(f->code()) && !marking->IsStopped()) {
2322 : // Discard any pending GC requests otherwise we will get GC when we enter
2323 : // code below.
2324 : marking->V8Step(kStepSizeInMs, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2325 16 : StepOrigin::kV8);
2326 : }
2327 :
2328 4 : CHECK(marking->IsMarking());
2329 :
2330 : {
2331 8 : v8::HandleScope scope(CcTest::isolate());
2332 4 : v8::Local<v8::Object> global = CcTest::global();
2333 : v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
2334 12 : global->Get(ctx, v8_str("g")).ToLocalChecked());
2335 8 : g->Call(ctx, global, 0, nullptr).ToLocalChecked();
2336 : }
2337 :
2338 4 : CcTest::heap()->incremental_marking()->set_should_hurry(true);
2339 4 : CcTest::CollectGarbage(OLD_SPACE);
2340 : }
2341 :
2342 26068 : HEAP_TEST(GCFlags) {
2343 5 : if (!FLAG_incremental_marking) return;
2344 5 : CcTest::InitializeVM();
2345 5 : Heap* heap = CcTest::heap();
2346 :
2347 : heap->set_current_gc_flags(Heap::kNoGCFlags);
2348 : CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2349 :
2350 : // Check whether we appropriately reset flags after GC.
2351 5 : CcTest::heap()->CollectAllGarbage(Heap::kReduceMemoryFootprintMask,
2352 5 : GarbageCollectionReason::kTesting);
2353 5 : CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2354 :
2355 : MarkCompactCollector* collector = heap->mark_compact_collector();
2356 5 : if (collector->sweeping_in_progress()) {
2357 5 : collector->EnsureSweepingCompleted();
2358 : }
2359 :
2360 : IncrementalMarking* marking = heap->incremental_marking();
2361 5 : marking->Stop();
2362 : heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask,
2363 5 : i::GarbageCollectionReason::kTesting);
2364 5 : CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2365 :
2366 5 : CcTest::CollectGarbage(NEW_SPACE);
2367 : // NewSpace scavenges should not overwrite the flags.
2368 5 : CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2369 :
2370 5 : CcTest::CollectAllGarbage();
2371 5 : CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2372 : }
2373 :
2374 26068 : HEAP_TEST(Regress845060) {
2375 : // Regression test for crbug.com/845060, where a raw pointer to a string's
2376 : // data was kept across an allocation. If the allocation causes GC and
2377 : // moves the string, such raw pointers become invalid.
2378 5 : FLAG_allow_natives_syntax = true;
2379 5 : FLAG_stress_incremental_marking = false;
2380 5 : FLAG_stress_compaction = false;
2381 5 : CcTest::InitializeVM();
2382 5 : LocalContext context;
2383 10 : v8::HandleScope scope(CcTest::isolate());
2384 5 : Heap* heap = CcTest::heap();
2385 :
2386 : // Preparation: create a string in new space.
2387 : Local<Value> str = CompileRun("var str = (new Array(10000)).join('x'); str");
2388 5 : CHECK(Heap::InYoungGeneration(*v8::Utils::OpenHandle(*str)));
2389 :
2390 : // Idle incremental marking sets the "kReduceMemoryFootprint" flag, which
2391 : // causes from_space to be unmapped after scavenging.
2392 5 : heap->StartIdleIncrementalMarking(GarbageCollectionReason::kTesting);
2393 5 : CHECK(heap->ShouldReduceMemory());
2394 :
2395 : // Run the test (which allocates results) until the original string was
2396 : // promoted to old space. Unmapping of from_space causes accesses to any
2397 : // stale raw pointers to crash.
2398 : CompileRun("while (%InNewSpace(str)) { str.split(''); }");
2399 5 : CHECK(!Heap::InYoungGeneration(*v8::Utils::OpenHandle(*str)));
2400 5 : }
2401 :
2402 26068 : TEST(IdleNotificationFinishMarking) {
2403 5 : if (!FLAG_incremental_marking) return;
2404 : ManualGCScope manual_gc_scope;
2405 5 : FLAG_allow_natives_syntax = true;
2406 5 : CcTest::InitializeVM();
2407 5 : const int initial_gc_count = CcTest::heap()->gc_count();
2408 5 : heap::SimulateFullSpace(CcTest::heap()->old_space());
2409 5 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2410 5 : marking->Stop();
2411 5 : CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2412 5 : i::GarbageCollectionReason::kTesting);
2413 :
2414 10 : CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count);
2415 :
2416 : const double kStepSizeInMs = 100;
2417 20 : do {
2418 : marking->V8Step(kStepSizeInMs, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2419 20 : StepOrigin::kV8);
2420 : } while (
2421 40 : !CcTest::heap()->mark_compact_collector()->marking_worklist()->IsEmpty());
2422 :
2423 : marking->SetWeakClosureWasOverApproximatedForTesting(true);
2424 :
2425 : // The next idle notification has to finish incremental marking.
2426 : const double kLongIdleTime = 1000.0;
2427 5 : CcTest::isolate()->IdleNotificationDeadline(
2428 15 : (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2429 : static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2430 5 : kLongIdleTime);
2431 10 : CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count + 1);
2432 : }
2433 :
2434 :
2435 : // Test that HAllocateObject will always return an object in new-space.
2436 26068 : TEST(OptimizedAllocationAlwaysInNewSpace) {
2437 5 : FLAG_allow_natives_syntax = true;
2438 5 : CcTest::InitializeVM();
2439 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2440 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2441 : FLAG_stress_incremental_marking)
2442 : return;
2443 4 : v8::HandleScope scope(CcTest::isolate());
2444 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2445 2 : heap::SimulateFullSpace(CcTest::heap()->new_space());
2446 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2447 : v8::Local<v8::Value> res = CompileRun(
2448 : "function c(x) {"
2449 : " this.x = x;"
2450 : " for (var i = 0; i < 32; i++) {"
2451 : " this['x' + i] = x;"
2452 : " }"
2453 : "}"
2454 : "function f(x) { return new c(x); };"
2455 : "f(1); f(2); f(3);"
2456 : "%OptimizeFunctionOnNextCall(f);"
2457 : "f(4);");
2458 :
2459 8 : CHECK_EQ(4, res.As<v8::Object>()
2460 : ->GetRealNamedProperty(ctx, v8_str("x"))
2461 : .ToLocalChecked()
2462 : ->Int32Value(ctx)
2463 : .FromJust());
2464 :
2465 : i::Handle<JSReceiver> o =
2466 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2467 :
2468 2 : CHECK(Heap::InYoungGeneration(*o));
2469 : }
2470 :
2471 :
2472 26068 : TEST(OptimizedPretenuringAllocationFolding) {
2473 5 : FLAG_allow_natives_syntax = true;
2474 5 : FLAG_expose_gc = true;
2475 5 : CcTest::InitializeVM();
2476 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2477 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2478 : FLAG_stress_incremental_marking)
2479 : return;
2480 4 : v8::HandleScope scope(CcTest::isolate());
2481 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2482 : // Grow new space unitl maximum capacity reached.
2483 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2484 8 : CcTest::heap()->new_space()->Grow();
2485 : }
2486 :
2487 : i::ScopedVector<char> source(1024);
2488 : i::SNPrintF(source,
2489 : "var number_elements = %d;"
2490 : "var elements = new Array();"
2491 : "function f() {"
2492 : " for (var i = 0; i < number_elements; i++) {"
2493 : " elements[i] = [[{}], [1.1]];"
2494 : " }"
2495 : " return elements[number_elements-1]"
2496 : "};"
2497 : "f(); gc();"
2498 : "f(); f();"
2499 : "%%OptimizeFunctionOnNextCall(f);"
2500 : "f();",
2501 2 : kPretenureCreationCount);
2502 :
2503 : v8::Local<v8::Value> res = CompileRun(source.start());
2504 :
2505 : v8::Local<v8::Value> int_array =
2506 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2507 : i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2508 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2509 : v8::Local<v8::Value> double_array =
2510 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2511 : i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2512 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2513 :
2514 : i::Handle<JSReceiver> o =
2515 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2516 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2517 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2518 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2519 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2520 4 : CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2521 : }
2522 :
2523 :
2524 26068 : TEST(OptimizedPretenuringObjectArrayLiterals) {
2525 5 : FLAG_allow_natives_syntax = true;
2526 5 : FLAG_expose_gc = true;
2527 5 : CcTest::InitializeVM();
2528 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2529 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2530 : FLAG_stress_incremental_marking) {
2531 : return;
2532 : }
2533 4 : v8::HandleScope scope(CcTest::isolate());
2534 :
2535 : // Grow new space unitl maximum capacity reached.
2536 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2537 8 : CcTest::heap()->new_space()->Grow();
2538 : }
2539 :
2540 : i::ScopedVector<char> source(1024);
2541 : i::SNPrintF(source,
2542 : "var number_elements = %d;"
2543 : "var elements = new Array(number_elements);"
2544 : "function f() {"
2545 : " for (var i = 0; i < number_elements; i++) {"
2546 : " elements[i] = [{}, {}, {}];"
2547 : " }"
2548 : " return elements[number_elements - 1];"
2549 : "};"
2550 : "f(); gc();"
2551 : "f(); f();"
2552 : "%%OptimizeFunctionOnNextCall(f);"
2553 : "f();",
2554 2 : kPretenureCreationCount);
2555 :
2556 : v8::Local<v8::Value> res = CompileRun(source.start());
2557 :
2558 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2559 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2560 :
2561 4 : CHECK(CcTest::heap()->InOldSpace(o->elements()));
2562 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2563 : }
2564 :
2565 26068 : TEST(OptimizedPretenuringNestedInObjectProperties) {
2566 5 : FLAG_allow_natives_syntax = true;
2567 5 : FLAG_expose_gc = true;
2568 5 : CcTest::InitializeVM();
2569 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2570 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2571 : FLAG_stress_incremental_marking) {
2572 : return;
2573 : }
2574 4 : v8::HandleScope scope(CcTest::isolate());
2575 :
2576 : // Grow new space until maximum capacity reached.
2577 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2578 8 : CcTest::heap()->new_space()->Grow();
2579 : }
2580 :
2581 : // Keep the nested literal alive while its root is freed
2582 : i::ScopedVector<char> source(1024);
2583 : i::SNPrintF(source,
2584 : "let number_elements = %d;"
2585 : "let elements = new Array(number_elements);"
2586 : "function f() {"
2587 : " for (let i = 0; i < number_elements; i++) {"
2588 : " let l = {a: {c: 2.2, d: {e: 3.3}}, b: 1.1}; "
2589 : " elements[i] = l.a;"
2590 : " }"
2591 : " return elements[number_elements-1];"
2592 : "};"
2593 : "f(); gc(); gc();"
2594 : "f(); f();"
2595 : "%%OptimizeFunctionOnNextCall(f);"
2596 : "f();",
2597 2 : kPretenureCreationCount);
2598 :
2599 : v8::Local<v8::Value> res = CompileRun(source.start());
2600 :
2601 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2602 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2603 :
2604 : // Nested literal sites are only pretenured if the top level
2605 : // literal is pretenured
2606 2 : CHECK(Heap::InYoungGeneration(*o));
2607 : }
2608 :
2609 26068 : TEST(OptimizedPretenuringMixedInObjectProperties) {
2610 5 : FLAG_allow_natives_syntax = true;
2611 5 : FLAG_expose_gc = true;
2612 5 : CcTest::InitializeVM();
2613 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2614 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2615 : FLAG_stress_incremental_marking)
2616 : return;
2617 4 : v8::HandleScope scope(CcTest::isolate());
2618 :
2619 : // Grow new space unitl maximum capacity reached.
2620 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2621 8 : CcTest::heap()->new_space()->Grow();
2622 : }
2623 :
2624 :
2625 : i::ScopedVector<char> source(1024);
2626 : i::SNPrintF(source,
2627 : "var number_elements = %d;"
2628 : "var elements = new Array(number_elements);"
2629 : "function f() {"
2630 : " for (var i = 0; i < number_elements; i++) {"
2631 : " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2632 : " }"
2633 : " return elements[number_elements - 1];"
2634 : "};"
2635 : "f(); gc();"
2636 : "f(); f();"
2637 : "%%OptimizeFunctionOnNextCall(f);"
2638 : "f();",
2639 2 : kPretenureCreationCount);
2640 :
2641 : v8::Local<v8::Value> res = CompileRun(source.start());
2642 :
2643 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2644 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2645 :
2646 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2647 2 : FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2648 2 : FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2649 4 : CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
2650 : if (!o->IsUnboxedDoubleField(idx2)) {
2651 4 : CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
2652 : } else {
2653 : CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
2654 : }
2655 :
2656 2 : JSObject inner_object = JSObject::cast(o->RawFastPropertyAt(idx1));
2657 4 : CHECK(CcTest::heap()->InOldSpace(inner_object));
2658 : if (!inner_object->IsUnboxedDoubleField(idx1)) {
2659 4 : CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
2660 : } else {
2661 : CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
2662 : }
2663 4 : CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
2664 : }
2665 :
2666 :
2667 26068 : TEST(OptimizedPretenuringDoubleArrayProperties) {
2668 5 : FLAG_allow_natives_syntax = true;
2669 5 : FLAG_expose_gc = true;
2670 5 : CcTest::InitializeVM();
2671 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2672 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2673 : FLAG_stress_incremental_marking)
2674 : return;
2675 4 : v8::HandleScope scope(CcTest::isolate());
2676 :
2677 : // Grow new space until maximum capacity reached.
2678 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2679 8 : CcTest::heap()->new_space()->Grow();
2680 : }
2681 :
2682 : i::ScopedVector<char> source(1024);
2683 : i::SNPrintF(source,
2684 : "var number_elements = %d;"
2685 : "var elements = new Array(number_elements);"
2686 : "function f() {"
2687 : " for (var i = 0; i < number_elements; i++) {"
2688 : " elements[i] = {a: 1.1, b: 2.2};"
2689 : " }"
2690 : " return elements[i - 1];"
2691 : "};"
2692 : "f(); gc();"
2693 : "f(); f();"
2694 : "%%OptimizeFunctionOnNextCall(f);"
2695 : "f();",
2696 2 : kPretenureCreationCount);
2697 :
2698 : v8::Local<v8::Value> res = CompileRun(source.start());
2699 :
2700 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2701 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2702 :
2703 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2704 6 : CHECK_EQ(o->property_array(),
2705 : ReadOnlyRoots(CcTest::heap()).empty_property_array());
2706 : }
2707 :
2708 :
2709 26068 : TEST(OptimizedPretenuringdoubleArrayLiterals) {
2710 5 : FLAG_allow_natives_syntax = true;
2711 5 : FLAG_expose_gc = true;
2712 5 : CcTest::InitializeVM();
2713 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2714 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2715 : FLAG_stress_incremental_marking)
2716 : return;
2717 4 : v8::HandleScope scope(CcTest::isolate());
2718 :
2719 : // Grow new space unitl maximum capacity reached.
2720 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2721 8 : CcTest::heap()->new_space()->Grow();
2722 : }
2723 :
2724 : i::ScopedVector<char> source(1024);
2725 : i::SNPrintF(source,
2726 : "var number_elements = %d;"
2727 : "var elements = new Array(number_elements);"
2728 : "function f() {"
2729 : " for (var i = 0; i < number_elements; i++) {"
2730 : " elements[i] = [1.1, 2.2, 3.3];"
2731 : " }"
2732 : " return elements[number_elements - 1];"
2733 : "};"
2734 : "f(); gc();"
2735 : "f(); f();"
2736 : "%%OptimizeFunctionOnNextCall(f);"
2737 : "f();",
2738 2 : kPretenureCreationCount);
2739 :
2740 : v8::Local<v8::Value> res = CompileRun(source.start());
2741 :
2742 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2743 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2744 :
2745 4 : CHECK(CcTest::heap()->InOldSpace(o->elements()));
2746 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2747 : }
2748 :
2749 :
2750 26068 : TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2751 5 : FLAG_allow_natives_syntax = true;
2752 5 : FLAG_expose_gc = true;
2753 5 : CcTest::InitializeVM();
2754 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2755 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2756 : FLAG_stress_incremental_marking)
2757 : return;
2758 4 : v8::HandleScope scope(CcTest::isolate());
2759 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2760 : // Grow new space unitl maximum capacity reached.
2761 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2762 8 : CcTest::heap()->new_space()->Grow();
2763 : }
2764 :
2765 : i::ScopedVector<char> source(1024);
2766 : i::SNPrintF(source,
2767 : "var number_elements = %d;"
2768 : "var elements = new Array(number_elements);"
2769 : "function f() {"
2770 : " for (var i = 0; i < number_elements; i++) {"
2771 : " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
2772 : " }"
2773 : " return elements[number_elements - 1];"
2774 : "};"
2775 : "f(); gc();"
2776 : "f(); f();"
2777 : "%%OptimizeFunctionOnNextCall(f);"
2778 : "f();",
2779 2 : kPretenureCreationCount);
2780 :
2781 : v8::Local<v8::Value> res = CompileRun(source.start());
2782 :
2783 : v8::Local<v8::Value> int_array =
2784 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2785 : i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2786 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2787 : v8::Local<v8::Value> double_array =
2788 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2789 : i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2790 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2791 :
2792 : Handle<JSObject> o = Handle<JSObject>::cast(
2793 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2794 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2795 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2796 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2797 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2798 4 : CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2799 : }
2800 :
2801 :
2802 26068 : TEST(OptimizedPretenuringNestedObjectLiterals) {
2803 5 : FLAG_allow_natives_syntax = true;
2804 5 : FLAG_expose_gc = true;
2805 5 : CcTest::InitializeVM();
2806 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2807 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2808 : FLAG_stress_incremental_marking)
2809 : return;
2810 4 : v8::HandleScope scope(CcTest::isolate());
2811 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2812 : // Grow new space unitl maximum capacity reached.
2813 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2814 8 : CcTest::heap()->new_space()->Grow();
2815 : }
2816 :
2817 : i::ScopedVector<char> source(1024);
2818 : i::SNPrintF(source,
2819 : "var number_elements = %d;"
2820 : "var elements = new Array(number_elements);"
2821 : "function f() {"
2822 : " for (var i = 0; i < number_elements; i++) {"
2823 : " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
2824 : " }"
2825 : " return elements[number_elements - 1];"
2826 : "};"
2827 : "f(); gc();"
2828 : "f(); f();"
2829 : "%%OptimizeFunctionOnNextCall(f);"
2830 : "f();",
2831 2 : kPretenureCreationCount);
2832 :
2833 : v8::Local<v8::Value> res = CompileRun(source.start());
2834 :
2835 : v8::Local<v8::Value> int_array_1 =
2836 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2837 : Handle<JSObject> int_array_handle_1 = Handle<JSObject>::cast(
2838 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_1)));
2839 : v8::Local<v8::Value> int_array_2 =
2840 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2841 : Handle<JSObject> int_array_handle_2 = Handle<JSObject>::cast(
2842 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_2)));
2843 :
2844 : Handle<JSObject> o = Handle<JSObject>::cast(
2845 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2846 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2847 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
2848 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
2849 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
2850 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
2851 : }
2852 :
2853 :
2854 26068 : TEST(OptimizedPretenuringNestedDoubleLiterals) {
2855 5 : FLAG_allow_natives_syntax = true;
2856 5 : FLAG_expose_gc = true;
2857 5 : CcTest::InitializeVM();
2858 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2859 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2860 : FLAG_stress_incremental_marking)
2861 : return;
2862 4 : v8::HandleScope scope(CcTest::isolate());
2863 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2864 : // Grow new space unitl maximum capacity reached.
2865 18 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2866 8 : CcTest::heap()->new_space()->Grow();
2867 : }
2868 :
2869 : i::ScopedVector<char> source(1024);
2870 : i::SNPrintF(source,
2871 : "var number_elements = %d;"
2872 : "var elements = new Array(number_elements);"
2873 : "function f() {"
2874 : " for (var i = 0; i < number_elements; i++) {"
2875 : " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
2876 : " }"
2877 : " return elements[number_elements - 1];"
2878 : "};"
2879 : "f(); gc();"
2880 : "f(); f();"
2881 : "%%OptimizeFunctionOnNextCall(f);"
2882 : "f();",
2883 2 : kPretenureCreationCount);
2884 :
2885 : v8::Local<v8::Value> res = CompileRun(source.start());
2886 :
2887 : v8::Local<v8::Value> double_array_1 =
2888 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2889 : i::Handle<JSObject> double_array_handle_1 = i::Handle<JSObject>::cast(
2890 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_1)));
2891 : v8::Local<v8::Value> double_array_2 =
2892 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2893 : i::Handle<JSObject> double_array_handle_2 = Handle<JSObject>::cast(
2894 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_2)));
2895 :
2896 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2897 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2898 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2899 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
2900 4 : CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
2901 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
2902 4 : CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
2903 : }
2904 :
2905 :
2906 : // Test regular array literals allocation.
2907 26068 : TEST(OptimizedAllocationArrayLiterals) {
2908 5 : FLAG_allow_natives_syntax = true;
2909 5 : CcTest::InitializeVM();
2910 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2911 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2912 : FLAG_stress_incremental_marking)
2913 : return;
2914 4 : v8::HandleScope scope(CcTest::isolate());
2915 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2916 : v8::Local<v8::Value> res = CompileRun(
2917 : "function f() {"
2918 : " var numbers = new Array(1, 2, 3);"
2919 : " numbers[0] = 3.14;"
2920 : " return numbers;"
2921 : "};"
2922 : "f(); f(); f();"
2923 : "%OptimizeFunctionOnNextCall(f);"
2924 : "f();");
2925 8 : CHECK_EQ(static_cast<int>(3.14), v8::Object::Cast(*res)
2926 : ->Get(ctx, v8_str("0"))
2927 : .ToLocalChecked()
2928 : ->Int32Value(ctx)
2929 : .FromJust());
2930 :
2931 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2932 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2933 :
2934 2 : CHECK(Heap::InYoungGeneration(o->elements()));
2935 : }
2936 :
2937 10 : static int CountMapTransitions(i::Isolate* isolate, Map map) {
2938 : DisallowHeapAllocation no_gc;
2939 10 : return TransitionsAccessor(isolate, map, &no_gc).NumberOfTransitions();
2940 : }
2941 :
2942 :
2943 : // Test that map transitions are cleared and maps are collected with
2944 : // incremental marking as well.
2945 26068 : TEST(Regress1465) {
2946 5 : if (!FLAG_incremental_marking) return;
2947 5 : FLAG_stress_compaction = false;
2948 5 : FLAG_stress_incremental_marking = false;
2949 5 : FLAG_allow_natives_syntax = true;
2950 5 : FLAG_trace_incremental_marking = true;
2951 5 : FLAG_retain_maps_for_n_gc = 0;
2952 5 : CcTest::InitializeVM();
2953 5 : v8::Isolate* isolate = CcTest::isolate();
2954 : i::Isolate* i_isolate = CcTest::i_isolate();
2955 10 : v8::HandleScope scope(isolate);
2956 5 : v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
2957 : static const int transitions_count = 256;
2958 :
2959 : CompileRun("function F() {}");
2960 : {
2961 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2962 2565 : for (int i = 0; i < transitions_count; i++) {
2963 : EmbeddedVector<char, 64> buffer;
2964 1280 : SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2965 : CompileRun(buffer.start());
2966 : }
2967 : CompileRun("var root = new F;");
2968 : }
2969 :
2970 : i::Handle<JSReceiver> root =
2971 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
2972 20 : CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
2973 :
2974 : // Count number of live transitions before marking.
2975 5 : int transitions_before = CountMapTransitions(i_isolate, root->map());
2976 : CompileRun("%DebugPrint(root);");
2977 5 : CHECK_EQ(transitions_count, transitions_before);
2978 :
2979 5 : heap::SimulateIncrementalMarking(CcTest::heap());
2980 5 : CcTest::CollectAllGarbage();
2981 :
2982 : // Count number of live transitions after marking. Note that one transition
2983 : // is left, because 'o' still holds an instance of one transition target.
2984 5 : int transitions_after = CountMapTransitions(i_isolate, root->map());
2985 : CompileRun("%DebugPrint(root);");
2986 5 : CHECK_EQ(1, transitions_after);
2987 : }
2988 :
2989 5 : static i::Handle<JSObject> GetByName(const char* name) {
2990 : return i::Handle<JSObject>::cast(
2991 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
2992 10 : CcTest::global()
2993 20 : ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(name))
2994 5 : .ToLocalChecked())));
2995 : }
2996 :
2997 : #ifdef DEBUG
2998 : static void AddTransitions(int transitions_count) {
2999 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
3000 : for (int i = 0; i < transitions_count; i++) {
3001 : EmbeddedVector<char, 64> buffer;
3002 : SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3003 : CompileRun(buffer.start());
3004 : }
3005 : }
3006 :
3007 :
3008 : static void AddPropertyTo(
3009 : int gc_count, Handle<JSObject> object, const char* property_name) {
3010 : Isolate* isolate = CcTest::i_isolate();
3011 : Factory* factory = isolate->factory();
3012 : Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
3013 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
3014 : FLAG_gc_interval = gc_count;
3015 : FLAG_gc_global = true;
3016 : FLAG_retain_maps_for_n_gc = 0;
3017 : CcTest::heap()->set_allocation_timeout(gc_count);
3018 : Object::SetProperty(isolate, object, prop_name, twenty_three).Check();
3019 : }
3020 :
3021 :
3022 : TEST(TransitionArrayShrinksDuringAllocToZero) {
3023 : FLAG_stress_compaction = false;
3024 : FLAG_stress_incremental_marking = false;
3025 : FLAG_allow_natives_syntax = true;
3026 : CcTest::InitializeVM();
3027 : i::Isolate* i_isolate = CcTest::i_isolate();
3028 : v8::HandleScope scope(CcTest::isolate());
3029 : static const int transitions_count = 10;
3030 : CompileRun("function F() { }");
3031 : AddTransitions(transitions_count);
3032 : CompileRun("var root = new F;");
3033 : Handle<JSObject> root = GetByName("root");
3034 :
3035 : // Count number of live transitions before marking.
3036 : int transitions_before = CountMapTransitions(i_isolate, root->map());
3037 : CHECK_EQ(transitions_count, transitions_before);
3038 :
3039 : // Get rid of o
3040 : CompileRun("o = new F;"
3041 : "root = new F");
3042 : root = GetByName("root");
3043 : AddPropertyTo(2, root, "funny");
3044 : CcTest::CollectGarbage(NEW_SPACE);
3045 :
3046 : // Count number of live transitions after marking. Note that one transition
3047 : // is left, because 'o' still holds an instance of one transition target.
3048 : int transitions_after =
3049 : CountMapTransitions(i_isolate, Map::cast(root->map()->GetBackPointer()));
3050 : CHECK_EQ(1, transitions_after);
3051 : }
3052 :
3053 :
3054 : TEST(TransitionArrayShrinksDuringAllocToOne) {
3055 : FLAG_stress_compaction = false;
3056 : FLAG_stress_incremental_marking = false;
3057 : FLAG_allow_natives_syntax = true;
3058 : CcTest::InitializeVM();
3059 : i::Isolate* i_isolate = CcTest::i_isolate();
3060 : v8::HandleScope scope(CcTest::isolate());
3061 : static const int transitions_count = 10;
3062 : CompileRun("function F() {}");
3063 : AddTransitions(transitions_count);
3064 : CompileRun("var root = new F;");
3065 : Handle<JSObject> root = GetByName("root");
3066 :
3067 : // Count number of live transitions before marking.
3068 : int transitions_before = CountMapTransitions(i_isolate, root->map());
3069 : CHECK_EQ(transitions_count, transitions_before);
3070 :
3071 : root = GetByName("root");
3072 : AddPropertyTo(2, root, "funny");
3073 : CcTest::CollectGarbage(NEW_SPACE);
3074 :
3075 : // Count number of live transitions after marking. Note that one transition
3076 : // is left, because 'o' still holds an instance of one transition target.
3077 : int transitions_after =
3078 : CountMapTransitions(i_isolate, Map::cast(root->map()->GetBackPointer()));
3079 : CHECK_EQ(2, transitions_after);
3080 : }
3081 :
3082 :
3083 : TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3084 : FLAG_stress_compaction = false;
3085 : FLAG_stress_incremental_marking = false;
3086 : FLAG_allow_natives_syntax = true;
3087 : CcTest::InitializeVM();
3088 : i::Isolate* i_isolate = CcTest::i_isolate();
3089 : v8::HandleScope scope(CcTest::isolate());
3090 : static const int transitions_count = 10;
3091 : CompileRun("function F() {}");
3092 : AddTransitions(transitions_count);
3093 : CompileRun("var root = new F;");
3094 : Handle<JSObject> root = GetByName("root");
3095 :
3096 : // Count number of live transitions before marking.
3097 : int transitions_before = CountMapTransitions(i_isolate, root->map());
3098 : CHECK_EQ(transitions_count, transitions_before);
3099 :
3100 : root = GetByName("root");
3101 : AddPropertyTo(0, root, "prop9");
3102 : CcTest::CollectGarbage(OLD_SPACE);
3103 :
3104 : // Count number of live transitions after marking. Note that one transition
3105 : // is left, because 'o' still holds an instance of one transition target.
3106 : int transitions_after =
3107 : CountMapTransitions(i_isolate, Map::cast(root->map()->GetBackPointer()));
3108 : CHECK_EQ(1, transitions_after);
3109 : }
3110 : #endif // DEBUG
3111 :
3112 :
3113 26068 : TEST(ReleaseOverReservedPages) {
3114 5 : if (FLAG_never_compact) return;
3115 5 : FLAG_trace_gc = true;
3116 : // The optimizer can allocate stuff, messing up the test.
3117 : #ifndef V8_LITE_MODE
3118 5 : FLAG_opt = false;
3119 5 : FLAG_always_opt = false;
3120 : #endif // V8_LITE_MODE
3121 : // - Parallel compaction increases fragmentation, depending on how existing
3122 : // memory is distributed. Since this is non-deterministic because of
3123 : // concurrent sweeping, we disable it for this test.
3124 : // - Concurrent sweeping adds non determinism, depending on when memory is
3125 : // available for further reuse.
3126 : // - Fast evacuation of pages may result in a different page count in old
3127 : // space.
3128 : ManualGCScope manual_gc_scope;
3129 5 : FLAG_page_promotion = false;
3130 5 : FLAG_parallel_compaction = false;
3131 5 : CcTest::InitializeVM();
3132 : Isolate* isolate = CcTest::i_isolate();
3133 : // If there's snapshot available, we don't know whether 20 small arrays will
3134 : // fit on the initial pages.
3135 5 : if (!isolate->snapshot_available()) return;
3136 : Factory* factory = isolate->factory();
3137 : Heap* heap = isolate->heap();
3138 10 : v8::HandleScope scope(CcTest::isolate());
3139 : // Ensure that the young generation is empty.
3140 5 : CcTest::CollectGarbage(NEW_SPACE);
3141 5 : CcTest::CollectGarbage(NEW_SPACE);
3142 : static const int number_of_test_pages = 20;
3143 :
3144 : // Prepare many pages with low live-bytes count.
3145 : PagedSpace* old_space = heap->old_space();
3146 5 : const int initial_page_count = old_space->CountTotalPages();
3147 5 : const int overall_page_count = number_of_test_pages + initial_page_count;
3148 205 : for (int i = 0; i < number_of_test_pages; i++) {
3149 : AlwaysAllocateScope always_allocate(isolate);
3150 100 : heap::SimulateFullSpace(old_space);
3151 100 : factory->NewFixedArray(1, AllocationType::kOld);
3152 : }
3153 5 : CHECK_EQ(overall_page_count, old_space->CountTotalPages());
3154 :
3155 : // Triggering one GC will cause a lot of garbage to be discovered but
3156 : // even spread across all allocated pages.
3157 5 : CcTest::CollectAllGarbage();
3158 5 : CHECK_GE(overall_page_count, old_space->CountTotalPages());
3159 :
3160 : // Triggering subsequent GCs should cause at least half of the pages
3161 : // to be released to the OS after at most two cycles.
3162 5 : CcTest::CollectAllGarbage();
3163 5 : CHECK_GE(overall_page_count, old_space->CountTotalPages());
3164 5 : CcTest::CollectAllGarbage();
3165 5 : CHECK_GE(overall_page_count, old_space->CountTotalPages() * 2);
3166 :
3167 : // Triggering a last-resort GC should cause all pages to be released to the
3168 : // OS so that other processes can seize the memory. If we get a failure here
3169 : // where there are 2 pages left instead of 1, then we should increase the
3170 : // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3171 : // first page should be small in order to reduce memory used when the VM
3172 : // boots, but if the 20 small arrays don't fit on the first page then that's
3173 : // an indication that it is too small.
3174 5 : CcTest::CollectAllAvailableGarbage();
3175 5 : CHECK_GE(initial_page_count, old_space->CountTotalPages());
3176 : }
3177 :
3178 : static int forced_gc_counter = 0;
3179 :
3180 6 : void MockUseCounterCallback(v8::Isolate* isolate,
3181 : v8::Isolate::UseCounterFeature feature) {
3182 6 : isolate->GetCurrentContext();
3183 6 : if (feature == v8::Isolate::kForcedGC) {
3184 5 : forced_gc_counter++;
3185 : }
3186 6 : }
3187 :
3188 :
3189 26068 : TEST(CountForcedGC) {
3190 5 : FLAG_expose_gc = true;
3191 5 : CcTest::InitializeVM();
3192 : Isolate* isolate = CcTest::i_isolate();
3193 10 : v8::HandleScope scope(CcTest::isolate());
3194 :
3195 5 : isolate->SetUseCounterCallback(MockUseCounterCallback);
3196 :
3197 5 : forced_gc_counter = 0;
3198 : const char* source = "gc();";
3199 : CompileRun(source);
3200 5 : CHECK_GT(forced_gc_counter, 0);
3201 5 : }
3202 :
3203 :
3204 : #ifdef OBJECT_PRINT
3205 : TEST(PrintSharedFunctionInfo) {
3206 : CcTest::InitializeVM();
3207 : v8::HandleScope scope(CcTest::isolate());
3208 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3209 : const char* source = "f = function() { return 987654321; }\n"
3210 : "g = function() { return 123456789; }\n";
3211 : CompileRun(source);
3212 : i::Handle<JSFunction> g = i::Handle<JSFunction>::cast(
3213 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3214 : CcTest::global()->Get(ctx, v8_str("g")).ToLocalChecked())));
3215 :
3216 : StdoutStream os;
3217 : g->shared()->Print(os);
3218 : os << std::endl;
3219 : }
3220 : #endif // OBJECT_PRINT
3221 :
3222 :
3223 26068 : TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3224 6 : if (!FLAG_use_ic) return;
3225 5 : if (!FLAG_incremental_marking) return;
3226 5 : if (FLAG_always_opt) return;
3227 4 : CcTest::InitializeVM();
3228 8 : v8::HandleScope scope(CcTest::isolate());
3229 : v8::Local<v8::Value> fun1, fun2;
3230 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3231 : {
3232 : CompileRun("function fun() {};");
3233 16 : fun1 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3234 : }
3235 :
3236 : {
3237 : CompileRun("function fun() {};");
3238 16 : fun2 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3239 : }
3240 :
3241 : // Prepare function f that contains type feedback for the two closures.
3242 16 : CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3243 16 : CHECK(CcTest::global()->Set(ctx, v8_str("fun2"), fun2).FromJust());
3244 : CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3245 :
3246 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3247 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3248 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3249 :
3250 8 : Handle<FeedbackVector> feedback_vector(f->feedback_vector(), f->GetIsolate());
3251 4 : FeedbackVectorHelper feedback_helper(feedback_vector);
3252 :
3253 : int expected_slots = 2;
3254 4 : CHECK_EQ(expected_slots, feedback_helper.slot_count());
3255 : int slot1 = 0;
3256 : int slot2 = 1;
3257 4 : CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeak());
3258 4 : CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeak());
3259 :
3260 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3261 4 : CcTest::CollectAllGarbage();
3262 :
3263 4 : CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeak());
3264 4 : CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeak());
3265 : }
3266 :
3267 :
3268 24 : static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
3269 : InlineCacheState desired_state) {
3270 : Handle<FeedbackVector> vector =
3271 48 : Handle<FeedbackVector>(f->feedback_vector(), f->GetIsolate());
3272 24 : FeedbackVectorHelper helper(vector);
3273 : FeedbackSlot slot = helper.slot(slot_index);
3274 : FeedbackNexus nexus(vector, slot);
3275 24 : CHECK(nexus.ic_state() == desired_state);
3276 24 : }
3277 :
3278 26068 : TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3279 1 : if (FLAG_lite_mode) return;
3280 5 : if (!FLAG_incremental_marking) return;
3281 5 : if (FLAG_always_opt) return;
3282 4 : CcTest::InitializeVM();
3283 8 : v8::HandleScope scope(CcTest::isolate());
3284 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3285 : // Prepare function f that contains a monomorphic IC for object
3286 : // originating from the same native context.
3287 : CompileRun(
3288 : "function fun() { this.x = 1; };"
3289 : "function f(o) { return new o(); } f(fun); f(fun);");
3290 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3291 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3292 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3293 :
3294 8 : Handle<FeedbackVector> vector(f->feedback_vector(), f->GetIsolate());
3295 4 : CHECK(vector->Get(FeedbackSlot(0))->IsWeakOrCleared());
3296 :
3297 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3298 4 : CcTest::CollectAllGarbage();
3299 :
3300 4 : CHECK(vector->Get(FeedbackSlot(0))->IsWeakOrCleared());
3301 : }
3302 :
3303 26068 : TEST(IncrementalMarkingPreservesMonomorphicIC) {
3304 6 : if (!FLAG_use_ic) return;
3305 5 : if (!FLAG_incremental_marking) return;
3306 5 : if (FLAG_always_opt) return;
3307 4 : CcTest::InitializeVM();
3308 8 : v8::HandleScope scope(CcTest::isolate());
3309 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3310 : // Prepare function f that contains a monomorphic IC for object
3311 : // originating from the same native context.
3312 : CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3313 : "function f(o) { return o.x; } f(obj); f(obj);");
3314 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3315 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3316 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3317 :
3318 4 : CheckVectorIC(f, 0, MONOMORPHIC);
3319 :
3320 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3321 4 : CcTest::CollectAllGarbage();
3322 :
3323 4 : CheckVectorIC(f, 0, MONOMORPHIC);
3324 : }
3325 :
3326 26068 : TEST(IncrementalMarkingPreservesPolymorphicIC) {
3327 6 : if (!FLAG_use_ic) return;
3328 5 : if (!FLAG_incremental_marking) return;
3329 5 : if (FLAG_always_opt) return;
3330 4 : CcTest::InitializeVM();
3331 8 : v8::HandleScope scope(CcTest::isolate());
3332 : v8::Local<v8::Value> obj1, obj2;
3333 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3334 :
3335 : {
3336 4 : LocalContext env;
3337 : CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3338 16 : obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3339 : }
3340 :
3341 : {
3342 4 : LocalContext env;
3343 : CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3344 16 : obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3345 : }
3346 :
3347 : // Prepare function f that contains a polymorphic IC for objects
3348 : // originating from two different native contexts.
3349 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3350 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3351 : CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3352 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3353 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3354 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3355 :
3356 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3357 :
3358 : // Fire context dispose notification.
3359 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3360 4 : CcTest::CollectAllGarbage();
3361 :
3362 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3363 : }
3364 :
3365 26068 : TEST(ContextDisposeDoesntClearPolymorphicIC) {
3366 6 : if (!FLAG_use_ic) return;
3367 5 : if (!FLAG_incremental_marking) return;
3368 5 : if (FLAG_always_opt) return;
3369 4 : CcTest::InitializeVM();
3370 8 : v8::HandleScope scope(CcTest::isolate());
3371 : v8::Local<v8::Value> obj1, obj2;
3372 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3373 :
3374 : {
3375 4 : LocalContext env;
3376 : CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3377 16 : obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3378 : }
3379 :
3380 : {
3381 4 : LocalContext env;
3382 : CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3383 16 : obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3384 : }
3385 :
3386 : // Prepare function f that contains a polymorphic IC for objects
3387 : // originating from two different native contexts.
3388 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3389 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3390 : CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3391 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3392 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3393 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3394 :
3395 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3396 :
3397 : // Fire context dispose notification.
3398 4 : CcTest::isolate()->ContextDisposedNotification();
3399 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3400 4 : CcTest::CollectAllGarbage();
3401 :
3402 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3403 : }
3404 :
3405 :
3406 48 : class SourceResource : public v8::String::ExternalOneByteStringResource {
3407 : public:
3408 : explicit SourceResource(const char* data)
3409 24 : : data_(data), length_(strlen(data)) { }
3410 :
3411 24 : void Dispose() override {
3412 24 : i::DeleteArray(data_);
3413 24 : data_ = nullptr;
3414 24 : }
3415 :
3416 288 : const char* data() const override { return data_; }
3417 :
3418 96 : size_t length() const override { return length_; }
3419 :
3420 48 : bool IsDisposed() { return data_ == nullptr; }
3421 :
3422 : private:
3423 : const char* data_;
3424 : size_t length_;
3425 : };
3426 :
3427 :
3428 24 : void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3429 : const char* accessor) {
3430 : // Test that the data retained by the Error.stack accessor is released
3431 : // after the first time the accessor is fired. We use external string
3432 : // to check whether the data is being released since the external string
3433 : // resource's callback is fired when the external string is GC'ed.
3434 : i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3435 48 : v8::HandleScope scope(isolate);
3436 24 : SourceResource* resource = new SourceResource(i::StrDup(source));
3437 : {
3438 48 : v8::HandleScope scope(isolate);
3439 24 : v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
3440 : v8::Local<v8::String> source_string =
3441 48 : v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked();
3442 : i_isolate->heap()->CollectAllAvailableGarbage(
3443 24 : i::GarbageCollectionReason::kTesting);
3444 24 : v8::Script::Compile(ctx, source_string)
3445 : .ToLocalChecked()
3446 24 : ->Run(ctx)
3447 : .ToLocalChecked();
3448 24 : CHECK(!resource->IsDisposed());
3449 : }
3450 : // i_isolate->heap()->CollectAllAvailableGarbage();
3451 24 : CHECK(!resource->IsDisposed());
3452 :
3453 : CompileRun(accessor);
3454 : i_isolate->heap()->CollectAllAvailableGarbage(
3455 24 : i::GarbageCollectionReason::kTesting);
3456 :
3457 : // External source has been released.
3458 24 : CHECK(resource->IsDisposed());
3459 24 : delete resource;
3460 24 : }
3461 :
3462 :
3463 26068 : UNINITIALIZED_TEST(ReleaseStackTraceData) {
3464 5 : if (FLAG_always_opt) {
3465 : // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3466 : // See: https://codereview.chromium.org/181833004/
3467 1 : return;
3468 : }
3469 : #ifndef V8_LITE_MODE
3470 : // ICs retain objects.
3471 4 : FLAG_use_ic = false;
3472 : #endif // V8_LITE_MODE
3473 4 : FLAG_concurrent_recompilation = false;
3474 : v8::Isolate::CreateParams create_params;
3475 4 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
3476 4 : v8::Isolate* isolate = v8::Isolate::New(create_params);
3477 : {
3478 : v8::Isolate::Scope isolate_scope(isolate);
3479 8 : v8::HandleScope handle_scope(isolate);
3480 8 : v8::Context::New(isolate)->Enter();
3481 : static const char* source1 = "var error = null; "
3482 : /* Normal Error */ "try { "
3483 : " throw new Error(); "
3484 : "} catch (e) { "
3485 : " error = e; "
3486 : "} ";
3487 : static const char* source2 = "var error = null; "
3488 : /* Stack overflow */ "try { "
3489 : " (function f() { f(); })(); "
3490 : "} catch (e) { "
3491 : " error = e; "
3492 : "} ";
3493 : static const char* source3 = "var error = null; "
3494 : /* Normal Error */ "try { "
3495 : /* as prototype */ " throw new Error(); "
3496 : "} catch (e) { "
3497 : " error = {}; "
3498 : " error.__proto__ = e; "
3499 : "} ";
3500 : static const char* source4 = "var error = null; "
3501 : /* Stack overflow */ "try { "
3502 : /* as prototype */ " (function f() { f(); })(); "
3503 : "} catch (e) { "
3504 : " error = {}; "
3505 : " error.__proto__ = e; "
3506 : "} ";
3507 : static const char* getter = "error.stack";
3508 : static const char* setter = "error.stack = 0";
3509 :
3510 4 : ReleaseStackTraceDataTest(isolate, source1, setter);
3511 4 : ReleaseStackTraceDataTest(isolate, source2, setter);
3512 : // We do not test source3 and source4 with setter, since the setter is
3513 : // supposed to (untypically) write to the receiver, not the holder. This is
3514 : // to emulate the behavior of a data property.
3515 :
3516 4 : ReleaseStackTraceDataTest(isolate, source1, getter);
3517 4 : ReleaseStackTraceDataTest(isolate, source2, getter);
3518 4 : ReleaseStackTraceDataTest(isolate, source3, getter);
3519 4 : ReleaseStackTraceDataTest(isolate, source4, getter);
3520 : }
3521 4 : isolate->Dispose();
3522 : }
3523 :
3524 : // TODO(mmarchini) also write tests for async/await and Promise.all
3525 15 : void DetailedErrorStackTraceTest(const char* src,
3526 : std::function<void(Handle<FrameArray>)> test) {
3527 15 : FLAG_detailed_error_stack_trace = true;
3528 15 : CcTest::InitializeVM();
3529 30 : v8::HandleScope scope(CcTest::isolate());
3530 :
3531 30 : v8::TryCatch try_catch(CcTest::isolate());
3532 : CompileRun(src);
3533 :
3534 15 : CHECK(try_catch.HasCaught());
3535 30 : Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
3536 :
3537 : Isolate* isolate = CcTest::i_isolate();
3538 : Handle<Name> key = isolate->factory()->stack_trace_symbol();
3539 :
3540 : Handle<FrameArray> stack_trace(
3541 : FrameArray::cast(
3542 : Handle<JSArray>::cast(
3543 30 : Object::GetProperty(isolate, exception, key).ToHandleChecked())
3544 : ->elements()),
3545 : isolate);
3546 :
3547 : test(stack_trace);
3548 15 : }
3549 :
3550 : // * Test interpreted function error
3551 26068 : TEST(DetailedErrorStackTrace) {
3552 : static const char* source =
3553 : "function func1(arg1) { "
3554 : " let err = new Error(); "
3555 : " throw err; "
3556 : "} "
3557 : "function func2(arg1, arg2) { "
3558 : " func1(42); "
3559 : "} "
3560 : "class Foo {}; "
3561 : "function main(arg1, arg2) { "
3562 : " func2(arg1, false); "
3563 : "} "
3564 : "var foo = new Foo(); "
3565 : "main(foo); ";
3566 :
3567 10 : DetailedErrorStackTraceTest(source, [](Handle<FrameArray> stack_trace) {
3568 : FixedArray foo_parameters = stack_trace->Parameters(0);
3569 5 : CHECK_EQ(foo_parameters->length(), 1);
3570 5 : CHECK(foo_parameters->get(0)->IsSmi());
3571 5 : CHECK_EQ(Smi::ToInt(foo_parameters->get(0)), 42);
3572 :
3573 : FixedArray bar_parameters = stack_trace->Parameters(1);
3574 5 : CHECK_EQ(bar_parameters->length(), 2);
3575 5 : CHECK(bar_parameters->get(0)->IsJSObject());
3576 5 : CHECK(bar_parameters->get(1)->IsBoolean());
3577 5 : Handle<Object> foo = Handle<Object>::cast(GetByName("foo"));
3578 5 : CHECK_EQ(bar_parameters->get(0), *foo);
3579 5 : CHECK(!bar_parameters->get(1)->BooleanValue(CcTest::i_isolate()));
3580 :
3581 : FixedArray main_parameters = stack_trace->Parameters(2);
3582 5 : CHECK_EQ(main_parameters->length(), 2);
3583 5 : CHECK(main_parameters->get(0)->IsJSObject());
3584 5 : CHECK(main_parameters->get(1)->IsUndefined());
3585 5 : CHECK_EQ(main_parameters->get(0), *foo);
3586 10 : });
3587 5 : }
3588 :
3589 : // * Test optimized function with inline frame error
3590 26068 : TEST(DetailedErrorStackTraceInline) {
3591 5 : FLAG_allow_natives_syntax = true;
3592 : static const char* source =
3593 : "function add(x) { "
3594 : " if (x == 42) "
3595 : " throw new Error(); "
3596 : " return x + x; "
3597 : "} "
3598 : "add(0); "
3599 : "add(1); "
3600 : "function foo(x) { "
3601 : " return add(x + 1) "
3602 : "} "
3603 : "foo(40); "
3604 : "%OptimizeFunctionOnNextCall(foo); "
3605 : "foo(41); ";
3606 :
3607 10 : DetailedErrorStackTraceTest(source, [](Handle<FrameArray> stack_trace) {
3608 : FixedArray parameters_add = stack_trace->Parameters(0);
3609 5 : CHECK_EQ(parameters_add->length(), 1);
3610 5 : CHECK(parameters_add->get(0)->IsSmi());
3611 5 : CHECK_EQ(Smi::ToInt(parameters_add->get(0)), 42);
3612 :
3613 : FixedArray parameters_foo = stack_trace->Parameters(1);
3614 5 : CHECK_EQ(parameters_foo->length(), 1);
3615 5 : CHECK(parameters_foo->get(0)->IsSmi());
3616 5 : CHECK_EQ(Smi::ToInt(parameters_foo->get(0)), 41);
3617 10 : });
3618 5 : }
3619 :
3620 : // * Test builtin exit error
3621 26068 : TEST(DetailedErrorStackTraceBuiltinExit) {
3622 : static const char* source =
3623 : "function test(arg1) { "
3624 : " (new Number()).toFixed(arg1); "
3625 : "} "
3626 : "test(9999); ";
3627 :
3628 10 : DetailedErrorStackTraceTest(source, [](Handle<FrameArray> stack_trace) {
3629 : FixedArray parameters = stack_trace->Parameters(0);
3630 :
3631 5 : CHECK_EQ(parameters->length(), 2);
3632 5 : CHECK(parameters->get(0)->IsSmi());
3633 5 : CHECK_EQ(Smi::ToInt(parameters->get(0)), 9999);
3634 10 : });
3635 5 : }
3636 :
3637 26068 : TEST(Regress169928) {
3638 5 : FLAG_allow_natives_syntax = true;
3639 : #ifndef V8_LITE_MODE
3640 5 : FLAG_opt = false;
3641 : #endif // V8_LITE_MODE
3642 5 : CcTest::InitializeVM();
3643 : Isolate* isolate = CcTest::i_isolate();
3644 4 : LocalContext env;
3645 : Factory* factory = isolate->factory();
3646 9 : v8::HandleScope scope(CcTest::isolate());
3647 :
3648 : // Some flags turn Scavenge collections into Mark-sweep collections
3649 : // and hence are incompatible with this test case.
3650 5 : if (FLAG_gc_global || FLAG_stress_compaction ||
3651 : FLAG_stress_incremental_marking)
3652 1 : return;
3653 :
3654 : // Prepare the environment
3655 : CompileRun("function fastliteralcase(literal, value) {"
3656 : " literal[0] = value;"
3657 : " return literal;"
3658 : "}"
3659 : "function get_standard_literal() {"
3660 : " var literal = [1, 2, 3];"
3661 : " return literal;"
3662 : "}"
3663 : "obj = fastliteralcase(get_standard_literal(), 1);"
3664 : "obj = fastliteralcase(get_standard_literal(), 1.5);"
3665 : "obj = fastliteralcase(get_standard_literal(), 2);");
3666 :
3667 : // prepare the heap
3668 : v8::Local<v8::String> mote_code_string =
3669 4 : v8_str("fastliteralcase(mote, 2.5);");
3670 :
3671 4 : v8::Local<v8::String> array_name = v8_str("mote");
3672 16 : CHECK(CcTest::global()
3673 : ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0))
3674 : .FromJust());
3675 :
3676 : // First make sure we flip spaces
3677 4 : CcTest::CollectGarbage(NEW_SPACE);
3678 :
3679 : // Allocate the object.
3680 : Handle<FixedArray> array_data =
3681 4 : factory->NewFixedArray(2, AllocationType::kYoung);
3682 : array_data->set(0, Smi::FromInt(1));
3683 : array_data->set(1, Smi::FromInt(2));
3684 :
3685 4 : heap::AllocateAllButNBytes(
3686 : CcTest::heap()->new_space(),
3687 4 : JSArray::kSize + AllocationMemento::kSize + kTaggedSize);
3688 :
3689 : Handle<JSArray> array =
3690 4 : factory->NewJSArrayWithElements(array_data, PACKED_SMI_ELEMENTS);
3691 :
3692 4 : CHECK_EQ(Smi::FromInt(2), array->length());
3693 8 : CHECK(array->HasSmiOrObjectElements());
3694 :
3695 : // We need filler the size of AllocationMemento object, plus an extra
3696 : // fill pointer value.
3697 : HeapObject obj;
3698 : AllocationResult allocation =
3699 : CcTest::heap()->new_space()->AllocateRawUnaligned(
3700 4 : AllocationMemento::kSize + kTaggedSize);
3701 4 : CHECK(allocation.To(&obj));
3702 : Address addr_obj = obj->address();
3703 : CcTest::heap()->CreateFillerObjectAt(addr_obj,
3704 : AllocationMemento::kSize + kTaggedSize,
3705 4 : ClearRecordedSlots::kNo);
3706 :
3707 : // Give the array a name, making sure not to allocate strings.
3708 : v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
3709 12 : CHECK(CcTest::global()->Set(env.local(), array_name, array_obj).FromJust());
3710 :
3711 : // This should crash with a protection violation if we are running a build
3712 : // with the bug.
3713 : AlwaysAllocateScope aa_scope(isolate);
3714 4 : v8::Script::Compile(env.local(), mote_code_string)
3715 : .ToLocalChecked()
3716 4 : ->Run(env.local())
3717 : .ToLocalChecked();
3718 : }
3719 :
3720 26068 : TEST(LargeObjectSlotRecording) {
3721 5 : if (!FLAG_incremental_marking) return;
3722 5 : if (FLAG_never_compact) return;
3723 : ManualGCScope manual_gc_scope;
3724 5 : FLAG_manual_evacuation_candidates_selection = true;
3725 5 : CcTest::InitializeVM();
3726 : Isolate* isolate = CcTest::i_isolate();
3727 : Heap* heap = isolate->heap();
3728 : HandleScope scope(isolate);
3729 :
3730 : // Create an object on an evacuation candidate.
3731 5 : heap::SimulateFullSpace(heap->old_space());
3732 : Handle<FixedArray> lit =
3733 5 : isolate->factory()->NewFixedArray(4, AllocationType::kOld);
3734 : Page* evac_page = Page::FromHeapObject(*lit);
3735 5 : heap::ForceEvacuationCandidate(evac_page);
3736 : FixedArray old_location = *lit;
3737 :
3738 : // Allocate a large object.
3739 : int size = Max(1000000, kMaxRegularHeapObjectSize + KB);
3740 : CHECK_LT(kMaxRegularHeapObjectSize, size);
3741 : Handle<FixedArray> lo =
3742 5 : isolate->factory()->NewFixedArray(size, AllocationType::kOld);
3743 5 : CHECK(heap->lo_space()->Contains(*lo));
3744 :
3745 : // Start incremental marking to active write barrier.
3746 5 : heap::SimulateIncrementalMarking(heap, false);
3747 :
3748 : // Create references from the large object to the object on the evacuation
3749 : // candidate.
3750 : const int kStep = size / 10;
3751 105 : for (int i = 0; i < size; i += kStep) {
3752 100 : lo->set(i, *lit);
3753 50 : CHECK(lo->get(i) == old_location);
3754 : }
3755 :
3756 5 : heap::SimulateIncrementalMarking(heap, true);
3757 :
3758 : // Move the evaucation candidate object.
3759 5 : CcTest::CollectAllGarbage();
3760 :
3761 : // Verify that the pointers in the large object got updated.
3762 105 : for (int i = 0; i < size; i += kStep) {
3763 50 : CHECK_EQ(lo->get(i), *lit);
3764 50 : CHECK(lo->get(i) != old_location);
3765 : }
3766 : }
3767 :
3768 10 : class DummyVisitor : public RootVisitor {
3769 : public:
3770 20 : void VisitRootPointers(Root root, const char* description,
3771 20 : FullObjectSlot start, FullObjectSlot end) override {}
3772 : };
3773 :
3774 :
3775 26068 : TEST(DeferredHandles) {
3776 5 : CcTest::InitializeVM();
3777 : Isolate* isolate = CcTest::i_isolate();
3778 : Heap* heap = isolate->heap();
3779 10 : v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
3780 : HandleScopeData* data = isolate->handle_scope_data();
3781 : Handle<Object> init(ReadOnlyRoots(heap).empty_string(), isolate);
3782 10215 : while (data->next < data->limit) {
3783 : Handle<Object> obj(ReadOnlyRoots(heap).empty_string(), isolate);
3784 : }
3785 : // An entire block of handles has been filled.
3786 : // Next handle would require a new block.
3787 5 : CHECK(data->next == data->limit);
3788 :
3789 10 : DeferredHandleScope deferred(isolate);
3790 5 : DummyVisitor visitor;
3791 5 : isolate->handle_scope_implementer()->Iterate(&visitor);
3792 5 : delete deferred.Detach();
3793 5 : }
3794 :
3795 :
3796 26067 : TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
3797 4 : if (!FLAG_incremental_marking) return;
3798 : ManualGCScope manual_gc_scope;
3799 4 : CcTest::InitializeVM();
3800 8 : v8::HandleScope scope(CcTest::isolate());
3801 : CompileRun("function f(n) {"
3802 : " var a = new Array(n);"
3803 : " for (var i = 0; i < n; i += 100) a[i] = i;"
3804 : "};"
3805 : "f(10 * 1024 * 1024);");
3806 4 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
3807 4 : if (marking->IsStopped()) {
3808 4 : CcTest::heap()->StartIncrementalMarking(
3809 4 : i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
3810 : }
3811 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3812 4 : CHECK(marking->IsComplete() ||
3813 : marking->IsReadyToOverApproximateWeakClosure());
3814 : }
3815 :
3816 :
3817 26068 : TEST(DisableInlineAllocation) {
3818 5 : FLAG_allow_natives_syntax = true;
3819 5 : CcTest::InitializeVM();
3820 10 : v8::HandleScope scope(CcTest::isolate());
3821 : CompileRun("function test() {"
3822 : " var x = [];"
3823 : " for (var i = 0; i < 10; i++) {"
3824 : " x[i] = [ {}, [1,2,3], [1,x,3] ];"
3825 : " }"
3826 : "}"
3827 : "function run() {"
3828 : " %OptimizeFunctionOnNextCall(test);"
3829 : " test();"
3830 : " %DeoptimizeFunction(test);"
3831 : "}");
3832 :
3833 : // Warm-up with inline allocation enabled.
3834 : CompileRun("test(); test(); run();");
3835 :
3836 : // Run test with inline allocation disabled.
3837 5 : CcTest::heap()->DisableInlineAllocation();
3838 : CompileRun("run()");
3839 :
3840 : // Run test with inline allocation re-enabled.
3841 5 : CcTest::heap()->EnableInlineAllocation();
3842 : CompileRun("run()");
3843 5 : }
3844 :
3845 :
3846 266 : static int AllocationSitesCount(Heap* heap) {
3847 : int count = 0;
3848 2852 : for (Object site = heap->allocation_sites_list(); site->IsAllocationSite();) {
3849 1293 : AllocationSite cur = AllocationSite::cast(site);
3850 1293 : CHECK(cur->HasWeakNext());
3851 : site = cur->weak_next();
3852 1293 : count++;
3853 : }
3854 266 : return count;
3855 : }
3856 :
3857 260 : static int SlimAllocationSiteCount(Heap* heap) {
3858 : int count = 0;
3859 2840 : for (Object weak_list = heap->allocation_sites_list();
3860 : weak_list->IsAllocationSite();) {
3861 : AllocationSite weak_cur = AllocationSite::cast(weak_list);
3862 3420 : for (Object site = weak_cur->nested_site(); site->IsAllocationSite();) {
3863 1065 : AllocationSite cur = AllocationSite::cast(site);
3864 1065 : CHECK(!cur->HasWeakNext());
3865 : site = cur->nested_site();
3866 1065 : count++;
3867 : }
3868 : weak_list = weak_cur->weak_next();
3869 : }
3870 260 : return count;
3871 : }
3872 :
3873 26068 : TEST(EnsureAllocationSiteDependentCodesProcessed) {
3874 5 : if (FLAG_always_opt || !FLAG_opt) return;
3875 3 : FLAG_allow_natives_syntax = true;
3876 3 : CcTest::InitializeVM();
3877 : Isolate* isolate = CcTest::i_isolate();
3878 3 : v8::internal::Heap* heap = CcTest::heap();
3879 : GlobalHandles* global_handles = isolate->global_handles();
3880 :
3881 3 : if (!isolate->use_optimizer()) return;
3882 :
3883 : // The allocation site at the head of the list is ours.
3884 : Handle<AllocationSite> site;
3885 : {
3886 3 : LocalContext context;
3887 6 : v8::HandleScope scope(context->GetIsolate());
3888 :
3889 3 : int count = AllocationSitesCount(heap);
3890 : CompileRun("var bar = function() { return (new Array()); };"
3891 : "var a = bar();"
3892 : "bar();"
3893 : "bar();");
3894 :
3895 : // One allocation site should have been created.
3896 3 : int new_count = AllocationSitesCount(heap);
3897 3 : CHECK_EQ(new_count, (count + 1));
3898 : site = Handle<AllocationSite>::cast(
3899 : global_handles->Create(
3900 : AllocationSite::cast(heap->allocation_sites_list())));
3901 :
3902 : CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
3903 :
3904 : Handle<JSFunction> bar_handle = Handle<JSFunction>::cast(
3905 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3906 6 : CcTest::global()
3907 9 : ->Get(context.local(), v8_str("bar"))
3908 : .ToLocalChecked())));
3909 :
3910 : int dependency_group_count = 0;
3911 : DependentCode dependency = site->dependent_code();
3912 15 : while (dependency != ReadOnlyRoots(heap).empty_weak_fixed_array()) {
3913 9 : CHECK(dependency->group() ==
3914 : DependentCode::kAllocationSiteTransitionChangedGroup ||
3915 : dependency->group() ==
3916 : DependentCode::kAllocationSiteTenuringChangedGroup);
3917 6 : CHECK_EQ(1, dependency->count());
3918 6 : CHECK(dependency->object_at(0)->IsWeak());
3919 : Code function_bar =
3920 : Code::cast(dependency->object_at(0)->GetHeapObjectAssumeWeak());
3921 6 : CHECK_EQ(bar_handle->code(), function_bar);
3922 : dependency = dependency->next_link();
3923 6 : dependency_group_count++;
3924 : }
3925 : // Expect a dependent code object for transitioning and pretenuring.
3926 3 : CHECK_EQ(2, dependency_group_count);
3927 : }
3928 :
3929 : // Now make sure that a gc should get rid of the function, even though we
3930 : // still have the allocation site alive.
3931 27 : for (int i = 0; i < 4; i++) {
3932 12 : CcTest::CollectAllGarbage();
3933 : }
3934 :
3935 : // The site still exists because of our global handle, but the code is no
3936 : // longer referred to by dependent_code().
3937 3 : CHECK(site->dependent_code()->object_at(0)->IsCleared());
3938 : }
3939 :
3940 130 : void CheckNumberOfAllocations(Heap* heap, const char* source,
3941 : int expected_full_alloc,
3942 : int expected_slim_alloc) {
3943 130 : int prev_fat_alloc_count = AllocationSitesCount(heap);
3944 130 : int prev_slim_alloc_count = SlimAllocationSiteCount(heap);
3945 :
3946 : CompileRun(source);
3947 :
3948 130 : int fat_alloc_sites = AllocationSitesCount(heap) - prev_fat_alloc_count;
3949 130 : int slim_alloc_sites = SlimAllocationSiteCount(heap) - prev_slim_alloc_count;
3950 :
3951 130 : CHECK_EQ(expected_full_alloc, fat_alloc_sites);
3952 130 : CHECK_EQ(expected_slim_alloc, slim_alloc_sites);
3953 130 : }
3954 :
3955 26068 : TEST(AllocationSiteCreation) {
3956 : // No feedback vectors and hence no allocation sites.
3957 : if (FLAG_lite_mode) return;
3958 5 : FLAG_always_opt = false;
3959 5 : CcTest::InitializeVM();
3960 : Isolate* isolate = CcTest::i_isolate();
3961 : Heap* heap = isolate->heap();
3962 : HandleScope scope(isolate);
3963 5 : i::FLAG_enable_one_shot_optimization = true;
3964 :
3965 : // Array literals.
3966 5 : CheckNumberOfAllocations(heap, "function f1() { return []; }; f1()", 1, 0);
3967 : CheckNumberOfAllocations(heap, "function f2() { return [1, 2]; }; f2()", 1,
3968 5 : 0);
3969 : CheckNumberOfAllocations(heap, "function f3() { return [[1], [2]]; }; f3()",
3970 5 : 1, 2);
3971 :
3972 : CheckNumberOfAllocations(heap,
3973 : "function f4() { "
3974 : "return [0, [1, 1.1, 1.2, "
3975 : "], 1.5, [2.1, 2.2], 3];"
3976 : "}; f4();",
3977 5 : 1, 2);
3978 :
3979 : // No allocation sites within IIFE/top-level
3980 : CheckNumberOfAllocations(heap,
3981 : R"(
3982 : (function f4() {
3983 : return [ 0, [ 1, 1.1, 1.2,], 1.5, [2.1, 2.2], 3 ];
3984 : })();
3985 : )",
3986 5 : 0, 0);
3987 :
3988 : CheckNumberOfAllocations(heap,
3989 : R"(
3990 : l = [ 1, 2, 3, 4];
3991 : )",
3992 5 : 0, 0);
3993 :
3994 : CheckNumberOfAllocations(heap,
3995 : R"(
3996 : a = [];
3997 : )",
3998 5 : 0, 0);
3999 :
4000 : CheckNumberOfAllocations(heap,
4001 : R"(
4002 : (function f4() {
4003 : return [];
4004 : })();
4005 : )",
4006 5 : 0, 0);
4007 :
4008 : // Object literals have lazy AllocationSites
4009 5 : CheckNumberOfAllocations(heap, "function f5() { return {}; }; f5(); ", 0, 0);
4010 :
4011 : // No AllocationSites are created for the empty object literal.
4012 55 : for (int i = 0; i < 5; i++) {
4013 25 : CheckNumberOfAllocations(heap, "f5(); ", 0, 0);
4014 : }
4015 :
4016 : CheckNumberOfAllocations(heap, "function f6() { return {a:1}; }; f6(); ", 0,
4017 5 : 0);
4018 :
4019 5 : CheckNumberOfAllocations(heap, "f6(); ", 1, 0);
4020 :
4021 : CheckNumberOfAllocations(heap, "function f7() { return {a:1, b:2}; }; f7(); ",
4022 5 : 0, 0);
4023 5 : CheckNumberOfAllocations(heap, "f7(); ", 1, 0);
4024 :
4025 : // No Allocation sites are created for object subliterals
4026 : CheckNumberOfAllocations(heap,
4027 : "function f8() {"
4028 : "return {a:{}, b:{ a:2, c:{ d:{f:{}}} } }; "
4029 : "}; f8(); ",
4030 5 : 0, 0);
4031 5 : CheckNumberOfAllocations(heap, "f8(); ", 1, 0);
4032 :
4033 : // We currently eagerly create allocation sites if there are sub-arrays.
4034 : // Allocation sites are created only for array subliterals
4035 : CheckNumberOfAllocations(heap,
4036 : "function f9() {"
4037 : "return {a:[1, 2, 3], b:{ a:2, c:{ d:{f:[]} } }}; "
4038 : "}; f9(); ",
4039 5 : 1, 2);
4040 :
4041 : // No new AllocationSites created on the second invocation.
4042 5 : CheckNumberOfAllocations(heap, "f9(); ", 0, 0);
4043 :
4044 : // No allocation sites for literals in an iife/top level code even if it has
4045 : // array subliterals
4046 : CheckNumberOfAllocations(heap,
4047 : R"(
4048 : (function f10() {
4049 : return {a: [1], b: [2]};
4050 : })();
4051 : )",
4052 5 : 0, 0);
4053 :
4054 : CheckNumberOfAllocations(heap,
4055 : R"(
4056 : l = {
4057 : a: 1,
4058 : b: {
4059 : c: [5],
4060 : }
4061 : };
4062 : )",
4063 5 : 0, 0);
4064 :
4065 : // Eagerly create allocation sites for literals within a loop of iife or
4066 : // top-level code
4067 : CheckNumberOfAllocations(heap,
4068 : R"(
4069 : (function f11() {
4070 : while(true) {
4071 : return {a: [1], b: [2]};
4072 : }
4073 : })();
4074 : )",
4075 5 : 1, 2);
4076 :
4077 : CheckNumberOfAllocations(heap,
4078 : R"(
4079 : for (i = 0; i < 1; ++i) {
4080 : l = {
4081 : a: 1,
4082 : b: {
4083 : c: [5],
4084 : }
4085 : };
4086 : }
4087 : )",
4088 5 : 1, 1);
4089 : }
4090 :
4091 26068 : TEST(CellsInOptimizedCodeAreWeak) {
4092 5 : if (FLAG_always_opt || !FLAG_opt) return;
4093 3 : FLAG_allow_natives_syntax = true;
4094 3 : CcTest::InitializeVM();
4095 : Isolate* isolate = CcTest::i_isolate();
4096 3 : v8::internal::Heap* heap = CcTest::heap();
4097 :
4098 3 : if (!isolate->use_optimizer()) return;
4099 : HandleScope outer_scope(heap->isolate());
4100 : Handle<Code> code;
4101 : {
4102 3 : LocalContext context;
4103 : HandleScope scope(heap->isolate());
4104 :
4105 : CompileRun(
4106 : "bar = (function() {"
4107 : " function bar() {"
4108 : " return foo(1);"
4109 : " };"
4110 : " var foo = function(x) { with (x) { return 1 + x; } };"
4111 : " %NeverOptimizeFunction(foo);"
4112 : " bar(foo);"
4113 : " bar(foo);"
4114 : " bar(foo);"
4115 : " %OptimizeFunctionOnNextCall(bar);"
4116 : " bar(foo);"
4117 : " return bar;})();");
4118 :
4119 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4120 6 : *v8::Local<v8::Function>::Cast(CcTest::global()
4121 9 : ->Get(context.local(), v8_str("bar"))
4122 : .ToLocalChecked())));
4123 3 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4124 : }
4125 :
4126 : // Now make sure that a gc should get rid of the function
4127 27 : for (int i = 0; i < 4; i++) {
4128 12 : CcTest::CollectAllGarbage();
4129 : }
4130 :
4131 3 : CHECK(code->marked_for_deoptimization());
4132 3 : CHECK(code->embedded_objects_cleared());
4133 : }
4134 :
4135 :
4136 26068 : TEST(ObjectsInOptimizedCodeAreWeak) {
4137 5 : if (FLAG_always_opt || !FLAG_opt) return;
4138 3 : FLAG_allow_natives_syntax = true;
4139 3 : CcTest::InitializeVM();
4140 : Isolate* isolate = CcTest::i_isolate();
4141 3 : v8::internal::Heap* heap = CcTest::heap();
4142 :
4143 3 : if (!isolate->use_optimizer()) return;
4144 : HandleScope outer_scope(heap->isolate());
4145 : Handle<Code> code;
4146 : {
4147 3 : LocalContext context;
4148 : HandleScope scope(heap->isolate());
4149 :
4150 : CompileRun(
4151 : "function bar() {"
4152 : " return foo(1);"
4153 : "};"
4154 : "function foo(x) { with (x) { return 1 + x; } };"
4155 : "%NeverOptimizeFunction(foo);"
4156 : "bar();"
4157 : "bar();"
4158 : "bar();"
4159 : "%OptimizeFunctionOnNextCall(bar);"
4160 : "bar();");
4161 :
4162 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4163 6 : *v8::Local<v8::Function>::Cast(CcTest::global()
4164 9 : ->Get(context.local(), v8_str("bar"))
4165 : .ToLocalChecked())));
4166 3 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4167 : }
4168 :
4169 : // Now make sure that a gc should get rid of the function
4170 27 : for (int i = 0; i < 4; i++) {
4171 12 : CcTest::CollectAllGarbage();
4172 : }
4173 :
4174 3 : CHECK(code->marked_for_deoptimization());
4175 3 : CHECK(code->embedded_objects_cleared());
4176 : }
4177 :
4178 26068 : TEST(NewSpaceObjectsInOptimizedCode) {
4179 5 : if (FLAG_always_opt || !FLAG_opt) return;
4180 3 : FLAG_allow_natives_syntax = true;
4181 3 : CcTest::InitializeVM();
4182 : Isolate* isolate = CcTest::i_isolate();
4183 :
4184 3 : if (!isolate->use_optimizer()) return;
4185 : HandleScope outer_scope(isolate);
4186 : Handle<Code> code;
4187 : {
4188 3 : LocalContext context;
4189 : HandleScope scope(isolate);
4190 :
4191 : CompileRun(
4192 : "var foo;"
4193 : "var bar;"
4194 : "(function() {"
4195 : " function foo_func(x) { with (x) { return 1 + x; } };"
4196 : " %NeverOptimizeFunction(foo_func);"
4197 : " function bar_func() {"
4198 : " return foo(1);"
4199 : " };"
4200 : " bar = bar_func;"
4201 : " foo = foo_func;"
4202 : " bar_func();"
4203 : " bar_func();"
4204 : " bar_func();"
4205 : " %OptimizeFunctionOnNextCall(bar_func);"
4206 : " bar_func();"
4207 : "})();");
4208 :
4209 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4210 6 : *v8::Local<v8::Function>::Cast(CcTest::global()
4211 9 : ->Get(context.local(), v8_str("bar"))
4212 : .ToLocalChecked())));
4213 :
4214 : Handle<JSFunction> foo = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4215 6 : *v8::Local<v8::Function>::Cast(CcTest::global()
4216 9 : ->Get(context.local(), v8_str("foo"))
4217 : .ToLocalChecked())));
4218 :
4219 3 : CHECK(Heap::InYoungGeneration(*foo));
4220 3 : CcTest::CollectGarbage(NEW_SPACE);
4221 3 : CcTest::CollectGarbage(NEW_SPACE);
4222 3 : CHECK(!Heap::InYoungGeneration(*foo));
4223 : #ifdef VERIFY_HEAP
4224 : CcTest::heap()->Verify();
4225 : #endif
4226 3 : CHECK(!bar->code()->marked_for_deoptimization());
4227 3 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4228 : }
4229 :
4230 : // Now make sure that a gc should get rid of the function
4231 27 : for (int i = 0; i < 4; i++) {
4232 12 : CcTest::CollectAllGarbage();
4233 : }
4234 :
4235 3 : CHECK(code->marked_for_deoptimization());
4236 3 : CHECK(code->embedded_objects_cleared());
4237 : }
4238 :
4239 26068 : TEST(ObjectsInEagerlyDeoptimizedCodeAreWeak) {
4240 5 : if (FLAG_always_opt || !FLAG_opt) return;
4241 3 : FLAG_allow_natives_syntax = true;
4242 3 : CcTest::InitializeVM();
4243 : Isolate* isolate = CcTest::i_isolate();
4244 3 : v8::internal::Heap* heap = CcTest::heap();
4245 :
4246 3 : if (!isolate->use_optimizer()) return;
4247 : HandleScope outer_scope(heap->isolate());
4248 : Handle<Code> code;
4249 : {
4250 3 : LocalContext context;
4251 : HandleScope scope(heap->isolate());
4252 :
4253 : CompileRun(
4254 : "function bar() {"
4255 : " return foo(1);"
4256 : "};"
4257 : "function foo(x) { with (x) { return 1 + x; } };"
4258 : "%NeverOptimizeFunction(foo);"
4259 : "bar();"
4260 : "bar();"
4261 : "bar();"
4262 : "%OptimizeFunctionOnNextCall(bar);"
4263 : "bar();"
4264 : "%DeoptimizeFunction(bar);");
4265 :
4266 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4267 6 : *v8::Local<v8::Function>::Cast(CcTest::global()
4268 9 : ->Get(context.local(), v8_str("bar"))
4269 : .ToLocalChecked())));
4270 3 : code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4271 : }
4272 :
4273 3 : CHECK(code->marked_for_deoptimization());
4274 :
4275 : // Now make sure that a gc should get rid of the function
4276 27 : for (int i = 0; i < 4; i++) {
4277 12 : CcTest::CollectAllGarbage();
4278 : }
4279 :
4280 3 : CHECK(code->marked_for_deoptimization());
4281 3 : CHECK(code->embedded_objects_cleared());
4282 : }
4283 :
4284 16 : static Handle<JSFunction> OptimizeDummyFunction(v8::Isolate* isolate,
4285 : const char* name) {
4286 : EmbeddedVector<char, 256> source;
4287 : SNPrintF(source,
4288 : "function %s() { return 0; }"
4289 : "%s(); %s();"
4290 : "%%OptimizeFunctionOnNextCall(%s);"
4291 16 : "%s();", name, name, name, name, name);
4292 : CompileRun(source.start());
4293 : i::Handle<JSFunction> fun = Handle<JSFunction>::cast(
4294 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4295 32 : CcTest::global()
4296 64 : ->Get(isolate->GetCurrentContext(), v8_str(name))
4297 : .ToLocalChecked())));
4298 16 : return fun;
4299 : }
4300 :
4301 8 : static int GetCodeChainLength(Code code) {
4302 : int result = 0;
4303 28 : while (code->next_code_link()->IsCode()) {
4304 4 : result++;
4305 4 : code = Code::cast(code->next_code_link());
4306 : }
4307 8 : return result;
4308 : }
4309 :
4310 :
4311 26068 : TEST(NextCodeLinkIsWeak) {
4312 5 : FLAG_always_opt = false;
4313 5 : FLAG_allow_natives_syntax = true;
4314 5 : CcTest::InitializeVM();
4315 : Isolate* isolate = CcTest::i_isolate();
4316 5 : v8::internal::Heap* heap = CcTest::heap();
4317 :
4318 5 : if (!isolate->use_optimizer()) return;
4319 : HandleScope outer_scope(heap->isolate());
4320 : Handle<Code> code;
4321 4 : CcTest::CollectAllAvailableGarbage();
4322 : int code_chain_length_before, code_chain_length_after;
4323 : {
4324 : HandleScope scope(heap->isolate());
4325 : Handle<JSFunction> mortal =
4326 4 : OptimizeDummyFunction(CcTest::isolate(), "mortal");
4327 : Handle<JSFunction> immortal =
4328 4 : OptimizeDummyFunction(CcTest::isolate(), "immortal");
4329 8 : CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
4330 4 : code_chain_length_before = GetCodeChainLength(immortal->code());
4331 : // Keep the immortal code and let the mortal code die.
4332 4 : code = scope.CloseAndEscape(Handle<Code>(immortal->code(), isolate));
4333 : CompileRun("mortal = null; immortal = null;");
4334 : }
4335 4 : CcTest::CollectAllAvailableGarbage();
4336 : // Now mortal code should be dead.
4337 4 : code_chain_length_after = GetCodeChainLength(*code);
4338 4 : CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
4339 : }
4340 :
4341 26068 : TEST(NextCodeLinkInCodeDataContainerIsCleared) {
4342 5 : FLAG_always_opt = false;
4343 5 : FLAG_allow_natives_syntax = true;
4344 5 : CcTest::InitializeVM();
4345 : Isolate* isolate = CcTest::i_isolate();
4346 5 : v8::internal::Heap* heap = CcTest::heap();
4347 :
4348 5 : if (!isolate->use_optimizer()) return;
4349 : HandleScope outer_scope(heap->isolate());
4350 : Handle<CodeDataContainer> code_data_container;
4351 : {
4352 : HandleScope scope(heap->isolate());
4353 : Handle<JSFunction> mortal1 =
4354 4 : OptimizeDummyFunction(CcTest::isolate(), "mortal1");
4355 : Handle<JSFunction> mortal2 =
4356 4 : OptimizeDummyFunction(CcTest::isolate(), "mortal2");
4357 8 : CHECK_EQ(mortal2->code()->next_code_link(), mortal1->code());
4358 : code_data_container = scope.CloseAndEscape(Handle<CodeDataContainer>(
4359 4 : mortal2->code()->code_data_container(), isolate));
4360 : CompileRun("mortal1 = null; mortal2 = null;");
4361 : }
4362 4 : CcTest::CollectAllAvailableGarbage();
4363 4 : CHECK(code_data_container->next_code_link()->IsUndefined(isolate));
4364 : }
4365 :
4366 8 : static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
4367 : i::byte buffer[i::Assembler::kMinimalBufferSize];
4368 : MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
4369 16 : ExternalAssemblerBuffer(buffer, sizeof(buffer)));
4370 8 : CodeDesc desc;
4371 8 : masm.Push(isolate->factory()->undefined_value());
4372 8 : masm.Push(isolate->factory()->undefined_value());
4373 8 : masm.Drop(2);
4374 : masm.GetCode(isolate, &desc);
4375 : Handle<Code> code = isolate->factory()->NewCode(
4376 16 : desc, Code::OPTIMIZED_FUNCTION, masm.CodeObject());
4377 8 : CHECK(code->IsCode());
4378 16 : return code;
4379 : }
4380 :
4381 :
4382 26068 : TEST(NextCodeLinkIsWeak2) {
4383 5 : FLAG_allow_natives_syntax = true;
4384 5 : CcTest::InitializeVM();
4385 : Isolate* isolate = CcTest::i_isolate();
4386 5 : v8::internal::Heap* heap = CcTest::heap();
4387 :
4388 5 : if (!isolate->use_optimizer()) return;
4389 : HandleScope outer_scope(heap->isolate());
4390 4 : CcTest::CollectAllAvailableGarbage();
4391 : Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
4392 : Handle<Code> new_head;
4393 : Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
4394 : {
4395 : HandleScope scope(heap->isolate());
4396 4 : Handle<Code> immortal = DummyOptimizedCode(isolate);
4397 4 : Handle<Code> mortal = DummyOptimizedCode(isolate);
4398 4 : mortal->set_next_code_link(*old_head);
4399 8 : immortal->set_next_code_link(*mortal);
4400 : context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
4401 4 : new_head = scope.CloseAndEscape(immortal);
4402 : }
4403 4 : CcTest::CollectAllAvailableGarbage();
4404 : // Now mortal code should be dead.
4405 8 : CHECK_EQ(*old_head, new_head->next_code_link());
4406 : }
4407 :
4408 :
4409 : static bool weak_ic_cleared = false;
4410 :
4411 49 : static void ClearWeakIC(
4412 : const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
4413 : printf("clear weak is called\n");
4414 49 : weak_ic_cleared = true;
4415 : data.GetParameter()->Reset();
4416 49 : }
4417 :
4418 :
4419 26068 : TEST(WeakFunctionInConstructor) {
4420 1 : if (FLAG_lite_mode) return;
4421 5 : if (FLAG_always_opt) return;
4422 4 : FLAG_stress_compaction = false;
4423 4 : FLAG_stress_incremental_marking = false;
4424 4 : CcTest::InitializeVM();
4425 4 : v8::Isolate* isolate = CcTest::isolate();
4426 4 : LocalContext env;
4427 8 : v8::HandleScope scope(isolate);
4428 : CompileRun(
4429 : "function createObj(obj) {"
4430 : " return new obj();"
4431 : "}");
4432 : i::Handle<JSFunction> createObj = Handle<JSFunction>::cast(
4433 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4434 8 : CcTest::global()
4435 12 : ->Get(env.local(), v8_str("createObj"))
4436 : .ToLocalChecked())));
4437 :
4438 : v8::Persistent<v8::Object> garbage;
4439 : {
4440 8 : v8::HandleScope scope(isolate);
4441 : const char* source =
4442 : " (function() {"
4443 : " function hat() { this.x = 5; }"
4444 : " createObj(hat);"
4445 : " createObj(hat);"
4446 : " return hat;"
4447 : " })();";
4448 4 : garbage.Reset(isolate, CompileRun(env.local(), source)
4449 : .ToLocalChecked()
4450 4 : ->ToObject(env.local())
4451 : .ToLocalChecked());
4452 : }
4453 4 : weak_ic_cleared = false;
4454 : garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4455 4 : CcTest::CollectAllGarbage();
4456 4 : CHECK(weak_ic_cleared);
4457 :
4458 : // We've determined the constructor in createObj has had it's weak cell
4459 : // cleared. Now, verify that one additional call with a new function
4460 : // allows monomorphicity.
4461 : Handle<FeedbackVector> feedback_vector =
4462 8 : Handle<FeedbackVector>(createObj->feedback_vector(), CcTest::i_isolate());
4463 4 : for (int i = 0; i < 20; i++) {
4464 : MaybeObject slot_value = feedback_vector->Get(FeedbackSlot(0));
4465 4 : CHECK(slot_value->IsWeakOrCleared());
4466 4 : if (slot_value->IsCleared()) break;
4467 0 : CcTest::CollectAllGarbage();
4468 : }
4469 :
4470 : MaybeObject slot_value = feedback_vector->Get(FeedbackSlot(0));
4471 4 : CHECK(slot_value->IsCleared());
4472 : CompileRun(
4473 : "function coat() { this.x = 6; }"
4474 : "createObj(coat);");
4475 : slot_value = feedback_vector->Get(FeedbackSlot(0));
4476 4 : CHECK(slot_value->IsWeak());
4477 : }
4478 :
4479 :
4480 : // Checks that the value returned by execution of the source is weak.
4481 45 : void CheckWeakness(const char* source) {
4482 45 : FLAG_stress_compaction = false;
4483 45 : FLAG_stress_incremental_marking = false;
4484 45 : CcTest::InitializeVM();
4485 45 : v8::Isolate* isolate = CcTest::isolate();
4486 45 : LocalContext env;
4487 90 : v8::HandleScope scope(isolate);
4488 : v8::Persistent<v8::Object> garbage;
4489 : {
4490 90 : v8::HandleScope scope(isolate);
4491 45 : garbage.Reset(isolate, CompileRun(env.local(), source)
4492 : .ToLocalChecked()
4493 45 : ->ToObject(env.local())
4494 : .ToLocalChecked());
4495 : }
4496 45 : weak_ic_cleared = false;
4497 : garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4498 45 : CcTest::CollectAllGarbage();
4499 45 : CHECK(weak_ic_cleared);
4500 45 : }
4501 :
4502 :
4503 : // Each of the following "weak IC" tests creates an IC that embeds a map with
4504 : // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
4505 26068 : TEST(WeakMapInMonomorphicLoadIC) {
4506 : CheckWeakness("function loadIC(obj) {"
4507 : " return obj.name;"
4508 : "}"
4509 : " (function() {"
4510 : " var proto = {'name' : 'weak'};"
4511 : " var obj = Object.create(proto);"
4512 : " loadIC(obj);"
4513 : " loadIC(obj);"
4514 : " loadIC(obj);"
4515 : " return proto;"
4516 5 : " })();");
4517 5 : }
4518 :
4519 :
4520 26068 : TEST(WeakMapInPolymorphicLoadIC) {
4521 : CheckWeakness(
4522 : "function loadIC(obj) {"
4523 : " return obj.name;"
4524 : "}"
4525 : " (function() {"
4526 : " var proto = {'name' : 'weak'};"
4527 : " var obj = Object.create(proto);"
4528 : " loadIC(obj);"
4529 : " loadIC(obj);"
4530 : " loadIC(obj);"
4531 : " var poly = Object.create(proto);"
4532 : " poly.x = true;"
4533 : " loadIC(poly);"
4534 : " return proto;"
4535 5 : " })();");
4536 5 : }
4537 :
4538 :
4539 26068 : TEST(WeakMapInMonomorphicKeyedLoadIC) {
4540 : CheckWeakness("function keyedLoadIC(obj, field) {"
4541 : " return obj[field];"
4542 : "}"
4543 : " (function() {"
4544 : " var proto = {'name' : 'weak'};"
4545 : " var obj = Object.create(proto);"
4546 : " keyedLoadIC(obj, 'name');"
4547 : " keyedLoadIC(obj, 'name');"
4548 : " keyedLoadIC(obj, 'name');"
4549 : " return proto;"
4550 5 : " })();");
4551 5 : }
4552 :
4553 :
4554 26068 : TEST(WeakMapInPolymorphicKeyedLoadIC) {
4555 : CheckWeakness(
4556 : "function keyedLoadIC(obj, field) {"
4557 : " return obj[field];"
4558 : "}"
4559 : " (function() {"
4560 : " var proto = {'name' : 'weak'};"
4561 : " var obj = Object.create(proto);"
4562 : " keyedLoadIC(obj, 'name');"
4563 : " keyedLoadIC(obj, 'name');"
4564 : " keyedLoadIC(obj, 'name');"
4565 : " var poly = Object.create(proto);"
4566 : " poly.x = true;"
4567 : " keyedLoadIC(poly, 'name');"
4568 : " return proto;"
4569 5 : " })();");
4570 5 : }
4571 :
4572 :
4573 26068 : TEST(WeakMapInMonomorphicStoreIC) {
4574 : CheckWeakness("function storeIC(obj, value) {"
4575 : " obj.name = value;"
4576 : "}"
4577 : " (function() {"
4578 : " var proto = {'name' : 'weak'};"
4579 : " var obj = Object.create(proto);"
4580 : " storeIC(obj, 'x');"
4581 : " storeIC(obj, 'x');"
4582 : " storeIC(obj, 'x');"
4583 : " return proto;"
4584 5 : " })();");
4585 5 : }
4586 :
4587 :
4588 26068 : TEST(WeakMapInPolymorphicStoreIC) {
4589 : CheckWeakness(
4590 : "function storeIC(obj, value) {"
4591 : " obj.name = value;"
4592 : "}"
4593 : " (function() {"
4594 : " var proto = {'name' : 'weak'};"
4595 : " var obj = Object.create(proto);"
4596 : " storeIC(obj, 'x');"
4597 : " storeIC(obj, 'x');"
4598 : " storeIC(obj, 'x');"
4599 : " var poly = Object.create(proto);"
4600 : " poly.x = true;"
4601 : " storeIC(poly, 'x');"
4602 : " return proto;"
4603 5 : " })();");
4604 5 : }
4605 :
4606 :
4607 26068 : TEST(WeakMapInMonomorphicKeyedStoreIC) {
4608 : CheckWeakness("function keyedStoreIC(obj, field, value) {"
4609 : " obj[field] = value;"
4610 : "}"
4611 : " (function() {"
4612 : " var proto = {'name' : 'weak'};"
4613 : " var obj = Object.create(proto);"
4614 : " keyedStoreIC(obj, 'x');"
4615 : " keyedStoreIC(obj, 'x');"
4616 : " keyedStoreIC(obj, 'x');"
4617 : " return proto;"
4618 5 : " })();");
4619 5 : }
4620 :
4621 :
4622 26068 : TEST(WeakMapInPolymorphicKeyedStoreIC) {
4623 : CheckWeakness(
4624 : "function keyedStoreIC(obj, field, value) {"
4625 : " obj[field] = value;"
4626 : "}"
4627 : " (function() {"
4628 : " var proto = {'name' : 'weak'};"
4629 : " var obj = Object.create(proto);"
4630 : " keyedStoreIC(obj, 'x');"
4631 : " keyedStoreIC(obj, 'x');"
4632 : " keyedStoreIC(obj, 'x');"
4633 : " var poly = Object.create(proto);"
4634 : " poly.x = true;"
4635 : " keyedStoreIC(poly, 'x');"
4636 : " return proto;"
4637 5 : " })();");
4638 5 : }
4639 :
4640 :
4641 26068 : TEST(WeakMapInMonomorphicCompareNilIC) {
4642 : CheckWeakness("function compareNilIC(obj) {"
4643 : " return obj == null;"
4644 : "}"
4645 : " (function() {"
4646 : " var proto = {'name' : 'weak'};"
4647 : " var obj = Object.create(proto);"
4648 : " compareNilIC(obj);"
4649 : " compareNilIC(obj);"
4650 : " compareNilIC(obj);"
4651 : " return proto;"
4652 5 : " })();");
4653 5 : }
4654 :
4655 :
4656 8 : Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
4657 8 : Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
4658 : Handle<Object> obj =
4659 24 : Object::GetProperty(isolate, isolate->global_object(), str)
4660 : .ToHandleChecked();
4661 8 : return Handle<JSFunction>::cast(obj);
4662 : }
4663 :
4664 16 : void CheckIC(Handle<JSFunction> function, int slot_index,
4665 : InlineCacheState state) {
4666 16 : FeedbackVector vector = function->feedback_vector();
4667 : FeedbackSlot slot(slot_index);
4668 : FeedbackNexus nexus(vector, slot);
4669 16 : CHECK_EQ(nexus.ic_state(), state);
4670 16 : }
4671 :
4672 26068 : TEST(MonomorphicStaysMonomorphicAfterGC) {
4673 6 : if (!FLAG_use_ic) return;
4674 5 : if (FLAG_always_opt) return;
4675 : ManualGCScope manual_gc_scope;
4676 4 : CcTest::InitializeVM();
4677 : Isolate* isolate = CcTest::i_isolate();
4678 8 : v8::HandleScope scope(CcTest::isolate());
4679 : CompileRun(
4680 : "function loadIC(obj) {"
4681 : " return obj.name;"
4682 : "}"
4683 : "function testIC() {"
4684 : " var proto = {'name' : 'weak'};"
4685 : " var obj = Object.create(proto);"
4686 : " loadIC(obj);"
4687 : " loadIC(obj);"
4688 : " loadIC(obj);"
4689 : " return proto;"
4690 : "};");
4691 4 : Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4692 : {
4693 8 : v8::HandleScope scope(CcTest::isolate());
4694 : CompileRun("(testIC())");
4695 : }
4696 4 : CcTest::CollectAllGarbage();
4697 4 : CheckIC(loadIC, 0, MONOMORPHIC);
4698 : {
4699 8 : v8::HandleScope scope(CcTest::isolate());
4700 : CompileRun("(testIC())");
4701 : }
4702 4 : CheckIC(loadIC, 0, MONOMORPHIC);
4703 : }
4704 :
4705 :
4706 26068 : TEST(PolymorphicStaysPolymorphicAfterGC) {
4707 6 : if (!FLAG_use_ic) return;
4708 5 : if (FLAG_always_opt) return;
4709 : ManualGCScope manual_gc_scope;
4710 4 : CcTest::InitializeVM();
4711 : Isolate* isolate = CcTest::i_isolate();
4712 8 : v8::HandleScope scope(CcTest::isolate());
4713 : CompileRun(
4714 : "function loadIC(obj) {"
4715 : " return obj.name;"
4716 : "}"
4717 : "function testIC() {"
4718 : " var proto = {'name' : 'weak'};"
4719 : " var obj = Object.create(proto);"
4720 : " loadIC(obj);"
4721 : " loadIC(obj);"
4722 : " loadIC(obj);"
4723 : " var poly = Object.create(proto);"
4724 : " poly.x = true;"
4725 : " loadIC(poly);"
4726 : " return proto;"
4727 : "};");
4728 4 : Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4729 : {
4730 8 : v8::HandleScope scope(CcTest::isolate());
4731 : CompileRun("(testIC())");
4732 : }
4733 4 : CcTest::CollectAllGarbage();
4734 4 : CheckIC(loadIC, 0, POLYMORPHIC);
4735 : {
4736 8 : v8::HandleScope scope(CcTest::isolate());
4737 : CompileRun("(testIC())");
4738 : }
4739 4 : CheckIC(loadIC, 0, POLYMORPHIC);
4740 : }
4741 :
4742 : #ifdef DEBUG
4743 : TEST(AddInstructionChangesNewSpacePromotion) {
4744 : FLAG_allow_natives_syntax = true;
4745 : FLAG_expose_gc = true;
4746 : FLAG_stress_compaction = true;
4747 : FLAG_gc_interval = 1000;
4748 : CcTest::InitializeVM();
4749 : if (!FLAG_allocation_site_pretenuring) return;
4750 : v8::HandleScope scope(CcTest::isolate());
4751 : Isolate* isolate = CcTest::i_isolate();
4752 : Heap* heap = isolate->heap();
4753 : LocalContext env;
4754 : CompileRun(
4755 : "function add(a, b) {"
4756 : " return a + b;"
4757 : "}"
4758 : "add(1, 2);"
4759 : "add(\"a\", \"b\");"
4760 : "var oldSpaceObject;"
4761 : "gc();"
4762 : "function crash(x) {"
4763 : " var object = {a: null, b: null};"
4764 : " var result = add(1.5, x | 0);"
4765 : " object.a = result;"
4766 : " oldSpaceObject = object;"
4767 : " return object;"
4768 : "}"
4769 : "crash(1);"
4770 : "crash(1);"
4771 : "%OptimizeFunctionOnNextCall(crash);"
4772 : "crash(1);");
4773 :
4774 : v8::Local<v8::Object> global = CcTest::global();
4775 : v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
4776 : global->Get(env.local(), v8_str("crash")).ToLocalChecked());
4777 : v8::Local<v8::Value> args1[] = {v8_num(1)};
4778 : heap->DisableInlineAllocation();
4779 : heap->set_allocation_timeout(1);
4780 : g->Call(env.local(), global, 1, args1).ToLocalChecked();
4781 : CcTest::CollectAllGarbage();
4782 : }
4783 :
4784 :
4785 : void OnFatalErrorExpectOOM(const char* location, const char* message) {
4786 : // Exit with 0 if the location matches our expectation.
4787 : exit(strcmp(location, "CALL_AND_RETRY_LAST"));
4788 : }
4789 :
4790 :
4791 : TEST(CEntryStubOOM) {
4792 : FLAG_allow_natives_syntax = true;
4793 : CcTest::InitializeVM();
4794 : v8::HandleScope scope(CcTest::isolate());
4795 : CcTest::isolate()->SetFatalErrorHandler(OnFatalErrorExpectOOM);
4796 :
4797 : v8::Local<v8::Value> result = CompileRun(
4798 : "%SetAllocationTimeout(1, 1);"
4799 : "var a = [];"
4800 : "a.__proto__ = [];"
4801 : "a.unshift(1)");
4802 :
4803 : CHECK(result->IsNumber());
4804 : }
4805 :
4806 : #endif // DEBUG
4807 :
4808 :
4809 5 : static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
4810 :
4811 :
4812 5 : static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
4813 5 : CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, nullptr);
4814 5 : }
4815 :
4816 26068 : HEAP_TEST(Regress538257) {
4817 : ManualGCScope manual_gc_scope;
4818 5 : FLAG_manual_evacuation_candidates_selection = true;
4819 : v8::Isolate::CreateParams create_params;
4820 : // Set heap limits.
4821 : create_params.constraints.set_max_semi_space_size_in_kb(1024);
4822 : #ifdef DEBUG
4823 : create_params.constraints.set_max_old_space_size(20);
4824 : #else
4825 : create_params.constraints.set_max_old_space_size(6);
4826 : #endif
4827 5 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
4828 5 : v8::Isolate* isolate = v8::Isolate::New(create_params);
4829 5 : isolate->Enter();
4830 : {
4831 : i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
4832 : Heap* heap = i_isolate->heap();
4833 : HandleScope handle_scope(i_isolate);
4834 : PagedSpace* old_space = heap->old_space();
4835 : const int kMaxObjects = 10000;
4836 : const int kFixedArrayLen = 512;
4837 100005 : Handle<FixedArray> objects[kMaxObjects];
4838 40030 : for (int i = 0; (i < kMaxObjects) &&
4839 13345 : heap->CanExpandOldGeneration(old_space->AreaSize());
4840 : i++) {
4841 : objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen,
4842 13340 : AllocationType::kOld);
4843 26680 : heap::ForceEvacuationCandidate(Page::FromHeapObject(*objects[i]));
4844 : }
4845 5 : heap::SimulateFullSpace(old_space);
4846 5 : CcTest::CollectAllGarbage();
4847 : // If we get this far, we've successfully aborted compaction. Any further
4848 : // allocations might trigger OOM.
4849 : }
4850 5 : isolate->Exit();
4851 5 : isolate->Dispose();
4852 5 : }
4853 :
4854 :
4855 26068 : TEST(Regress357137) {
4856 5 : CcTest::InitializeVM();
4857 5 : v8::Isolate* isolate = CcTest::isolate();
4858 10 : v8::HandleScope hscope(isolate);
4859 5 : v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
4860 10 : global->Set(
4861 5 : v8::String::NewFromUtf8(isolate, "interrupt", v8::NewStringType::kNormal)
4862 : .ToLocalChecked(),
4863 5 : v8::FunctionTemplate::New(isolate, RequestInterrupt));
4864 5 : v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
4865 5 : CHECK(!context.IsEmpty());
4866 : v8::Context::Scope cscope(context);
4867 :
4868 : v8::Local<v8::Value> result = CompileRun(
4869 : "var locals = '';"
4870 : "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
4871 : "eval('function f() {' + locals + 'return function() { return v0; }; }');"
4872 : "interrupt();" // This triggers a fake stack overflow in f.
4873 : "f()()");
4874 10 : CHECK_EQ(42.0, result->ToNumber(context).ToLocalChecked()->Value());
4875 5 : }
4876 :
4877 :
4878 26068 : TEST(Regress507979) {
4879 : const int kFixedArrayLen = 10;
4880 5 : CcTest::InitializeVM();
4881 : Isolate* isolate = CcTest::i_isolate();
4882 : HandleScope handle_scope(isolate);
4883 :
4884 5 : Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
4885 5 : Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
4886 5 : CHECK(Heap::InYoungGeneration(*o1));
4887 5 : CHECK(Heap::InYoungGeneration(*o2));
4888 :
4889 10 : HeapIterator it(isolate->heap(), i::HeapIterator::kFilterUnreachable);
4890 :
4891 : // Replace parts of an object placed before a live object with a filler. This
4892 : // way the filler object shares the mark bits with the following live object.
4893 5 : o1->Shrink(isolate, kFixedArrayLen - 1);
4894 :
4895 32741 : for (HeapObject obj = it.next(); !obj.is_null(); obj = it.next()) {
4896 : // Let's not optimize the loop away.
4897 32736 : CHECK_NE(obj->address(), kNullAddress);
4898 : }
4899 5 : }
4900 :
4901 26068 : TEST(Regress388880) {
4902 5 : if (!FLAG_incremental_marking) return;
4903 5 : FLAG_stress_incremental_marking = false;
4904 5 : FLAG_expose_gc = true;
4905 5 : CcTest::InitializeVM();
4906 10 : v8::HandleScope scope(CcTest::isolate());
4907 : Isolate* isolate = CcTest::i_isolate();
4908 : Factory* factory = isolate->factory();
4909 : Heap* heap = isolate->heap();
4910 :
4911 5 : Handle<Map> map1 = Map::Create(isolate, 1);
4912 5 : Handle<String> name = factory->NewStringFromStaticChars("foo");
4913 5 : name = factory->InternalizeString(name);
4914 : Handle<Map> map2 =
4915 10 : Map::CopyWithField(isolate, map1, name, FieldType::Any(isolate), NONE,
4916 : PropertyConstness::kMutable, Representation::Tagged(),
4917 10 : OMIT_TRANSITION)
4918 5 : .ToHandleChecked();
4919 :
4920 5 : size_t desired_offset = Page::kPageSize - map1->instance_size();
4921 :
4922 : // Allocate padding objects in old pointer space so, that object allocated
4923 : // afterwards would end at the end of the page.
4924 5 : heap::SimulateFullSpace(heap->old_space());
4925 : size_t padding_size =
4926 5 : desired_offset - MemoryChunkLayout::ObjectStartOffsetInDataPage();
4927 : heap::CreatePadding(heap, static_cast<int>(padding_size),
4928 10 : AllocationType::kOld);
4929 :
4930 5 : Handle<JSObject> o = factory->NewJSObjectFromMap(map1, AllocationType::kOld);
4931 10 : o->set_raw_properties_or_hash(*factory->empty_fixed_array());
4932 :
4933 : // Ensure that the object allocated where we need it.
4934 : Page* page = Page::FromHeapObject(*o);
4935 5 : CHECK_EQ(desired_offset, page->Offset(o->address()));
4936 :
4937 : // Now we have an object right at the end of the page.
4938 :
4939 : // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
4940 : // that would cause crash.
4941 5 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4942 5 : marking->Stop();
4943 5 : CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
4944 5 : i::GarbageCollectionReason::kTesting);
4945 5 : CHECK(marking->IsMarking());
4946 :
4947 : // Now everything is set up for crashing in JSObject::MigrateFastToFast()
4948 : // when it calls heap->AdjustLiveBytes(...).
4949 5 : JSObject::MigrateToMap(o, map2);
4950 : }
4951 :
4952 :
4953 26068 : TEST(Regress3631) {
4954 5 : if (!FLAG_incremental_marking) return;
4955 5 : FLAG_expose_gc = true;
4956 5 : CcTest::InitializeVM();
4957 10 : v8::HandleScope scope(CcTest::isolate());
4958 : Isolate* isolate = CcTest::i_isolate();
4959 : Heap* heap = isolate->heap();
4960 5 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4961 : v8::Local<v8::Value> result = CompileRun(
4962 : "var weak_map = new WeakMap();"
4963 : "var future_keys = [];"
4964 : "for (var i = 0; i < 50; i++) {"
4965 : " var key = {'k' : i + 0.1};"
4966 : " weak_map.set(key, 1);"
4967 : " future_keys.push({'x' : i + 0.2});"
4968 : "}"
4969 : "weak_map");
4970 5 : if (marking->IsStopped()) {
4971 4 : CcTest::heap()->StartIncrementalMarking(
4972 4 : i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
4973 : }
4974 : // Incrementally mark the backing store.
4975 : Handle<JSReceiver> obj =
4976 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
4977 : Handle<JSWeakCollection> weak_map(JSWeakCollection::cast(*obj), isolate);
4978 5 : SimulateIncrementalMarking(heap);
4979 : // Stash the backing store in a handle.
4980 : Handle<Object> save(weak_map->table(), isolate);
4981 : // The following line will update the backing store.
4982 : CompileRun(
4983 : "for (var i = 0; i < 50; i++) {"
4984 : " weak_map.set(future_keys[i], i);"
4985 : "}");
4986 : heap->incremental_marking()->set_should_hurry(true);
4987 5 : CcTest::CollectGarbage(OLD_SPACE);
4988 : }
4989 :
4990 :
4991 26068 : TEST(Regress442710) {
4992 5 : CcTest::InitializeVM();
4993 : Isolate* isolate = CcTest::i_isolate();
4994 : Factory* factory = isolate->factory();
4995 :
4996 : HandleScope sc(isolate);
4997 10 : Handle<JSGlobalObject> global(CcTest::i_isolate()->context()->global_object(),
4998 5 : isolate);
4999 : Handle<JSArray> array = factory->NewJSArray(2);
5000 :
5001 5 : Handle<String> name = factory->InternalizeUtf8String("testArray");
5002 10 : Object::SetProperty(isolate, global, name, array).Check();
5003 : CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5004 5 : CcTest::CollectGarbage(OLD_SPACE);
5005 5 : }
5006 :
5007 :
5008 26068 : HEAP_TEST(NumberStringCacheSize) {
5009 : // Test that the number-string cache has not been resized in the snapshot.
5010 5 : CcTest::InitializeVM();
5011 : Isolate* isolate = CcTest::i_isolate();
5012 5 : if (!isolate->snapshot_available()) return;
5013 : Heap* heap = isolate->heap();
5014 5 : CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
5015 : heap->number_string_cache()->length());
5016 : }
5017 :
5018 :
5019 26068 : TEST(Regress3877) {
5020 5 : CcTest::InitializeVM();
5021 : Isolate* isolate = CcTest::i_isolate();
5022 : Factory* factory = isolate->factory();
5023 : HandleScope scope(isolate);
5024 : CompileRun("function cls() { this.x = 10; }");
5025 5 : Handle<WeakFixedArray> weak_prototype_holder = factory->NewWeakFixedArray(1);
5026 : {
5027 : HandleScope inner_scope(isolate);
5028 : v8::Local<v8::Value> result = CompileRun("cls.prototype");
5029 : Handle<JSReceiver> proto =
5030 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5031 10 : weak_prototype_holder->Set(0, HeapObjectReference::Weak(*proto));
5032 : }
5033 5 : CHECK(!weak_prototype_holder->Get(0)->IsCleared());
5034 : CompileRun(
5035 : "var a = { };"
5036 : "a.x = new cls();"
5037 : "cls.prototype = null;");
5038 45 : for (int i = 0; i < 4; i++) {
5039 20 : CcTest::CollectAllGarbage();
5040 : }
5041 : // The map of a.x keeps prototype alive
5042 5 : CHECK(!weak_prototype_holder->Get(0)->IsCleared());
5043 : // Change the map of a.x and make the previous map garbage collectable.
5044 : CompileRun("a.x.__proto__ = {};");
5045 45 : for (int i = 0; i < 4; i++) {
5046 20 : CcTest::CollectAllGarbage();
5047 : }
5048 5 : CHECK(weak_prototype_holder->Get(0)->IsCleared());
5049 5 : }
5050 :
5051 20 : Handle<WeakFixedArray> AddRetainedMap(Isolate* isolate, Heap* heap) {
5052 : HandleScope inner_scope(isolate);
5053 20 : Handle<Map> map = Map::Create(isolate, 1);
5054 : v8::Local<v8::Value> result =
5055 : CompileRun("(function () { return {x : 10}; })();");
5056 : Handle<JSReceiver> proto =
5057 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5058 20 : Map::SetPrototype(isolate, map, proto);
5059 20 : heap->AddRetainedMap(map);
5060 20 : Handle<WeakFixedArray> array = isolate->factory()->NewWeakFixedArray(1);
5061 40 : array->Set(0, HeapObjectReference::Weak(*map));
5062 40 : return inner_scope.CloseAndEscape(array);
5063 : }
5064 :
5065 :
5066 20 : void CheckMapRetainingFor(int n) {
5067 20 : FLAG_retain_maps_for_n_gc = n;
5068 : Isolate* isolate = CcTest::i_isolate();
5069 : Heap* heap = isolate->heap();
5070 20 : Handle<WeakFixedArray> array_with_map = AddRetainedMap(isolate, heap);
5071 20 : CHECK(array_with_map->Get(0)->IsWeak());
5072 120 : for (int i = 0; i < n; i++) {
5073 50 : heap::SimulateIncrementalMarking(heap);
5074 50 : CcTest::CollectGarbage(OLD_SPACE);
5075 : }
5076 20 : CHECK(array_with_map->Get(0)->IsWeak());
5077 20 : heap::SimulateIncrementalMarking(heap);
5078 20 : CcTest::CollectGarbage(OLD_SPACE);
5079 20 : CHECK(array_with_map->Get(0)->IsCleared());
5080 20 : }
5081 :
5082 :
5083 26068 : TEST(MapRetaining) {
5084 5 : if (!FLAG_incremental_marking) return;
5085 : ManualGCScope manual_gc_scope;
5086 5 : CcTest::InitializeVM();
5087 10 : v8::HandleScope scope(CcTest::isolate());
5088 5 : CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5089 5 : CheckMapRetainingFor(0);
5090 5 : CheckMapRetainingFor(1);
5091 5 : CheckMapRetainingFor(7);
5092 : }
5093 :
5094 26063 : TEST(PreprocessStackTrace) {
5095 : // Do not automatically trigger early GC.
5096 0 : FLAG_gc_interval = -1;
5097 0 : CcTest::InitializeVM();
5098 0 : v8::HandleScope scope(CcTest::isolate());
5099 0 : v8::TryCatch try_catch(CcTest::isolate());
5100 : CompileRun("throw new Error();");
5101 0 : CHECK(try_catch.HasCaught());
5102 : Isolate* isolate = CcTest::i_isolate();
5103 0 : Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
5104 : Handle<Name> key = isolate->factory()->stack_trace_symbol();
5105 : Handle<Object> stack_trace =
5106 0 : Object::GetProperty(isolate, exception, key).ToHandleChecked();
5107 : Handle<Object> code =
5108 0 : Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5109 0 : CHECK(code->IsCode());
5110 :
5111 0 : CcTest::CollectAllAvailableGarbage();
5112 :
5113 : Handle<Object> pos =
5114 0 : Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5115 0 : CHECK(pos->IsSmi());
5116 :
5117 : Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
5118 : int array_length = Smi::ToInt(stack_trace_array->length());
5119 0 : for (int i = 0; i < array_length; i++) {
5120 : Handle<Object> element =
5121 0 : Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
5122 0 : CHECK(!element->IsCode());
5123 : }
5124 0 : }
5125 :
5126 :
5127 215 : void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
5128 215 : CHECK_LE(FixedArray::kHeaderSize, bytes);
5129 215 : CHECK(IsAligned(bytes, kTaggedSize));
5130 : Factory* factory = isolate->factory();
5131 : HandleScope scope(isolate);
5132 : AlwaysAllocateScope always_allocate(isolate);
5133 : int elements =
5134 215 : static_cast<int>((bytes - FixedArray::kHeaderSize) / kTaggedSize);
5135 : Handle<FixedArray> array = factory->NewFixedArray(
5136 : elements,
5137 215 : space == NEW_SPACE ? AllocationType::kYoung : AllocationType::kOld);
5138 430 : CHECK((space == NEW_SPACE) == Heap::InYoungGeneration(*array));
5139 215 : CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
5140 215 : }
5141 :
5142 :
5143 26068 : TEST(NewSpaceAllocationCounter) {
5144 5 : CcTest::InitializeVM();
5145 10 : v8::HandleScope scope(CcTest::isolate());
5146 : Isolate* isolate = CcTest::i_isolate();
5147 : Heap* heap = isolate->heap();
5148 : size_t counter1 = heap->NewSpaceAllocationCounter();
5149 5 : CcTest::CollectGarbage(NEW_SPACE);
5150 5 : CcTest::CollectGarbage(NEW_SPACE); // Ensure new space is empty.
5151 : const size_t kSize = 1024;
5152 5 : AllocateInSpace(isolate, kSize, NEW_SPACE);
5153 : size_t counter2 = heap->NewSpaceAllocationCounter();
5154 5 : CHECK_EQ(kSize, counter2 - counter1);
5155 5 : CcTest::CollectGarbage(NEW_SPACE);
5156 : size_t counter3 = heap->NewSpaceAllocationCounter();
5157 5 : CHECK_EQ(0U, counter3 - counter2);
5158 : // Test counter overflow.
5159 : size_t max_counter = static_cast<size_t>(-1);
5160 : heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
5161 : size_t start = heap->NewSpaceAllocationCounter();
5162 205 : for (int i = 0; i < 20; i++) {
5163 100 : AllocateInSpace(isolate, kSize, NEW_SPACE);
5164 : size_t counter = heap->NewSpaceAllocationCounter();
5165 100 : CHECK_EQ(kSize, counter - start);
5166 : start = counter;
5167 : }
5168 5 : }
5169 :
5170 :
5171 26068 : TEST(OldSpaceAllocationCounter) {
5172 5 : CcTest::InitializeVM();
5173 10 : v8::HandleScope scope(CcTest::isolate());
5174 : Isolate* isolate = CcTest::i_isolate();
5175 : Heap* heap = isolate->heap();
5176 : size_t counter1 = heap->OldGenerationAllocationCounter();
5177 5 : CcTest::CollectGarbage(NEW_SPACE);
5178 5 : CcTest::CollectGarbage(NEW_SPACE);
5179 : const size_t kSize = 1024;
5180 5 : AllocateInSpace(isolate, kSize, OLD_SPACE);
5181 : size_t counter2 = heap->OldGenerationAllocationCounter();
5182 : // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
5183 5 : CHECK_LE(kSize, counter2 - counter1);
5184 5 : CcTest::CollectGarbage(NEW_SPACE);
5185 : size_t counter3 = heap->OldGenerationAllocationCounter();
5186 5 : CHECK_EQ(0u, counter3 - counter2);
5187 5 : AllocateInSpace(isolate, kSize, OLD_SPACE);
5188 5 : CcTest::CollectGarbage(OLD_SPACE);
5189 : size_t counter4 = heap->OldGenerationAllocationCounter();
5190 5 : CHECK_LE(kSize, counter4 - counter3);
5191 : // Test counter overflow.
5192 : size_t max_counter = static_cast<size_t>(-1);
5193 : heap->set_old_generation_allocation_counter_at_last_gc(max_counter -
5194 : 10 * kSize);
5195 : size_t start = heap->OldGenerationAllocationCounter();
5196 205 : for (int i = 0; i < 20; i++) {
5197 100 : AllocateInSpace(isolate, kSize, OLD_SPACE);
5198 : size_t counter = heap->OldGenerationAllocationCounter();
5199 100 : CHECK_LE(kSize, counter - start);
5200 : start = counter;
5201 : }
5202 5 : }
5203 :
5204 :
5205 20 : static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
5206 : Isolate* isolate = CcTest::i_isolate();
5207 : Object message(
5208 20 : *reinterpret_cast<Address*>(isolate->pending_message_obj_address()));
5209 20 : CHECK(message->IsTheHole(isolate));
5210 20 : }
5211 :
5212 :
5213 26068 : TEST(MessageObjectLeak) {
5214 5 : CcTest::InitializeVM();
5215 5 : v8::Isolate* isolate = CcTest::isolate();
5216 10 : v8::HandleScope scope(isolate);
5217 5 : v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5218 10 : global->Set(
5219 5 : v8::String::NewFromUtf8(isolate, "check", v8::NewStringType::kNormal)
5220 : .ToLocalChecked(),
5221 5 : v8::FunctionTemplate::New(isolate, CheckLeak));
5222 5 : v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
5223 : v8::Context::Scope cscope(context);
5224 :
5225 : const char* test =
5226 : "try {"
5227 : " throw 'message 1';"
5228 : "} catch (e) {"
5229 : "}"
5230 : "check();"
5231 : "L: try {"
5232 : " throw 'message 2';"
5233 : "} finally {"
5234 : " break L;"
5235 : "}"
5236 : "check();";
5237 : CompileRun(test);
5238 :
5239 : const char* flag = "--turbo-filter=*";
5240 5 : FlagList::SetFlagsFromString(flag, StrLength(flag));
5241 5 : FLAG_always_opt = true;
5242 :
5243 : CompileRun(test);
5244 5 : }
5245 :
5246 :
5247 10 : static void CheckEqualSharedFunctionInfos(
5248 : const v8::FunctionCallbackInfo<v8::Value>& args) {
5249 : Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
5250 : Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
5251 : Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
5252 : Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
5253 10 : CHECK(fun1->shared() == fun2->shared());
5254 10 : }
5255 :
5256 :
5257 10 : static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
5258 : Isolate* isolate = CcTest::i_isolate();
5259 : Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
5260 : Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
5261 : // Bytecode is code too.
5262 10 : SharedFunctionInfo::DiscardCompiled(isolate, handle(fun->shared(), isolate));
5263 20 : fun->set_code(*BUILTIN_CODE(isolate, CompileLazy));
5264 10 : CcTest::CollectAllAvailableGarbage();
5265 10 : }
5266 :
5267 :
5268 26068 : TEST(CanonicalSharedFunctionInfo) {
5269 5 : CcTest::InitializeVM();
5270 5 : v8::Isolate* isolate = CcTest::isolate();
5271 10 : v8::HandleScope scope(isolate);
5272 5 : v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5273 10 : global->Set(isolate, "check", v8::FunctionTemplate::New(
5274 : isolate, CheckEqualSharedFunctionInfos));
5275 10 : global->Set(isolate, "remove",
5276 : v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
5277 5 : v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
5278 : v8::Context::Scope cscope(context);
5279 : CompileRun(
5280 : "function f() { return function g() {}; }"
5281 : "var g1 = f();"
5282 : "remove(f);"
5283 : "var g2 = f();"
5284 : "check(g1, g2);");
5285 :
5286 : CompileRun(
5287 : "function f() { return (function() { return function g() {}; })(); }"
5288 : "var g1 = f();"
5289 : "remove(f);"
5290 : "var g2 = f();"
5291 : "check(g1, g2);");
5292 5 : }
5293 :
5294 :
5295 26068 : TEST(ScriptIterator) {
5296 5 : CcTest::InitializeVM();
5297 10 : v8::HandleScope scope(CcTest::isolate());
5298 : Isolate* isolate = CcTest::i_isolate();
5299 5 : Heap* heap = CcTest::heap();
5300 5 : LocalContext context;
5301 :
5302 5 : CcTest::CollectAllGarbage();
5303 :
5304 : int script_count = 0;
5305 : {
5306 10 : HeapIterator it(heap);
5307 42401 : for (HeapObject obj = it.next(); !obj.is_null(); obj = it.next()) {
5308 42396 : if (obj->IsScript()) script_count++;
5309 : }
5310 : }
5311 :
5312 : {
5313 5 : Script::Iterator iterator(isolate);
5314 40 : for (Script script = iterator.Next(); !script.is_null();
5315 : script = iterator.Next()) {
5316 15 : script_count--;
5317 : }
5318 : }
5319 :
5320 5 : CHECK_EQ(0, script_count);
5321 5 : }
5322 :
5323 :
5324 26068 : TEST(SharedFunctionInfoIterator) {
5325 5 : CcTest::InitializeVM();
5326 10 : v8::HandleScope scope(CcTest::isolate());
5327 : Isolate* isolate = CcTest::i_isolate();
5328 5 : Heap* heap = CcTest::heap();
5329 5 : LocalContext context;
5330 :
5331 5 : CcTest::CollectAllGarbage();
5332 5 : CcTest::CollectAllGarbage();
5333 :
5334 : int sfi_count = 0;
5335 : {
5336 10 : HeapIterator it(heap);
5337 41907 : for (HeapObject obj = it.next(); !obj.is_null(); obj = it.next()) {
5338 41902 : if (!obj->IsSharedFunctionInfo()) continue;
5339 3491 : sfi_count++;
5340 : }
5341 : }
5342 :
5343 : {
5344 5 : SharedFunctionInfo::GlobalIterator iterator(isolate);
5345 6992 : while (!iterator.Next().is_null()) sfi_count--;
5346 : }
5347 :
5348 5 : CHECK_EQ(0, sfi_count);
5349 5 : }
5350 :
5351 : // This is the same as Factory::NewByteArray, except it doesn't retry on
5352 : // allocation failure.
5353 50490 : AllocationResult HeapTester::AllocateByteArrayForTest(
5354 : Heap* heap, int length, AllocationType allocation_type) {
5355 : DCHECK(length >= 0 && length <= ByteArray::kMaxLength);
5356 : int size = ByteArray::SizeFor(length);
5357 : HeapObject result;
5358 : {
5359 50490 : AllocationResult allocation = heap->AllocateRaw(size, allocation_type);
5360 50490 : if (!allocation.To(&result)) return allocation;
5361 : }
5362 :
5363 : result->set_map_after_allocation(ReadOnlyRoots(heap).byte_array_map(),
5364 : SKIP_WRITE_BARRIER);
5365 : ByteArray::cast(result)->set_length(length);
5366 50480 : ByteArray::cast(result)->clear_padding();
5367 50480 : return result;
5368 : }
5369 :
5370 26068 : HEAP_TEST(Regress587004) {
5371 : ManualGCScope manual_gc_scope;
5372 : #ifdef VERIFY_HEAP
5373 : FLAG_verify_heap = false;
5374 : #endif
5375 5 : CcTest::InitializeVM();
5376 10 : v8::HandleScope scope(CcTest::isolate());
5377 5 : Heap* heap = CcTest::heap();
5378 : Isolate* isolate = CcTest::i_isolate();
5379 : Factory* factory = isolate->factory();
5380 : const int N =
5381 : (kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / kTaggedSize;
5382 5 : Handle<FixedArray> array = factory->NewFixedArray(N, AllocationType::kOld);
5383 5 : CHECK(heap->old_space()->Contains(*array));
5384 5 : Handle<Object> number = factory->NewHeapNumber(1.0);
5385 5 : CHECK(Heap::InYoungGeneration(*number));
5386 327665 : for (int i = 0; i < N; i++) {
5387 163830 : array->set(i, *number);
5388 : }
5389 5 : CcTest::CollectGarbage(OLD_SPACE);
5390 5 : heap::SimulateFullSpace(heap->old_space());
5391 5 : heap->RightTrimFixedArray(*array, N - 1);
5392 5 : heap->mark_compact_collector()->EnsureSweepingCompleted();
5393 : ByteArray byte_array;
5394 : const int M = 256;
5395 : // Don't allow old space expansion. The test works without this flag too,
5396 : // but becomes very slow.
5397 : heap->set_force_oom(true);
5398 5 : while (
5399 10 : AllocateByteArrayForTest(heap, M, AllocationType::kOld).To(&byte_array)) {
5400 0 : for (int j = 0; j < M; j++) {
5401 : byte_array->set(j, 0x31);
5402 : }
5403 : }
5404 : // Re-enable old space expansion to avoid OOM crash.
5405 : heap->set_force_oom(false);
5406 5 : CcTest::CollectGarbage(NEW_SPACE);
5407 5 : }
5408 :
5409 26068 : HEAP_TEST(Regress589413) {
5410 5 : if (!FLAG_incremental_marking) return;
5411 5 : FLAG_stress_compaction = true;
5412 5 : FLAG_manual_evacuation_candidates_selection = true;
5413 5 : FLAG_parallel_compaction = false;
5414 : ManualGCScope manual_gc_scope;
5415 5 : CcTest::InitializeVM();
5416 10 : v8::HandleScope scope(CcTest::isolate());
5417 5 : Heap* heap = CcTest::heap();
5418 : // Get the heap in clean state.
5419 5 : CcTest::CollectGarbage(OLD_SPACE);
5420 5 : CcTest::CollectGarbage(OLD_SPACE);
5421 : Isolate* isolate = CcTest::i_isolate();
5422 : Factory* factory = isolate->factory();
5423 : // Fill the new space with byte arrays with elements looking like pointers.
5424 : const int M = 256;
5425 : ByteArray byte_array;
5426 39650 : while (AllocateByteArrayForTest(heap, M, AllocationType::kYoung)
5427 : .To(&byte_array)) {
5428 10167660 : for (int j = 0; j < M; j++) {
5429 : byte_array->set(j, 0x31);
5430 : }
5431 : // Add the array in root set.
5432 : handle(byte_array, isolate);
5433 : }
5434 : // Make sure the byte arrays will be promoted on the next GC.
5435 5 : CcTest::CollectGarbage(NEW_SPACE);
5436 : // This number is close to large free list category threshold.
5437 : const int N = 0x3EEE;
5438 : {
5439 : std::vector<FixedArray> arrays;
5440 : std::set<Page*> pages;
5441 5 : FixedArray array;
5442 : // Fill all pages with fixed arrays.
5443 : heap->set_force_oom(true);
5444 10 : while (
5445 20 : AllocateFixedArrayForTest(heap, N, AllocationType::kOld).To(&array)) {
5446 5 : arrays.push_back(array);
5447 10 : pages.insert(Page::FromHeapObject(array));
5448 : // Add the array in root set.
5449 : handle(array, isolate);
5450 : }
5451 : // Expand and full one complete page with fixed arrays.
5452 : heap->set_force_oom(false);
5453 25 : while (
5454 50 : AllocateFixedArrayForTest(heap, N, AllocationType::kOld).To(&array)) {
5455 20 : arrays.push_back(array);
5456 40 : pages.insert(Page::FromHeapObject(array));
5457 : // Add the array in root set.
5458 : handle(array, isolate);
5459 : // Do not expand anymore.
5460 : heap->set_force_oom(true);
5461 : }
5462 : // Expand and mark the new page as evacuation candidate.
5463 : heap->set_force_oom(false);
5464 : {
5465 : AlwaysAllocateScope always_allocate(isolate);
5466 : Handle<HeapObject> ec_obj =
5467 5 : factory->NewFixedArray(5000, AllocationType::kOld);
5468 : Page* ec_page = Page::FromHeapObject(*ec_obj);
5469 5 : heap::ForceEvacuationCandidate(ec_page);
5470 : // Make all arrays point to evacuation candidate so that
5471 : // slots are recorded for them.
5472 55 : for (size_t j = 0; j < arrays.size(); j++) {
5473 25 : array = arrays[j];
5474 805525 : for (int i = 0; i < N; i++) {
5475 402750 : array->set(i, *ec_obj);
5476 : }
5477 : }
5478 : }
5479 5 : heap::SimulateIncrementalMarking(heap);
5480 55 : for (size_t j = 0; j < arrays.size(); j++) {
5481 25 : heap->RightTrimFixedArray(arrays[j], N - 1);
5482 : }
5483 : }
5484 : // Force allocation from the free list.
5485 : heap->set_force_oom(true);
5486 5 : CcTest::CollectGarbage(OLD_SPACE);
5487 : }
5488 :
5489 26068 : TEST(Regress598319) {
5490 5 : if (!FLAG_incremental_marking) return;
5491 : ManualGCScope manual_gc_scope;
5492 : // This test ensures that no white objects can cross the progress bar of large
5493 : // objects during incremental marking. It checks this by using Shift() during
5494 : // incremental marking.
5495 5 : CcTest::InitializeVM();
5496 10 : v8::HandleScope scope(CcTest::isolate());
5497 5 : Heap* heap = CcTest::heap();
5498 : Isolate* isolate = heap->isolate();
5499 :
5500 : // The size of the array should be larger than kProgressBarScanningChunk.
5501 : const int kNumberOfObjects = Max(FixedArray::kMaxRegularLength + 1, 128 * KB);
5502 :
5503 : struct Arr {
5504 5 : Arr(Isolate* isolate, int number_of_objects) {
5505 5 : root = isolate->factory()->NewFixedArray(1, AllocationType::kOld);
5506 : {
5507 : // Temporary scope to avoid getting any other objects into the root set.
5508 10 : v8::HandleScope scope(CcTest::isolate());
5509 : Handle<FixedArray> tmp = isolate->factory()->NewFixedArray(
5510 5 : number_of_objects, AllocationType::kOld);
5511 10 : root->set(0, *tmp);
5512 1310725 : for (int i = 0; i < get()->length(); i++) {
5513 655360 : tmp = isolate->factory()->NewFixedArray(100, AllocationType::kOld);
5514 1310720 : get()->set(i, *tmp);
5515 : }
5516 : }
5517 5 : }
5518 :
5519 : FixedArray get() { return FixedArray::cast(root->get(0)); }
5520 :
5521 : Handle<FixedArray> root;
5522 5 : } arr(isolate, kNumberOfObjects);
5523 :
5524 5 : CHECK_EQ(arr.get()->length(), kNumberOfObjects);
5525 5 : CHECK(heap->lo_space()->Contains(arr.get()));
5526 : LargePage* page = LargePage::FromHeapObject(arr.get());
5527 5 : CHECK_NOT_NULL(page);
5528 :
5529 : // GC to cleanup state
5530 5 : CcTest::CollectGarbage(OLD_SPACE);
5531 : MarkCompactCollector* collector = heap->mark_compact_collector();
5532 5 : if (collector->sweeping_in_progress()) {
5533 5 : collector->EnsureSweepingCompleted();
5534 : }
5535 :
5536 5 : CHECK(heap->lo_space()->Contains(arr.get()));
5537 : IncrementalMarking* marking = heap->incremental_marking();
5538 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5539 5 : CHECK(marking_state->IsWhite(arr.get()));
5540 1310725 : for (int i = 0; i < arr.get()->length(); i++) {
5541 : HeapObject arr_value = HeapObject::cast(arr.get()->get(i));
5542 655360 : CHECK(marking_state->IsWhite(arr_value));
5543 : }
5544 :
5545 : // Start incremental marking.
5546 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5547 5 : if (marking->IsStopped()) {
5548 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5549 5 : i::GarbageCollectionReason::kTesting);
5550 : }
5551 5 : CHECK(marking->IsMarking());
5552 :
5553 : // Check that we have not marked the interesting array during root scanning.
5554 1310725 : for (int i = 0; i < arr.get()->length(); i++) {
5555 : HeapObject arr_value = HeapObject::cast(arr.get()->get(i));
5556 655360 : CHECK(marking_state->IsWhite(arr_value));
5557 : }
5558 :
5559 : // Now we search for a state where we are in incremental marking and have
5560 : // only partially marked the large object.
5561 : const double kSmallStepSizeInMs = 0.1;
5562 20 : while (!marking->IsComplete()) {
5563 : marking->V8Step(kSmallStepSizeInMs,
5564 : i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5565 20 : StepOrigin::kV8);
5566 40 : if (page->IsFlagSet(Page::HAS_PROGRESS_BAR) && page->ProgressBar() > 0) {
5567 10 : CHECK_NE(page->ProgressBar(), arr.get()->Size());
5568 : {
5569 : // Shift by 1, effectively moving one white object across the progress
5570 : // bar, meaning that we will miss marking it.
5571 10 : v8::HandleScope scope(CcTest::isolate());
5572 : Handle<JSArray> js_array = isolate->factory()->NewJSArrayWithElements(
5573 5 : Handle<FixedArray>(arr.get(), isolate));
5574 5 : js_array->GetElementsAccessor()->Shift(js_array);
5575 : }
5576 5 : break;
5577 : }
5578 : }
5579 :
5580 : // Finish marking with bigger steps to speed up test.
5581 : const double kLargeStepSizeInMs = 1000;
5582 4120 : while (!marking->IsComplete()) {
5583 : marking->V8Step(kLargeStepSizeInMs,
5584 : i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5585 4115 : StepOrigin::kV8);
5586 4115 : if (marking->IsReadyToOverApproximateWeakClosure()) {
5587 5 : marking->FinalizeIncrementally();
5588 : }
5589 : }
5590 5 : CHECK(marking->IsComplete());
5591 :
5592 : // All objects need to be black after marking. If a white object crossed the
5593 : // progress bar, we would fail here.
5594 1310725 : for (int i = 0; i < arr.get()->length(); i++) {
5595 : HeapObject arr_value = HeapObject::cast(arr.get()->get(i));
5596 655360 : CHECK(marking_state->IsBlack(arr_value));
5597 : }
5598 : }
5599 :
5600 10 : Handle<FixedArray> ShrinkArrayAndCheckSize(Heap* heap, int length) {
5601 : // Make sure there is no garbage and the compilation cache is empty.
5602 110 : for (int i = 0; i < 5; i++) {
5603 50 : CcTest::CollectAllGarbage();
5604 : }
5605 10 : heap->mark_compact_collector()->EnsureSweepingCompleted();
5606 10 : size_t size_before_allocation = heap->SizeOfObjects();
5607 : Handle<FixedArray> array =
5608 10 : heap->isolate()->factory()->NewFixedArray(length, AllocationType::kOld);
5609 10 : size_t size_after_allocation = heap->SizeOfObjects();
5610 10 : CHECK_EQ(size_after_allocation, size_before_allocation + array->Size());
5611 10 : array->Shrink(heap->isolate(), 1);
5612 10 : size_t size_after_shrinking = heap->SizeOfObjects();
5613 : // Shrinking does not change the space size immediately.
5614 10 : CHECK_EQ(size_after_allocation, size_after_shrinking);
5615 : // GC and sweeping updates the size to acccount for shrinking.
5616 10 : CcTest::CollectAllGarbage();
5617 10 : heap->mark_compact_collector()->EnsureSweepingCompleted();
5618 10 : intptr_t size_after_gc = heap->SizeOfObjects();
5619 20 : CHECK_EQ(size_after_gc, size_before_allocation + array->Size());
5620 10 : return array;
5621 : }
5622 :
5623 26068 : TEST(Regress609761) {
5624 5 : CcTest::InitializeVM();
5625 10 : v8::HandleScope scope(CcTest::isolate());
5626 5 : Heap* heap = CcTest::heap();
5627 : int length = kMaxRegularHeapObjectSize / kTaggedSize + 1;
5628 5 : Handle<FixedArray> array = ShrinkArrayAndCheckSize(heap, length);
5629 5 : CHECK(heap->lo_space()->Contains(*array));
5630 5 : }
5631 :
5632 26068 : TEST(LiveBytes) {
5633 5 : CcTest::InitializeVM();
5634 10 : v8::HandleScope scope(CcTest::isolate());
5635 5 : Heap* heap = CcTest::heap();
5636 5 : Handle<FixedArray> array = ShrinkArrayAndCheckSize(heap, 2000);
5637 5 : CHECK(heap->old_space()->Contains(*array));
5638 5 : }
5639 :
5640 26068 : TEST(Regress615489) {
5641 5 : if (!FLAG_incremental_marking) return;
5642 5 : CcTest::InitializeVM();
5643 10 : v8::HandleScope scope(CcTest::isolate());
5644 5 : Heap* heap = CcTest::heap();
5645 : Isolate* isolate = heap->isolate();
5646 5 : CcTest::CollectAllGarbage();
5647 :
5648 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5649 : i::IncrementalMarking* marking = heap->incremental_marking();
5650 5 : if (collector->sweeping_in_progress()) {
5651 5 : collector->EnsureSweepingCompleted();
5652 : }
5653 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5654 5 : if (marking->IsStopped()) {
5655 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5656 5 : i::GarbageCollectionReason::kTesting);
5657 : }
5658 5 : CHECK(marking->IsMarking());
5659 : marking->StartBlackAllocationForTesting();
5660 : {
5661 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
5662 10 : v8::HandleScope inner(CcTest::isolate());
5663 10 : isolate->factory()->NewFixedArray(500, AllocationType::kOld)->Size();
5664 : }
5665 : const double kStepSizeInMs = 100;
5666 23 : while (!marking->IsComplete()) {
5667 : marking->V8Step(kStepSizeInMs, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5668 18 : StepOrigin::kV8);
5669 18 : if (marking->IsReadyToOverApproximateWeakClosure()) {
5670 5 : marking->FinalizeIncrementally();
5671 : }
5672 : }
5673 5 : CHECK(marking->IsComplete());
5674 5 : intptr_t size_before = heap->SizeOfObjects();
5675 5 : CcTest::CollectAllGarbage();
5676 5 : intptr_t size_after = heap->SizeOfObjects();
5677 : // Live size does not increase after garbage collection.
5678 5 : CHECK_LE(size_after, size_before);
5679 : }
5680 :
5681 : class StaticOneByteResource : public v8::String::ExternalOneByteStringResource {
5682 : public:
5683 5 : explicit StaticOneByteResource(const char* data) : data_(data) {}
5684 :
5685 10 : ~StaticOneByteResource() override = default;
5686 :
5687 0 : const char* data() const override { return data_; }
5688 :
5689 5 : size_t length() const override { return strlen(data_); }
5690 :
5691 : private:
5692 : const char* data_;
5693 : };
5694 :
5695 26068 : TEST(Regress631969) {
5696 5 : if (!FLAG_incremental_marking) return;
5697 5 : FLAG_manual_evacuation_candidates_selection = true;
5698 5 : FLAG_parallel_compaction = false;
5699 : ManualGCScope manual_gc_scope;
5700 5 : CcTest::InitializeVM();
5701 10 : v8::HandleScope scope(CcTest::isolate());
5702 5 : Heap* heap = CcTest::heap();
5703 : // Get the heap in clean state.
5704 5 : CcTest::CollectGarbage(OLD_SPACE);
5705 5 : CcTest::CollectGarbage(OLD_SPACE);
5706 : Isolate* isolate = CcTest::i_isolate();
5707 : Factory* factory = isolate->factory();
5708 : // Allocate two strings in a fresh page and mark the page as evacuation
5709 : // candidate.
5710 5 : heap::SimulateFullSpace(heap->old_space());
5711 : Handle<String> s1 =
5712 5 : factory->NewStringFromStaticChars("123456789", AllocationType::kOld);
5713 : Handle<String> s2 =
5714 5 : factory->NewStringFromStaticChars("01234", AllocationType::kOld);
5715 5 : heap::ForceEvacuationCandidate(Page::FromHeapObject(*s1));
5716 :
5717 5 : heap::SimulateIncrementalMarking(heap, false);
5718 :
5719 : // Allocate a cons string and promote it to a fresh page in the old space.
5720 5 : heap::SimulateFullSpace(heap->old_space());
5721 : Handle<String> s3;
5722 10 : factory->NewConsString(s1, s2).ToHandle(&s3);
5723 5 : CcTest::CollectGarbage(NEW_SPACE);
5724 5 : CcTest::CollectGarbage(NEW_SPACE);
5725 :
5726 : // Finish incremental marking.
5727 : const double kStepSizeInMs = 100;
5728 : IncrementalMarking* marking = heap->incremental_marking();
5729 30 : while (!marking->IsComplete()) {
5730 : marking->V8Step(kStepSizeInMs, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5731 25 : StepOrigin::kV8);
5732 25 : if (marking->IsReadyToOverApproximateWeakClosure()) {
5733 5 : marking->FinalizeIncrementally();
5734 : }
5735 : }
5736 :
5737 : {
5738 : StaticOneByteResource external_string("12345678901234");
5739 5 : s3->MakeExternal(&external_string);
5740 5 : CcTest::CollectGarbage(OLD_SPACE);
5741 : // This avoids the GC from trying to free stack allocated resources.
5742 10 : i::Handle<i::ExternalOneByteString>::cast(s3)->SetResource(isolate,
5743 5 : nullptr);
5744 : }
5745 : }
5746 :
5747 26068 : TEST(LeftTrimFixedArrayInBlackArea) {
5748 5 : if (!FLAG_incremental_marking) return;
5749 5 : CcTest::InitializeVM();
5750 10 : v8::HandleScope scope(CcTest::isolate());
5751 5 : Heap* heap = CcTest::heap();
5752 : Isolate* isolate = heap->isolate();
5753 5 : CcTest::CollectAllGarbage();
5754 :
5755 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5756 : i::IncrementalMarking* marking = heap->incremental_marking();
5757 5 : if (collector->sweeping_in_progress()) {
5758 5 : collector->EnsureSweepingCompleted();
5759 : }
5760 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5761 5 : if (marking->IsStopped()) {
5762 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5763 5 : i::GarbageCollectionReason::kTesting);
5764 : }
5765 5 : CHECK(marking->IsMarking());
5766 : marking->StartBlackAllocationForTesting();
5767 :
5768 : // Ensure that we allocate a new page, set up a bump pointer area, and
5769 : // perform the allocation in a black area.
5770 5 : heap::SimulateFullSpace(heap->old_space());
5771 5 : isolate->factory()->NewFixedArray(4, AllocationType::kOld);
5772 : Handle<FixedArray> array =
5773 5 : isolate->factory()->NewFixedArray(50, AllocationType::kOld);
5774 5 : CHECK(heap->old_space()->Contains(*array));
5775 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5776 5 : CHECK(marking_state->IsBlack(*array));
5777 :
5778 : // Now left trim the allocated black area. A filler has to be installed
5779 : // for the trimmed area and all mark bits of the trimmed area have to be
5780 : // cleared.
5781 5 : FixedArrayBase trimmed = heap->LeftTrimFixedArray(*array, 10);
5782 5 : CHECK(marking_state->IsBlack(trimmed));
5783 :
5784 5 : heap::GcAndSweep(heap, OLD_SPACE);
5785 : }
5786 :
5787 26068 : TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
5788 5 : if (!FLAG_incremental_marking) return;
5789 5 : CcTest::InitializeVM();
5790 10 : v8::HandleScope scope(CcTest::isolate());
5791 5 : Heap* heap = CcTest::heap();
5792 : Isolate* isolate = heap->isolate();
5793 5 : CcTest::CollectAllGarbage();
5794 :
5795 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5796 : i::IncrementalMarking* marking = heap->incremental_marking();
5797 5 : if (collector->sweeping_in_progress()) {
5798 5 : collector->EnsureSweepingCompleted();
5799 : }
5800 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5801 5 : if (marking->IsStopped()) {
5802 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5803 5 : i::GarbageCollectionReason::kTesting);
5804 : }
5805 5 : CHECK(marking->IsMarking());
5806 : marking->StartBlackAllocationForTesting();
5807 :
5808 : // Ensure that we allocate a new page, set up a bump pointer area, and
5809 : // perform the allocation in a black area.
5810 5 : heap::SimulateFullSpace(heap->old_space());
5811 5 : isolate->factory()->NewFixedArray(10, AllocationType::kOld);
5812 :
5813 : // Allocate the fixed array that will be trimmed later.
5814 : Handle<FixedArray> array =
5815 5 : isolate->factory()->NewFixedArray(100, AllocationType::kOld);
5816 : Address start_address = array->address();
5817 5 : Address end_address = start_address + array->Size();
5818 : Page* page = Page::FromAddress(start_address);
5819 : IncrementalMarking::NonAtomicMarkingState* marking_state =
5820 : marking->non_atomic_marking_state();
5821 5 : CHECK(marking_state->IsBlack(*array));
5822 10 : CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
5823 : page->AddressToMarkbitIndex(start_address),
5824 : page->AddressToMarkbitIndex(end_address)));
5825 5 : CHECK(heap->old_space()->Contains(*array));
5826 :
5827 : FixedArrayBase previous = *array;
5828 : FixedArrayBase trimmed;
5829 :
5830 : // First trim in one word steps.
5831 105 : for (int i = 0; i < 10; i++) {
5832 50 : trimmed = heap->LeftTrimFixedArray(previous, 1);
5833 : HeapObject filler = HeapObject::FromAddress(previous->address());
5834 50 : CHECK(filler->IsFiller());
5835 50 : CHECK(marking_state->IsBlack(trimmed));
5836 50 : CHECK(marking_state->IsBlack(previous));
5837 : previous = trimmed;
5838 : }
5839 :
5840 : // Then trim in two and three word steps.
5841 25 : for (int i = 2; i <= 3; i++) {
5842 210 : for (int j = 0; j < 10; j++) {
5843 100 : trimmed = heap->LeftTrimFixedArray(previous, i);
5844 : HeapObject filler = HeapObject::FromAddress(previous->address());
5845 100 : CHECK(filler->IsFiller());
5846 100 : CHECK(marking_state->IsBlack(trimmed));
5847 100 : CHECK(marking_state->IsBlack(previous));
5848 : previous = trimmed;
5849 : }
5850 : }
5851 :
5852 5 : heap::GcAndSweep(heap, OLD_SPACE);
5853 : }
5854 :
5855 26068 : TEST(ContinuousRightTrimFixedArrayInBlackArea) {
5856 5 : if (!FLAG_incremental_marking) return;
5857 5 : CcTest::InitializeVM();
5858 10 : v8::HandleScope scope(CcTest::isolate());
5859 5 : Heap* heap = CcTest::heap();
5860 : Isolate* isolate = CcTest::i_isolate();
5861 5 : CcTest::CollectAllGarbage();
5862 :
5863 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5864 : i::IncrementalMarking* marking = heap->incremental_marking();
5865 5 : if (collector->sweeping_in_progress()) {
5866 5 : collector->EnsureSweepingCompleted();
5867 : }
5868 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5869 5 : if (marking->IsStopped()) {
5870 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5871 5 : i::GarbageCollectionReason::kTesting);
5872 : }
5873 5 : CHECK(marking->IsMarking());
5874 : marking->StartBlackAllocationForTesting();
5875 :
5876 : // Ensure that we allocate a new page, set up a bump pointer area, and
5877 : // perform the allocation in a black area.
5878 5 : heap::SimulateFullSpace(heap->old_space());
5879 5 : isolate->factory()->NewFixedArray(10, AllocationType::kOld);
5880 :
5881 : // Allocate the fixed array that will be trimmed later.
5882 : Handle<FixedArray> array =
5883 5 : CcTest::i_isolate()->factory()->NewFixedArray(100, AllocationType::kOld);
5884 : Address start_address = array->address();
5885 5 : Address end_address = start_address + array->Size();
5886 : Page* page = Page::FromAddress(start_address);
5887 : IncrementalMarking::NonAtomicMarkingState* marking_state =
5888 : marking->non_atomic_marking_state();
5889 5 : CHECK(marking_state->IsBlack(*array));
5890 :
5891 10 : CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
5892 : page->AddressToMarkbitIndex(start_address),
5893 : page->AddressToMarkbitIndex(end_address)));
5894 5 : CHECK(heap->old_space()->Contains(*array));
5895 :
5896 : // Trim it once by one word to make checking for white marking color uniform.
5897 5 : Address previous = end_address - kTaggedSize;
5898 10 : isolate->heap()->RightTrimFixedArray(*array, 1);
5899 :
5900 : HeapObject filler = HeapObject::FromAddress(previous);
5901 5 : CHECK(filler->IsFiller());
5902 5 : CHECK(marking_state->IsImpossible(filler));
5903 :
5904 : // Trim 10 times by one, two, and three word.
5905 35 : for (int i = 1; i <= 3; i++) {
5906 315 : for (int j = 0; j < 10; j++) {
5907 150 : previous -= kTaggedSize * i;
5908 150 : isolate->heap()->RightTrimFixedArray(*array, i);
5909 : HeapObject filler = HeapObject::FromAddress(previous);
5910 150 : CHECK(filler->IsFiller());
5911 150 : CHECK(marking_state->IsWhite(filler));
5912 : }
5913 : }
5914 :
5915 5 : heap::GcAndSweep(heap, OLD_SPACE);
5916 : }
5917 :
5918 26068 : TEST(Regress618958) {
5919 5 : if (!FLAG_incremental_marking) return;
5920 5 : CcTest::InitializeVM();
5921 10 : v8::HandleScope scope(CcTest::isolate());
5922 5 : Heap* heap = CcTest::heap();
5923 : bool isolate_is_locked = true;
5924 5 : CcTest::isolate()->AdjustAmountOfExternalAllocatedMemory(100 * MB);
5925 : int mark_sweep_count_before = heap->ms_count();
5926 : heap->MemoryPressureNotification(MemoryPressureLevel::kCritical,
5927 5 : isolate_is_locked);
5928 : int mark_sweep_count_after = heap->ms_count();
5929 5 : int mark_sweeps_performed = mark_sweep_count_after - mark_sweep_count_before;
5930 : // The memory pressuer handler either performed two GCs or performed one and
5931 : // started incremental marking.
5932 5 : CHECK(mark_sweeps_performed == 2 ||
5933 : (mark_sweeps_performed == 1 &&
5934 : !heap->incremental_marking()->IsStopped()));
5935 : }
5936 :
5937 26068 : TEST(YoungGenerationLargeObjectAllocationScavenge) {
5938 10 : if (FLAG_minor_mc) return;
5939 5 : FLAG_young_generation_large_objects = true;
5940 5 : CcTest::InitializeVM();
5941 5 : v8::HandleScope scope(CcTest::isolate());
5942 5 : Heap* heap = CcTest::heap();
5943 : Isolate* isolate = heap->isolate();
5944 10 : if (!isolate->serializer_enabled()) return;
5945 :
5946 : // TODO(hpayer): Update the test as soon as we have a tenure limit for LO.
5947 0 : Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(200000);
5948 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
5949 0 : CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
5950 0 : CHECK(chunk->IsFlagSet(MemoryChunk::LARGE_PAGE));
5951 0 : CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
5952 :
5953 0 : Handle<Object> number = isolate->factory()->NewHeapNumber(123.456);
5954 0 : array_small->set(0, *number);
5955 :
5956 0 : CcTest::CollectGarbage(NEW_SPACE);
5957 :
5958 : // After the first young generation GC array_small will be in the old
5959 : // generation large object space.
5960 : chunk = MemoryChunk::FromHeapObject(*array_small);
5961 0 : CHECK_EQ(LO_SPACE, chunk->owner()->identity());
5962 0 : CHECK(!chunk->InYoungGeneration());
5963 :
5964 0 : CcTest::CollectAllAvailableGarbage();
5965 : }
5966 :
5967 26068 : TEST(YoungGenerationLargeObjectAllocationMarkCompact) {
5968 10 : if (FLAG_minor_mc) return;
5969 5 : FLAG_young_generation_large_objects = true;
5970 5 : CcTest::InitializeVM();
5971 5 : v8::HandleScope scope(CcTest::isolate());
5972 5 : Heap* heap = CcTest::heap();
5973 : Isolate* isolate = heap->isolate();
5974 10 : if (!isolate->serializer_enabled()) return;
5975 :
5976 : // TODO(hpayer): Update the test as soon as we have a tenure limit for LO.
5977 0 : Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(200000);
5978 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
5979 0 : CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
5980 0 : CHECK(chunk->IsFlagSet(MemoryChunk::LARGE_PAGE));
5981 0 : CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
5982 :
5983 0 : Handle<Object> number = isolate->factory()->NewHeapNumber(123.456);
5984 0 : array_small->set(0, *number);
5985 :
5986 0 : CcTest::CollectGarbage(OLD_SPACE);
5987 :
5988 : // After the first full GC array_small will be in the old generation
5989 : // large object space.
5990 : chunk = MemoryChunk::FromHeapObject(*array_small);
5991 0 : CHECK_EQ(LO_SPACE, chunk->owner()->identity());
5992 0 : CHECK(!chunk->InYoungGeneration());
5993 :
5994 0 : CcTest::CollectAllAvailableGarbage();
5995 : }
5996 :
5997 26068 : TEST(YoungGenerationLargeObjectAllocationReleaseScavenger) {
5998 10 : if (FLAG_minor_mc) return;
5999 5 : FLAG_young_generation_large_objects = true;
6000 5 : CcTest::InitializeVM();
6001 5 : v8::HandleScope scope(CcTest::isolate());
6002 5 : Heap* heap = CcTest::heap();
6003 : Isolate* isolate = heap->isolate();
6004 10 : if (!isolate->serializer_enabled()) return;
6005 :
6006 : {
6007 : HandleScope scope(isolate);
6008 0 : for (int i = 0; i < 10; i++) {
6009 0 : Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(20000);
6010 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
6011 0 : CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
6012 0 : CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
6013 : }
6014 : }
6015 :
6016 0 : CcTest::CollectGarbage(NEW_SPACE);
6017 0 : CHECK(isolate->heap()->new_lo_space()->IsEmpty());
6018 0 : CHECK_EQ(0, isolate->heap()->new_lo_space()->Size());
6019 0 : CHECK_EQ(0, isolate->heap()->new_lo_space()->SizeOfObjects());
6020 0 : CHECK(isolate->heap()->lo_space()->IsEmpty());
6021 0 : CHECK_EQ(0, isolate->heap()->lo_space()->Size());
6022 0 : CHECK_EQ(0, isolate->heap()->lo_space()->SizeOfObjects());
6023 : }
6024 :
6025 26068 : TEST(UncommitUnusedLargeObjectMemory) {
6026 5 : CcTest::InitializeVM();
6027 10 : v8::HandleScope scope(CcTest::isolate());
6028 5 : Heap* heap = CcTest::heap();
6029 : Isolate* isolate = heap->isolate();
6030 :
6031 : Handle<FixedArray> array =
6032 5 : isolate->factory()->NewFixedArray(200000, AllocationType::kOld);
6033 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array);
6034 5 : CHECK(chunk->owner()->identity() == LO_SPACE);
6035 :
6036 5 : intptr_t size_before = array->Size();
6037 5 : size_t committed_memory_before = chunk->CommittedPhysicalMemory();
6038 :
6039 5 : array->Shrink(isolate, 1);
6040 5 : CHECK(array->Size() < size_before);
6041 :
6042 5 : CcTest::CollectAllGarbage();
6043 5 : CHECK(chunk->CommittedPhysicalMemory() < committed_memory_before);
6044 10 : size_t shrinked_size = RoundUp(
6045 15 : (array->address() - chunk->address()) + array->Size(), CommitPageSize());
6046 5 : CHECK_EQ(shrinked_size, chunk->CommittedPhysicalMemory());
6047 5 : }
6048 :
6049 26068 : TEST(RememberedSetRemoveRange) {
6050 5 : CcTest::InitializeVM();
6051 10 : v8::HandleScope scope(CcTest::isolate());
6052 5 : Heap* heap = CcTest::heap();
6053 : Isolate* isolate = heap->isolate();
6054 :
6055 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(
6056 5 : Page::kPageSize / kTaggedSize, AllocationType::kOld);
6057 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array);
6058 5 : CHECK(chunk->owner()->identity() == LO_SPACE);
6059 5 : Address start = array->address();
6060 : // Maps slot to boolean indicator of whether the slot should be in the set.
6061 : std::map<Address, bool> slots;
6062 5 : slots[start + 0] = true;
6063 5 : slots[start + kTaggedSize] = true;
6064 5 : slots[start + Page::kPageSize - kTaggedSize] = true;
6065 5 : slots[start + Page::kPageSize] = true;
6066 5 : slots[start + Page::kPageSize + kTaggedSize] = true;
6067 5 : slots[chunk->area_end() - kTaggedSize] = true;
6068 :
6069 30 : for (auto x : slots) {
6070 25 : RememberedSet<OLD_TO_NEW>::Insert(chunk, x.first);
6071 : }
6072 :
6073 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
6074 50 : [&slots](MaybeObjectSlot slot) {
6075 50 : CHECK(slots[slot.address()]);
6076 25 : return KEEP_SLOT;
6077 : },
6078 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
6079 :
6080 5 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start, start + kTaggedSize,
6081 5 : SlotSet::FREE_EMPTY_BUCKETS);
6082 5 : slots[start] = false;
6083 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
6084 40 : [&slots](MaybeObjectSlot slot) {
6085 40 : CHECK(slots[slot.address()]);
6086 20 : return KEEP_SLOT;
6087 : },
6088 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
6089 :
6090 5 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start + kTaggedSize,
6091 : start + Page::kPageSize,
6092 5 : SlotSet::FREE_EMPTY_BUCKETS);
6093 5 : slots[start + kTaggedSize] = false;
6094 5 : slots[start + Page::kPageSize - kTaggedSize] = false;
6095 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
6096 20 : [&slots](MaybeObjectSlot slot) {
6097 20 : CHECK(slots[slot.address()]);
6098 10 : return KEEP_SLOT;
6099 : },
6100 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
6101 :
6102 5 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start,
6103 : start + Page::kPageSize + kTaggedSize,
6104 5 : SlotSet::FREE_EMPTY_BUCKETS);
6105 5 : slots[start + Page::kPageSize] = false;
6106 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
6107 10 : [&slots](MaybeObjectSlot slot) {
6108 10 : CHECK(slots[slot.address()]);
6109 5 : return KEEP_SLOT;
6110 : },
6111 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
6112 :
6113 5 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, chunk->area_end() - kTaggedSize,
6114 : chunk->area_end(),
6115 5 : SlotSet::FREE_EMPTY_BUCKETS);
6116 5 : slots[chunk->area_end() - kTaggedSize] = false;
6117 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
6118 0 : [&slots](MaybeObjectSlot slot) {
6119 0 : CHECK(slots[slot.address()]);
6120 0 : return KEEP_SLOT;
6121 : },
6122 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
6123 5 : }
6124 :
6125 26068 : HEAP_TEST(Regress670675) {
6126 5 : if (!FLAG_incremental_marking) return;
6127 5 : FLAG_stress_incremental_marking = false;
6128 5 : CcTest::InitializeVM();
6129 10 : v8::HandleScope scope(CcTest::isolate());
6130 5 : Heap* heap = CcTest::heap();
6131 : Isolate* isolate = heap->isolate();
6132 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
6133 5 : CcTest::CollectAllGarbage();
6134 :
6135 5 : if (collector->sweeping_in_progress()) {
6136 5 : collector->EnsureSweepingCompleted();
6137 : }
6138 5 : i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
6139 5 : if (marking->IsStopped()) {
6140 5 : marking->Start(i::GarbageCollectionReason::kTesting);
6141 : }
6142 : size_t array_length = 128 * KB;
6143 5 : size_t n = heap->OldGenerationSpaceAvailable() / array_length;
6144 825 : for (size_t i = 0; i < n + 40; i++) {
6145 : {
6146 : HandleScope inner_scope(isolate);
6147 : isolate->factory()->NewFixedArray(static_cast<int>(array_length),
6148 415 : AllocationType::kOld);
6149 : }
6150 415 : if (marking->IsStopped()) break;
6151 410 : double deadline = heap->MonotonicallyIncreasingTimeInMs() + 1;
6152 : marking->AdvanceWithDeadline(
6153 410 : deadline, IncrementalMarking::GC_VIA_STACK_GUARD, StepOrigin::kV8);
6154 : }
6155 : DCHECK(marking->IsStopped());
6156 : }
6157 :
6158 : namespace {
6159 10 : Handle<Code> GenerateDummyImmovableCode(Isolate* isolate) {
6160 40 : Assembler assm(AssemblerOptions{});
6161 :
6162 : const int kNumberOfNops = 1 << 10;
6163 20490 : for (int i = 0; i < kNumberOfNops; i++) {
6164 10240 : assm.nop(); // supported on all architectures
6165 : }
6166 :
6167 10 : CodeDesc desc;
6168 : assm.GetCode(isolate, &desc);
6169 : Handle<Code> code = isolate->factory()->NewCode(
6170 : desc, Code::STUB, Handle<Code>(), Builtins::kNoBuiltinId,
6171 30 : MaybeHandle<ByteArray>(), DeoptimizationData::Empty(isolate), kImmovable);
6172 10 : CHECK(code->IsCode());
6173 :
6174 20 : return code;
6175 : }
6176 : } // namespace
6177 :
6178 26068 : HEAP_TEST(Regress5831) {
6179 5 : CcTest::InitializeVM();
6180 5 : Heap* heap = CcTest::heap();
6181 : Isolate* isolate = CcTest::i_isolate();
6182 : HandleScope handle_scope(isolate);
6183 :
6184 : // Used to ensure that the generated code is not collected.
6185 : const int kInitialSize = 32;
6186 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(kInitialSize);
6187 :
6188 : // Ensure that all immovable code space pages are full and we overflow into
6189 : // LO_SPACE.
6190 : const int kMaxIterations = 1 << 16;
6191 : bool overflowed_into_lospace = false;
6192 5 : for (int i = 0; i < kMaxIterations; i++) {
6193 5 : Handle<Code> code = GenerateDummyImmovableCode(isolate);
6194 5 : array = FixedArray::SetAndGrow(isolate, array, i, code);
6195 10 : CHECK(heap->code_space()->Contains(code->address()) ||
6196 : heap->code_lo_space()->Contains(*code));
6197 5 : if (heap->code_lo_space()->Contains(*code)) {
6198 : overflowed_into_lospace = true;
6199 : break;
6200 : }
6201 : }
6202 :
6203 5 : CHECK(overflowed_into_lospace);
6204 :
6205 : // Fake a serializer run.
6206 5 : isolate->serializer_enabled_ = true;
6207 :
6208 : // Generate the code.
6209 5 : Handle<Code> code = GenerateDummyImmovableCode(isolate);
6210 5 : CHECK_GE(i::kMaxRegularHeapObjectSize, code->Size());
6211 5 : CHECK(!heap->code_space()->first_page()->Contains(code->address()));
6212 :
6213 : // Ensure it's not in large object space.
6214 : MemoryChunk* chunk = MemoryChunk::FromHeapObject(*code);
6215 5 : CHECK(chunk->owner()->identity() != LO_SPACE);
6216 5 : CHECK(chunk->NeverEvacuate());
6217 5 : }
6218 :
6219 26068 : TEST(Regress6800) {
6220 5 : CcTest::InitializeVM();
6221 : Isolate* isolate = CcTest::i_isolate();
6222 : HandleScope handle_scope(isolate);
6223 :
6224 : const int kRootLength = 1000;
6225 : Handle<FixedArray> root =
6226 5 : isolate->factory()->NewFixedArray(kRootLength, AllocationType::kOld);
6227 : {
6228 : HandleScope inner_scope(isolate);
6229 5 : Handle<FixedArray> new_space_array = isolate->factory()->NewFixedArray(1);
6230 10005 : for (int i = 0; i < kRootLength; i++) {
6231 10000 : root->set(i, *new_space_array);
6232 : }
6233 10005 : for (int i = 0; i < kRootLength; i++) {
6234 15000 : root->set(i, ReadOnlyRoots(CcTest::heap()).undefined_value());
6235 : }
6236 : }
6237 5 : CcTest::CollectGarbage(NEW_SPACE);
6238 5 : CHECK_EQ(0, RememberedSet<OLD_TO_NEW>::NumberOfPreFreedEmptyBuckets(
6239 : MemoryChunk::FromHeapObject(*root)));
6240 5 : }
6241 :
6242 26068 : TEST(Regress6800LargeObject) {
6243 5 : CcTest::InitializeVM();
6244 : Isolate* isolate = CcTest::i_isolate();
6245 : HandleScope handle_scope(isolate);
6246 :
6247 : const int kRootLength = i::kMaxRegularHeapObjectSize / kTaggedSize;
6248 : Handle<FixedArray> root =
6249 5 : isolate->factory()->NewFixedArray(kRootLength, AllocationType::kOld);
6250 5 : CcTest::heap()->lo_space()->Contains(*root);
6251 : {
6252 : HandleScope inner_scope(isolate);
6253 5 : Handle<FixedArray> new_space_array = isolate->factory()->NewFixedArray(1);
6254 327685 : for (int i = 0; i < kRootLength; i++) {
6255 327680 : root->set(i, *new_space_array);
6256 : }
6257 327685 : for (int i = 0; i < kRootLength; i++) {
6258 491520 : root->set(i, ReadOnlyRoots(CcTest::heap()).undefined_value());
6259 : }
6260 : }
6261 5 : CcTest::CollectGarbage(OLD_SPACE);
6262 5 : CHECK_EQ(0, RememberedSet<OLD_TO_NEW>::NumberOfPreFreedEmptyBuckets(
6263 : MemoryChunk::FromHeapObject(*root)));
6264 5 : }
6265 :
6266 26068 : HEAP_TEST(RegressMissingWriteBarrierInAllocate) {
6267 5 : if (!FLAG_incremental_marking) return;
6268 : ManualGCScope manual_gc_scope;
6269 5 : CcTest::InitializeVM();
6270 10 : v8::HandleScope scope(CcTest::isolate());
6271 5 : Heap* heap = CcTest::heap();
6272 : Isolate* isolate = heap->isolate();
6273 5 : CcTest::CollectAllGarbage();
6274 5 : heap::SimulateIncrementalMarking(heap, false);
6275 : Handle<Map> map;
6276 : {
6277 : AlwaysAllocateScope always_allocate(isolate);
6278 5 : map = isolate->factory()->NewMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
6279 : }
6280 : heap->incremental_marking()->StartBlackAllocationForTesting();
6281 : Handle<HeapObject> object;
6282 : {
6283 : AlwaysAllocateScope always_allocate(isolate);
6284 : object = handle(isolate->factory()->NewForTest(map, AllocationType::kOld),
6285 : isolate);
6286 : }
6287 : // The object is black. If Factory::New sets the map without write-barrier,
6288 : // then the map is white and will be freed prematurely.
6289 5 : heap::SimulateIncrementalMarking(heap, true);
6290 5 : CcTest::CollectAllGarbage();
6291 : MarkCompactCollector* collector = heap->mark_compact_collector();
6292 5 : if (collector->sweeping_in_progress()) {
6293 5 : collector->EnsureSweepingCompleted();
6294 : }
6295 5 : CHECK(object->map()->IsMap());
6296 : }
6297 :
6298 26068 : HEAP_TEST(MarkCompactEpochCounter) {
6299 : ManualGCScope manual_gc_scope;
6300 5 : CcTest::InitializeVM();
6301 10 : v8::HandleScope scope(CcTest::isolate());
6302 5 : Heap* heap = CcTest::heap();
6303 : unsigned epoch0 = heap->mark_compact_collector()->epoch();
6304 5 : CcTest::CollectGarbage(OLD_SPACE);
6305 : unsigned epoch1 = heap->mark_compact_collector()->epoch();
6306 5 : CHECK_EQ(epoch0 + 1, epoch1);
6307 5 : heap::SimulateIncrementalMarking(heap, true);
6308 5 : CcTest::CollectGarbage(OLD_SPACE);
6309 : unsigned epoch2 = heap->mark_compact_collector()->epoch();
6310 5 : CHECK_EQ(epoch1 + 1, epoch2);
6311 5 : CcTest::CollectGarbage(NEW_SPACE);
6312 : unsigned epoch3 = heap->mark_compact_collector()->epoch();
6313 5 : CHECK_EQ(epoch2, epoch3);
6314 5 : }
6315 :
6316 26068 : UNINITIALIZED_TEST(ReinitializeStringHashSeed) {
6317 : // Enable rehashing and create an isolate and context.
6318 5 : i::FLAG_rehash_snapshot = true;
6319 25 : for (int i = 1; i < 3; i++) {
6320 10 : i::FLAG_hash_seed = 1337 * i;
6321 : v8::Isolate::CreateParams create_params;
6322 10 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6323 10 : v8::Isolate* isolate = v8::Isolate::New(create_params);
6324 : {
6325 : v8::Isolate::Scope isolate_scope(isolate);
6326 10 : CHECK_EQ(static_cast<uint64_t>(1337 * i),
6327 : HashSeed(reinterpret_cast<i::Isolate*>(isolate)));
6328 20 : v8::HandleScope handle_scope(isolate);
6329 10 : v8::Local<v8::Context> context = v8::Context::New(isolate);
6330 10 : CHECK(!context.IsEmpty());
6331 : v8::Context::Scope context_scope(context);
6332 : }
6333 10 : isolate->Dispose();
6334 : }
6335 5 : }
6336 :
6337 : const int kHeapLimit = 100 * MB;
6338 : Isolate* oom_isolate = nullptr;
6339 :
6340 0 : void OOMCallback(const char* location, bool is_heap_oom) {
6341 0 : Heap* heap = oom_isolate->heap();
6342 : size_t kSlack = heap->new_space()->Capacity();
6343 0 : CHECK_LE(heap->OldGenerationCapacity(), kHeapLimit + kSlack);
6344 0 : CHECK_LE(heap->memory_allocator()->Size(), heap->MaxReserved() + kSlack);
6345 0 : base::OS::ExitProcess(0);
6346 0 : }
6347 :
6348 26063 : UNINITIALIZED_TEST(OutOfMemory) {
6349 0 : if (FLAG_stress_incremental_marking) return;
6350 : #ifdef VERIFY_HEAP
6351 : if (FLAG_verify_heap) return;
6352 : #endif
6353 0 : FLAG_max_old_space_size = kHeapLimit / MB;
6354 : v8::Isolate::CreateParams create_params;
6355 0 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6356 0 : v8::Isolate* isolate = v8::Isolate::New(create_params);
6357 : Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
6358 0 : oom_isolate = i_isolate;
6359 0 : isolate->SetOOMErrorHandler(OOMCallback);
6360 : {
6361 : Factory* factory = i_isolate->factory();
6362 : HandleScope handle_scope(i_isolate);
6363 : while (true) {
6364 0 : factory->NewFixedArray(100);
6365 : }
6366 : }
6367 : }
6368 :
6369 26064 : UNINITIALIZED_TEST(OutOfMemoryIneffectiveGC) {
6370 2 : if (!FLAG_detect_ineffective_gcs_near_heap_limit) return;
6371 1 : if (FLAG_stress_incremental_marking) return;
6372 : #ifdef VERIFY_HEAP
6373 : if (FLAG_verify_heap) return;
6374 : #endif
6375 :
6376 0 : FLAG_max_old_space_size = kHeapLimit / MB;
6377 : v8::Isolate::CreateParams create_params;
6378 0 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6379 0 : v8::Isolate* isolate = v8::Isolate::New(create_params);
6380 : Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
6381 0 : oom_isolate = i_isolate;
6382 0 : isolate->SetOOMErrorHandler(OOMCallback);
6383 : Factory* factory = i_isolate->factory();
6384 : Heap* heap = i_isolate->heap();
6385 0 : heap->CollectAllGarbage(Heap::kNoGCFlags, GarbageCollectionReason::kTesting);
6386 : {
6387 : HandleScope scope(i_isolate);
6388 0 : while (heap->OldGenerationSizeOfObjects() <
6389 0 : heap->MaxOldGenerationSize() * 0.9) {
6390 0 : factory->NewFixedArray(100, AllocationType::kOld);
6391 : }
6392 : {
6393 : int initial_ms_count = heap->ms_count();
6394 : int ineffective_ms_start = initial_ms_count;
6395 0 : while (heap->ms_count() < initial_ms_count + 10) {
6396 : HandleScope inner_scope(i_isolate);
6397 0 : factory->NewFixedArray(30000, AllocationType::kOld);
6398 0 : if (heap->tracer()->AverageMarkCompactMutatorUtilization() >= 0.3) {
6399 0 : ineffective_ms_start = heap->ms_count() + 1;
6400 : }
6401 : }
6402 0 : int consecutive_ineffective_ms = heap->ms_count() - ineffective_ms_start;
6403 0 : CHECK_IMPLIES(
6404 : consecutive_ineffective_ms >= 4,
6405 : heap->tracer()->AverageMarkCompactMutatorUtilization() >= 0.3);
6406 : }
6407 : }
6408 0 : isolate->Dispose();
6409 : }
6410 :
6411 26068 : HEAP_TEST(Regress779503) {
6412 : // The following regression test ensures that the Scavenger does not allocate
6413 : // over invalid slots. More specific, the Scavenger should not sweep a page
6414 : // that it currently processes because it might allocate over the currently
6415 : // processed slot.
6416 : const int kArraySize = 2048;
6417 5 : CcTest::InitializeVM();
6418 : Isolate* isolate = CcTest::i_isolate();
6419 5 : Heap* heap = CcTest::heap();
6420 5 : heap::SealCurrentObjects(heap);
6421 : {
6422 : HandleScope handle_scope(isolate);
6423 : // The byte array filled with kHeapObjectTag ensures that we cannot read
6424 : // from the slot again and interpret it as heap value. Doing so will crash.
6425 5 : Handle<ByteArray> byte_array = isolate->factory()->NewByteArray(kArraySize);
6426 5 : CHECK(Heap::InYoungGeneration(*byte_array));
6427 20485 : for (int i = 0; i < kArraySize; i++) {
6428 : byte_array->set(i, kHeapObjectTag);
6429 : }
6430 :
6431 : {
6432 : HandleScope handle_scope(isolate);
6433 : // The FixedArray in old space serves as space for slots.
6434 : Handle<FixedArray> fixed_array =
6435 5 : isolate->factory()->NewFixedArray(kArraySize, AllocationType::kOld);
6436 5 : CHECK(!Heap::InYoungGeneration(*fixed_array));
6437 20485 : for (int i = 0; i < kArraySize; i++) {
6438 20480 : fixed_array->set(i, *byte_array);
6439 : }
6440 : }
6441 : // Delay sweeper tasks to allow the scavenger to sweep the page it is
6442 : // currently scavenging.
6443 5 : heap->delay_sweeper_tasks_for_testing_ = true;
6444 5 : CcTest::CollectGarbage(OLD_SPACE);
6445 5 : CHECK(Heap::InYoungGeneration(*byte_array));
6446 : }
6447 : // Scavenging and sweeping the same page will crash as slots will be
6448 : // overridden.
6449 5 : CcTest::CollectGarbage(NEW_SPACE);
6450 5 : heap->delay_sweeper_tasks_for_testing_ = false;
6451 5 : }
6452 :
6453 : struct OutOfMemoryState {
6454 : Heap* heap;
6455 : bool oom_triggered;
6456 : size_t old_generation_capacity_at_oom;
6457 : size_t memory_allocator_size_at_oom;
6458 : size_t new_space_capacity_at_oom;
6459 : size_t new_lo_space_size_at_oom;
6460 : size_t current_heap_limit;
6461 : size_t initial_heap_limit;
6462 : };
6463 :
6464 12 : size_t NearHeapLimitCallback(void* raw_state, size_t current_heap_limit,
6465 : size_t initial_heap_limit) {
6466 : OutOfMemoryState* state = static_cast<OutOfMemoryState*>(raw_state);
6467 12 : Heap* heap = state->heap;
6468 12 : state->oom_triggered = true;
6469 12 : state->old_generation_capacity_at_oom = heap->OldGenerationCapacity();
6470 12 : state->memory_allocator_size_at_oom = heap->memory_allocator()->Size();
6471 12 : state->new_space_capacity_at_oom = heap->new_space()->Capacity();
6472 12 : state->new_lo_space_size_at_oom = heap->new_lo_space()->Size();
6473 12 : state->current_heap_limit = current_heap_limit;
6474 12 : state->initial_heap_limit = initial_heap_limit;
6475 12 : return initial_heap_limit + 100 * MB;
6476 : }
6477 :
6478 0 : size_t MemoryAllocatorSizeFromHeapCapacity(size_t capacity) {
6479 : // Size to capacity factor.
6480 : double factor =
6481 4 : Page::kPageSize * 1.0 / MemoryChunkLayout::AllocatableMemoryInDataPage();
6482 : // Some tables (e.g. deoptimization table) are allocated directly with the
6483 : // memory allocator. Allow some slack to account for them.
6484 : size_t slack = 5 * MB;
6485 4 : return static_cast<size_t>(capacity * factor) + slack;
6486 : }
6487 :
6488 26063 : UNINITIALIZED_TEST(OutOfMemorySmallObjects) {
6489 0 : if (FLAG_stress_incremental_marking) return;
6490 : #ifdef VERIFY_HEAP
6491 : if (FLAG_verify_heap) return;
6492 : #endif
6493 : const size_t kOldGenerationLimit = 300 * MB;
6494 0 : FLAG_max_old_space_size = kOldGenerationLimit / MB;
6495 : v8::Isolate::CreateParams create_params;
6496 0 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6497 : Isolate* isolate =
6498 0 : reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
6499 : Heap* heap = isolate->heap();
6500 : Factory* factory = isolate->factory();
6501 : OutOfMemoryState state;
6502 0 : state.heap = heap;
6503 0 : state.oom_triggered = false;
6504 0 : heap->AddNearHeapLimitCallback(NearHeapLimitCallback, &state);
6505 : {
6506 : HandleScope handle_scope(isolate);
6507 0 : while (!state.oom_triggered) {
6508 0 : factory->NewFixedArray(100);
6509 : }
6510 : }
6511 0 : CHECK_LE(state.old_generation_capacity_at_oom,
6512 : kOldGenerationLimit + state.new_space_capacity_at_oom);
6513 0 : CHECK_LE(kOldGenerationLimit, state.old_generation_capacity_at_oom +
6514 : state.new_space_capacity_at_oom);
6515 0 : CHECK_LE(
6516 : state.memory_allocator_size_at_oom,
6517 : MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
6518 : 2 * state.new_space_capacity_at_oom));
6519 0 : reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
6520 : }
6521 :
6522 26068 : UNINITIALIZED_TEST(OutOfMemoryLargeObjects) {
6523 6 : if (FLAG_stress_incremental_marking) return;
6524 : #ifdef VERIFY_HEAP
6525 : if (FLAG_verify_heap) return;
6526 : #endif
6527 : const size_t kOldGenerationLimit = 300 * MB;
6528 4 : FLAG_max_old_space_size = kOldGenerationLimit / MB;
6529 : v8::Isolate::CreateParams create_params;
6530 4 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6531 : Isolate* isolate =
6532 4 : reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
6533 : Heap* heap = isolate->heap();
6534 : Factory* factory = isolate->factory();
6535 : OutOfMemoryState state;
6536 4 : state.heap = heap;
6537 4 : state.oom_triggered = false;
6538 4 : heap->AddNearHeapLimitCallback(NearHeapLimitCallback, &state);
6539 : const int kFixedArrayLength = 1000000;
6540 : {
6541 : HandleScope handle_scope(isolate);
6542 644 : while (!state.oom_triggered) {
6543 320 : factory->NewFixedArray(kFixedArrayLength);
6544 : }
6545 : }
6546 4 : CHECK_LE(state.old_generation_capacity_at_oom, kOldGenerationLimit);
6547 4 : CHECK_LE(kOldGenerationLimit, state.old_generation_capacity_at_oom +
6548 : state.new_space_capacity_at_oom +
6549 : state.new_lo_space_size_at_oom +
6550 : FixedArray::SizeFor(kFixedArrayLength));
6551 8 : CHECK_LE(
6552 : state.memory_allocator_size_at_oom,
6553 : MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
6554 : 2 * state.new_space_capacity_at_oom +
6555 : state.new_lo_space_size_at_oom));
6556 4 : reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
6557 : }
6558 :
6559 26068 : UNINITIALIZED_TEST(RestoreHeapLimit) {
6560 6 : if (FLAG_stress_incremental_marking) return;
6561 : #ifdef VERIFY_HEAP
6562 : if (FLAG_verify_heap) return;
6563 : #endif
6564 : ManualGCScope manual_gc_scope;
6565 : const size_t kOldGenerationLimit = 300 * MB;
6566 4 : FLAG_max_old_space_size = kOldGenerationLimit / MB;
6567 : v8::Isolate::CreateParams create_params;
6568 4 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6569 : Isolate* isolate =
6570 4 : reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
6571 : Heap* heap = isolate->heap();
6572 : Factory* factory = isolate->factory();
6573 : OutOfMemoryState state;
6574 4 : state.heap = heap;
6575 4 : state.oom_triggered = false;
6576 4 : heap->AddNearHeapLimitCallback(NearHeapLimitCallback, &state);
6577 4 : heap->AutomaticallyRestoreInitialHeapLimit(0.5);
6578 : const int kFixedArrayLength = 1000000;
6579 : {
6580 : HandleScope handle_scope(isolate);
6581 644 : while (!state.oom_triggered) {
6582 320 : factory->NewFixedArray(kFixedArrayLength);
6583 : }
6584 : }
6585 4 : heap->MemoryPressureNotification(MemoryPressureLevel::kCritical, true);
6586 4 : state.oom_triggered = false;
6587 : {
6588 : HandleScope handle_scope(isolate);
6589 628 : while (!state.oom_triggered) {
6590 312 : factory->NewFixedArray(kFixedArrayLength);
6591 : }
6592 : }
6593 4 : CHECK_EQ(state.current_heap_limit, state.initial_heap_limit);
6594 4 : reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
6595 : }
6596 :
6597 5 : void HeapTester::UncommitFromSpace(Heap* heap) {
6598 5 : heap->UncommitFromSpace();
6599 5 : heap->memory_allocator()->unmapper()->EnsureUnmappingCompleted();
6600 5 : }
6601 :
6602 : class DeleteNative {
6603 : public:
6604 : static void Deleter(void* arg) {
6605 : delete reinterpret_cast<DeleteNative*>(arg);
6606 : }
6607 : };
6608 :
6609 26068 : TEST(Regress8014) {
6610 : Isolate* isolate = CcTest::InitIsolateOnce();
6611 : Heap* heap = isolate->heap();
6612 : {
6613 : HandleScope scope(isolate);
6614 100005 : for (int i = 0; i < 10000; i++) {
6615 : auto handle = Managed<DeleteNative>::FromRawPtr(isolate, 1000000,
6616 50000 : new DeleteNative());
6617 : USE(handle);
6618 : }
6619 : }
6620 : int ms_count = heap->ms_count();
6621 5 : heap->MemoryPressureNotification(MemoryPressureLevel::kCritical, true);
6622 : // Several GCs can be triggred by the above call.
6623 : // The bad case triggers 10000 GCs.
6624 10 : CHECK_LE(heap->ms_count(), ms_count + 10);
6625 5 : }
6626 :
6627 26068 : TEST(Regress8617) {
6628 : ManualGCScope manual_gc_scope;
6629 5 : FLAG_manual_evacuation_candidates_selection = true;
6630 5 : LocalContext env;
6631 : Isolate* isolate = CcTest::i_isolate();
6632 : Heap* heap = isolate->heap();
6633 : HandleScope scope(isolate);
6634 5 : heap::SimulateFullSpace(heap->old_space());
6635 : // Step 1. Create a function and ensure that it is in the old space.
6636 : Handle<Object> foo =
6637 : v8::Utils::OpenHandle(*CompileRun("function foo() { return 42; };"
6638 : "foo;"));
6639 5 : if (heap->InYoungGeneration(*foo)) {
6640 0 : CcTest::CollectGarbage(NEW_SPACE);
6641 0 : CcTest::CollectGarbage(NEW_SPACE);
6642 : }
6643 : // Step 2. Create an object with a reference to foo in the descriptor array.
6644 : CompileRun(
6645 : "var obj = {};"
6646 : "obj.method = foo;"
6647 : "obj;");
6648 : // Step 3. Make sure that foo moves during Mark-Compact.
6649 : Page* ec_page = Page::FromAddress(foo->ptr());
6650 5 : heap::ForceEvacuationCandidate(ec_page);
6651 : // Step 4. Start incremental marking.
6652 5 : heap::SimulateIncrementalMarking(heap, false);
6653 5 : CHECK(ec_page->IsEvacuationCandidate());
6654 : // Step 5. Install a new descriptor array on the map of the object.
6655 : // This runs the marking barrier for the descriptor array.
6656 : // In the bad case it sets the number of marked descriptors but does not
6657 : // change the color of the descriptor array.
6658 : CompileRun("obj.bar = 10;");
6659 : // Step 6. Promote the descriptor array to old space. During promotion
6660 : // the Scavenger will not record the slot of foo in the descriptor array.
6661 5 : CcTest::CollectGarbage(NEW_SPACE);
6662 5 : CcTest::CollectGarbage(NEW_SPACE);
6663 : // Step 7. Complete the Mark-Compact.
6664 5 : CcTest::CollectAllGarbage();
6665 : // Step 8. Use the descriptor for foo, which contains a stale pointer.
6666 : CompileRun("obj.method()");
6667 5 : }
6668 :
6669 26068 : HEAP_TEST(MemoryReducerActivationForSmallHeaps) {
6670 : ManualGCScope manual_gc_scope;
6671 5 : LocalContext env;
6672 : Isolate* isolate = CcTest::i_isolate();
6673 : Heap* heap = isolate->heap();
6674 5 : CHECK_EQ(heap->memory_reducer()->state_.action, MemoryReducer::Action::kDone);
6675 : HandleScope scope(isolate);
6676 : const size_t kActivationThreshold = 1 * MB;
6677 5 : size_t initial_capacity = heap->OldGenerationCapacity();
6678 4543 : while (heap->OldGenerationCapacity() <
6679 1516 : initial_capacity + kActivationThreshold) {
6680 1511 : isolate->factory()->NewFixedArray(1 * KB, AllocationType::kOld);
6681 : }
6682 5 : CHECK_EQ(heap->memory_reducer()->state_.action, MemoryReducer::Action::kWait);
6683 5 : }
6684 :
6685 : } // namespace heap
6686 : } // namespace internal
6687 78189 : } // namespace v8
6688 :
6689 : #undef __
|