Line data Source code
1 : // Copyright 2012 the V8 project authors. All rights reserved.
2 : // Redistribution and use in source and binary forms, with or without
3 : // modification, are permitted provided that the following conditions are
4 : // met:
5 : //
6 : // * Redistributions of source code must retain the above copyright
7 : // notice, this list of conditions and the following disclaimer.
8 : // * Redistributions in binary form must reproduce the above
9 : // copyright notice, this list of conditions and the following
10 : // disclaimer in the documentation and/or other materials provided
11 : // with the distribution.
12 : // * Neither the name of Google Inc. nor the names of its
13 : // contributors may be used to endorse or promote products derived
14 : // from this software without specific prior written permission.
15 : //
16 : // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 : // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 : // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 : // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 : // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 : // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 : // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 : // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 : // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 : // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 : // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 :
28 : #include <stdlib.h>
29 : #include <utility>
30 :
31 : #include "src/api.h"
32 : #include "src/assembler-inl.h"
33 : #include "src/code-stubs.h"
34 : #include "src/compilation-cache.h"
35 : #include "src/debug/debug.h"
36 : #include "src/deoptimizer.h"
37 : #include "src/elements.h"
38 : #include "src/execution.h"
39 : #include "src/factory.h"
40 : #include "src/field-type.h"
41 : #include "src/global-handles.h"
42 : #include "src/heap/gc-tracer.h"
43 : #include "src/heap/incremental-marking.h"
44 : #include "src/heap/mark-compact.h"
45 : #include "src/heap/memory-reducer.h"
46 : #include "src/ic/ic.h"
47 : #include "src/macro-assembler-inl.h"
48 : #include "src/objects-inl.h"
49 : #include "src/regexp/jsregexp.h"
50 : #include "src/snapshot/snapshot.h"
51 : #include "src/transitions.h"
52 : #include "test/cctest/cctest.h"
53 : #include "test/cctest/heap/heap-tester.h"
54 : #include "test/cctest/heap/heap-utils.h"
55 : #include "test/cctest/test-feedback-vector.h"
56 : #include "test/cctest/test-transitions.h"
57 :
58 : namespace v8 {
59 : namespace internal {
60 : namespace heap {
61 :
62 : // We only start allocation-site tracking with the second instantiation.
63 : static const int kPretenureCreationCount =
64 : AllocationSite::kPretenureMinimumCreated + 1;
65 :
66 25 : static void CheckMap(Map* map, int type, int instance_size) {
67 25 : CHECK(map->IsHeapObject());
68 : #ifdef DEBUG
69 : CHECK(CcTest::heap()->Contains(map));
70 : #endif
71 25 : CHECK_EQ(CcTest::heap()->meta_map(), map->map());
72 25 : CHECK_EQ(type, map->instance_type());
73 25 : CHECK_EQ(instance_size, map->instance_size());
74 25 : }
75 :
76 :
77 23723 : TEST(HeapMaps) {
78 5 : CcTest::InitializeVM();
79 25 : Heap* heap = CcTest::heap();
80 5 : CheckMap(heap->meta_map(), MAP_TYPE, Map::kSize);
81 5 : CheckMap(heap->heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
82 5 : CheckMap(heap->fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
83 5 : CheckMap(heap->hash_table_map(), HASH_TABLE_TYPE, kVariableSizeSentinel);
84 5 : CheckMap(heap->string_map(), STRING_TYPE, kVariableSizeSentinel);
85 5 : }
86 :
87 15 : static void VerifyStoredPrototypeMap(Isolate* isolate,
88 : int stored_map_context_index,
89 : int stored_ctor_context_index) {
90 15 : Handle<Context> context = isolate->native_context();
91 :
92 : Handle<Map> this_map(Map::cast(context->get(stored_map_context_index)));
93 :
94 : Handle<JSFunction> fun(
95 : JSFunction::cast(context->get(stored_ctor_context_index)));
96 : Handle<JSObject> proto(JSObject::cast(fun->initial_map()->prototype()));
97 : Handle<Map> that_map(proto->map());
98 :
99 15 : CHECK(proto->HasFastProperties());
100 15 : CHECK_EQ(*this_map, *that_map);
101 15 : }
102 :
103 : // Checks that critical maps stored on the context (mostly used for fast-path
104 : // checks) are unchanged after initialization.
105 23723 : TEST(ContextMaps) {
106 5 : CcTest::InitializeVM();
107 : Isolate* isolate = CcTest::i_isolate();
108 : HandleScope handle_scope(isolate);
109 :
110 : VerifyStoredPrototypeMap(isolate,
111 : Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX,
112 5 : Context::STRING_FUNCTION_INDEX);
113 : VerifyStoredPrototypeMap(isolate, Context::REGEXP_PROTOTYPE_MAP_INDEX,
114 5 : Context::REGEXP_FUNCTION_INDEX);
115 : VerifyStoredPrototypeMap(isolate, Context::PROMISE_PROTOTYPE_MAP_INDEX,
116 5 : Context::PROMISE_FUNCTION_INDEX);
117 5 : }
118 :
119 23723 : TEST(InitialObjects) {
120 5 : LocalContext env;
121 : HandleScope scope(CcTest::i_isolate());
122 : Handle<Context> context = v8::Utils::OpenHandle(*env);
123 : // Initial ArrayIterator prototype.
124 5 : CHECK_EQ(
125 : context->initial_array_iterator_prototype(),
126 : *v8::Utils::OpenHandle(*CompileRun("[][Symbol.iterator]().__proto__")));
127 : // Initial ArrayIterator prototype map.
128 5 : CHECK_EQ(context->initial_array_iterator_prototype_map(),
129 : context->initial_array_iterator_prototype()->map());
130 : // Initial Array prototype.
131 5 : CHECK_EQ(context->initial_array_prototype(),
132 : *v8::Utils::OpenHandle(*CompileRun("Array.prototype")));
133 : // Initial Generator prototype.
134 5 : CHECK_EQ(context->initial_generator_prototype(),
135 : *v8::Utils::OpenHandle(
136 : *CompileRun("(function*(){}).__proto__.prototype")));
137 : // Initial Iterator prototype.
138 5 : CHECK_EQ(context->initial_iterator_prototype(),
139 : *v8::Utils::OpenHandle(
140 : *CompileRun("[][Symbol.iterator]().__proto__.__proto__")));
141 : // Initial Object prototype.
142 5 : CHECK_EQ(context->initial_object_prototype(),
143 5 : *v8::Utils::OpenHandle(*CompileRun("Object.prototype")));
144 5 : }
145 :
146 20 : static void CheckOddball(Isolate* isolate, Object* obj, const char* string) {
147 20 : CHECK(obj->IsOddball());
148 : Handle<Object> handle(obj, isolate);
149 40 : Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
150 20 : CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
151 20 : }
152 :
153 :
154 15 : static void CheckSmi(Isolate* isolate, int value, const char* string) {
155 : Handle<Object> handle(Smi::FromInt(value), isolate);
156 30 : Object* print_string = *Object::ToString(isolate, handle).ToHandleChecked();
157 15 : CHECK(String::cast(print_string)->IsUtf8EqualTo(CStrVector(string)));
158 15 : }
159 :
160 :
161 5 : static void CheckNumber(Isolate* isolate, double value, const char* string) {
162 5 : Handle<Object> number = isolate->factory()->NewNumber(value);
163 5 : CHECK(number->IsNumber());
164 : Handle<Object> print_string =
165 10 : Object::ToString(isolate, number).ToHandleChecked();
166 5 : CHECK(String::cast(*print_string)->IsUtf8EqualTo(CStrVector(string)));
167 5 : }
168 :
169 10 : void CheckEmbeddedObjectsAreEqual(Handle<Code> lhs, Handle<Code> rhs) {
170 : int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
171 10 : RelocIterator lhs_it(*lhs, mode_mask);
172 10 : RelocIterator rhs_it(*rhs, mode_mask);
173 30 : while (!lhs_it.done() && !rhs_it.done()) {
174 30 : CHECK(lhs_it.rinfo()->target_object() == rhs_it.rinfo()->target_object());
175 :
176 10 : lhs_it.next();
177 10 : rhs_it.next();
178 : }
179 10 : CHECK(lhs_it.done() == rhs_it.done());
180 10 : }
181 :
182 23723 : HEAP_TEST(TestNewSpaceRefsInCopiedCode) {
183 5 : CcTest::InitializeVM();
184 : Isolate* isolate = CcTest::i_isolate();
185 : Factory* factory = isolate->factory();
186 5 : Heap* heap = isolate->heap();
187 : HandleScope sc(isolate);
188 :
189 : Handle<HeapNumber> value = factory->NewHeapNumber(1.000123);
190 5 : CHECK(heap->InNewSpace(*value));
191 :
192 : i::byte buffer[i::Assembler::kMinimalBufferSize];
193 : MacroAssembler masm(isolate, buffer, sizeof(buffer),
194 5 : v8::internal::CodeObjectRequired::kYes);
195 : // Add a new-space reference to the code.
196 5 : masm.Push(value);
197 :
198 : CodeDesc desc;
199 5 : masm.GetCode(isolate, &desc);
200 : Handle<Code> code =
201 5 : isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
202 :
203 : Code* tmp = nullptr;
204 10 : heap->CopyCode(*code).To(&tmp);
205 : Handle<Code> copy(tmp);
206 :
207 5 : CheckEmbeddedObjectsAreEqual(code, copy);
208 5 : CcTest::CollectAllAvailableGarbage();
209 5 : CheckEmbeddedObjectsAreEqual(code, copy);
210 5 : }
211 :
212 5 : static void CheckFindCodeObject(Isolate* isolate) {
213 : // Test FindCodeObject
214 : #define __ assm.
215 :
216 5 : Assembler assm(isolate, nullptr, 0);
217 :
218 5 : __ nop(); // supported on all architectures
219 :
220 : CodeDesc desc;
221 5 : assm.GetCode(isolate, &desc);
222 : Handle<Code> code =
223 5 : isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
224 5 : CHECK(code->IsCode());
225 :
226 : HeapObject* obj = HeapObject::cast(*code);
227 5 : Address obj_addr = obj->address();
228 :
229 85 : for (int i = 0; i < obj->Size(); i += kPointerSize) {
230 80 : Object* found = isolate->FindCodeObject(obj_addr + i);
231 80 : CHECK_EQ(*code, found);
232 : }
233 :
234 : Handle<Code> copy =
235 5 : isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
236 : HeapObject* obj_copy = HeapObject::cast(*copy);
237 5 : Object* not_right = isolate->FindCodeObject(obj_copy->address() +
238 5 : obj_copy->Size() / 2);
239 5 : CHECK(not_right != *code);
240 5 : }
241 :
242 :
243 23723 : TEST(HandleNull) {
244 5 : CcTest::InitializeVM();
245 : Isolate* isolate = CcTest::i_isolate();
246 : HandleScope outer_scope(isolate);
247 10 : LocalContext context;
248 : Handle<Object> n(static_cast<Object*>(nullptr), isolate);
249 5 : CHECK(!n.is_null());
250 5 : }
251 :
252 :
253 23723 : TEST(HeapObjects) {
254 5 : CcTest::InitializeVM();
255 : Isolate* isolate = CcTest::i_isolate();
256 : Factory* factory = isolate->factory();
257 20 : Heap* heap = isolate->heap();
258 :
259 : HandleScope sc(isolate);
260 5 : Handle<Object> value = factory->NewNumber(1.000123);
261 5 : CHECK(value->IsHeapNumber());
262 5 : CHECK(value->IsNumber());
263 5 : CHECK_EQ(1.000123, value->Number());
264 :
265 5 : value = factory->NewNumber(1.0);
266 5 : CHECK(value->IsSmi());
267 5 : CHECK(value->IsNumber());
268 5 : CHECK_EQ(1.0, value->Number());
269 :
270 5 : value = factory->NewNumberFromInt(1024);
271 5 : CHECK(value->IsSmi());
272 5 : CHECK(value->IsNumber());
273 5 : CHECK_EQ(1024.0, value->Number());
274 :
275 5 : value = factory->NewNumberFromInt(Smi::kMinValue);
276 5 : CHECK(value->IsSmi());
277 5 : CHECK(value->IsNumber());
278 5 : CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
279 :
280 5 : value = factory->NewNumberFromInt(Smi::kMaxValue);
281 5 : CHECK(value->IsSmi());
282 5 : CHECK(value->IsNumber());
283 5 : CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
284 :
285 : #if !defined(V8_TARGET_ARCH_64_BIT)
286 : // TODO(lrn): We need a NumberFromIntptr function in order to test this.
287 : value = factory->NewNumberFromInt(Smi::kMinValue - 1);
288 : CHECK(value->IsHeapNumber());
289 : CHECK(value->IsNumber());
290 : CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
291 : #endif
292 :
293 5 : value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
294 5 : CHECK(value->IsHeapNumber());
295 5 : CHECK(value->IsNumber());
296 5 : CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
297 : value->Number());
298 :
299 5 : value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
300 5 : CHECK(value->IsHeapNumber());
301 5 : CHECK(value->IsNumber());
302 5 : CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
303 : value->Number());
304 :
305 : // nan oddball checks
306 5 : CHECK(factory->nan_value()->IsNumber());
307 5 : CHECK(std::isnan(factory->nan_value()->Number()));
308 :
309 5 : Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
310 5 : CHECK(s->IsString());
311 5 : CHECK_EQ(10, s->length());
312 :
313 : Handle<String> object_string = Handle<String>::cast(factory->Object_string());
314 : Handle<JSGlobalObject> global(
315 5 : CcTest::i_isolate()->context()->global_object());
316 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
317 :
318 : // Check ToString for oddballs
319 5 : CheckOddball(isolate, heap->true_value(), "true");
320 5 : CheckOddball(isolate, heap->false_value(), "false");
321 5 : CheckOddball(isolate, heap->null_value(), "null");
322 5 : CheckOddball(isolate, heap->undefined_value(), "undefined");
323 :
324 : // Check ToString for Smis
325 5 : CheckSmi(isolate, 0, "0");
326 5 : CheckSmi(isolate, 42, "42");
327 5 : CheckSmi(isolate, -42, "-42");
328 :
329 : // Check ToString for Numbers
330 5 : CheckNumber(isolate, 1.1, "1.1");
331 :
332 5 : CheckFindCodeObject(isolate);
333 5 : }
334 :
335 23723 : TEST(Tagging) {
336 5 : CcTest::InitializeVM();
337 : int request = 24;
338 : CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
339 : CHECK(Smi::FromInt(42)->IsSmi());
340 : CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
341 : CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
342 5 : }
343 :
344 :
345 23723 : TEST(GarbageCollection) {
346 5 : CcTest::InitializeVM();
347 : Isolate* isolate = CcTest::i_isolate();
348 : Factory* factory = isolate->factory();
349 :
350 : HandleScope sc(isolate);
351 : // Check GC.
352 5 : CcTest::CollectGarbage(NEW_SPACE);
353 :
354 : Handle<JSGlobalObject> global(
355 5 : CcTest::i_isolate()->context()->global_object());
356 5 : Handle<String> name = factory->InternalizeUtf8String("theFunction");
357 5 : Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
358 5 : Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
359 5 : Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
360 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
361 : Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
362 :
363 : {
364 : HandleScope inner_scope(isolate);
365 : // Allocate a function and keep it in global object's property.
366 5 : Handle<JSFunction> function = factory->NewFunction(name);
367 5 : JSReceiver::SetProperty(global, name, function, LanguageMode::kSloppy)
368 10 : .Check();
369 : // Allocate an object. Unrooted after leaving the scope.
370 5 : Handle<JSObject> obj = factory->NewJSObject(function);
371 5 : JSReceiver::SetProperty(obj, prop_name, twenty_three, LanguageMode::kSloppy)
372 10 : .Check();
373 5 : JSReceiver::SetProperty(obj, prop_namex, twenty_four, LanguageMode::kSloppy)
374 10 : .Check();
375 :
376 10 : CHECK_EQ(Smi::FromInt(23),
377 : *Object::GetProperty(obj, prop_name).ToHandleChecked());
378 10 : CHECK_EQ(Smi::FromInt(24),
379 : *Object::GetProperty(obj, prop_namex).ToHandleChecked());
380 : }
381 :
382 5 : CcTest::CollectGarbage(NEW_SPACE);
383 :
384 : // Function should be alive.
385 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
386 : // Check function is retained.
387 : Handle<Object> func_value =
388 10 : Object::GetProperty(global, name).ToHandleChecked();
389 5 : CHECK(func_value->IsJSFunction());
390 5 : Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
391 :
392 : {
393 : HandleScope inner_scope(isolate);
394 : // Allocate another object, make it reachable from global.
395 5 : Handle<JSObject> obj = factory->NewJSObject(function);
396 5 : JSReceiver::SetProperty(global, obj_name, obj, LanguageMode::kSloppy)
397 10 : .Check();
398 5 : JSReceiver::SetProperty(obj, prop_name, twenty_three, LanguageMode::kSloppy)
399 10 : .Check();
400 : }
401 :
402 : // After gc, it should survive.
403 5 : CcTest::CollectGarbage(NEW_SPACE);
404 :
405 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
406 : Handle<Object> obj =
407 10 : Object::GetProperty(global, obj_name).ToHandleChecked();
408 5 : CHECK(obj->IsJSObject());
409 10 : CHECK_EQ(Smi::FromInt(23),
410 : *Object::GetProperty(obj, prop_name).ToHandleChecked());
411 5 : }
412 :
413 :
414 25 : static void VerifyStringAllocation(Isolate* isolate, const char* string) {
415 : HandleScope scope(isolate);
416 : Handle<String> s = isolate->factory()->NewStringFromUtf8(
417 50 : CStrVector(string)).ToHandleChecked();
418 25 : CHECK_EQ(StrLength(string), s->length());
419 385 : for (int index = 0; index < s->length(); index++) {
420 180 : CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
421 : }
422 25 : }
423 :
424 :
425 23723 : TEST(String) {
426 5 : CcTest::InitializeVM();
427 5 : Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
428 :
429 5 : VerifyStringAllocation(isolate, "a");
430 5 : VerifyStringAllocation(isolate, "ab");
431 5 : VerifyStringAllocation(isolate, "abc");
432 5 : VerifyStringAllocation(isolate, "abcd");
433 5 : VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
434 5 : }
435 :
436 :
437 23723 : TEST(LocalHandles) {
438 5 : CcTest::InitializeVM();
439 : Isolate* isolate = CcTest::i_isolate();
440 : Factory* factory = isolate->factory();
441 :
442 5 : v8::HandleScope scope(CcTest::isolate());
443 : const char* name = "Kasper the spunky";
444 5 : Handle<String> string = factory->NewStringFromAsciiChecked(name);
445 5 : CHECK_EQ(StrLength(name), string->length());
446 5 : }
447 :
448 :
449 23723 : TEST(GlobalHandles) {
450 5 : CcTest::InitializeVM();
451 5 : Isolate* isolate = CcTest::i_isolate();
452 : Factory* factory = isolate->factory();
453 : GlobalHandles* global_handles = isolate->global_handles();
454 :
455 : Handle<Object> h1;
456 : Handle<Object> h2;
457 : Handle<Object> h3;
458 : Handle<Object> h4;
459 :
460 : {
461 : HandleScope scope(isolate);
462 :
463 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
464 5 : Handle<Object> u = factory->NewNumber(1.12344);
465 :
466 5 : h1 = global_handles->Create(*i);
467 5 : h2 = global_handles->Create(*u);
468 5 : h3 = global_handles->Create(*i);
469 5 : h4 = global_handles->Create(*u);
470 : }
471 :
472 : // after gc, it should survive
473 5 : CcTest::CollectGarbage(NEW_SPACE);
474 :
475 5 : CHECK((*h1)->IsString());
476 5 : CHECK((*h2)->IsHeapNumber());
477 5 : CHECK((*h3)->IsString());
478 5 : CHECK((*h4)->IsHeapNumber());
479 :
480 5 : CHECK_EQ(*h3, *h1);
481 5 : GlobalHandles::Destroy(h1.location());
482 5 : GlobalHandles::Destroy(h3.location());
483 :
484 5 : CHECK_EQ(*h4, *h2);
485 5 : GlobalHandles::Destroy(h2.location());
486 5 : GlobalHandles::Destroy(h4.location());
487 5 : }
488 :
489 :
490 : static bool WeakPointerCleared = false;
491 :
492 15 : static void TestWeakGlobalHandleCallback(
493 15 : const v8::WeakCallbackInfo<void>& data) {
494 : std::pair<v8::Persistent<v8::Value>*, int>* p =
495 : reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
496 : data.GetParameter());
497 15 : if (p->second == 1234) WeakPointerCleared = true;
498 15 : p->first->Reset();
499 15 : }
500 :
501 :
502 23723 : TEST(WeakGlobalHandlesScavenge) {
503 5 : FLAG_stress_compaction = false;
504 5 : FLAG_stress_incremental_marking = false;
505 5 : CcTest::InitializeVM();
506 5 : Isolate* isolate = CcTest::i_isolate();
507 : Factory* factory = isolate->factory();
508 : GlobalHandles* global_handles = isolate->global_handles();
509 :
510 5 : WeakPointerCleared = false;
511 :
512 : Handle<Object> h1;
513 : Handle<Object> h2;
514 :
515 : {
516 : HandleScope scope(isolate);
517 :
518 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
519 5 : Handle<Object> u = factory->NewNumber(1.12344);
520 :
521 5 : h1 = global_handles->Create(*i);
522 5 : h2 = global_handles->Create(*u);
523 : }
524 :
525 : std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
526 : GlobalHandles::MakeWeak(
527 : h2.location(), reinterpret_cast<void*>(&handle_and_id),
528 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
529 :
530 : // Scavenge treats weak pointers as normal roots.
531 5 : CcTest::CollectGarbage(NEW_SPACE);
532 :
533 5 : CHECK((*h1)->IsString());
534 5 : CHECK((*h2)->IsHeapNumber());
535 :
536 5 : CHECK(!WeakPointerCleared);
537 5 : CHECK(!global_handles->IsNearDeath(h2.location()));
538 5 : CHECK(!global_handles->IsNearDeath(h1.location()));
539 :
540 5 : GlobalHandles::Destroy(h1.location());
541 5 : GlobalHandles::Destroy(h2.location());
542 5 : }
543 :
544 23723 : TEST(WeakGlobalUnmodifiedApiHandlesScavenge) {
545 5 : CcTest::InitializeVM();
546 5 : Isolate* isolate = CcTest::i_isolate();
547 5 : LocalContext context;
548 : Factory* factory = isolate->factory();
549 : GlobalHandles* global_handles = isolate->global_handles();
550 :
551 5 : WeakPointerCleared = false;
552 :
553 : Handle<Object> h1;
554 : Handle<Object> h2;
555 :
556 : {
557 : HandleScope scope(isolate);
558 :
559 : // Create an Api object that is unmodified.
560 10 : auto function = FunctionTemplate::New(context->GetIsolate())
561 15 : ->GetFunction(context.local())
562 5 : .ToLocalChecked();
563 5 : auto i = function->NewInstance(context.local()).ToLocalChecked();
564 5 : Handle<Object> u = factory->NewNumber(1.12344);
565 :
566 5 : h1 = global_handles->Create(*u);
567 5 : h2 = global_handles->Create(*(reinterpret_cast<internal::Object**>(*i)));
568 : }
569 :
570 : std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
571 : GlobalHandles::MakeWeak(
572 : h2.location(), reinterpret_cast<void*>(&handle_and_id),
573 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
574 :
575 5 : CcTest::CollectGarbage(NEW_SPACE);
576 :
577 5 : CHECK((*h1)->IsHeapNumber());
578 5 : CHECK(WeakPointerCleared);
579 5 : CHECK(!global_handles->IsNearDeath(h1.location()));
580 :
581 5 : GlobalHandles::Destroy(h1.location());
582 5 : }
583 :
584 23723 : TEST(WeakGlobalApiHandleModifiedMapScavenge) {
585 5 : CcTest::InitializeVM();
586 5 : Isolate* isolate = CcTest::i_isolate();
587 5 : LocalContext context;
588 : GlobalHandles* global_handles = isolate->global_handles();
589 :
590 5 : WeakPointerCleared = false;
591 :
592 : Handle<Object> h1;
593 :
594 : {
595 : HandleScope scope(isolate);
596 :
597 : // Create an API object which does not have the same map as constructor.
598 5 : auto function_template = FunctionTemplate::New(context->GetIsolate());
599 5 : auto instance_t = function_template->InstanceTemplate();
600 : instance_t->Set(v8::String::NewFromUtf8(context->GetIsolate(), "a",
601 5 : NewStringType::kNormal)
602 : .ToLocalChecked(),
603 15 : v8::Number::New(context->GetIsolate(), 10));
604 : auto function =
605 5 : function_template->GetFunction(context.local()).ToLocalChecked();
606 5 : auto i = function->NewInstance(context.local()).ToLocalChecked();
607 :
608 5 : h1 = global_handles->Create(*(reinterpret_cast<internal::Object**>(*i)));
609 : }
610 :
611 : std::pair<Handle<Object>*, int> handle_and_id(&h1, 1234);
612 : GlobalHandles::MakeWeak(
613 : h1.location(), reinterpret_cast<void*>(&handle_and_id),
614 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
615 :
616 5 : CcTest::CollectGarbage(NEW_SPACE);
617 :
618 5 : CHECK(!WeakPointerCleared);
619 5 : CHECK(!global_handles->IsNearDeath(h1.location()));
620 :
621 5 : GlobalHandles::Destroy(h1.location());
622 5 : }
623 :
624 23723 : TEST(WeakGlobalApiHandleWithElementsScavenge) {
625 5 : CcTest::InitializeVM();
626 5 : Isolate* isolate = CcTest::i_isolate();
627 5 : LocalContext context;
628 : GlobalHandles* global_handles = isolate->global_handles();
629 :
630 5 : WeakPointerCleared = false;
631 :
632 : Handle<Object> h1;
633 :
634 : {
635 : HandleScope scope(isolate);
636 :
637 : // Create an API object which has elements.
638 5 : auto function_template = FunctionTemplate::New(context->GetIsolate());
639 5 : auto instance_t = function_template->InstanceTemplate();
640 : instance_t->Set(v8::String::NewFromUtf8(context->GetIsolate(), "1",
641 5 : NewStringType::kNormal)
642 : .ToLocalChecked(),
643 15 : v8::Number::New(context->GetIsolate(), 10));
644 : instance_t->Set(v8::String::NewFromUtf8(context->GetIsolate(), "2",
645 5 : NewStringType::kNormal)
646 : .ToLocalChecked(),
647 15 : v8::Number::New(context->GetIsolate(), 10));
648 : auto function =
649 5 : function_template->GetFunction(context.local()).ToLocalChecked();
650 5 : auto i = function->NewInstance(context.local()).ToLocalChecked();
651 :
652 5 : h1 = global_handles->Create(*(reinterpret_cast<internal::Object**>(*i)));
653 : }
654 :
655 : std::pair<Handle<Object>*, int> handle_and_id(&h1, 1234);
656 : GlobalHandles::MakeWeak(
657 : h1.location(), reinterpret_cast<void*>(&handle_and_id),
658 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
659 :
660 5 : CcTest::CollectGarbage(NEW_SPACE);
661 :
662 5 : CHECK(!WeakPointerCleared);
663 5 : CHECK(!global_handles->IsNearDeath(h1.location()));
664 :
665 5 : GlobalHandles::Destroy(h1.location());
666 5 : }
667 :
668 23723 : TEST(WeakGlobalHandlesMark) {
669 5 : FLAG_stress_incremental_marking = false;
670 5 : CcTest::InitializeVM();
671 5 : Isolate* isolate = CcTest::i_isolate();
672 : Heap* heap = isolate->heap();
673 : Factory* factory = isolate->factory();
674 : GlobalHandles* global_handles = isolate->global_handles();
675 :
676 5 : WeakPointerCleared = false;
677 :
678 : Handle<Object> h1;
679 : Handle<Object> h2;
680 :
681 : {
682 : HandleScope scope(isolate);
683 :
684 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
685 5 : Handle<Object> u = factory->NewNumber(1.12344);
686 :
687 5 : h1 = global_handles->Create(*i);
688 5 : h2 = global_handles->Create(*u);
689 : }
690 :
691 : // Make sure the objects are promoted.
692 5 : CcTest::CollectGarbage(OLD_SPACE);
693 5 : CcTest::CollectGarbage(NEW_SPACE);
694 10 : CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
695 :
696 : std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
697 : GlobalHandles::MakeWeak(
698 : h2.location(), reinterpret_cast<void*>(&handle_and_id),
699 5 : &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
700 5 : CHECK(!GlobalHandles::IsNearDeath(h1.location()));
701 5 : CHECK(!GlobalHandles::IsNearDeath(h2.location()));
702 :
703 : // Incremental marking potentially marked handles before they turned weak.
704 5 : CcTest::CollectAllGarbage();
705 :
706 5 : CHECK((*h1)->IsString());
707 :
708 5 : CHECK(WeakPointerCleared);
709 5 : CHECK(!GlobalHandles::IsNearDeath(h1.location()));
710 :
711 5 : GlobalHandles::Destroy(h1.location());
712 5 : }
713 :
714 :
715 23723 : TEST(DeleteWeakGlobalHandle) {
716 5 : FLAG_stress_compaction = false;
717 5 : FLAG_stress_incremental_marking = false;
718 5 : CcTest::InitializeVM();
719 5 : Isolate* isolate = CcTest::i_isolate();
720 : Factory* factory = isolate->factory();
721 : GlobalHandles* global_handles = isolate->global_handles();
722 :
723 5 : WeakPointerCleared = false;
724 :
725 : Handle<Object> h;
726 :
727 : {
728 : HandleScope scope(isolate);
729 :
730 5 : Handle<Object> i = factory->NewStringFromStaticChars("fisk");
731 5 : h = global_handles->Create(*i);
732 : }
733 :
734 : std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
735 : GlobalHandles::MakeWeak(h.location(), reinterpret_cast<void*>(&handle_and_id),
736 : &TestWeakGlobalHandleCallback,
737 5 : v8::WeakCallbackType::kParameter);
738 :
739 : // Scanvenge does not recognize weak reference.
740 5 : CcTest::CollectGarbage(NEW_SPACE);
741 :
742 5 : CHECK(!WeakPointerCleared);
743 :
744 : // Mark-compact treats weak reference properly.
745 5 : CcTest::CollectGarbage(OLD_SPACE);
746 :
747 5 : CHECK(WeakPointerCleared);
748 5 : }
749 :
750 23723 : TEST(BytecodeArray) {
751 10 : if (FLAG_never_compact) return;
752 : static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a};
753 : static const int kRawBytesSize = sizeof(kRawBytes);
754 : static const int kFrameSize = 32;
755 : static const int kParameterCount = 2;
756 :
757 5 : FLAG_concurrent_marking = false;
758 5 : FLAG_manual_evacuation_candidates_selection = true;
759 5 : FLAG_stress_incremental_marking = false;
760 5 : CcTest::InitializeVM();
761 : Isolate* isolate = CcTest::i_isolate();
762 5 : Heap* heap = isolate->heap();
763 : Factory* factory = isolate->factory();
764 : HandleScope scope(isolate);
765 :
766 5 : heap::SimulateFullSpace(heap->old_space());
767 5 : Handle<FixedArray> constant_pool = factory->NewFixedArray(5, TENURED);
768 30 : for (int i = 0; i < 5; i++) {
769 25 : Handle<Object> number = factory->NewHeapNumber(i);
770 25 : constant_pool->set(i, *number);
771 : }
772 :
773 : // Allocate and initialize BytecodeArray
774 : Handle<BytecodeArray> array = factory->NewBytecodeArray(
775 5 : kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
776 :
777 5 : CHECK(array->IsBytecodeArray());
778 5 : CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
779 5 : CHECK_EQ(array->frame_size(), kFrameSize);
780 5 : CHECK_EQ(array->parameter_count(), kParameterCount);
781 5 : CHECK_EQ(array->constant_pool(), *constant_pool);
782 5 : CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
783 10 : CHECK_GE(array->address() + array->BytecodeArraySize(),
784 : array->GetFirstBytecodeAddress() + array->length());
785 20 : for (int i = 0; i < kRawBytesSize; i++) {
786 20 : CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
787 20 : CHECK_EQ(array->get(i), kRawBytes[i]);
788 : }
789 :
790 : FixedArray* old_constant_pool_address = *constant_pool;
791 :
792 : // Perform a full garbage collection and force the constant pool to be on an
793 : // evacuation candidate.
794 5 : Page* evac_page = Page::FromAddress(constant_pool->address());
795 5 : heap::ForceEvacuationCandidate(evac_page);
796 5 : CcTest::CollectAllGarbage();
797 :
798 : // BytecodeArray should survive.
799 5 : CHECK_EQ(array->length(), kRawBytesSize);
800 5 : CHECK_EQ(array->frame_size(), kFrameSize);
801 20 : for (int i = 0; i < kRawBytesSize; i++) {
802 40 : CHECK_EQ(array->get(i), kRawBytes[i]);
803 20 : CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
804 : }
805 :
806 : // Constant pool should have been migrated.
807 5 : CHECK_EQ(array->constant_pool(), *constant_pool);
808 5 : CHECK_NE(array->constant_pool(), old_constant_pool_address);
809 : }
810 :
811 23723 : TEST(BytecodeArrayAging) {
812 : static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a};
813 : static const int kRawBytesSize = sizeof(kRawBytes);
814 : static const int kFrameSize = 32;
815 : static const int kParameterCount = 2;
816 5 : CcTest::InitializeVM();
817 : Isolate* isolate = CcTest::i_isolate();
818 : Factory* factory = isolate->factory();
819 : HandleScope scope(isolate);
820 :
821 : Handle<BytecodeArray> array =
822 : factory->NewBytecodeArray(kRawBytesSize, kRawBytes, kFrameSize,
823 5 : kParameterCount, factory->empty_fixed_array());
824 :
825 5 : CHECK_EQ(BytecodeArray::kFirstBytecodeAge, array->bytecode_age());
826 5 : array->MakeOlder();
827 5 : CHECK_EQ(BytecodeArray::kQuadragenarianBytecodeAge, array->bytecode_age());
828 : array->set_bytecode_age(BytecodeArray::kLastBytecodeAge);
829 5 : array->MakeOlder();
830 5 : CHECK_EQ(BytecodeArray::kLastBytecodeAge, array->bytecode_age());
831 5 : }
832 :
833 : static const char* not_so_random_string_table[] = {
834 : "abstract",
835 : "boolean",
836 : "break",
837 : "byte",
838 : "case",
839 : "catch",
840 : "char",
841 : "class",
842 : "const",
843 : "continue",
844 : "debugger",
845 : "default",
846 : "delete",
847 : "do",
848 : "double",
849 : "else",
850 : "enum",
851 : "export",
852 : "extends",
853 : "false",
854 : "final",
855 : "finally",
856 : "float",
857 : "for",
858 : "function",
859 : "goto",
860 : "if",
861 : "implements",
862 : "import",
863 : "in",
864 : "instanceof",
865 : "int",
866 : "interface",
867 : "long",
868 : "native",
869 : "new",
870 : "null",
871 : "package",
872 : "private",
873 : "protected",
874 : "public",
875 : "return",
876 : "short",
877 : "static",
878 : "super",
879 : "switch",
880 : "synchronized",
881 : "this",
882 : "throw",
883 : "throws",
884 : "transient",
885 : "true",
886 : "try",
887 : "typeof",
888 : "var",
889 : "void",
890 : "volatile",
891 : "while",
892 : "with",
893 : 0
894 : };
895 :
896 :
897 10 : static void CheckInternalizedStrings(const char** strings) {
898 : Isolate* isolate = CcTest::i_isolate();
899 : Factory* factory = isolate->factory();
900 600 : for (const char* string = *strings; *strings != 0; string = *strings++) {
901 : HandleScope scope(isolate);
902 : Handle<String> a =
903 590 : isolate->factory()->InternalizeUtf8String(CStrVector(string));
904 : // InternalizeUtf8String may return a failure if a GC is needed.
905 590 : CHECK(a->IsInternalizedString());
906 590 : Handle<String> b = factory->InternalizeUtf8String(string);
907 590 : CHECK_EQ(*b, *a);
908 590 : CHECK(b->IsUtf8EqualTo(CStrVector(string)));
909 590 : b = isolate->factory()->InternalizeUtf8String(CStrVector(string));
910 590 : CHECK_EQ(*b, *a);
911 590 : CHECK(b->IsUtf8EqualTo(CStrVector(string)));
912 : }
913 10 : }
914 :
915 :
916 23723 : TEST(StringTable) {
917 5 : CcTest::InitializeVM();
918 :
919 5 : v8::HandleScope sc(CcTest::isolate());
920 5 : CheckInternalizedStrings(not_so_random_string_table);
921 5 : CheckInternalizedStrings(not_so_random_string_table);
922 5 : }
923 :
924 :
925 23723 : TEST(FunctionAllocation) {
926 5 : CcTest::InitializeVM();
927 : Isolate* isolate = CcTest::i_isolate();
928 : Factory* factory = isolate->factory();
929 :
930 5 : v8::HandleScope sc(CcTest::isolate());
931 5 : Handle<String> name = factory->InternalizeUtf8String("theFunction");
932 5 : Handle<JSFunction> function = factory->NewFunction(name);
933 :
934 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
935 : Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
936 :
937 5 : Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
938 5 : Handle<JSObject> obj = factory->NewJSObject(function);
939 5 : JSReceiver::SetProperty(obj, prop_name, twenty_three, LanguageMode::kSloppy)
940 10 : .Check();
941 10 : CHECK_EQ(Smi::FromInt(23),
942 : *Object::GetProperty(obj, prop_name).ToHandleChecked());
943 : // Check that we can add properties to function objects.
944 : JSReceiver::SetProperty(function, prop_name, twenty_four,
945 5 : LanguageMode::kSloppy)
946 10 : .Check();
947 10 : CHECK_EQ(Smi::FromInt(24),
948 5 : *Object::GetProperty(function, prop_name).ToHandleChecked());
949 5 : }
950 :
951 :
952 23723 : TEST(ObjectProperties) {
953 5 : CcTest::InitializeVM();
954 : Isolate* isolate = CcTest::i_isolate();
955 : Factory* factory = isolate->factory();
956 :
957 5 : v8::HandleScope sc(CcTest::isolate());
958 5 : Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
959 : Handle<Object> object = Object::GetProperty(
960 15 : CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
961 5 : Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
962 5 : Handle<JSObject> obj = factory->NewJSObject(constructor);
963 5 : Handle<String> first = factory->InternalizeUtf8String("first");
964 5 : Handle<String> second = factory->InternalizeUtf8String("second");
965 :
966 : Handle<Smi> one(Smi::FromInt(1), isolate);
967 : Handle<Smi> two(Smi::FromInt(2), isolate);
968 :
969 : // check for empty
970 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
971 :
972 : // add first
973 10 : JSReceiver::SetProperty(obj, first, one, LanguageMode::kSloppy).Check();
974 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
975 :
976 : // delete first
977 10 : CHECK(Just(true) ==
978 : JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
979 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
980 :
981 : // add first and then second
982 10 : JSReceiver::SetProperty(obj, first, one, LanguageMode::kSloppy).Check();
983 10 : JSReceiver::SetProperty(obj, second, two, LanguageMode::kSloppy).Check();
984 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
985 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
986 :
987 : // delete first and then second
988 10 : CHECK(Just(true) ==
989 : JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
990 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
991 10 : CHECK(Just(true) ==
992 : JSReceiver::DeleteProperty(obj, second, LanguageMode::kSloppy));
993 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
994 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
995 :
996 : // add first and then second
997 10 : JSReceiver::SetProperty(obj, first, one, LanguageMode::kSloppy).Check();
998 10 : JSReceiver::SetProperty(obj, second, two, LanguageMode::kSloppy).Check();
999 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
1000 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
1001 :
1002 : // delete second and then first
1003 10 : CHECK(Just(true) ==
1004 : JSReceiver::DeleteProperty(obj, second, LanguageMode::kSloppy));
1005 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
1006 10 : CHECK(Just(true) ==
1007 : JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
1008 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
1009 10 : CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
1010 :
1011 : // check string and internalized string match
1012 : const char* string1 = "fisk";
1013 5 : Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
1014 10 : JSReceiver::SetProperty(obj, s1, one, LanguageMode::kSloppy).Check();
1015 5 : Handle<String> s1_string = factory->InternalizeUtf8String(string1);
1016 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
1017 :
1018 : // check internalized string and string match
1019 : const char* string2 = "fugl";
1020 5 : Handle<String> s2_string = factory->InternalizeUtf8String(string2);
1021 10 : JSReceiver::SetProperty(obj, s2_string, one, LanguageMode::kSloppy).Check();
1022 5 : Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
1023 10 : CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
1024 5 : }
1025 :
1026 :
1027 23723 : TEST(JSObjectMaps) {
1028 5 : CcTest::InitializeVM();
1029 : Isolate* isolate = CcTest::i_isolate();
1030 : Factory* factory = isolate->factory();
1031 :
1032 5 : v8::HandleScope sc(CcTest::isolate());
1033 5 : Handle<String> name = factory->InternalizeUtf8String("theFunction");
1034 5 : Handle<JSFunction> function = factory->NewFunction(name);
1035 :
1036 5 : Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
1037 5 : Handle<JSObject> obj = factory->NewJSObject(function);
1038 : Handle<Map> initial_map(function->initial_map());
1039 :
1040 : // Set a propery
1041 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
1042 5 : JSReceiver::SetProperty(obj, prop_name, twenty_three, LanguageMode::kSloppy)
1043 10 : .Check();
1044 10 : CHECK_EQ(Smi::FromInt(23),
1045 : *Object::GetProperty(obj, prop_name).ToHandleChecked());
1046 :
1047 : // Check the map has changed
1048 5 : CHECK(*initial_map != obj->map());
1049 5 : }
1050 :
1051 :
1052 23723 : TEST(JSArray) {
1053 5 : CcTest::InitializeVM();
1054 : Isolate* isolate = CcTest::i_isolate();
1055 : Factory* factory = isolate->factory();
1056 :
1057 5 : v8::HandleScope sc(CcTest::isolate());
1058 5 : Handle<String> name = factory->InternalizeUtf8String("Array");
1059 : Handle<Object> fun_obj = Object::GetProperty(
1060 15 : CcTest::i_isolate()->global_object(), name).ToHandleChecked();
1061 5 : Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
1062 :
1063 : // Allocate the object.
1064 : Handle<Object> element;
1065 5 : Handle<JSObject> object = factory->NewJSObject(function);
1066 : Handle<JSArray> array = Handle<JSArray>::cast(object);
1067 : // We just initialized the VM, no heap allocation failure yet.
1068 5 : JSArray::Initialize(array, 0);
1069 :
1070 : // Set array length to 0.
1071 5 : JSArray::SetLength(array, 0);
1072 5 : CHECK_EQ(Smi::kZero, array->length());
1073 : // Must be in fast mode.
1074 5 : CHECK(array->HasSmiOrObjectElements());
1075 :
1076 : // array[length] = name.
1077 5 : JSReceiver::SetElement(isolate, array, 0, name, LanguageMode::kSloppy)
1078 10 : .Check();
1079 5 : CHECK_EQ(Smi::FromInt(1), array->length());
1080 10 : element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
1081 5 : CHECK_EQ(*element, *name);
1082 :
1083 : // Set array length with larger than smi value.
1084 5 : JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
1085 :
1086 5 : uint32_t int_length = 0;
1087 5 : CHECK(array->length()->ToArrayIndex(&int_length));
1088 5 : CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
1089 5 : CHECK(array->HasDictionaryElements()); // Must be in slow mode.
1090 :
1091 : // array[length] = name.
1092 : JSReceiver::SetElement(isolate, array, int_length, name,
1093 10 : LanguageMode::kSloppy)
1094 10 : .Check();
1095 5 : uint32_t new_int_length = 0;
1096 5 : CHECK(array->length()->ToArrayIndex(&new_int_length));
1097 10 : CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
1098 10 : element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
1099 5 : CHECK_EQ(*element, *name);
1100 10 : element = Object::GetElement(isolate, array, 0).ToHandleChecked();
1101 5 : CHECK_EQ(*element, *name);
1102 5 : }
1103 :
1104 :
1105 23723 : TEST(JSObjectCopy) {
1106 5 : CcTest::InitializeVM();
1107 : Isolate* isolate = CcTest::i_isolate();
1108 : Factory* factory = isolate->factory();
1109 :
1110 5 : v8::HandleScope sc(CcTest::isolate());
1111 5 : Handle<String> object_string(String::cast(CcTest::heap()->Object_string()));
1112 : Handle<Object> object = Object::GetProperty(
1113 15 : CcTest::i_isolate()->global_object(), object_string).ToHandleChecked();
1114 5 : Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
1115 5 : Handle<JSObject> obj = factory->NewJSObject(constructor);
1116 5 : Handle<String> first = factory->InternalizeUtf8String("first");
1117 5 : Handle<String> second = factory->InternalizeUtf8String("second");
1118 :
1119 : Handle<Smi> one(Smi::FromInt(1), isolate);
1120 : Handle<Smi> two(Smi::FromInt(2), isolate);
1121 :
1122 10 : JSReceiver::SetProperty(obj, first, one, LanguageMode::kSloppy).Check();
1123 10 : JSReceiver::SetProperty(obj, second, two, LanguageMode::kSloppy).Check();
1124 :
1125 10 : JSReceiver::SetElement(isolate, obj, 0, first, LanguageMode::kSloppy).Check();
1126 5 : JSReceiver::SetElement(isolate, obj, 1, second, LanguageMode::kSloppy)
1127 10 : .Check();
1128 :
1129 : // Make the clone.
1130 : Handle<Object> value1, value2;
1131 5 : Handle<JSObject> clone = factory->CopyJSObject(obj);
1132 5 : CHECK(!clone.is_identical_to(obj));
1133 :
1134 10 : value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1135 10 : value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1136 5 : CHECK_EQ(*value1, *value2);
1137 10 : value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1138 10 : value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1139 5 : CHECK_EQ(*value1, *value2);
1140 :
1141 10 : value1 = Object::GetProperty(obj, first).ToHandleChecked();
1142 10 : value2 = Object::GetProperty(clone, first).ToHandleChecked();
1143 5 : CHECK_EQ(*value1, *value2);
1144 10 : value1 = Object::GetProperty(obj, second).ToHandleChecked();
1145 10 : value2 = Object::GetProperty(clone, second).ToHandleChecked();
1146 5 : CHECK_EQ(*value1, *value2);
1147 :
1148 : // Flip the values.
1149 10 : JSReceiver::SetProperty(clone, first, two, LanguageMode::kSloppy).Check();
1150 10 : JSReceiver::SetProperty(clone, second, one, LanguageMode::kSloppy).Check();
1151 :
1152 5 : JSReceiver::SetElement(isolate, clone, 0, second, LanguageMode::kSloppy)
1153 10 : .Check();
1154 5 : JSReceiver::SetElement(isolate, clone, 1, first, LanguageMode::kSloppy)
1155 10 : .Check();
1156 :
1157 10 : value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
1158 10 : value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
1159 5 : CHECK_EQ(*value1, *value2);
1160 10 : value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
1161 10 : value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
1162 5 : CHECK_EQ(*value1, *value2);
1163 :
1164 10 : value1 = Object::GetProperty(obj, second).ToHandleChecked();
1165 10 : value2 = Object::GetProperty(clone, first).ToHandleChecked();
1166 5 : CHECK_EQ(*value1, *value2);
1167 10 : value1 = Object::GetProperty(obj, first).ToHandleChecked();
1168 10 : value2 = Object::GetProperty(clone, second).ToHandleChecked();
1169 5 : CHECK_EQ(*value1, *value2);
1170 5 : }
1171 :
1172 :
1173 23723 : TEST(StringAllocation) {
1174 5 : CcTest::InitializeVM();
1175 : Isolate* isolate = CcTest::i_isolate();
1176 : Factory* factory = isolate->factory();
1177 :
1178 : const unsigned char chars[] = { 0xe5, 0xa4, 0xa7 };
1179 510 : for (int length = 0; length < 100; length++) {
1180 500 : v8::HandleScope scope(CcTest::isolate());
1181 500 : char* non_one_byte = NewArray<char>(3 * length + 1);
1182 500 : char* one_byte = NewArray<char>(length + 1);
1183 500 : non_one_byte[3 * length] = 0;
1184 500 : one_byte[length] = 0;
1185 25250 : for (int i = 0; i < length; i++) {
1186 24750 : one_byte[i] = 'a';
1187 24750 : non_one_byte[3 * i] = chars[0];
1188 24750 : non_one_byte[3 * i + 1] = chars[1];
1189 24750 : non_one_byte[3 * i + 2] = chars[2];
1190 : }
1191 : Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
1192 500 : Vector<const char>(non_one_byte, 3 * length));
1193 500 : CHECK_EQ(length, non_one_byte_sym->length());
1194 : Handle<String> one_byte_sym =
1195 500 : factory->InternalizeOneByteString(OneByteVector(one_byte, length));
1196 500 : CHECK_EQ(length, one_byte_sym->length());
1197 : Handle<String> non_one_byte_str =
1198 500 : factory->NewStringFromUtf8(Vector<const char>(non_one_byte, 3 * length))
1199 1000 : .ToHandleChecked();
1200 500 : non_one_byte_str->Hash();
1201 500 : CHECK_EQ(length, non_one_byte_str->length());
1202 : Handle<String> one_byte_str =
1203 500 : factory->NewStringFromUtf8(Vector<const char>(one_byte, length))
1204 1000 : .ToHandleChecked();
1205 500 : one_byte_str->Hash();
1206 500 : CHECK_EQ(length, one_byte_str->length());
1207 : DeleteArray(non_one_byte);
1208 : DeleteArray(one_byte);
1209 500 : }
1210 5 : }
1211 :
1212 :
1213 5 : static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
1214 : // Count the number of objects found in the heap.
1215 : int found_count = 0;
1216 5 : HeapIterator iterator(heap);
1217 40010 : for (HeapObject* obj = iterator.next(); obj != nullptr;
1218 : obj = iterator.next()) {
1219 240030 : for (int i = 0; i < size; i++) {
1220 480060 : if (*objs[i] == obj) {
1221 30 : found_count++;
1222 : }
1223 : }
1224 : }
1225 5 : return found_count;
1226 : }
1227 :
1228 :
1229 23723 : TEST(Iteration) {
1230 5 : CcTest::InitializeVM();
1231 : Isolate* isolate = CcTest::i_isolate();
1232 : Factory* factory = isolate->factory();
1233 5 : v8::HandleScope scope(CcTest::isolate());
1234 :
1235 : // Array of objects to scan haep for.
1236 : const int objs_count = 6;
1237 35 : Handle<Object> objs[objs_count];
1238 : int next_objs_index = 0;
1239 :
1240 : // Allocate a JS array to OLD_SPACE and NEW_SPACE
1241 5 : objs[next_objs_index++] = factory->NewJSArray(10);
1242 5 : objs[next_objs_index++] = factory->NewJSArray(10, HOLEY_ELEMENTS, TENURED);
1243 :
1244 : // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
1245 5 : objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
1246 : objs[next_objs_index++] =
1247 5 : factory->NewStringFromStaticChars("abcdefghij", TENURED);
1248 :
1249 : // Allocate a large string (for large object space).
1250 : int large_size = kMaxRegularHeapObjectSize + 1;
1251 5 : char* str = new char[large_size];
1252 5 : for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
1253 5 : str[large_size - 1] = '\0';
1254 5 : objs[next_objs_index++] = factory->NewStringFromAsciiChecked(str, TENURED);
1255 5 : delete[] str;
1256 :
1257 : // Add a Map object to look for.
1258 5 : objs[next_objs_index++] = Handle<Map>(HeapObject::cast(*objs[0])->map());
1259 :
1260 : CHECK_EQ(objs_count, next_objs_index);
1261 5 : CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
1262 5 : }
1263 :
1264 23723 : TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
1265 5 : if (!FLAG_incremental_marking) return;
1266 : // Turn off always_opt because it interferes with running the built-in for
1267 : // the last call to g().
1268 5 : FLAG_always_opt = false;
1269 5 : FLAG_allow_natives_syntax = true;
1270 5 : CcTest::InitializeVM();
1271 : Isolate* isolate = CcTest::i_isolate();
1272 : Factory* factory = isolate->factory();
1273 5 : Heap* heap = isolate->heap();
1274 5 : v8::HandleScope scope(CcTest::isolate());
1275 :
1276 : CompileRun(
1277 : "function make_closure(x) {"
1278 : " return function() { return x + 3 };"
1279 : "}"
1280 : "var f = make_closure(5); f();"
1281 : "var g = make_closure(5);");
1282 :
1283 : // Check f is compiled.
1284 5 : Handle<String> f_name = factory->InternalizeUtf8String("f");
1285 : Handle<Object> f_value =
1286 15 : Object::GetProperty(isolate->global_object(), f_name).ToHandleChecked();
1287 : Handle<JSFunction> f_function = Handle<JSFunction>::cast(f_value);
1288 5 : CHECK(f_function->is_compiled());
1289 :
1290 : // Check g is not compiled.
1291 5 : Handle<String> g_name = factory->InternalizeUtf8String("g");
1292 : Handle<Object> g_value =
1293 15 : Object::GetProperty(isolate->global_object(), g_name).ToHandleChecked();
1294 : Handle<JSFunction> g_function = Handle<JSFunction>::cast(g_value);
1295 5 : CHECK(!g_function->is_compiled());
1296 :
1297 5 : heap::SimulateIncrementalMarking(heap);
1298 : CompileRun("%OptimizeFunctionOnNextCall(f); f();");
1299 :
1300 : // g should now have available an optimized function, unmarked by gc. The
1301 : // CompileLazy built-in will discover it and install it in the closure, and
1302 : // the incremental write barrier should be used.
1303 : CompileRun("g();");
1304 5 : CHECK(g_function->is_compiled());
1305 : }
1306 :
1307 23723 : TEST(CompilationCacheCachingBehavior) {
1308 : // If we do not have the compilation cache turned off, this test is invalid.
1309 5 : if (!FLAG_compilation_cache) {
1310 0 : return;
1311 : }
1312 5 : CcTest::InitializeVM();
1313 5 : Isolate* isolate = CcTest::i_isolate();
1314 : Factory* factory = isolate->factory();
1315 : CompilationCache* compilation_cache = isolate->compilation_cache();
1316 5 : LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
1317 :
1318 5 : v8::HandleScope scope(CcTest::isolate());
1319 : const char* raw_source =
1320 : "function foo() {"
1321 : " var x = 42;"
1322 : " var y = 42;"
1323 : " var z = x + y;"
1324 : "};"
1325 : "foo();";
1326 5 : Handle<String> source = factory->InternalizeUtf8String(raw_source);
1327 5 : Handle<Context> native_context = isolate->native_context();
1328 :
1329 : {
1330 5 : v8::HandleScope scope(CcTest::isolate());
1331 5 : CompileRun(raw_source);
1332 : }
1333 :
1334 : // The script should be in the cache now.
1335 : InfoVectorPair pair = compilation_cache->LookupScript(
1336 : source, Handle<Object>(), 0, 0, v8::ScriptOriginOptions(true, false),
1337 5 : native_context, language_mode);
1338 5 : CHECK(pair.has_shared());
1339 :
1340 : // Check that the code cache entry survives at least on GC.
1341 : // (Unless --optimize-for-size, in which case it might get collected
1342 : // immediately.)
1343 5 : if (!FLAG_optimize_for_size) {
1344 5 : CcTest::CollectAllGarbage();
1345 : pair = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0,
1346 : v8::ScriptOriginOptions(true, false),
1347 5 : native_context, language_mode);
1348 5 : CHECK(pair.has_shared());
1349 : }
1350 :
1351 : // Progress code age until it's old and ready for GC.
1352 : const int kAgingThreshold = 6;
1353 35 : for (int i = 0; i < kAgingThreshold; i++) {
1354 30 : CHECK(pair.shared()->HasBytecodeArray());
1355 30 : pair.shared()->bytecode_array()->MakeOlder();
1356 : }
1357 :
1358 5 : CcTest::CollectAllGarbage();
1359 : // Ensure code aging cleared the entry from the cache.
1360 : pair = compilation_cache->LookupScript(source, Handle<Object>(), 0, 0,
1361 : v8::ScriptOriginOptions(true, false),
1362 5 : native_context, language_mode);
1363 5 : CHECK(!pair.has_shared());
1364 : }
1365 :
1366 :
1367 150 : static void OptimizeEmptyFunction(const char* name) {
1368 : HandleScope scope(CcTest::i_isolate());
1369 : EmbeddedVector<char, 256> source;
1370 : SNPrintF(source,
1371 : "function %s() { return 0; }"
1372 : "%s(); %s();"
1373 : "%%OptimizeFunctionOnNextCall(%s);"
1374 : "%s();",
1375 150 : name, name, name, name, name);
1376 150 : CompileRun(source.start());
1377 150 : }
1378 :
1379 :
1380 : // Count the number of native contexts in the weak list of native contexts.
1381 381 : int CountNativeContexts() {
1382 : int count = 0;
1383 381 : Object* object = CcTest::heap()->native_contexts_list();
1384 2732 : while (!object->IsUndefined(CcTest::i_isolate())) {
1385 1970 : count++;
1386 : object = Context::cast(object)->next_context_link();
1387 : }
1388 381 : return count;
1389 : }
1390 :
1391 23723 : TEST(TestInternalWeakLists) {
1392 5 : FLAG_always_opt = false;
1393 5 : FLAG_allow_natives_syntax = true;
1394 5 : v8::V8::Initialize();
1395 :
1396 : // Some flags turn Scavenge collections into Mark-sweep collections
1397 : // and hence are incompatible with this test case.
1398 5 : if (FLAG_gc_global || FLAG_stress_compaction ||
1399 : FLAG_stress_incremental_marking)
1400 2 : return;
1401 4 : FLAG_retain_maps_for_n_gc = 0;
1402 :
1403 : static const int kNumTestContexts = 10;
1404 :
1405 60 : Isolate* isolate = CcTest::i_isolate();
1406 : HandleScope scope(isolate);
1407 44 : v8::Local<v8::Context> ctx[kNumTestContexts];
1408 4 : if (!isolate->use_optimizer()) return;
1409 :
1410 3 : CHECK_EQ(0, CountNativeContexts());
1411 :
1412 : // Create a number of global contests which gets linked together.
1413 33 : for (int i = 0; i < kNumTestContexts; i++) {
1414 30 : ctx[i] = v8::Context::New(CcTest::isolate());
1415 :
1416 : // Collect garbage that might have been created by one of the
1417 : // installed extensions.
1418 30 : isolate->compilation_cache()->Clear();
1419 30 : CcTest::CollectAllGarbage();
1420 :
1421 30 : CHECK_EQ(i + 1, CountNativeContexts());
1422 :
1423 60 : ctx[i]->Enter();
1424 :
1425 : // Create a handle scope so no function objects get stuck in the outer
1426 : // handle scope.
1427 : HandleScope scope(isolate);
1428 30 : OptimizeEmptyFunction("f1");
1429 30 : OptimizeEmptyFunction("f2");
1430 30 : OptimizeEmptyFunction("f3");
1431 30 : OptimizeEmptyFunction("f4");
1432 30 : OptimizeEmptyFunction("f5");
1433 :
1434 : // Remove function f1, and
1435 : CompileRun("f1=null");
1436 :
1437 : // Scavenge treats these references as strong.
1438 330 : for (int j = 0; j < 10; j++) {
1439 300 : CcTest::CollectGarbage(NEW_SPACE);
1440 : }
1441 :
1442 : // Mark compact handles the weak references.
1443 30 : isolate->compilation_cache()->Clear();
1444 30 : CcTest::CollectAllGarbage();
1445 :
1446 : // Get rid of f3 and f5 in the same way.
1447 : CompileRun("f3=null");
1448 330 : for (int j = 0; j < 10; j++) {
1449 300 : CcTest::CollectGarbage(NEW_SPACE);
1450 : }
1451 30 : CcTest::CollectAllGarbage();
1452 : CompileRun("f5=null");
1453 330 : for (int j = 0; j < 10; j++) {
1454 300 : CcTest::CollectGarbage(NEW_SPACE);
1455 : }
1456 30 : CcTest::CollectAllGarbage();
1457 :
1458 30 : ctx[i]->Exit();
1459 : }
1460 :
1461 : // Force compilation cache cleanup.
1462 3 : CcTest::heap()->NotifyContextDisposed(true);
1463 3 : CcTest::CollectAllGarbage();
1464 :
1465 : // Dispose the native contexts one by one.
1466 33 : for (int i = 0; i < kNumTestContexts; i++) {
1467 : // TODO(dcarney): is there a better way to do this?
1468 30 : i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1469 30 : *unsafe = CcTest::heap()->undefined_value();
1470 : ctx[i].Clear();
1471 :
1472 : // Scavenge treats these references as strong.
1473 330 : for (int j = 0; j < 10; j++) {
1474 300 : CcTest::CollectGarbage(i::NEW_SPACE);
1475 300 : CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1476 : }
1477 :
1478 : // Mark compact handles the weak references.
1479 30 : CcTest::CollectAllGarbage();
1480 30 : CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1481 : }
1482 :
1483 3 : CHECK_EQ(0, CountNativeContexts());
1484 : }
1485 :
1486 :
1487 23723 : TEST(TestSizeOfRegExpCode) {
1488 5 : if (!FLAG_regexp_optimization) return;
1489 :
1490 5 : v8::V8::Initialize();
1491 :
1492 : Isolate* isolate = CcTest::i_isolate();
1493 : HandleScope scope(isolate);
1494 :
1495 10 : LocalContext context;
1496 :
1497 : // Adjust source below and this check to match
1498 : // RegExpImple::kRegExpTooLargeToOptimize.
1499 : CHECK_EQ(i::RegExpImpl::kRegExpTooLargeToOptimize, 20 * KB);
1500 :
1501 : // Compile a regexp that is much larger if we are using regexp optimizations.
1502 : CompileRun(
1503 : "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1504 : "var half_size_reg_exp;"
1505 : "while (reg_exp_source.length < 20 * 1024) {"
1506 : " half_size_reg_exp = reg_exp_source;"
1507 : " reg_exp_source = reg_exp_source + reg_exp_source;"
1508 : "}"
1509 : // Flatten string.
1510 : "reg_exp_source.match(/f/);");
1511 :
1512 : // Get initial heap size after several full GCs, which will stabilize
1513 : // the heap size and return with sweeping finished completely.
1514 5 : CcTest::CollectAllAvailableGarbage();
1515 5 : MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1516 5 : if (collector->sweeping_in_progress()) {
1517 5 : collector->EnsureSweepingCompleted();
1518 : }
1519 5 : int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1520 :
1521 : CompileRun("'foo'.match(reg_exp_source);");
1522 5 : CcTest::CollectAllAvailableGarbage();
1523 5 : int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1524 :
1525 : CompileRun("'foo'.match(half_size_reg_exp);");
1526 5 : CcTest::CollectAllAvailableGarbage();
1527 : int size_with_optimized_regexp =
1528 5 : static_cast<int>(CcTest::heap()->SizeOfObjects());
1529 :
1530 5 : int size_of_regexp_code = size_with_regexp - initial_size;
1531 :
1532 : // On some platforms the debug-code flag causes huge amounts of regexp code
1533 : // to be emitted, breaking this test.
1534 5 : if (!FLAG_debug_code) {
1535 5 : CHECK_LE(size_of_regexp_code, 1 * MB);
1536 : }
1537 :
1538 : // Small regexp is half the size, but compiles to more than twice the code
1539 : // due to the optimization steps.
1540 5 : CHECK_GE(size_with_optimized_regexp,
1541 : size_with_regexp + size_of_regexp_code * 2);
1542 : }
1543 :
1544 :
1545 23723 : HEAP_TEST(TestSizeOfObjects) {
1546 5 : v8::V8::Initialize();
1547 5 : Heap* heap = CcTest::heap();
1548 : MarkCompactCollector* collector = heap->mark_compact_collector();
1549 :
1550 : // Get initial heap size after several full GCs, which will stabilize
1551 : // the heap size and return with sweeping finished completely.
1552 5 : CcTest::CollectAllAvailableGarbage();
1553 5 : if (collector->sweeping_in_progress()) {
1554 5 : collector->EnsureSweepingCompleted();
1555 : }
1556 5 : int initial_size = static_cast<int>(heap->SizeOfObjects());
1557 :
1558 : {
1559 : // Allocate objects on several different old-space pages so that
1560 : // concurrent sweeper threads will be busy sweeping the old space on
1561 : // subsequent GC runs.
1562 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
1563 : int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1564 505 : for (int i = 1; i <= 100; i++) {
1565 500 : heap->AllocateFixedArray(8192, TENURED).ToObjectChecked();
1566 500 : CHECK_EQ(initial_size + i * filler_size,
1567 : static_cast<int>(heap->SizeOfObjects()));
1568 : }
1569 : }
1570 :
1571 : // The heap size should go back to initial size after a full GC, even
1572 : // though sweeping didn't finish yet.
1573 5 : CcTest::CollectAllGarbage();
1574 : // Normally sweeping would not be complete here, but no guarantees.
1575 5 : CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
1576 : // Waiting for sweeper threads should not change heap size.
1577 5 : if (collector->sweeping_in_progress()) {
1578 5 : collector->EnsureSweepingCompleted();
1579 : }
1580 5 : CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
1581 5 : }
1582 :
1583 :
1584 23723 : TEST(TestAlignmentCalculations) {
1585 : // Maximum fill amounts are consistent.
1586 : int maximum_double_misalignment = kDoubleSize - kPointerSize;
1587 5 : int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
1588 5 : CHECK_EQ(0, max_word_fill);
1589 5 : int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
1590 5 : CHECK_EQ(maximum_double_misalignment, max_double_fill);
1591 5 : int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
1592 5 : CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
1593 :
1594 : Address base = static_cast<Address>(nullptr);
1595 : int fill = 0;
1596 :
1597 : // Word alignment never requires fill.
1598 5 : fill = Heap::GetFillToAlign(base, kWordAligned);
1599 5 : CHECK_EQ(0, fill);
1600 5 : fill = Heap::GetFillToAlign(base + kPointerSize, kWordAligned);
1601 5 : CHECK_EQ(0, fill);
1602 :
1603 : // No fill is required when address is double aligned.
1604 5 : fill = Heap::GetFillToAlign(base, kDoubleAligned);
1605 5 : CHECK_EQ(0, fill);
1606 : // Fill is required if address is not double aligned.
1607 5 : fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleAligned);
1608 5 : CHECK_EQ(maximum_double_misalignment, fill);
1609 : // kDoubleUnaligned has the opposite fill amounts.
1610 5 : fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
1611 5 : CHECK_EQ(maximum_double_misalignment, fill);
1612 5 : fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleUnaligned);
1613 5 : CHECK_EQ(0, fill);
1614 5 : }
1615 :
1616 :
1617 : static HeapObject* NewSpaceAllocateAligned(int size,
1618 : AllocationAlignment alignment) {
1619 : Heap* heap = CcTest::heap();
1620 : AllocationResult allocation =
1621 : heap->new_space()->AllocateRawAligned(size, alignment);
1622 : HeapObject* obj = nullptr;
1623 : allocation.To(&obj);
1624 : heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
1625 : return obj;
1626 : }
1627 :
1628 :
1629 : // Get new space allocation into the desired alignment.
1630 : static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
1631 : Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1632 : int fill = Heap::GetFillToAlign(*top_addr, alignment);
1633 : if (fill) {
1634 : NewSpaceAllocateAligned(fill + offset, kWordAligned);
1635 : }
1636 : return *top_addr;
1637 : }
1638 :
1639 :
1640 23723 : TEST(TestAlignedAllocation) {
1641 : // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
1642 : const intptr_t double_misalignment = kDoubleSize - kPointerSize;
1643 5 : Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1644 : Address start;
1645 : HeapObject* obj;
1646 : HeapObject* filler;
1647 : if (double_misalignment) {
1648 : // Allocate a pointer sized object that must be double aligned at an
1649 : // aligned address.
1650 : start = AlignNewSpace(kDoubleAligned, 0);
1651 : obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
1652 : CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
1653 : // There is no filler.
1654 : CHECK_EQ(kPointerSize, *top_addr - start);
1655 :
1656 : // Allocate a second pointer sized object that must be double aligned at an
1657 : // unaligned address.
1658 : start = AlignNewSpace(kDoubleAligned, kPointerSize);
1659 : obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
1660 : CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
1661 : // There is a filler object before the object.
1662 : filler = HeapObject::FromAddress(start);
1663 : CHECK(obj != filler && filler->IsFiller() &&
1664 : filler->Size() == kPointerSize);
1665 : CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
1666 :
1667 : // Similarly for kDoubleUnaligned.
1668 : start = AlignNewSpace(kDoubleUnaligned, 0);
1669 : obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
1670 : CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
1671 : CHECK_EQ(kPointerSize, *top_addr - start);
1672 : start = AlignNewSpace(kDoubleUnaligned, kPointerSize);
1673 : obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
1674 : CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
1675 : // There is a filler object before the object.
1676 : filler = HeapObject::FromAddress(start);
1677 : CHECK(obj != filler && filler->IsFiller() &&
1678 : filler->Size() == kPointerSize);
1679 : CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
1680 : }
1681 5 : }
1682 :
1683 :
1684 : static HeapObject* OldSpaceAllocateAligned(int size,
1685 : AllocationAlignment alignment) {
1686 : Heap* heap = CcTest::heap();
1687 : AllocationResult allocation =
1688 : heap->old_space()->AllocateRawAligned(size, alignment);
1689 : HeapObject* obj = nullptr;
1690 : allocation.To(&obj);
1691 : heap->CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo);
1692 : return obj;
1693 : }
1694 :
1695 :
1696 : // Get old space allocation into the desired alignment.
1697 : static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
1698 : Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
1699 : int fill = Heap::GetFillToAlign(*top_addr, alignment);
1700 : int allocation = fill + offset;
1701 : if (allocation) {
1702 : OldSpaceAllocateAligned(allocation, kWordAligned);
1703 : }
1704 : Address top = *top_addr;
1705 : // Now force the remaining allocation onto the free list.
1706 : CcTest::heap()->old_space()->EmptyAllocationInfo();
1707 : return top;
1708 : }
1709 :
1710 :
1711 : // Test the case where allocation must be done from the free list, so filler
1712 : // may precede or follow the object.
1713 23723 : TEST(TestAlignedOverAllocation) {
1714 10 : Heap* heap = CcTest::heap();
1715 : // Test checks for fillers before and behind objects and requires a fresh
1716 : // page and empty free list.
1717 5 : heap::AbandonCurrentlyFreeMemory(heap->old_space());
1718 : // Allocate a dummy object to properly set up the linear allocation info.
1719 : AllocationResult dummy =
1720 5 : heap->old_space()->AllocateRawUnaligned(kPointerSize);
1721 5 : CHECK(!dummy.IsRetry());
1722 : heap->CreateFillerObjectAt(dummy.ToObjectChecked()->address(), kPointerSize,
1723 5 : ClearRecordedSlots::kNo);
1724 :
1725 : // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
1726 : const intptr_t double_misalignment = kDoubleSize - kPointerSize;
1727 : Address start;
1728 : HeapObject* obj;
1729 : HeapObject* filler;
1730 : if (double_misalignment) {
1731 : start = AlignOldSpace(kDoubleAligned, 0);
1732 : obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
1733 : // The object is aligned.
1734 : CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
1735 : // Try the opposite alignment case.
1736 : start = AlignOldSpace(kDoubleAligned, kPointerSize);
1737 : obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
1738 : CHECK(IsAddressAligned(obj->address(), kDoubleAlignment));
1739 : filler = HeapObject::FromAddress(start);
1740 : CHECK(obj != filler);
1741 : CHECK(filler->IsFiller());
1742 : CHECK_EQ(kPointerSize, filler->Size());
1743 : CHECK(obj != filler && filler->IsFiller() &&
1744 : filler->Size() == kPointerSize);
1745 :
1746 : // Similarly for kDoubleUnaligned.
1747 : start = AlignOldSpace(kDoubleUnaligned, 0);
1748 : obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
1749 : // The object is aligned.
1750 : CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
1751 : // Try the opposite alignment case.
1752 : start = AlignOldSpace(kDoubleUnaligned, kPointerSize);
1753 : obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
1754 : CHECK(IsAddressAligned(obj->address(), kDoubleAlignment, kPointerSize));
1755 : filler = HeapObject::FromAddress(start);
1756 : CHECK(obj != filler && filler->IsFiller() &&
1757 : filler->Size() == kPointerSize);
1758 : }
1759 5 : }
1760 :
1761 :
1762 23723 : TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
1763 5 : CcTest::InitializeVM();
1764 5 : HeapIterator iterator(CcTest::heap());
1765 5 : intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
1766 : intptr_t size_of_objects_2 = 0;
1767 39975 : for (HeapObject* obj = iterator.next(); obj != nullptr;
1768 : obj = iterator.next()) {
1769 39970 : if (!obj->IsFreeSpace()) {
1770 39970 : size_of_objects_2 += obj->Size();
1771 : }
1772 : }
1773 : // Delta must be within 5% of the larger result.
1774 : // TODO(gc): Tighten this up by distinguishing between byte
1775 : // arrays that are real and those that merely mark free space
1776 : // on the heap.
1777 5 : if (size_of_objects_1 > size_of_objects_2) {
1778 5 : intptr_t delta = size_of_objects_1 - size_of_objects_2;
1779 : PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
1780 : ", "
1781 : "Iterator: %" V8PRIdPTR
1782 : ", "
1783 : "delta: %" V8PRIdPTR "\n",
1784 5 : size_of_objects_1, size_of_objects_2, delta);
1785 5 : CHECK_GT(size_of_objects_1 / 20, delta);
1786 : } else {
1787 0 : intptr_t delta = size_of_objects_2 - size_of_objects_1;
1788 : PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
1789 : ", "
1790 : "Iterator: %" V8PRIdPTR
1791 : ", "
1792 : "delta: %" V8PRIdPTR "\n",
1793 0 : size_of_objects_1, size_of_objects_2, delta);
1794 0 : CHECK_GT(size_of_objects_2 / 20, delta);
1795 5 : }
1796 5 : }
1797 :
1798 23723 : TEST(GrowAndShrinkNewSpace) {
1799 : // Avoid shrinking new space in GC epilogue. This can happen if allocation
1800 : // throughput samples have been taken while executing the benchmark.
1801 5 : FLAG_predictable = true;
1802 :
1803 5 : CcTest::InitializeVM();
1804 5 : Heap* heap = CcTest::heap();
1805 : NewSpace* new_space = heap->new_space();
1806 :
1807 5 : if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1808 5 : return;
1809 : }
1810 :
1811 : // Make sure we're in a consistent state to start out.
1812 5 : CcTest::CollectAllGarbage();
1813 :
1814 : // Explicitly growing should double the space capacity.
1815 : size_t old_capacity, new_capacity;
1816 : old_capacity = new_space->TotalCapacity();
1817 5 : new_space->Grow();
1818 : new_capacity = new_space->TotalCapacity();
1819 5 : CHECK_EQ(2 * old_capacity, new_capacity);
1820 :
1821 : old_capacity = new_space->TotalCapacity();
1822 : {
1823 5 : v8::HandleScope temporary_scope(CcTest::isolate());
1824 5 : heap::SimulateFullSpace(new_space);
1825 : }
1826 : new_capacity = new_space->TotalCapacity();
1827 5 : CHECK_EQ(old_capacity, new_capacity);
1828 :
1829 : // Explicitly shrinking should not affect space capacity.
1830 : old_capacity = new_space->TotalCapacity();
1831 5 : new_space->Shrink();
1832 : new_capacity = new_space->TotalCapacity();
1833 5 : CHECK_EQ(old_capacity, new_capacity);
1834 :
1835 : // Let the scavenger empty the new space.
1836 5 : CcTest::CollectGarbage(NEW_SPACE);
1837 5 : CHECK_LE(new_space->Size(), old_capacity);
1838 :
1839 : // Explicitly shrinking should halve the space capacity.
1840 : old_capacity = new_space->TotalCapacity();
1841 5 : new_space->Shrink();
1842 : new_capacity = new_space->TotalCapacity();
1843 5 : CHECK_EQ(old_capacity, 2 * new_capacity);
1844 :
1845 : // Consecutive shrinking should not affect space capacity.
1846 : old_capacity = new_space->TotalCapacity();
1847 5 : new_space->Shrink();
1848 5 : new_space->Shrink();
1849 5 : new_space->Shrink();
1850 : new_capacity = new_space->TotalCapacity();
1851 5 : CHECK_EQ(old_capacity, new_capacity);
1852 : }
1853 :
1854 23723 : TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
1855 5 : CcTest::InitializeVM();
1856 10 : Heap* heap = CcTest::heap();
1857 5 : if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
1858 0 : return;
1859 : }
1860 :
1861 5 : v8::HandleScope scope(CcTest::isolate());
1862 : NewSpace* new_space = heap->new_space();
1863 : size_t old_capacity, new_capacity;
1864 : old_capacity = new_space->TotalCapacity();
1865 5 : new_space->Grow();
1866 : new_capacity = new_space->TotalCapacity();
1867 5 : CHECK_EQ(2 * old_capacity, new_capacity);
1868 : {
1869 5 : v8::HandleScope temporary_scope(CcTest::isolate());
1870 5 : heap::SimulateFullSpace(new_space);
1871 : }
1872 5 : CcTest::CollectAllAvailableGarbage();
1873 : new_capacity = new_space->TotalCapacity();
1874 5 : CHECK_EQ(old_capacity, new_capacity);
1875 : }
1876 :
1877 60 : static int NumberOfGlobalObjects() {
1878 : int count = 0;
1879 60 : HeapIterator iterator(CcTest::heap());
1880 465281 : for (HeapObject* obj = iterator.next(); obj != nullptr;
1881 : obj = iterator.next()) {
1882 465221 : if (obj->IsJSGlobalObject()) count++;
1883 : }
1884 60 : return count;
1885 : }
1886 :
1887 :
1888 : // Test that we don't embed maps from foreign contexts into
1889 : // optimized code.
1890 23723 : TEST(LeakNativeContextViaMap) {
1891 5 : FLAG_allow_natives_syntax = true;
1892 5 : v8::Isolate* isolate = CcTest::isolate();
1893 5 : v8::HandleScope outer_scope(isolate);
1894 : v8::Persistent<v8::Context> ctx1p;
1895 : v8::Persistent<v8::Context> ctx2p;
1896 : {
1897 5 : v8::HandleScope scope(isolate);
1898 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
1899 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
1900 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1901 : }
1902 :
1903 5 : CcTest::CollectAllAvailableGarbage();
1904 5 : CHECK_EQ(2, NumberOfGlobalObjects());
1905 :
1906 : {
1907 5 : v8::HandleScope inner_scope(isolate);
1908 : CompileRun("var v = {x: 42}");
1909 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1910 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1911 : v8::Local<v8::Value> v =
1912 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
1913 5 : ctx2->Enter();
1914 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
1915 : v8::Local<v8::Value> res = CompileRun(
1916 : "function f() { return o.x; }"
1917 : "for (var i = 0; i < 10; ++i) f();"
1918 : "%OptimizeFunctionOnNextCall(f);"
1919 : "f();");
1920 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
1921 25 : CHECK(ctx2->Global()
1922 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
1923 : .FromJust());
1924 5 : ctx2->Exit();
1925 5 : v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
1926 : ctx1p.Reset();
1927 5 : isolate->ContextDisposedNotification();
1928 : }
1929 5 : CcTest::CollectAllAvailableGarbage();
1930 5 : CHECK_EQ(1, NumberOfGlobalObjects());
1931 : ctx2p.Reset();
1932 5 : CcTest::CollectAllAvailableGarbage();
1933 5 : CHECK_EQ(0, NumberOfGlobalObjects());
1934 5 : }
1935 :
1936 :
1937 : // Test that we don't embed functions from foreign contexts into
1938 : // optimized code.
1939 23723 : TEST(LeakNativeContextViaFunction) {
1940 5 : FLAG_allow_natives_syntax = true;
1941 5 : v8::Isolate* isolate = CcTest::isolate();
1942 5 : v8::HandleScope outer_scope(isolate);
1943 : v8::Persistent<v8::Context> ctx1p;
1944 : v8::Persistent<v8::Context> ctx2p;
1945 : {
1946 5 : v8::HandleScope scope(isolate);
1947 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
1948 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
1949 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1950 : }
1951 :
1952 5 : CcTest::CollectAllAvailableGarbage();
1953 5 : CHECK_EQ(2, NumberOfGlobalObjects());
1954 :
1955 : {
1956 5 : v8::HandleScope inner_scope(isolate);
1957 : CompileRun("var v = function() { return 42; }");
1958 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
1959 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
1960 : v8::Local<v8::Value> v =
1961 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
1962 5 : ctx2->Enter();
1963 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
1964 : v8::Local<v8::Value> res = CompileRun(
1965 : "function f(x) { return x(); }"
1966 : "for (var i = 0; i < 10; ++i) f(o);"
1967 : "%OptimizeFunctionOnNextCall(f);"
1968 : "f(o);");
1969 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
1970 25 : CHECK(ctx2->Global()
1971 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
1972 : .FromJust());
1973 5 : ctx2->Exit();
1974 5 : ctx1->Exit();
1975 : ctx1p.Reset();
1976 5 : isolate->ContextDisposedNotification();
1977 : }
1978 5 : CcTest::CollectAllAvailableGarbage();
1979 5 : CHECK_EQ(1, NumberOfGlobalObjects());
1980 : ctx2p.Reset();
1981 5 : CcTest::CollectAllAvailableGarbage();
1982 5 : CHECK_EQ(0, NumberOfGlobalObjects());
1983 5 : }
1984 :
1985 :
1986 23723 : TEST(LeakNativeContextViaMapKeyed) {
1987 5 : FLAG_allow_natives_syntax = true;
1988 5 : v8::Isolate* isolate = CcTest::isolate();
1989 5 : v8::HandleScope outer_scope(isolate);
1990 : v8::Persistent<v8::Context> ctx1p;
1991 : v8::Persistent<v8::Context> ctx2p;
1992 : {
1993 5 : v8::HandleScope scope(isolate);
1994 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
1995 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
1996 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
1997 : }
1998 :
1999 5 : CcTest::CollectAllAvailableGarbage();
2000 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2001 :
2002 : {
2003 5 : v8::HandleScope inner_scope(isolate);
2004 : CompileRun("var v = [42, 43]");
2005 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2006 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2007 : v8::Local<v8::Value> v =
2008 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2009 5 : ctx2->Enter();
2010 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2011 : v8::Local<v8::Value> res = CompileRun(
2012 : "function f() { return o[0]; }"
2013 : "for (var i = 0; i < 10; ++i) f();"
2014 : "%OptimizeFunctionOnNextCall(f);"
2015 : "f();");
2016 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2017 25 : CHECK(ctx2->Global()
2018 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2019 : .FromJust());
2020 5 : ctx2->Exit();
2021 5 : ctx1->Exit();
2022 : ctx1p.Reset();
2023 5 : isolate->ContextDisposedNotification();
2024 : }
2025 5 : CcTest::CollectAllAvailableGarbage();
2026 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2027 : ctx2p.Reset();
2028 5 : CcTest::CollectAllAvailableGarbage();
2029 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2030 5 : }
2031 :
2032 :
2033 23723 : TEST(LeakNativeContextViaMapProto) {
2034 5 : FLAG_allow_natives_syntax = true;
2035 5 : v8::Isolate* isolate = CcTest::isolate();
2036 5 : v8::HandleScope outer_scope(isolate);
2037 : v8::Persistent<v8::Context> ctx1p;
2038 : v8::Persistent<v8::Context> ctx2p;
2039 : {
2040 5 : v8::HandleScope scope(isolate);
2041 10 : ctx1p.Reset(isolate, v8::Context::New(isolate));
2042 10 : ctx2p.Reset(isolate, v8::Context::New(isolate));
2043 5 : v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2044 : }
2045 :
2046 5 : CcTest::CollectAllAvailableGarbage();
2047 5 : CHECK_EQ(2, NumberOfGlobalObjects());
2048 :
2049 : {
2050 5 : v8::HandleScope inner_scope(isolate);
2051 : CompileRun("var v = { y: 42}");
2052 : v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2053 : v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2054 : v8::Local<v8::Value> v =
2055 20 : ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2056 5 : ctx2->Enter();
2057 20 : CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2058 : v8::Local<v8::Value> res = CompileRun(
2059 : "function f() {"
2060 : " var p = {x: 42};"
2061 : " p.__proto__ = o;"
2062 : " return p.x;"
2063 : "}"
2064 : "for (var i = 0; i < 10; ++i) f();"
2065 : "%OptimizeFunctionOnNextCall(f);"
2066 : "f();");
2067 10 : CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2068 25 : CHECK(ctx2->Global()
2069 : ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2070 : .FromJust());
2071 5 : ctx2->Exit();
2072 5 : ctx1->Exit();
2073 : ctx1p.Reset();
2074 5 : isolate->ContextDisposedNotification();
2075 : }
2076 5 : CcTest::CollectAllAvailableGarbage();
2077 5 : CHECK_EQ(1, NumberOfGlobalObjects());
2078 : ctx2p.Reset();
2079 5 : CcTest::CollectAllAvailableGarbage();
2080 5 : CHECK_EQ(0, NumberOfGlobalObjects());
2081 5 : }
2082 :
2083 :
2084 23723 : TEST(InstanceOfStubWriteBarrier) {
2085 6 : if (!FLAG_incremental_marking) return;
2086 : ManualGCScope manual_gc_scope;
2087 5 : FLAG_allow_natives_syntax = true;
2088 : #ifdef VERIFY_HEAP
2089 : FLAG_verify_heap = true;
2090 : #endif
2091 :
2092 5 : CcTest::InitializeVM();
2093 5 : if (!CcTest::i_isolate()->use_optimizer()) return;
2094 4 : if (FLAG_force_marking_deque_overflows) return;
2095 8 : v8::HandleScope outer_scope(CcTest::isolate());
2096 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2097 :
2098 : {
2099 4 : v8::HandleScope scope(CcTest::isolate());
2100 : CompileRun(
2101 : "function foo () { }"
2102 : "function mkbar () { return new (new Function(\"\")) (); }"
2103 : "function f (x) { return (x instanceof foo); }"
2104 : "function g () { f(mkbar()); }"
2105 : "f(new foo()); f(new foo());"
2106 : "%OptimizeFunctionOnNextCall(f);"
2107 4 : "f(new foo()); g();");
2108 : }
2109 :
2110 4 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2111 4 : marking->Stop();
2112 : CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2113 4 : i::GarbageCollectionReason::kTesting);
2114 :
2115 : i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2116 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2117 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2118 :
2119 4 : CHECK(f->IsOptimized());
2120 :
2121 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
2122 :
2123 12 : while (!marking_state->IsBlack(f->code()) && !marking->IsStopped()) {
2124 : // Discard any pending GC requests otherwise we will get GC when we enter
2125 : // code below.
2126 : marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2127 4 : StepOrigin::kV8);
2128 : }
2129 :
2130 4 : CHECK(marking->IsMarking());
2131 :
2132 : {
2133 4 : v8::HandleScope scope(CcTest::isolate());
2134 4 : v8::Local<v8::Object> global = CcTest::global();
2135 : v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
2136 12 : global->Get(ctx, v8_str("g")).ToLocalChecked());
2137 8 : g->Call(ctx, global, 0, nullptr).ToLocalChecked();
2138 : }
2139 :
2140 4 : CcTest::heap()->incremental_marking()->set_should_hurry(true);
2141 4 : CcTest::CollectGarbage(OLD_SPACE);
2142 : }
2143 :
2144 23723 : HEAP_TEST(GCFlags) {
2145 10 : if (!FLAG_incremental_marking) return;
2146 5 : CcTest::InitializeVM();
2147 15 : Heap* heap = CcTest::heap();
2148 :
2149 : heap->set_current_gc_flags(Heap::kNoGCFlags);
2150 5 : CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2151 :
2152 : // Set the flags to check whether we appropriately resets them after the GC.
2153 : heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
2154 5 : CcTest::CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2155 5 : CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2156 :
2157 : MarkCompactCollector* collector = heap->mark_compact_collector();
2158 5 : if (collector->sweeping_in_progress()) {
2159 5 : collector->EnsureSweepingCompleted();
2160 : }
2161 :
2162 : IncrementalMarking* marking = heap->incremental_marking();
2163 5 : marking->Stop();
2164 : heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask,
2165 5 : i::GarbageCollectionReason::kTesting);
2166 5 : CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2167 :
2168 5 : CcTest::CollectGarbage(NEW_SPACE);
2169 : // NewSpace scavenges should not overwrite the flags.
2170 5 : CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2171 :
2172 5 : CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2173 5 : CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2174 : }
2175 :
2176 :
2177 23723 : TEST(IdleNotificationFinishMarking) {
2178 10 : if (!FLAG_incremental_marking) return;
2179 : ManualGCScope manual_gc_scope;
2180 5 : FLAG_allow_natives_syntax = true;
2181 5 : CcTest::InitializeVM();
2182 5 : const int initial_gc_count = CcTest::heap()->gc_count();
2183 5 : heap::SimulateFullSpace(CcTest::heap()->old_space());
2184 5 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2185 5 : marking->Stop();
2186 : CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2187 5 : i::GarbageCollectionReason::kTesting);
2188 :
2189 10 : CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count);
2190 :
2191 10 : do {
2192 : marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2193 10 : StepOrigin::kV8);
2194 : } while (
2195 10 : !CcTest::heap()->mark_compact_collector()->marking_worklist()->IsEmpty());
2196 :
2197 : marking->SetWeakClosureWasOverApproximatedForTesting(true);
2198 :
2199 : // The next idle notification has to finish incremental marking.
2200 : const double kLongIdleTime = 1000.0;
2201 : CcTest::isolate()->IdleNotificationDeadline(
2202 15 : (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2203 : static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2204 5 : kLongIdleTime);
2205 10 : CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count + 1);
2206 : }
2207 :
2208 :
2209 : // Test that HAllocateObject will always return an object in new-space.
2210 23723 : TEST(OptimizedAllocationAlwaysInNewSpace) {
2211 5 : FLAG_allow_natives_syntax = true;
2212 5 : CcTest::InitializeVM();
2213 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2214 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2215 : FLAG_stress_incremental_marking)
2216 : return;
2217 2 : v8::HandleScope scope(CcTest::isolate());
2218 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2219 2 : heap::SimulateFullSpace(CcTest::heap()->new_space());
2220 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2221 : v8::Local<v8::Value> res = CompileRun(
2222 : "function c(x) {"
2223 : " this.x = x;"
2224 : " for (var i = 0; i < 32; i++) {"
2225 : " this['x' + i] = x;"
2226 : " }"
2227 : "}"
2228 : "function f(x) { return new c(x); };"
2229 : "f(1); f(2); f(3);"
2230 : "%OptimizeFunctionOnNextCall(f);"
2231 : "f(4);");
2232 :
2233 8 : CHECK_EQ(4, res.As<v8::Object>()
2234 : ->GetRealNamedProperty(ctx, v8_str("x"))
2235 : .ToLocalChecked()
2236 : ->Int32Value(ctx)
2237 : .FromJust());
2238 :
2239 : i::Handle<JSReceiver> o =
2240 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2241 :
2242 6 : CHECK(CcTest::heap()->InNewSpace(*o));
2243 : }
2244 :
2245 :
2246 23723 : TEST(OptimizedPretenuringAllocationFolding) {
2247 5 : FLAG_allow_natives_syntax = true;
2248 5 : FLAG_expose_gc = true;
2249 5 : CcTest::InitializeVM();
2250 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2251 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2252 : FLAG_stress_incremental_marking)
2253 : return;
2254 2 : v8::HandleScope scope(CcTest::isolate());
2255 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2256 : // Grow new space unitl maximum capacity reached.
2257 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2258 8 : CcTest::heap()->new_space()->Grow();
2259 : }
2260 :
2261 : i::ScopedVector<char> source(1024);
2262 : i::SNPrintF(source,
2263 : "var number_elements = %d;"
2264 : "var elements = new Array();"
2265 : "function f() {"
2266 : " for (var i = 0; i < number_elements; i++) {"
2267 : " elements[i] = [[{}], [1.1]];"
2268 : " }"
2269 : " return elements[number_elements-1]"
2270 : "};"
2271 : "f(); gc();"
2272 : "f(); f();"
2273 : "%%OptimizeFunctionOnNextCall(f);"
2274 : "f();",
2275 2 : kPretenureCreationCount);
2276 :
2277 : v8::Local<v8::Value> res = CompileRun(source.start());
2278 :
2279 : v8::Local<v8::Value> int_array =
2280 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2281 : i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2282 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2283 : v8::Local<v8::Value> double_array =
2284 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2285 : i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2286 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2287 :
2288 : i::Handle<JSReceiver> o =
2289 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2290 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2291 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2292 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2293 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2294 6 : CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2295 : }
2296 :
2297 :
2298 23723 : TEST(OptimizedPretenuringObjectArrayLiterals) {
2299 5 : FLAG_allow_natives_syntax = true;
2300 5 : FLAG_expose_gc = true;
2301 5 : CcTest::InitializeVM();
2302 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2303 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2304 : FLAG_stress_incremental_marking) {
2305 : return;
2306 : }
2307 2 : v8::HandleScope scope(CcTest::isolate());
2308 :
2309 : // Grow new space unitl maximum capacity reached.
2310 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2311 8 : CcTest::heap()->new_space()->Grow();
2312 : }
2313 :
2314 : i::ScopedVector<char> source(1024);
2315 : i::SNPrintF(source,
2316 : "var number_elements = %d;"
2317 : "var elements = new Array(number_elements);"
2318 : "function f() {"
2319 : " for (var i = 0; i < number_elements; i++) {"
2320 : " elements[i] = [{}, {}, {}];"
2321 : " }"
2322 : " return elements[number_elements - 1];"
2323 : "};"
2324 : "f(); gc();"
2325 : "f(); f();"
2326 : "%%OptimizeFunctionOnNextCall(f);"
2327 : "f();",
2328 2 : kPretenureCreationCount);
2329 :
2330 : v8::Local<v8::Value> res = CompileRun(source.start());
2331 :
2332 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2333 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2334 :
2335 4 : CHECK(CcTest::heap()->InOldSpace(o->elements()));
2336 6 : CHECK(CcTest::heap()->InOldSpace(*o));
2337 : }
2338 :
2339 :
2340 23723 : TEST(OptimizedPretenuringMixedInObjectProperties) {
2341 5 : FLAG_allow_natives_syntax = true;
2342 5 : FLAG_expose_gc = true;
2343 5 : CcTest::InitializeVM();
2344 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2345 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2346 : FLAG_stress_incremental_marking)
2347 : return;
2348 2 : v8::HandleScope scope(CcTest::isolate());
2349 :
2350 : // Grow new space unitl maximum capacity reached.
2351 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2352 8 : CcTest::heap()->new_space()->Grow();
2353 : }
2354 :
2355 :
2356 : i::ScopedVector<char> source(1024);
2357 : i::SNPrintF(source,
2358 : "var number_elements = %d;"
2359 : "var elements = new Array(number_elements);"
2360 : "function f() {"
2361 : " for (var i = 0; i < number_elements; i++) {"
2362 : " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2363 : " }"
2364 : " return elements[number_elements - 1];"
2365 : "};"
2366 : "f(); gc();"
2367 : "f(); f();"
2368 : "%%OptimizeFunctionOnNextCall(f);"
2369 : "f();",
2370 2 : kPretenureCreationCount);
2371 :
2372 : v8::Local<v8::Value> res = CompileRun(source.start());
2373 :
2374 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2375 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2376 :
2377 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2378 2 : FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2379 2 : FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2380 4 : CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
2381 2 : if (!o->IsUnboxedDoubleField(idx2)) {
2382 0 : CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
2383 : } else {
2384 2 : CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
2385 : }
2386 :
2387 : JSObject* inner_object =
2388 2 : reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
2389 4 : CHECK(CcTest::heap()->InOldSpace(inner_object));
2390 2 : if (!inner_object->IsUnboxedDoubleField(idx1)) {
2391 0 : CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
2392 : } else {
2393 2 : CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
2394 : }
2395 6 : CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
2396 : }
2397 :
2398 :
2399 23723 : TEST(OptimizedPretenuringDoubleArrayProperties) {
2400 5 : FLAG_allow_natives_syntax = true;
2401 5 : FLAG_expose_gc = true;
2402 5 : CcTest::InitializeVM();
2403 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2404 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2405 : FLAG_stress_incremental_marking)
2406 : return;
2407 2 : v8::HandleScope scope(CcTest::isolate());
2408 :
2409 : // Grow new space unitl maximum capacity reached.
2410 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2411 8 : CcTest::heap()->new_space()->Grow();
2412 : }
2413 :
2414 : i::ScopedVector<char> source(1024);
2415 : i::SNPrintF(source,
2416 : "var number_elements = %d;"
2417 : "var elements = new Array(number_elements);"
2418 : "function f() {"
2419 : " for (var i = 0; i < number_elements; i++) {"
2420 : " elements[i] = {a: 1.1, b: 2.2};"
2421 : " }"
2422 : " return elements[i - 1];"
2423 : "};"
2424 : "f(); gc();"
2425 : "f(); f();"
2426 : "%%OptimizeFunctionOnNextCall(f);"
2427 : "f();",
2428 2 : kPretenureCreationCount);
2429 :
2430 : v8::Local<v8::Value> res = CompileRun(source.start());
2431 :
2432 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2433 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2434 :
2435 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2436 6 : CHECK(CcTest::heap()->InOldSpace(o->property_array()));
2437 : }
2438 :
2439 :
2440 23723 : TEST(OptimizedPretenuringdoubleArrayLiterals) {
2441 5 : FLAG_allow_natives_syntax = true;
2442 5 : FLAG_expose_gc = true;
2443 5 : CcTest::InitializeVM();
2444 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2445 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2446 : FLAG_stress_incremental_marking)
2447 : return;
2448 2 : v8::HandleScope scope(CcTest::isolate());
2449 :
2450 : // Grow new space unitl maximum capacity reached.
2451 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2452 8 : CcTest::heap()->new_space()->Grow();
2453 : }
2454 :
2455 : i::ScopedVector<char> source(1024);
2456 : i::SNPrintF(source,
2457 : "var number_elements = %d;"
2458 : "var elements = new Array(number_elements);"
2459 : "function f() {"
2460 : " for (var i = 0; i < number_elements; i++) {"
2461 : " elements[i] = [1.1, 2.2, 3.3];"
2462 : " }"
2463 : " return elements[number_elements - 1];"
2464 : "};"
2465 : "f(); gc();"
2466 : "f(); f();"
2467 : "%%OptimizeFunctionOnNextCall(f);"
2468 : "f();",
2469 2 : kPretenureCreationCount);
2470 :
2471 : v8::Local<v8::Value> res = CompileRun(source.start());
2472 :
2473 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2474 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2475 :
2476 4 : CHECK(CcTest::heap()->InOldSpace(o->elements()));
2477 6 : CHECK(CcTest::heap()->InOldSpace(*o));
2478 : }
2479 :
2480 :
2481 23723 : TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2482 5 : FLAG_allow_natives_syntax = true;
2483 5 : FLAG_expose_gc = true;
2484 5 : CcTest::InitializeVM();
2485 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2486 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2487 : FLAG_stress_incremental_marking)
2488 : return;
2489 2 : v8::HandleScope scope(CcTest::isolate());
2490 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2491 : // Grow new space unitl maximum capacity reached.
2492 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2493 8 : CcTest::heap()->new_space()->Grow();
2494 : }
2495 :
2496 : i::ScopedVector<char> source(1024);
2497 : i::SNPrintF(source,
2498 : "var number_elements = %d;"
2499 : "var elements = new Array(number_elements);"
2500 : "function f() {"
2501 : " for (var i = 0; i < number_elements; i++) {"
2502 : " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
2503 : " }"
2504 : " return elements[number_elements - 1];"
2505 : "};"
2506 : "f(); gc();"
2507 : "f(); f();"
2508 : "%%OptimizeFunctionOnNextCall(f);"
2509 : "f();",
2510 2 : kPretenureCreationCount);
2511 :
2512 : v8::Local<v8::Value> res = CompileRun(source.start());
2513 :
2514 : v8::Local<v8::Value> int_array =
2515 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2516 : i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2517 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2518 : v8::Local<v8::Value> double_array =
2519 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2520 : i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2521 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2522 :
2523 : Handle<JSObject> o = Handle<JSObject>::cast(
2524 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2525 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2526 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2527 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2528 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2529 6 : CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2530 : }
2531 :
2532 :
2533 23723 : TEST(OptimizedPretenuringNestedObjectLiterals) {
2534 5 : FLAG_allow_natives_syntax = true;
2535 5 : FLAG_expose_gc = true;
2536 5 : CcTest::InitializeVM();
2537 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2538 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2539 : FLAG_stress_incremental_marking)
2540 : return;
2541 2 : v8::HandleScope scope(CcTest::isolate());
2542 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2543 : // Grow new space unitl maximum capacity reached.
2544 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2545 8 : CcTest::heap()->new_space()->Grow();
2546 : }
2547 :
2548 : i::ScopedVector<char> source(1024);
2549 : i::SNPrintF(source,
2550 : "var number_elements = %d;"
2551 : "var elements = new Array(number_elements);"
2552 : "function f() {"
2553 : " for (var i = 0; i < number_elements; i++) {"
2554 : " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
2555 : " }"
2556 : " return elements[number_elements - 1];"
2557 : "};"
2558 : "f(); gc();"
2559 : "f(); f();"
2560 : "%%OptimizeFunctionOnNextCall(f);"
2561 : "f();",
2562 2 : kPretenureCreationCount);
2563 :
2564 : v8::Local<v8::Value> res = CompileRun(source.start());
2565 :
2566 : v8::Local<v8::Value> int_array_1 =
2567 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2568 : Handle<JSObject> int_array_handle_1 = Handle<JSObject>::cast(
2569 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_1)));
2570 : v8::Local<v8::Value> int_array_2 =
2571 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2572 : Handle<JSObject> int_array_handle_2 = Handle<JSObject>::cast(
2573 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_2)));
2574 :
2575 : Handle<JSObject> o = Handle<JSObject>::cast(
2576 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2577 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2578 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
2579 4 : CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
2580 4 : CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
2581 6 : CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
2582 : }
2583 :
2584 :
2585 23723 : TEST(OptimizedPretenuringNestedDoubleLiterals) {
2586 5 : FLAG_allow_natives_syntax = true;
2587 5 : FLAG_expose_gc = true;
2588 5 : CcTest::InitializeVM();
2589 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2590 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2591 : FLAG_stress_incremental_marking)
2592 : return;
2593 2 : v8::HandleScope scope(CcTest::isolate());
2594 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2595 : // Grow new space unitl maximum capacity reached.
2596 22 : while (!CcTest::heap()->new_space()->IsAtMaximumCapacity()) {
2597 8 : CcTest::heap()->new_space()->Grow();
2598 : }
2599 :
2600 : i::ScopedVector<char> source(1024);
2601 : i::SNPrintF(source,
2602 : "var number_elements = %d;"
2603 : "var elements = new Array(number_elements);"
2604 : "function f() {"
2605 : " for (var i = 0; i < number_elements; i++) {"
2606 : " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
2607 : " }"
2608 : " return elements[number_elements - 1];"
2609 : "};"
2610 : "f(); gc();"
2611 : "f(); f();"
2612 : "%%OptimizeFunctionOnNextCall(f);"
2613 : "f();",
2614 2 : kPretenureCreationCount);
2615 :
2616 : v8::Local<v8::Value> res = CompileRun(source.start());
2617 :
2618 : v8::Local<v8::Value> double_array_1 =
2619 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2620 : i::Handle<JSObject> double_array_handle_1 = i::Handle<JSObject>::cast(
2621 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_1)));
2622 : v8::Local<v8::Value> double_array_2 =
2623 6 : v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2624 : i::Handle<JSObject> double_array_handle_2 = Handle<JSObject>::cast(
2625 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_2)));
2626 :
2627 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2628 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2629 4 : CHECK(CcTest::heap()->InOldSpace(*o));
2630 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
2631 4 : CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
2632 4 : CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
2633 6 : CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
2634 : }
2635 :
2636 :
2637 : // Test regular array literals allocation.
2638 23723 : TEST(OptimizedAllocationArrayLiterals) {
2639 5 : FLAG_allow_natives_syntax = true;
2640 5 : CcTest::InitializeVM();
2641 8 : if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2642 3 : if (FLAG_gc_global || FLAG_stress_compaction ||
2643 : FLAG_stress_incremental_marking)
2644 : return;
2645 2 : v8::HandleScope scope(CcTest::isolate());
2646 2 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2647 : v8::Local<v8::Value> res = CompileRun(
2648 : "function f() {"
2649 : " var numbers = new Array(1, 2, 3);"
2650 : " numbers[0] = 3.14;"
2651 : " return numbers;"
2652 : "};"
2653 : "f(); f(); f();"
2654 : "%OptimizeFunctionOnNextCall(f);"
2655 : "f();");
2656 8 : CHECK_EQ(static_cast<int>(3.14), v8::Object::Cast(*res)
2657 : ->Get(ctx, v8_str("0"))
2658 : .ToLocalChecked()
2659 : ->Int32Value(ctx)
2660 : .FromJust());
2661 :
2662 : i::Handle<JSObject> o = Handle<JSObject>::cast(
2663 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2664 :
2665 4 : CHECK(CcTest::heap()->InNewSpace(o->elements()));
2666 : }
2667 :
2668 :
2669 10 : static int CountMapTransitions(Map* map) {
2670 : DisallowHeapAllocation no_gc;
2671 10 : return TransitionsAccessor(map, &no_gc).NumberOfTransitions();
2672 : }
2673 :
2674 :
2675 : // Test that map transitions are cleared and maps are collected with
2676 : // incremental marking as well.
2677 23723 : TEST(Regress1465) {
2678 5 : if (!FLAG_incremental_marking) return;
2679 5 : FLAG_stress_compaction = false;
2680 5 : FLAG_stress_incremental_marking = false;
2681 5 : FLAG_allow_natives_syntax = true;
2682 5 : FLAG_trace_incremental_marking = true;
2683 5 : FLAG_retain_maps_for_n_gc = 0;
2684 5 : CcTest::InitializeVM();
2685 5 : v8::HandleScope scope(CcTest::isolate());
2686 5 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2687 : static const int transitions_count = 256;
2688 :
2689 : CompileRun("function F() {}");
2690 : {
2691 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2692 1285 : for (int i = 0; i < transitions_count; i++) {
2693 : EmbeddedVector<char, 64> buffer;
2694 1280 : SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2695 1280 : CompileRun(buffer.start());
2696 : }
2697 : CompileRun("var root = new F;");
2698 : }
2699 :
2700 : i::Handle<JSReceiver> root =
2701 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
2702 20 : CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
2703 :
2704 : // Count number of live transitions before marking.
2705 5 : int transitions_before = CountMapTransitions(root->map());
2706 : CompileRun("%DebugPrint(root);");
2707 5 : CHECK_EQ(transitions_count, transitions_before);
2708 :
2709 5 : heap::SimulateIncrementalMarking(CcTest::heap());
2710 5 : CcTest::CollectAllGarbage();
2711 :
2712 : // Count number of live transitions after marking. Note that one transition
2713 : // is left, because 'o' still holds an instance of one transition target.
2714 5 : int transitions_after = CountMapTransitions(root->map());
2715 : CompileRun("%DebugPrint(root);");
2716 5 : CHECK_EQ(1, transitions_after);
2717 : }
2718 :
2719 :
2720 : #ifdef DEBUG
2721 : static void AddTransitions(int transitions_count) {
2722 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2723 : for (int i = 0; i < transitions_count; i++) {
2724 : EmbeddedVector<char, 64> buffer;
2725 : SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2726 : CompileRun(buffer.start());
2727 : }
2728 : }
2729 :
2730 :
2731 : static i::Handle<JSObject> GetByName(const char* name) {
2732 : return i::Handle<JSObject>::cast(
2733 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
2734 : CcTest::global()
2735 : ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(name))
2736 : .ToLocalChecked())));
2737 : }
2738 :
2739 :
2740 : static void AddPropertyTo(
2741 : int gc_count, Handle<JSObject> object, const char* property_name) {
2742 : Isolate* isolate = CcTest::i_isolate();
2743 : Factory* factory = isolate->factory();
2744 : Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
2745 : Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
2746 : FLAG_gc_interval = gc_count;
2747 : FLAG_gc_global = true;
2748 : FLAG_retain_maps_for_n_gc = 0;
2749 : CcTest::heap()->set_allocation_timeout(gc_count);
2750 : JSReceiver::SetProperty(object, prop_name, twenty_three,
2751 : LanguageMode::kSloppy)
2752 : .Check();
2753 : }
2754 :
2755 :
2756 : TEST(TransitionArrayShrinksDuringAllocToZero) {
2757 : FLAG_stress_compaction = false;
2758 : FLAG_stress_incremental_marking = false;
2759 : FLAG_allow_natives_syntax = true;
2760 : CcTest::InitializeVM();
2761 : v8::HandleScope scope(CcTest::isolate());
2762 : static const int transitions_count = 10;
2763 : CompileRun("function F() { }");
2764 : AddTransitions(transitions_count);
2765 : CompileRun("var root = new F;");
2766 : Handle<JSObject> root = GetByName("root");
2767 :
2768 : // Count number of live transitions before marking.
2769 : int transitions_before = CountMapTransitions(root->map());
2770 : CHECK_EQ(transitions_count, transitions_before);
2771 :
2772 : // Get rid of o
2773 : CompileRun("o = new F;"
2774 : "root = new F");
2775 : root = GetByName("root");
2776 : AddPropertyTo(2, root, "funny");
2777 : CcTest::CollectGarbage(NEW_SPACE);
2778 :
2779 : // Count number of live transitions after marking. Note that one transition
2780 : // is left, because 'o' still holds an instance of one transition target.
2781 : int transitions_after = CountMapTransitions(
2782 : Map::cast(root->map()->GetBackPointer()));
2783 : CHECK_EQ(1, transitions_after);
2784 : }
2785 :
2786 :
2787 : TEST(TransitionArrayShrinksDuringAllocToOne) {
2788 : FLAG_stress_compaction = false;
2789 : FLAG_stress_incremental_marking = false;
2790 : FLAG_allow_natives_syntax = true;
2791 : CcTest::InitializeVM();
2792 : v8::HandleScope scope(CcTest::isolate());
2793 : static const int transitions_count = 10;
2794 : CompileRun("function F() {}");
2795 : AddTransitions(transitions_count);
2796 : CompileRun("var root = new F;");
2797 : Handle<JSObject> root = GetByName("root");
2798 :
2799 : // Count number of live transitions before marking.
2800 : int transitions_before = CountMapTransitions(root->map());
2801 : CHECK_EQ(transitions_count, transitions_before);
2802 :
2803 : root = GetByName("root");
2804 : AddPropertyTo(2, root, "funny");
2805 : CcTest::CollectGarbage(NEW_SPACE);
2806 :
2807 : // Count number of live transitions after marking. Note that one transition
2808 : // is left, because 'o' still holds an instance of one transition target.
2809 : int transitions_after = CountMapTransitions(
2810 : Map::cast(root->map()->GetBackPointer()));
2811 : CHECK_EQ(2, transitions_after);
2812 : }
2813 :
2814 :
2815 : TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
2816 : FLAG_stress_compaction = false;
2817 : FLAG_stress_incremental_marking = false;
2818 : FLAG_allow_natives_syntax = true;
2819 : CcTest::InitializeVM();
2820 : v8::HandleScope scope(CcTest::isolate());
2821 : static const int transitions_count = 10;
2822 : CompileRun("function F() {}");
2823 : AddTransitions(transitions_count);
2824 : CompileRun("var root = new F;");
2825 : Handle<JSObject> root = GetByName("root");
2826 :
2827 : // Count number of live transitions before marking.
2828 : int transitions_before = CountMapTransitions(root->map());
2829 : CHECK_EQ(transitions_count, transitions_before);
2830 :
2831 : root = GetByName("root");
2832 : AddPropertyTo(0, root, "prop9");
2833 : CcTest::CollectGarbage(OLD_SPACE);
2834 :
2835 : // Count number of live transitions after marking. Note that one transition
2836 : // is left, because 'o' still holds an instance of one transition target.
2837 : int transitions_after = CountMapTransitions(
2838 : Map::cast(root->map()->GetBackPointer()));
2839 : CHECK_EQ(1, transitions_after);
2840 : }
2841 :
2842 :
2843 : TEST(TransitionArraySimpleToFull) {
2844 : FLAG_stress_compaction = false;
2845 : FLAG_stress_incremental_marking = false;
2846 : FLAG_allow_natives_syntax = true;
2847 : CcTest::InitializeVM();
2848 : v8::HandleScope scope(CcTest::isolate());
2849 : static const int transitions_count = 1;
2850 : CompileRun("function F() {}");
2851 : AddTransitions(transitions_count);
2852 : CompileRun("var root = new F;");
2853 : Handle<JSObject> root = GetByName("root");
2854 :
2855 : // Count number of live transitions before marking.
2856 : int transitions_before = CountMapTransitions(root->map());
2857 : CHECK_EQ(transitions_count, transitions_before);
2858 :
2859 : CompileRun("o = new F;"
2860 : "root = new F");
2861 : root = GetByName("root");
2862 : {
2863 : DisallowHeapAllocation no_gc;
2864 : CHECK(TestTransitionsAccessor(root->map(), &no_gc).IsWeakCellEncoding());
2865 : }
2866 : AddPropertyTo(2, root, "happy");
2867 :
2868 : // Count number of live transitions after marking. Note that one transition
2869 : // is left, because 'root' still holds an instance of one transition target.
2870 : int transitions_after = CountMapTransitions(
2871 : Map::cast(root->map()->GetBackPointer()));
2872 : CHECK_EQ(1, transitions_after);
2873 : }
2874 : #endif // DEBUG
2875 :
2876 :
2877 23723 : TEST(ReleaseOverReservedPages) {
2878 5 : if (FLAG_never_compact) return;
2879 5 : FLAG_trace_gc = true;
2880 : // The optimizer can allocate stuff, messing up the test.
2881 5 : FLAG_opt = false;
2882 5 : FLAG_always_opt = false;
2883 : // Parallel compaction increases fragmentation, depending on how existing
2884 : // memory is distributed. Since this is non-deterministic because of
2885 : // concurrent sweeping, we disable it for this test.
2886 5 : FLAG_parallel_compaction = false;
2887 5 : FLAG_concurrent_marking = false;
2888 : // Concurrent sweeping adds non determinism, depending on when memory is
2889 : // available for further reuse.
2890 5 : FLAG_concurrent_sweeping = false;
2891 : // Fast evacuation of pages may result in a different page count in old space.
2892 5 : FLAG_page_promotion = false;
2893 5 : CcTest::InitializeVM();
2894 5 : Isolate* isolate = CcTest::i_isolate();
2895 : // If there's snapshot available, we don't know whether 20 small arrays will
2896 : // fit on the initial pages.
2897 5 : if (!isolate->snapshot_available()) return;
2898 : Factory* factory = isolate->factory();
2899 5 : Heap* heap = isolate->heap();
2900 5 : v8::HandleScope scope(CcTest::isolate());
2901 : static const int number_of_test_pages = 20;
2902 :
2903 : // Prepare many pages with low live-bytes count.
2904 : PagedSpace* old_space = heap->old_space();
2905 5 : const int initial_page_count = old_space->CountTotalPages();
2906 5 : const int overall_page_count = number_of_test_pages + initial_page_count;
2907 105 : for (int i = 0; i < number_of_test_pages; i++) {
2908 : AlwaysAllocateScope always_allocate(isolate);
2909 100 : heap::SimulateFullSpace(old_space);
2910 100 : factory->NewFixedArray(1, TENURED);
2911 : }
2912 5 : CHECK_EQ(overall_page_count, old_space->CountTotalPages());
2913 :
2914 : // Triggering one GC will cause a lot of garbage to be discovered but
2915 : // even spread across all allocated pages.
2916 5 : CcTest::CollectAllGarbage();
2917 5 : CHECK_GE(overall_page_count, old_space->CountTotalPages());
2918 :
2919 : // Triggering subsequent GCs should cause at least half of the pages
2920 : // to be released to the OS after at most two cycles.
2921 5 : CcTest::CollectAllGarbage();
2922 5 : CHECK_GE(overall_page_count, old_space->CountTotalPages());
2923 5 : CcTest::CollectAllGarbage();
2924 5 : CHECK_GE(overall_page_count, old_space->CountTotalPages() * 2);
2925 :
2926 : // Triggering a last-resort GC should cause all pages to be released to the
2927 : // OS so that other processes can seize the memory. If we get a failure here
2928 : // where there are 2 pages left instead of 1, then we should increase the
2929 : // size of the first page a little in SizeOfFirstPage in spaces.cc. The
2930 : // first page should be small in order to reduce memory used when the VM
2931 : // boots, but if the 20 small arrays don't fit on the first page then that's
2932 : // an indication that it is too small.
2933 5 : CcTest::CollectAllAvailableGarbage();
2934 5 : CHECK_EQ(initial_page_count, old_space->CountTotalPages());
2935 : }
2936 :
2937 : static int forced_gc_counter = 0;
2938 :
2939 10 : void MockUseCounterCallback(v8::Isolate* isolate,
2940 : v8::Isolate::UseCounterFeature feature) {
2941 10 : isolate->GetCurrentContext();
2942 10 : if (feature == v8::Isolate::kForcedGC) {
2943 5 : forced_gc_counter++;
2944 : }
2945 10 : }
2946 :
2947 :
2948 23723 : TEST(CountForcedGC) {
2949 5 : FLAG_expose_gc = true;
2950 5 : CcTest::InitializeVM();
2951 : Isolate* isolate = CcTest::i_isolate();
2952 5 : v8::HandleScope scope(CcTest::isolate());
2953 :
2954 5 : isolate->SetUseCounterCallback(MockUseCounterCallback);
2955 :
2956 5 : forced_gc_counter = 0;
2957 : const char* source = "gc();";
2958 : CompileRun(source);
2959 5 : CHECK_GT(forced_gc_counter, 0);
2960 5 : }
2961 :
2962 :
2963 : #ifdef OBJECT_PRINT
2964 : TEST(PrintSharedFunctionInfo) {
2965 : CcTest::InitializeVM();
2966 : v8::HandleScope scope(CcTest::isolate());
2967 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2968 : const char* source = "f = function() { return 987654321; }\n"
2969 : "g = function() { return 123456789; }\n";
2970 : CompileRun(source);
2971 : i::Handle<JSFunction> g = i::Handle<JSFunction>::cast(
2972 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2973 : CcTest::global()->Get(ctx, v8_str("g")).ToLocalChecked())));
2974 :
2975 : OFStream os(stdout);
2976 : g->shared()->Print(os);
2977 : os << std::endl;
2978 : }
2979 : #endif // OBJECT_PRINT
2980 :
2981 :
2982 23723 : TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
2983 6 : if (!FLAG_incremental_marking) return;
2984 5 : if (FLAG_always_opt) return;
2985 4 : CcTest::InitializeVM();
2986 4 : v8::HandleScope scope(CcTest::isolate());
2987 : v8::Local<v8::Value> fun1, fun2;
2988 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2989 : {
2990 : CompileRun("function fun() {};");
2991 16 : fun1 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
2992 : }
2993 :
2994 : {
2995 : CompileRun("function fun() {};");
2996 16 : fun2 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
2997 : }
2998 :
2999 : // Prepare function f that contains type feedback for the two closures.
3000 16 : CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3001 16 : CHECK(CcTest::global()->Set(ctx, v8_str("fun2"), fun2).FromJust());
3002 : CompileRun("function f(a, b) { a(); b(); } f(fun1, fun2);");
3003 :
3004 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3005 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3006 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3007 :
3008 : Handle<FeedbackVector> feedback_vector(f->feedback_vector());
3009 4 : FeedbackVectorHelper feedback_helper(feedback_vector);
3010 :
3011 : int expected_slots = 2;
3012 4 : CHECK_EQ(expected_slots, feedback_helper.slot_count());
3013 : int slot1 = 0;
3014 : int slot2 = 1;
3015 4 : CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeakCell());
3016 4 : CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell());
3017 :
3018 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3019 4 : CcTest::CollectAllGarbage();
3020 :
3021 4 : CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1)))
3022 : ->cleared());
3023 4 : CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot2)))
3024 4 : ->cleared());
3025 : }
3026 :
3027 :
3028 24 : static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
3029 : InlineCacheState desired_state) {
3030 : Handle<FeedbackVector> vector = Handle<FeedbackVector>(f->feedback_vector());
3031 24 : FeedbackVectorHelper helper(vector);
3032 : FeedbackSlot slot = helper.slot(slot_index);
3033 24 : if (vector->IsLoadIC(slot)) {
3034 : LoadICNexus nexus(vector, slot);
3035 24 : CHECK(nexus.StateFromFeedback() == desired_state);
3036 : } else {
3037 0 : CHECK(vector->IsKeyedLoadIC(slot));
3038 : KeyedLoadICNexus nexus(vector, slot);
3039 0 : CHECK(nexus.StateFromFeedback() == desired_state);
3040 : }
3041 24 : }
3042 :
3043 23723 : TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3044 6 : if (!FLAG_incremental_marking) return;
3045 5 : if (FLAG_always_opt) return;
3046 4 : CcTest::InitializeVM();
3047 4 : v8::HandleScope scope(CcTest::isolate());
3048 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3049 : // Prepare function f that contains a monomorphic IC for object
3050 : // originating from the same native context.
3051 : CompileRun(
3052 : "function fun() { this.x = 1; };"
3053 : "function f(o) { return new o(); } f(fun); f(fun);");
3054 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3055 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3056 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3057 :
3058 : Handle<FeedbackVector> vector(f->feedback_vector());
3059 4 : CHECK(vector->Get(FeedbackSlot(0))->IsWeakCell());
3060 :
3061 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3062 4 : CcTest::CollectAllGarbage();
3063 :
3064 4 : CHECK(vector->Get(FeedbackSlot(0))->IsWeakCell());
3065 : }
3066 :
3067 23723 : TEST(IncrementalMarkingPreservesMonomorphicIC) {
3068 6 : if (!FLAG_incremental_marking) return;
3069 5 : if (FLAG_always_opt) return;
3070 4 : CcTest::InitializeVM();
3071 4 : v8::HandleScope scope(CcTest::isolate());
3072 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3073 : // Prepare function f that contains a monomorphic IC for object
3074 : // originating from the same native context.
3075 : CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3076 : "function f(o) { return o.x; } f(obj); f(obj);");
3077 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3078 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3079 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3080 :
3081 4 : CheckVectorIC(f, 0, MONOMORPHIC);
3082 :
3083 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3084 4 : CcTest::CollectAllGarbage();
3085 :
3086 4 : CheckVectorIC(f, 0, MONOMORPHIC);
3087 : }
3088 :
3089 23723 : TEST(IncrementalMarkingPreservesPolymorphicIC) {
3090 6 : if (!FLAG_incremental_marking) return;
3091 5 : if (FLAG_always_opt) return;
3092 4 : CcTest::InitializeVM();
3093 4 : v8::HandleScope scope(CcTest::isolate());
3094 : v8::Local<v8::Value> obj1, obj2;
3095 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3096 :
3097 : {
3098 4 : LocalContext env;
3099 : CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3100 20 : obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3101 : }
3102 :
3103 : {
3104 4 : LocalContext env;
3105 : CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3106 20 : obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3107 : }
3108 :
3109 : // Prepare function f that contains a polymorphic IC for objects
3110 : // originating from two different native contexts.
3111 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3112 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3113 : CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3114 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3115 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3116 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3117 :
3118 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3119 :
3120 : // Fire context dispose notification.
3121 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3122 4 : CcTest::CollectAllGarbage();
3123 :
3124 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3125 : }
3126 :
3127 23723 : TEST(ContextDisposeDoesntClearPolymorphicIC) {
3128 6 : if (!FLAG_incremental_marking) return;
3129 5 : if (FLAG_always_opt) return;
3130 4 : CcTest::InitializeVM();
3131 4 : v8::HandleScope scope(CcTest::isolate());
3132 : v8::Local<v8::Value> obj1, obj2;
3133 4 : v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3134 :
3135 : {
3136 4 : LocalContext env;
3137 : CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3138 20 : obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3139 : }
3140 :
3141 : {
3142 4 : LocalContext env;
3143 : CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3144 20 : obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3145 : }
3146 :
3147 : // Prepare function f that contains a polymorphic IC for objects
3148 : // originating from two different native contexts.
3149 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3150 16 : CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3151 : CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3152 : Handle<JSFunction> f = Handle<JSFunction>::cast(
3153 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3154 16 : CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3155 :
3156 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3157 :
3158 : // Fire context dispose notification.
3159 4 : CcTest::isolate()->ContextDisposedNotification();
3160 4 : heap::SimulateIncrementalMarking(CcTest::heap());
3161 4 : CcTest::CollectAllGarbage();
3162 :
3163 4 : CheckVectorIC(f, 0, POLYMORPHIC);
3164 : }
3165 :
3166 :
3167 48 : class SourceResource : public v8::String::ExternalOneByteStringResource {
3168 : public:
3169 : explicit SourceResource(const char* data)
3170 24 : : data_(data), length_(strlen(data)) { }
3171 :
3172 24 : virtual void Dispose() {
3173 24 : i::DeleteArray(data_);
3174 24 : data_ = nullptr;
3175 24 : }
3176 :
3177 288 : const char* data() const { return data_; }
3178 :
3179 72 : size_t length() const { return length_; }
3180 :
3181 48 : bool IsDisposed() { return data_ == nullptr; }
3182 :
3183 : private:
3184 : const char* data_;
3185 : size_t length_;
3186 : };
3187 :
3188 :
3189 24 : void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3190 : const char* accessor) {
3191 : // Test that the data retained by the Error.stack accessor is released
3192 : // after the first time the accessor is fired. We use external string
3193 : // to check whether the data is being released since the external string
3194 : // resource's callback is fired when the external string is GC'ed.
3195 : i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3196 24 : v8::HandleScope scope(isolate);
3197 96 : SourceResource* resource = new SourceResource(i::StrDup(source));
3198 : {
3199 24 : v8::HandleScope scope(isolate);
3200 24 : v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
3201 : v8::Local<v8::String> source_string =
3202 48 : v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked();
3203 : i_isolate->heap()->CollectAllAvailableGarbage(
3204 24 : i::GarbageCollectionReason::kTesting);
3205 : v8::Script::Compile(ctx, source_string)
3206 24 : .ToLocalChecked()
3207 : ->Run(ctx)
3208 24 : .ToLocalChecked();
3209 24 : CHECK(!resource->IsDisposed());
3210 : }
3211 : // i_isolate->heap()->CollectAllAvailableGarbage();
3212 24 : CHECK(!resource->IsDisposed());
3213 :
3214 : CompileRun(accessor);
3215 : i_isolate->heap()->CollectAllAvailableGarbage(
3216 24 : i::GarbageCollectionReason::kTesting);
3217 :
3218 : // External source has been released.
3219 24 : CHECK(resource->IsDisposed());
3220 24 : delete resource;
3221 24 : }
3222 :
3223 :
3224 23723 : UNINITIALIZED_TEST(ReleaseStackTraceData) {
3225 5 : if (FLAG_always_opt) {
3226 : // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
3227 : // See: https://codereview.chromium.org/181833004/
3228 1 : return;
3229 : }
3230 4 : FLAG_use_ic = false; // ICs retain objects.
3231 4 : FLAG_concurrent_recompilation = false;
3232 : v8::Isolate::CreateParams create_params;
3233 4 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
3234 4 : v8::Isolate* isolate = v8::Isolate::New(create_params);
3235 : {
3236 : v8::Isolate::Scope isolate_scope(isolate);
3237 8 : v8::HandleScope handle_scope(isolate);
3238 8 : v8::Context::New(isolate)->Enter();
3239 : static const char* source1 = "var error = null; "
3240 : /* Normal Error */ "try { "
3241 : " throw new Error(); "
3242 : "} catch (e) { "
3243 : " error = e; "
3244 : "} ";
3245 : static const char* source2 = "var error = null; "
3246 : /* Stack overflow */ "try { "
3247 : " (function f() { f(); })(); "
3248 : "} catch (e) { "
3249 : " error = e; "
3250 : "} ";
3251 : static const char* source3 = "var error = null; "
3252 : /* Normal Error */ "try { "
3253 : /* as prototype */ " throw new Error(); "
3254 : "} catch (e) { "
3255 : " error = {}; "
3256 : " error.__proto__ = e; "
3257 : "} ";
3258 : static const char* source4 = "var error = null; "
3259 : /* Stack overflow */ "try { "
3260 : /* as prototype */ " (function f() { f(); })(); "
3261 : "} catch (e) { "
3262 : " error = {}; "
3263 : " error.__proto__ = e; "
3264 : "} ";
3265 : static const char* getter = "error.stack";
3266 : static const char* setter = "error.stack = 0";
3267 :
3268 4 : ReleaseStackTraceDataTest(isolate, source1, setter);
3269 4 : ReleaseStackTraceDataTest(isolate, source2, setter);
3270 : // We do not test source3 and source4 with setter, since the setter is
3271 : // supposed to (untypically) write to the receiver, not the holder. This is
3272 : // to emulate the behavior of a data property.
3273 :
3274 4 : ReleaseStackTraceDataTest(isolate, source1, getter);
3275 4 : ReleaseStackTraceDataTest(isolate, source2, getter);
3276 4 : ReleaseStackTraceDataTest(isolate, source3, getter);
3277 4 : ReleaseStackTraceDataTest(isolate, source4, getter);
3278 : }
3279 4 : isolate->Dispose();
3280 : }
3281 :
3282 23723 : TEST(Regress169928) {
3283 5 : FLAG_allow_natives_syntax = true;
3284 5 : FLAG_opt = false;
3285 5 : CcTest::InitializeVM();
3286 : Isolate* isolate = CcTest::i_isolate();
3287 5 : LocalContext env;
3288 : Factory* factory = isolate->factory();
3289 9 : v8::HandleScope scope(CcTest::isolate());
3290 :
3291 : // Some flags turn Scavenge collections into Mark-sweep collections
3292 : // and hence are incompatible with this test case.
3293 5 : if (FLAG_gc_global || FLAG_stress_compaction ||
3294 : FLAG_stress_incremental_marking)
3295 1 : return;
3296 :
3297 : // Prepare the environment
3298 : CompileRun("function fastliteralcase(literal, value) {"
3299 : " literal[0] = value;"
3300 : " return literal;"
3301 : "}"
3302 : "function get_standard_literal() {"
3303 : " var literal = [1, 2, 3];"
3304 : " return literal;"
3305 : "}"
3306 : "obj = fastliteralcase(get_standard_literal(), 1);"
3307 : "obj = fastliteralcase(get_standard_literal(), 1.5);"
3308 : "obj = fastliteralcase(get_standard_literal(), 2);");
3309 :
3310 : // prepare the heap
3311 : v8::Local<v8::String> mote_code_string =
3312 4 : v8_str("fastliteralcase(mote, 2.5);");
3313 :
3314 4 : v8::Local<v8::String> array_name = v8_str("mote");
3315 16 : CHECK(CcTest::global()
3316 : ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0))
3317 : .FromJust());
3318 :
3319 : // First make sure we flip spaces
3320 4 : CcTest::CollectGarbage(NEW_SPACE);
3321 :
3322 : // Allocate the object.
3323 4 : Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
3324 : array_data->set(0, Smi::FromInt(1));
3325 : array_data->set(1, Smi::FromInt(2));
3326 :
3327 : heap::AllocateAllButNBytes(
3328 : CcTest::heap()->new_space(),
3329 4 : JSArray::kSize + AllocationMemento::kSize + kPointerSize);
3330 :
3331 : Handle<JSArray> array =
3332 : factory->NewJSArrayWithElements(array_data, PACKED_SMI_ELEMENTS);
3333 :
3334 4 : CHECK_EQ(Smi::FromInt(2), array->length());
3335 4 : CHECK(array->HasSmiOrObjectElements());
3336 :
3337 : // We need filler the size of AllocationMemento object, plus an extra
3338 : // fill pointer value.
3339 : HeapObject* obj = nullptr;
3340 : AllocationResult allocation =
3341 : CcTest::heap()->new_space()->AllocateRawUnaligned(
3342 4 : AllocationMemento::kSize + kPointerSize);
3343 4 : CHECK(allocation.To(&obj));
3344 4 : Address addr_obj = obj->address();
3345 : CcTest::heap()->CreateFillerObjectAt(addr_obj,
3346 : AllocationMemento::kSize + kPointerSize,
3347 4 : ClearRecordedSlots::kNo);
3348 :
3349 : // Give the array a name, making sure not to allocate strings.
3350 : v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
3351 12 : CHECK(CcTest::global()->Set(env.local(), array_name, array_obj).FromJust());
3352 :
3353 : // This should crash with a protection violation if we are running a build
3354 : // with the bug.
3355 : AlwaysAllocateScope aa_scope(isolate);
3356 4 : v8::Script::Compile(env.local(), mote_code_string)
3357 4 : .ToLocalChecked()
3358 4 : ->Run(env.local())
3359 8 : .ToLocalChecked();
3360 : }
3361 :
3362 23723 : TEST(LargeObjectSlotRecording) {
3363 5 : if (!FLAG_incremental_marking) return;
3364 5 : if (FLAG_never_compact) return;
3365 : ManualGCScope manual_gc_scope;
3366 5 : FLAG_manual_evacuation_candidates_selection = true;
3367 5 : CcTest::InitializeVM();
3368 : Isolate* isolate = CcTest::i_isolate();
3369 20 : Heap* heap = isolate->heap();
3370 : HandleScope scope(isolate);
3371 :
3372 : // Create an object on an evacuation candidate.
3373 5 : heap::SimulateFullSpace(heap->old_space());
3374 5 : Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED);
3375 5 : Page* evac_page = Page::FromAddress(lit->address());
3376 5 : heap::ForceEvacuationCandidate(evac_page);
3377 : FixedArray* old_location = *lit;
3378 :
3379 : // Allocate a large object.
3380 : int size = Max(1000000, kMaxRegularHeapObjectSize + KB);
3381 : CHECK_LT(kMaxRegularHeapObjectSize, size);
3382 5 : Handle<FixedArray> lo = isolate->factory()->NewFixedArray(size, TENURED);
3383 5 : CHECK(heap->lo_space()->Contains(*lo));
3384 :
3385 : // Start incremental marking to active write barrier.
3386 5 : heap::SimulateIncrementalMarking(heap, false);
3387 : heap->incremental_marking()->AdvanceIncrementalMarking(
3388 5 : 10000000, IncrementalMarking::NO_GC_VIA_STACK_GUARD, StepOrigin::kV8);
3389 :
3390 : // Create references from the large object to the object on the evacuation
3391 : // candidate.
3392 : const int kStep = size / 10;
3393 55 : for (int i = 0; i < size; i += kStep) {
3394 50 : lo->set(i, *lit);
3395 50 : CHECK(lo->get(i) == old_location);
3396 : }
3397 :
3398 : // Move the evaucation candidate object.
3399 5 : CcTest::CollectAllGarbage();
3400 :
3401 : // Verify that the pointers in the large object got updated.
3402 55 : for (int i = 0; i < size; i += kStep) {
3403 50 : CHECK_EQ(lo->get(i), *lit);
3404 50 : CHECK(lo->get(i) != old_location);
3405 : }
3406 : }
3407 :
3408 5 : class DummyVisitor : public RootVisitor {
3409 : public:
3410 20 : void VisitRootPointers(Root root, Object** start, Object** end) override {}
3411 : };
3412 :
3413 :
3414 23723 : TEST(DeferredHandles) {
3415 5 : CcTest::InitializeVM();
3416 5 : Isolate* isolate = CcTest::i_isolate();
3417 5110 : Heap* heap = isolate->heap();
3418 5 : v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
3419 : HandleScopeData* data = isolate->handle_scope_data();
3420 : Handle<Object> init(heap->empty_string(), isolate);
3421 5110 : while (data->next < data->limit) {
3422 : Handle<Object> obj(heap->empty_string(), isolate);
3423 : }
3424 : // An entire block of handles has been filled.
3425 : // Next handle would require a new block.
3426 5 : CHECK(data->next == data->limit);
3427 :
3428 10 : DeferredHandleScope deferred(isolate);
3429 5 : DummyVisitor visitor;
3430 5 : isolate->handle_scope_implementer()->Iterate(&visitor);
3431 10 : delete deferred.Detach();
3432 5 : }
3433 :
3434 :
3435 23723 : TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
3436 5 : if (!FLAG_incremental_marking) return;
3437 : ManualGCScope manual_gc_scope;
3438 5 : CcTest::InitializeVM();
3439 10 : v8::HandleScope scope(CcTest::isolate());
3440 : CompileRun("function f(n) {"
3441 : " var a = new Array(n);"
3442 : " for (var i = 0; i < n; i += 100) a[i] = i;"
3443 : "};"
3444 : "f(10 * 1024 * 1024);");
3445 10 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
3446 5 : if (marking->IsStopped()) {
3447 : CcTest::heap()->StartIncrementalMarking(
3448 5 : i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
3449 : }
3450 : // This big step should be sufficient to mark the whole array.
3451 : marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
3452 5 : StepOrigin::kV8);
3453 10 : CHECK(marking->IsComplete() ||
3454 : marking->IsReadyToOverApproximateWeakClosure());
3455 : }
3456 :
3457 :
3458 23723 : TEST(DisableInlineAllocation) {
3459 5 : FLAG_allow_natives_syntax = true;
3460 5 : CcTest::InitializeVM();
3461 5 : v8::HandleScope scope(CcTest::isolate());
3462 : CompileRun("function test() {"
3463 : " var x = [];"
3464 : " for (var i = 0; i < 10; i++) {"
3465 : " x[i] = [ {}, [1,2,3], [1,x,3] ];"
3466 : " }"
3467 : "}"
3468 : "function run() {"
3469 : " %OptimizeFunctionOnNextCall(test);"
3470 : " test();"
3471 : " %DeoptimizeFunction(test);"
3472 : "}");
3473 :
3474 : // Warm-up with inline allocation enabled.
3475 : CompileRun("test(); test(); run();");
3476 :
3477 : // Run test with inline allocation disabled.
3478 5 : CcTest::heap()->DisableInlineAllocation();
3479 : CompileRun("run()");
3480 :
3481 : // Run test with inline allocation re-enabled.
3482 5 : CcTest::heap()->EnableInlineAllocation();
3483 5 : CompileRun("run()");
3484 5 : }
3485 :
3486 :
3487 171 : static int AllocationSitesCount(Heap* heap) {
3488 : int count = 0;
3489 1895 : for (Object* site = heap->allocation_sites_list();
3490 : !(site->IsUndefined(heap->isolate()));
3491 : site = AllocationSite::cast(site)->weak_next()) {
3492 1553 : count++;
3493 : }
3494 171 : return count;
3495 : }
3496 :
3497 :
3498 23723 : TEST(EnsureAllocationSiteDependentCodesProcessed) {
3499 5 : if (FLAG_always_opt || !FLAG_opt) return;
3500 3 : FLAG_allow_natives_syntax = true;
3501 3 : CcTest::InitializeVM();
3502 3 : Isolate* isolate = CcTest::i_isolate();
3503 15 : v8::internal::Heap* heap = CcTest::heap();
3504 : GlobalHandles* global_handles = isolate->global_handles();
3505 :
3506 3 : if (!isolate->use_optimizer()) return;
3507 :
3508 : // The allocation site at the head of the list is ours.
3509 : Handle<AllocationSite> site;
3510 : {
3511 3 : LocalContext context;
3512 6 : v8::HandleScope scope(context->GetIsolate());
3513 :
3514 3 : int count = AllocationSitesCount(heap);
3515 : CompileRun("var bar = function() { return (new Array()); };"
3516 : "var a = bar();"
3517 : "bar();"
3518 : "bar();");
3519 :
3520 : // One allocation site should have been created.
3521 3 : int new_count = AllocationSitesCount(heap);
3522 3 : CHECK_EQ(new_count, (count + 1));
3523 : site = Handle<AllocationSite>::cast(
3524 : global_handles->Create(
3525 : AllocationSite::cast(heap->allocation_sites_list())));
3526 :
3527 : CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
3528 :
3529 : Handle<JSFunction> bar_handle = Handle<JSFunction>::cast(
3530 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3531 : CcTest::global()
3532 9 : ->Get(context.local(), v8_str("bar"))
3533 3 : .ToLocalChecked())));
3534 :
3535 : int dependency_group_count = 0;
3536 : DependentCode* dependency = site->dependent_code();
3537 12 : while (dependency != heap->empty_fixed_array()) {
3538 9 : CHECK(dependency->group() ==
3539 : DependentCode::kAllocationSiteTransitionChangedGroup ||
3540 : dependency->group() ==
3541 : DependentCode::kAllocationSiteTenuringChangedGroup);
3542 6 : CHECK_EQ(1, dependency->count());
3543 6 : CHECK(dependency->object_at(0)->IsWeakCell());
3544 : Code* function_bar =
3545 : Code::cast(WeakCell::cast(dependency->object_at(0))->value());
3546 6 : CHECK_EQ(bar_handle->code(), function_bar);
3547 : dependency = dependency->next_link();
3548 6 : dependency_group_count++;
3549 : }
3550 : // Expect a dependent code object for transitioning and pretenuring.
3551 6 : CHECK_EQ(2, dependency_group_count);
3552 : }
3553 :
3554 : // Now make sure that a gc should get rid of the function, even though we
3555 : // still have the allocation site alive.
3556 15 : for (int i = 0; i < 4; i++) {
3557 12 : CcTest::CollectAllGarbage();
3558 : }
3559 :
3560 : // The site still exists because of our global handle, but the code is no
3561 : // longer referred to by dependent_code().
3562 6 : CHECK(site->dependent_code()->object_at(0)->IsWeakCell() &&
3563 : WeakCell::cast(site->dependent_code()->object_at(0))->cleared());
3564 : }
3565 :
3566 23723 : TEST(AllocationSiteCreation) {
3567 5 : FLAG_always_opt = false;
3568 5 : CcTest::InitializeVM();
3569 : Isolate* isolate = CcTest::i_isolate();
3570 5 : Heap* heap = isolate->heap();
3571 : HandleScope scope(isolate);
3572 :
3573 : int prev_count = 0;
3574 : int count = 0;
3575 :
3576 : // Array literals.
3577 5 : prev_count = AllocationSitesCount(heap);
3578 : CompileRun("(function f1() { return []; })()");
3579 5 : count = AllocationSitesCount(heap);
3580 5 : CHECK_EQ(1, count - prev_count);
3581 :
3582 : prev_count = count;
3583 : CompileRun("(function f2() { return [1, 2]; })()");
3584 5 : count = AllocationSitesCount(heap);
3585 5 : CHECK_EQ(1, count - prev_count);
3586 :
3587 : prev_count = count;
3588 : CompileRun("(function f3() { return [[1], [2]]; })()");
3589 5 : count = AllocationSitesCount(heap);
3590 5 : CHECK_EQ(3, count - prev_count);
3591 :
3592 : prev_count = count;
3593 : CompileRun(
3594 : "(function f4() { "
3595 : "return [0, [1, 1.1, 1.2, "
3596 : "], 1.5, [2.1, 2.2], 3];"
3597 : "})()");
3598 5 : count = AllocationSitesCount(heap);
3599 5 : CHECK_EQ(3, count - prev_count);
3600 :
3601 : // Object literals have lazy AllocationSites
3602 5 : prev_count = AllocationSitesCount(heap);
3603 : CompileRun("function f5() { return {}; }; f5(); ");
3604 5 : count = AllocationSitesCount(heap);
3605 5 : CHECK_EQ(0, count - prev_count);
3606 : // No AllocationSites are created for the empty object literal.
3607 25 : for (int i = 0; i < 5; i++) {
3608 25 : prev_count = AllocationSitesCount(heap);
3609 : CompileRun("f5(); ");
3610 25 : count = AllocationSitesCount(heap);
3611 25 : CHECK_EQ(0, count - prev_count);
3612 : }
3613 :
3614 5 : prev_count = AllocationSitesCount(heap);
3615 : CompileRun("function f6() { return {a:1}; }; f6(); ");
3616 5 : count = AllocationSitesCount(heap);
3617 5 : CHECK_EQ(0, count - prev_count);
3618 5 : prev_count = AllocationSitesCount(heap);
3619 : CompileRun("f6(); ");
3620 5 : count = AllocationSitesCount(heap);
3621 5 : CHECK_EQ(1, count - prev_count);
3622 :
3623 5 : prev_count = AllocationSitesCount(heap);
3624 : CompileRun("function f7() { return {a:1, b:2}; }; f7(); ");
3625 5 : count = AllocationSitesCount(heap);
3626 5 : CHECK_EQ(0, count - prev_count);
3627 5 : prev_count = AllocationSitesCount(heap);
3628 : CompileRun("f7(); ");
3629 5 : count = AllocationSitesCount(heap);
3630 5 : CHECK_EQ(1, count - prev_count);
3631 :
3632 5 : prev_count = AllocationSitesCount(heap);
3633 : CompileRun(
3634 : "function f8() {"
3635 : "return {a:{}, b:{ a:2, c:{ d:{f:{}}} } }; "
3636 : "}; f8(); ");
3637 5 : count = AllocationSitesCount(heap);
3638 5 : CHECK_EQ(0, count - prev_count);
3639 5 : prev_count = AllocationSitesCount(heap);
3640 : CompileRun("f8(); ");
3641 5 : count = AllocationSitesCount(heap);
3642 5 : CHECK_EQ(6, count - prev_count);
3643 :
3644 : // We currently eagerly create allocation sites if there are sub-arrays.
3645 5 : prev_count = AllocationSitesCount(heap);
3646 : CompileRun(
3647 : "function f9() {"
3648 : "return {a:[1, 2, 3], b:{ a:2, c:{ d:{f:[]} } }}; "
3649 : "}; f9(); ");
3650 5 : count = AllocationSitesCount(heap);
3651 5 : CHECK_EQ(6, count - prev_count);
3652 5 : prev_count = AllocationSitesCount(heap);
3653 : CompileRun("f9(); ");
3654 5 : count = AllocationSitesCount(heap);
3655 : // No new AllocationSites created on the second invocation.
3656 5 : CHECK_EQ(0, count - prev_count);
3657 5 : }
3658 :
3659 23723 : TEST(CellsInOptimizedCodeAreWeak) {
3660 5 : if (FLAG_always_opt || !FLAG_opt) return;
3661 3 : FLAG_allow_natives_syntax = true;
3662 3 : CcTest::InitializeVM();
3663 : Isolate* isolate = CcTest::i_isolate();
3664 3 : v8::internal::Heap* heap = CcTest::heap();
3665 :
3666 3 : if (!isolate->use_optimizer()) return;
3667 : HandleScope outer_scope(heap->isolate());
3668 : Handle<Code> code;
3669 : {
3670 3 : LocalContext context;
3671 : HandleScope scope(heap->isolate());
3672 :
3673 : CompileRun(
3674 : "bar = (function() {"
3675 : " function bar() {"
3676 : " return foo(1);"
3677 : " };"
3678 : " var foo = function(x) { with (x) { return 1 + x; } };"
3679 : " %NeverOptimizeFunction(foo);"
3680 : " bar(foo);"
3681 : " bar(foo);"
3682 : " bar(foo);"
3683 : " %OptimizeFunctionOnNextCall(bar);"
3684 : " bar(foo);"
3685 : " return bar;})();");
3686 :
3687 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
3688 : *v8::Local<v8::Function>::Cast(CcTest::global()
3689 9 : ->Get(context.local(), v8_str("bar"))
3690 3 : .ToLocalChecked())));
3691 6 : code = scope.CloseAndEscape(Handle<Code>(bar->code()));
3692 : }
3693 :
3694 : // Now make sure that a gc should get rid of the function
3695 15 : for (int i = 0; i < 4; i++) {
3696 12 : CcTest::CollectAllGarbage();
3697 : }
3698 :
3699 3 : CHECK(code->marked_for_deoptimization());
3700 : }
3701 :
3702 :
3703 23723 : TEST(ObjectsInOptimizedCodeAreWeak) {
3704 5 : if (FLAG_always_opt || !FLAG_opt) return;
3705 3 : FLAG_allow_natives_syntax = true;
3706 3 : CcTest::InitializeVM();
3707 : Isolate* isolate = CcTest::i_isolate();
3708 3 : v8::internal::Heap* heap = CcTest::heap();
3709 :
3710 3 : if (!isolate->use_optimizer()) return;
3711 : HandleScope outer_scope(heap->isolate());
3712 : Handle<Code> code;
3713 : {
3714 3 : LocalContext context;
3715 : HandleScope scope(heap->isolate());
3716 :
3717 : CompileRun(
3718 : "function bar() {"
3719 : " return foo(1);"
3720 : "};"
3721 : "function foo(x) { with (x) { return 1 + x; } };"
3722 : "%NeverOptimizeFunction(foo);"
3723 : "bar();"
3724 : "bar();"
3725 : "bar();"
3726 : "%OptimizeFunctionOnNextCall(bar);"
3727 : "bar();");
3728 :
3729 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
3730 : *v8::Local<v8::Function>::Cast(CcTest::global()
3731 9 : ->Get(context.local(), v8_str("bar"))
3732 3 : .ToLocalChecked())));
3733 6 : code = scope.CloseAndEscape(Handle<Code>(bar->code()));
3734 : }
3735 :
3736 : // Now make sure that a gc should get rid of the function
3737 15 : for (int i = 0; i < 4; i++) {
3738 12 : CcTest::CollectAllGarbage();
3739 : }
3740 :
3741 3 : CHECK(code->marked_for_deoptimization());
3742 : }
3743 :
3744 23723 : TEST(NewSpaceObjectsInOptimizedCode) {
3745 5 : if (FLAG_always_opt || !FLAG_opt) return;
3746 3 : FLAG_allow_natives_syntax = true;
3747 3 : CcTest::InitializeVM();
3748 : Isolate* isolate = CcTest::i_isolate();
3749 3 : v8::internal::Heap* heap = CcTest::heap();
3750 :
3751 3 : if (!isolate->use_optimizer()) return;
3752 : HandleScope outer_scope(heap->isolate());
3753 : Handle<Code> code;
3754 : {
3755 3 : LocalContext context;
3756 : HandleScope scope(heap->isolate());
3757 :
3758 : CompileRun(
3759 : "var foo;"
3760 : "var bar;"
3761 : "(function() {"
3762 : " function foo_func(x) { with (x) { return 1 + x; } };"
3763 : " %NeverOptimizeFunction(foo_func);"
3764 : " function bar_func() {"
3765 : " return foo(1);"
3766 : " };"
3767 : " bar = bar_func;"
3768 : " foo = foo_func;"
3769 : " bar_func();"
3770 : " bar_func();"
3771 : " bar_func();"
3772 : " %OptimizeFunctionOnNextCall(bar_func);"
3773 : " bar_func();"
3774 : "})();");
3775 :
3776 : Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
3777 : *v8::Local<v8::Function>::Cast(CcTest::global()
3778 9 : ->Get(context.local(), v8_str("bar"))
3779 3 : .ToLocalChecked())));
3780 :
3781 : Handle<JSFunction> foo = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
3782 : *v8::Local<v8::Function>::Cast(CcTest::global()
3783 9 : ->Get(context.local(), v8_str("foo"))
3784 3 : .ToLocalChecked())));
3785 :
3786 3 : CHECK(heap->InNewSpace(*foo));
3787 3 : CcTest::CollectGarbage(NEW_SPACE);
3788 3 : CcTest::CollectGarbage(NEW_SPACE);
3789 3 : CHECK(!heap->InNewSpace(*foo));
3790 : #ifdef VERIFY_HEAP
3791 : heap->Verify();
3792 : #endif
3793 3 : CHECK(!bar->code()->marked_for_deoptimization());
3794 6 : code = scope.CloseAndEscape(Handle<Code>(bar->code()));
3795 : }
3796 :
3797 : // Now make sure that a gc should get rid of the function
3798 15 : for (int i = 0; i < 4; i++) {
3799 12 : CcTest::CollectAllGarbage();
3800 : }
3801 :
3802 3 : CHECK(code->marked_for_deoptimization());
3803 : }
3804 :
3805 23723 : TEST(NoWeakHashTableLeakWithIncrementalMarking) {
3806 5 : if (FLAG_always_opt || !FLAG_opt) return;
3807 3 : if (!FLAG_incremental_marking) return;
3808 3 : FLAG_allow_natives_syntax = true;
3809 3 : FLAG_compilation_cache = false;
3810 3 : FLAG_retain_maps_for_n_gc = 0;
3811 3 : CcTest::InitializeVM();
3812 : Isolate* isolate = CcTest::i_isolate();
3813 :
3814 : // Do not run for no-snap builds.
3815 3 : if (!i::Snapshot::HasContextSnapshot(isolate, 0)) return;
3816 :
3817 9 : v8::internal::Heap* heap = CcTest::heap();
3818 :
3819 : // Get a clean slate regarding optimized functions on the heap.
3820 3 : i::Deoptimizer::DeoptimizeAll(isolate);
3821 3 : CcTest::CollectAllGarbage();
3822 :
3823 3 : if (!isolate->use_optimizer()) return;
3824 : HandleScope outer_scope(heap->isolate());
3825 12 : for (int i = 0; i < 3; i++) {
3826 9 : heap::SimulateIncrementalMarking(heap);
3827 : {
3828 9 : LocalContext context;
3829 : HandleScope scope(heap->isolate());
3830 : EmbeddedVector<char, 256> source;
3831 : SNPrintF(source,
3832 : "function bar%d() {"
3833 : " return foo%d(1);"
3834 : "};"
3835 : "function foo%d(x) { with (x) { return 1 + x; } };"
3836 : "bar%d();"
3837 : "bar%d();"
3838 : "bar%d();"
3839 : "%%OptimizeFunctionOnNextCall(bar%d);"
3840 : "bar%d();",
3841 9 : i, i, i, i, i, i, i, i);
3842 18 : CompileRun(source.start());
3843 : }
3844 : // We have to abort incremental marking here to abandon black pages.
3845 9 : CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
3846 : }
3847 : int elements = 0;
3848 3 : if (heap->weak_object_to_code_table()->IsHashTable()) {
3849 : WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
3850 : elements = t->NumberOfElements();
3851 : }
3852 3 : CHECK_EQ(0, elements);
3853 : }
3854 :
3855 :
3856 8 : static Handle<JSFunction> OptimizeDummyFunction(v8::Isolate* isolate,
3857 : const char* name) {
3858 : EmbeddedVector<char, 256> source;
3859 : SNPrintF(source,
3860 : "function %s() { return 0; }"
3861 : "%s(); %s();"
3862 : "%%OptimizeFunctionOnNextCall(%s);"
3863 8 : "%s();", name, name, name, name, name);
3864 8 : CompileRun(source.start());
3865 : i::Handle<JSFunction> fun = Handle<JSFunction>::cast(
3866 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3867 : CcTest::global()
3868 24 : ->Get(isolate->GetCurrentContext(), v8_str(name))
3869 8 : .ToLocalChecked())));
3870 8 : return fun;
3871 : }
3872 :
3873 :
3874 8 : static int GetCodeChainLength(Code* code) {
3875 : int result = 0;
3876 20 : while (code->next_code_link()->IsCode()) {
3877 4 : result++;
3878 : code = Code::cast(code->next_code_link());
3879 : }
3880 8 : return result;
3881 : }
3882 :
3883 :
3884 23723 : TEST(NextCodeLinkIsWeak) {
3885 5 : FLAG_always_opt = false;
3886 5 : FLAG_allow_natives_syntax = true;
3887 5 : CcTest::InitializeVM();
3888 : Isolate* isolate = CcTest::i_isolate();
3889 5 : v8::internal::Heap* heap = CcTest::heap();
3890 :
3891 10 : if (!isolate->use_optimizer()) return;
3892 : HandleScope outer_scope(heap->isolate());
3893 : Handle<Code> code;
3894 4 : CcTest::CollectAllAvailableGarbage();
3895 : int code_chain_length_before, code_chain_length_after;
3896 : {
3897 : HandleScope scope(heap->isolate());
3898 : Handle<JSFunction> mortal =
3899 4 : OptimizeDummyFunction(CcTest::isolate(), "mortal");
3900 : Handle<JSFunction> immortal =
3901 4 : OptimizeDummyFunction(CcTest::isolate(), "immortal");
3902 4 : CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
3903 4 : code_chain_length_before = GetCodeChainLength(immortal->code());
3904 : // Keep the immortal code and let the mortal code die.
3905 4 : code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
3906 : CompileRun("mortal = null; immortal = null;");
3907 : }
3908 4 : CcTest::CollectAllAvailableGarbage();
3909 : // Now mortal code should be dead.
3910 4 : code_chain_length_after = GetCodeChainLength(*code);
3911 4 : CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
3912 : }
3913 :
3914 :
3915 8 : static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
3916 : i::byte buffer[i::Assembler::kMinimalBufferSize];
3917 : MacroAssembler masm(isolate, buffer, sizeof(buffer),
3918 8 : v8::internal::CodeObjectRequired::kYes);
3919 : CodeDesc desc;
3920 8 : masm.Push(isolate->factory()->undefined_value());
3921 8 : masm.Drop(1);
3922 8 : masm.GetCode(isolate, &desc);
3923 8 : Handle<Object> undefined(isolate->heap()->undefined_value(), isolate);
3924 : Handle<Code> code =
3925 8 : isolate->factory()->NewCode(desc, Code::OPTIMIZED_FUNCTION, undefined);
3926 8 : CHECK(code->IsCode());
3927 16 : return code;
3928 : }
3929 :
3930 :
3931 23723 : TEST(NextCodeLinkIsWeak2) {
3932 5 : FLAG_allow_natives_syntax = true;
3933 5 : CcTest::InitializeVM();
3934 : Isolate* isolate = CcTest::i_isolate();
3935 9 : v8::internal::Heap* heap = CcTest::heap();
3936 :
3937 10 : if (!isolate->use_optimizer()) return;
3938 : HandleScope outer_scope(heap->isolate());
3939 4 : CcTest::CollectAllAvailableGarbage();
3940 : Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
3941 : Handle<Code> new_head;
3942 : Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
3943 : {
3944 : HandleScope scope(heap->isolate());
3945 4 : Handle<Code> immortal = DummyOptimizedCode(isolate);
3946 4 : Handle<Code> mortal = DummyOptimizedCode(isolate);
3947 4 : mortal->set_next_code_link(*old_head);
3948 4 : immortal->set_next_code_link(*mortal);
3949 4 : context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
3950 4 : new_head = scope.CloseAndEscape(immortal);
3951 : }
3952 4 : CcTest::CollectAllAvailableGarbage();
3953 : // Now mortal code should be dead.
3954 4 : CHECK_EQ(*old_head, new_head->next_code_link());
3955 : }
3956 :
3957 :
3958 : static bool weak_ic_cleared = false;
3959 :
3960 49 : static void ClearWeakIC(
3961 49 : const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
3962 : printf("clear weak is called\n");
3963 49 : weak_ic_cleared = true;
3964 : data.GetParameter()->Reset();
3965 49 : }
3966 :
3967 :
3968 23723 : TEST(WeakFunctionInConstructor) {
3969 6 : if (FLAG_always_opt) return;
3970 4 : FLAG_stress_compaction = false;
3971 4 : FLAG_stress_incremental_marking = false;
3972 4 : CcTest::InitializeVM();
3973 4 : v8::Isolate* isolate = CcTest::isolate();
3974 4 : LocalContext env;
3975 8 : v8::HandleScope scope(isolate);
3976 : CompileRun(
3977 : "function createObj(obj) {"
3978 : " return new obj();"
3979 : "}");
3980 : i::Handle<JSFunction> createObj = Handle<JSFunction>::cast(
3981 : v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3982 : CcTest::global()
3983 12 : ->Get(env.local(), v8_str("createObj"))
3984 4 : .ToLocalChecked())));
3985 :
3986 : v8::Persistent<v8::Object> garbage;
3987 : {
3988 4 : v8::HandleScope scope(isolate);
3989 : const char* source =
3990 : " (function() {"
3991 : " function hat() { this.x = 5; }"
3992 : " createObj(hat);"
3993 : " createObj(hat);"
3994 : " return hat;"
3995 : " })();";
3996 4 : garbage.Reset(isolate, CompileRun(env.local(), source)
3997 4 : .ToLocalChecked()
3998 4 : ->ToObject(env.local())
3999 8 : .ToLocalChecked());
4000 : }
4001 4 : weak_ic_cleared = false;
4002 : garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4003 4 : CcTest::CollectAllGarbage();
4004 4 : CHECK(weak_ic_cleared);
4005 :
4006 : // We've determined the constructor in createObj has had it's weak cell
4007 : // cleared. Now, verify that one additional call with a new function
4008 : // allows monomorphicity.
4009 : Handle<FeedbackVector> feedback_vector =
4010 : Handle<FeedbackVector>(createObj->feedback_vector(), CcTest::i_isolate());
4011 4 : for (int i = 0; i < 20; i++) {
4012 : Object* slot_value = feedback_vector->Get(FeedbackSlot(0));
4013 4 : CHECK(slot_value->IsWeakCell());
4014 4 : if (WeakCell::cast(slot_value)->cleared()) break;
4015 0 : CcTest::CollectAllGarbage();
4016 : }
4017 :
4018 : Object* slot_value = feedback_vector->Get(FeedbackSlot(0));
4019 8 : CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared());
4020 : CompileRun(
4021 : "function coat() { this.x = 6; }"
4022 : "createObj(coat);");
4023 : slot_value = feedback_vector->Get(FeedbackSlot(0));
4024 12 : CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared());
4025 : }
4026 :
4027 :
4028 : // Checks that the value returned by execution of the source is weak.
4029 45 : void CheckWeakness(const char* source) {
4030 45 : FLAG_stress_compaction = false;
4031 45 : FLAG_stress_incremental_marking = false;
4032 45 : CcTest::InitializeVM();
4033 45 : v8::Isolate* isolate = CcTest::isolate();
4034 45 : LocalContext env;
4035 90 : v8::HandleScope scope(isolate);
4036 : v8::Persistent<v8::Object> garbage;
4037 : {
4038 45 : v8::HandleScope scope(isolate);
4039 45 : garbage.Reset(isolate, CompileRun(env.local(), source)
4040 45 : .ToLocalChecked()
4041 45 : ->ToObject(env.local())
4042 90 : .ToLocalChecked());
4043 : }
4044 45 : weak_ic_cleared = false;
4045 : garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4046 45 : CcTest::CollectAllGarbage();
4047 90 : CHECK(weak_ic_cleared);
4048 45 : }
4049 :
4050 :
4051 : // Each of the following "weak IC" tests creates an IC that embeds a map with
4052 : // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
4053 23723 : TEST(WeakMapInMonomorphicLoadIC) {
4054 : CheckWeakness("function loadIC(obj) {"
4055 : " return obj.name;"
4056 : "}"
4057 : " (function() {"
4058 : " var proto = {'name' : 'weak'};"
4059 : " var obj = Object.create(proto);"
4060 : " loadIC(obj);"
4061 : " loadIC(obj);"
4062 : " loadIC(obj);"
4063 : " return proto;"
4064 5 : " })();");
4065 5 : }
4066 :
4067 :
4068 23723 : TEST(WeakMapInPolymorphicLoadIC) {
4069 : CheckWeakness(
4070 : "function loadIC(obj) {"
4071 : " return obj.name;"
4072 : "}"
4073 : " (function() {"
4074 : " var proto = {'name' : 'weak'};"
4075 : " var obj = Object.create(proto);"
4076 : " loadIC(obj);"
4077 : " loadIC(obj);"
4078 : " loadIC(obj);"
4079 : " var poly = Object.create(proto);"
4080 : " poly.x = true;"
4081 : " loadIC(poly);"
4082 : " return proto;"
4083 5 : " })();");
4084 5 : }
4085 :
4086 :
4087 23723 : TEST(WeakMapInMonomorphicKeyedLoadIC) {
4088 : CheckWeakness("function keyedLoadIC(obj, field) {"
4089 : " return obj[field];"
4090 : "}"
4091 : " (function() {"
4092 : " var proto = {'name' : 'weak'};"
4093 : " var obj = Object.create(proto);"
4094 : " keyedLoadIC(obj, 'name');"
4095 : " keyedLoadIC(obj, 'name');"
4096 : " keyedLoadIC(obj, 'name');"
4097 : " return proto;"
4098 5 : " })();");
4099 5 : }
4100 :
4101 :
4102 23723 : TEST(WeakMapInPolymorphicKeyedLoadIC) {
4103 : CheckWeakness(
4104 : "function keyedLoadIC(obj, field) {"
4105 : " return obj[field];"
4106 : "}"
4107 : " (function() {"
4108 : " var proto = {'name' : 'weak'};"
4109 : " var obj = Object.create(proto);"
4110 : " keyedLoadIC(obj, 'name');"
4111 : " keyedLoadIC(obj, 'name');"
4112 : " keyedLoadIC(obj, 'name');"
4113 : " var poly = Object.create(proto);"
4114 : " poly.x = true;"
4115 : " keyedLoadIC(poly, 'name');"
4116 : " return proto;"
4117 5 : " })();");
4118 5 : }
4119 :
4120 :
4121 23723 : TEST(WeakMapInMonomorphicStoreIC) {
4122 : CheckWeakness("function storeIC(obj, value) {"
4123 : " obj.name = value;"
4124 : "}"
4125 : " (function() {"
4126 : " var proto = {'name' : 'weak'};"
4127 : " var obj = Object.create(proto);"
4128 : " storeIC(obj, 'x');"
4129 : " storeIC(obj, 'x');"
4130 : " storeIC(obj, 'x');"
4131 : " return proto;"
4132 5 : " })();");
4133 5 : }
4134 :
4135 :
4136 23723 : TEST(WeakMapInPolymorphicStoreIC) {
4137 : CheckWeakness(
4138 : "function storeIC(obj, value) {"
4139 : " obj.name = value;"
4140 : "}"
4141 : " (function() {"
4142 : " var proto = {'name' : 'weak'};"
4143 : " var obj = Object.create(proto);"
4144 : " storeIC(obj, 'x');"
4145 : " storeIC(obj, 'x');"
4146 : " storeIC(obj, 'x');"
4147 : " var poly = Object.create(proto);"
4148 : " poly.x = true;"
4149 : " storeIC(poly, 'x');"
4150 : " return proto;"
4151 5 : " })();");
4152 5 : }
4153 :
4154 :
4155 23723 : TEST(WeakMapInMonomorphicKeyedStoreIC) {
4156 : CheckWeakness("function keyedStoreIC(obj, field, value) {"
4157 : " obj[field] = value;"
4158 : "}"
4159 : " (function() {"
4160 : " var proto = {'name' : 'weak'};"
4161 : " var obj = Object.create(proto);"
4162 : " keyedStoreIC(obj, 'x');"
4163 : " keyedStoreIC(obj, 'x');"
4164 : " keyedStoreIC(obj, 'x');"
4165 : " return proto;"
4166 5 : " })();");
4167 5 : }
4168 :
4169 :
4170 23723 : TEST(WeakMapInPolymorphicKeyedStoreIC) {
4171 : CheckWeakness(
4172 : "function keyedStoreIC(obj, field, value) {"
4173 : " obj[field] = value;"
4174 : "}"
4175 : " (function() {"
4176 : " var proto = {'name' : 'weak'};"
4177 : " var obj = Object.create(proto);"
4178 : " keyedStoreIC(obj, 'x');"
4179 : " keyedStoreIC(obj, 'x');"
4180 : " keyedStoreIC(obj, 'x');"
4181 : " var poly = Object.create(proto);"
4182 : " poly.x = true;"
4183 : " keyedStoreIC(poly, 'x');"
4184 : " return proto;"
4185 5 : " })();");
4186 5 : }
4187 :
4188 :
4189 23723 : TEST(WeakMapInMonomorphicCompareNilIC) {
4190 : CheckWeakness("function compareNilIC(obj) {"
4191 : " return obj == null;"
4192 : "}"
4193 : " (function() {"
4194 : " var proto = {'name' : 'weak'};"
4195 : " var obj = Object.create(proto);"
4196 : " compareNilIC(obj);"
4197 : " compareNilIC(obj);"
4198 : " compareNilIC(obj);"
4199 : " return proto;"
4200 5 : " })();");
4201 5 : }
4202 :
4203 :
4204 8 : Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
4205 8 : Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
4206 : Handle<Object> obj =
4207 24 : Object::GetProperty(isolate->global_object(), str).ToHandleChecked();
4208 8 : return Handle<JSFunction>::cast(obj);
4209 : }
4210 :
4211 16 : void CheckIC(Handle<JSFunction> function, int slot_index,
4212 : InlineCacheState state) {
4213 : FeedbackVector* vector = function->feedback_vector();
4214 : FeedbackSlot slot(slot_index);
4215 : LoadICNexus nexus(vector, slot);
4216 16 : CHECK_EQ(nexus.StateFromFeedback(), state);
4217 16 : }
4218 :
4219 23723 : TEST(MonomorphicStaysMonomorphicAfterGC) {
4220 6 : if (FLAG_always_opt) return;
4221 : ManualGCScope manual_gc_scope;
4222 4 : CcTest::InitializeVM();
4223 : Isolate* isolate = CcTest::i_isolate();
4224 8 : v8::HandleScope scope(CcTest::isolate());
4225 : CompileRun(
4226 : "function loadIC(obj) {"
4227 : " return obj.name;"
4228 : "}"
4229 : "function testIC() {"
4230 : " var proto = {'name' : 'weak'};"
4231 : " var obj = Object.create(proto);"
4232 : " loadIC(obj);"
4233 : " loadIC(obj);"
4234 : " loadIC(obj);"
4235 : " return proto;"
4236 : "};");
4237 4 : Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4238 : {
4239 4 : v8::HandleScope scope(CcTest::isolate());
4240 4 : CompileRun("(testIC())");
4241 : }
4242 4 : CcTest::CollectAllGarbage();
4243 4 : CheckIC(loadIC, 0, MONOMORPHIC);
4244 : {
4245 4 : v8::HandleScope scope(CcTest::isolate());
4246 4 : CompileRun("(testIC())");
4247 : }
4248 4 : CheckIC(loadIC, 0, MONOMORPHIC);
4249 : }
4250 :
4251 :
4252 23723 : TEST(PolymorphicStaysPolymorphicAfterGC) {
4253 6 : if (FLAG_always_opt) return;
4254 : ManualGCScope manual_gc_scope;
4255 4 : CcTest::InitializeVM();
4256 : Isolate* isolate = CcTest::i_isolate();
4257 8 : v8::HandleScope scope(CcTest::isolate());
4258 : CompileRun(
4259 : "function loadIC(obj) {"
4260 : " return obj.name;"
4261 : "}"
4262 : "function testIC() {"
4263 : " var proto = {'name' : 'weak'};"
4264 : " var obj = Object.create(proto);"
4265 : " loadIC(obj);"
4266 : " loadIC(obj);"
4267 : " loadIC(obj);"
4268 : " var poly = Object.create(proto);"
4269 : " poly.x = true;"
4270 : " loadIC(poly);"
4271 : " return proto;"
4272 : "};");
4273 4 : Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4274 : {
4275 4 : v8::HandleScope scope(CcTest::isolate());
4276 4 : CompileRun("(testIC())");
4277 : }
4278 4 : CcTest::CollectAllGarbage();
4279 4 : CheckIC(loadIC, 0, POLYMORPHIC);
4280 : {
4281 4 : v8::HandleScope scope(CcTest::isolate());
4282 4 : CompileRun("(testIC())");
4283 : }
4284 4 : CheckIC(loadIC, 0, POLYMORPHIC);
4285 : }
4286 :
4287 :
4288 23723 : TEST(WeakCell) {
4289 : ManualGCScope manual_gc_scope;
4290 5 : CcTest::InitializeVM();
4291 : Isolate* isolate = CcTest::i_isolate();
4292 : v8::internal::Factory* factory = isolate->factory();
4293 :
4294 : HandleScope outer_scope(isolate);
4295 : Handle<WeakCell> weak_cell1;
4296 : {
4297 : HandleScope inner_scope(isolate);
4298 5 : Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
4299 5 : weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
4300 : }
4301 :
4302 5 : Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
4303 : Handle<WeakCell> weak_cell2;
4304 : {
4305 : HandleScope inner_scope(isolate);
4306 5 : weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
4307 : }
4308 5 : CHECK(weak_cell1->value()->IsFixedArray());
4309 5 : CHECK_EQ(*survivor, weak_cell2->value());
4310 5 : CcTest::CollectGarbage(NEW_SPACE);
4311 5 : CHECK(weak_cell1->value()->IsFixedArray());
4312 5 : CHECK_EQ(*survivor, weak_cell2->value());
4313 5 : CcTest::CollectGarbage(NEW_SPACE);
4314 5 : CHECK(weak_cell1->value()->IsFixedArray());
4315 5 : CHECK_EQ(*survivor, weak_cell2->value());
4316 5 : CcTest::CollectAllAvailableGarbage();
4317 5 : CHECK(weak_cell1->cleared());
4318 5 : CHECK_EQ(*survivor, weak_cell2->value());
4319 5 : }
4320 :
4321 :
4322 23723 : TEST(WeakCellsWithIncrementalMarking) {
4323 5 : if (!FLAG_incremental_marking) return;
4324 : ManualGCScope manual_gc_scope;
4325 5 : CcTest::InitializeVM();
4326 : Isolate* isolate = CcTest::i_isolate();
4327 85 : v8::internal::Heap* heap = CcTest::heap();
4328 : v8::internal::Factory* factory = isolate->factory();
4329 :
4330 : const int N = 16;
4331 : HandleScope outer_scope(isolate);
4332 5 : Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
4333 85 : Handle<WeakCell> weak_cells[N];
4334 :
4335 80 : for (int i = 0; i < N; i++) {
4336 : HandleScope inner_scope(isolate);
4337 : Handle<HeapObject> value =
4338 80 : i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
4339 80 : Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
4340 80 : CHECK(weak_cell->value()->IsFixedArray());
4341 : IncrementalMarking* marking = heap->incremental_marking();
4342 80 : if (marking->IsStopped()) {
4343 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
4344 5 : i::GarbageCollectionReason::kTesting);
4345 : }
4346 : marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
4347 80 : StepOrigin::kV8);
4348 80 : CcTest::CollectGarbage(NEW_SPACE);
4349 80 : CHECK(weak_cell->value()->IsFixedArray());
4350 80 : weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
4351 : }
4352 : // Call collect all twice to make sure that we also cleared
4353 : // weak cells that were allocated on black pages.
4354 5 : CcTest::CollectAllGarbage();
4355 5 : CcTest::CollectAllGarbage();
4356 5 : CHECK_EQ(*survivor, weak_cells[0]->value());
4357 75 : for (int i = 1; i < N; i++) {
4358 150 : CHECK(weak_cells[i]->cleared());
4359 : }
4360 : }
4361 :
4362 :
4363 : #ifdef DEBUG
4364 : TEST(AddInstructionChangesNewSpacePromotion) {
4365 : FLAG_allow_natives_syntax = true;
4366 : FLAG_expose_gc = true;
4367 : FLAG_stress_compaction = true;
4368 : FLAG_gc_interval = 1000;
4369 : CcTest::InitializeVM();
4370 : if (!FLAG_allocation_site_pretenuring) return;
4371 : v8::HandleScope scope(CcTest::isolate());
4372 : Isolate* isolate = CcTest::i_isolate();
4373 : Heap* heap = isolate->heap();
4374 : LocalContext env;
4375 : CompileRun(
4376 : "function add(a, b) {"
4377 : " return a + b;"
4378 : "}"
4379 : "add(1, 2);"
4380 : "add(\"a\", \"b\");"
4381 : "var oldSpaceObject;"
4382 : "gc();"
4383 : "function crash(x) {"
4384 : " var object = {a: null, b: null};"
4385 : " var result = add(1.5, x | 0);"
4386 : " object.a = result;"
4387 : " oldSpaceObject = object;"
4388 : " return object;"
4389 : "}"
4390 : "crash(1);"
4391 : "crash(1);"
4392 : "%OptimizeFunctionOnNextCall(crash);"
4393 : "crash(1);");
4394 :
4395 : v8::Local<v8::Object> global = CcTest::global();
4396 : v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
4397 : global->Get(env.local(), v8_str("crash")).ToLocalChecked());
4398 : v8::Local<v8::Value> args1[] = {v8_num(1)};
4399 : heap->DisableInlineAllocation();
4400 : heap->set_allocation_timeout(1);
4401 : g->Call(env.local(), global, 1, args1).ToLocalChecked();
4402 : CcTest::CollectAllGarbage();
4403 : }
4404 :
4405 :
4406 : void OnFatalErrorExpectOOM(const char* location, const char* message) {
4407 : // Exit with 0 if the location matches our expectation.
4408 : exit(strcmp(location, "CALL_AND_RETRY_LAST"));
4409 : }
4410 :
4411 :
4412 : TEST(CEntryStubOOM) {
4413 : FLAG_allow_natives_syntax = true;
4414 : CcTest::InitializeVM();
4415 : v8::HandleScope scope(CcTest::isolate());
4416 : CcTest::isolate()->SetFatalErrorHandler(OnFatalErrorExpectOOM);
4417 :
4418 : v8::Local<v8::Value> result = CompileRun(
4419 : "%SetFlags('--gc-interval=1');"
4420 : "var a = [];"
4421 : "a.__proto__ = [];"
4422 : "a.unshift(1)");
4423 :
4424 : CHECK(result->IsNumber());
4425 : }
4426 :
4427 : #endif // DEBUG
4428 :
4429 :
4430 5 : static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
4431 :
4432 :
4433 5 : static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
4434 5 : CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, nullptr);
4435 5 : }
4436 :
4437 23723 : HEAP_TEST(Regress538257) {
4438 5 : FLAG_concurrent_marking = false;
4439 5 : FLAG_manual_evacuation_candidates_selection = true;
4440 : v8::Isolate::CreateParams create_params;
4441 : // Set heap limits.
4442 : create_params.constraints.set_max_semi_space_size_in_kb(1024);
4443 : #ifdef DEBUG
4444 : create_params.constraints.set_max_old_space_size(20);
4445 : #else
4446 : create_params.constraints.set_max_old_space_size(6);
4447 : #endif
4448 5 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
4449 5 : v8::Isolate* isolate = v8::Isolate::New(create_params);
4450 5 : isolate->Enter();
4451 : {
4452 : i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
4453 10 : Heap* heap = i_isolate->heap();
4454 : HandleScope handle_scope(i_isolate);
4455 4883 : PagedSpace* old_space = heap->old_space();
4456 : const int kMaxObjects = 10000;
4457 : const int kFixedArrayLen = 512;
4458 50005 : Handle<FixedArray> objects[kMaxObjects];
4459 9761 : for (int i = 0; (i < kMaxObjects) &&
4460 4883 : heap->CanExpandOldGeneration(old_space->AreaSize());
4461 : i++) {
4462 4878 : objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED);
4463 14634 : heap::ForceEvacuationCandidate(Page::FromAddress(objects[i]->address()));
4464 : }
4465 5 : heap::SimulateFullSpace(old_space);
4466 : heap->CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask,
4467 5 : i::GarbageCollectionReason::kTesting);
4468 : // If we get this far, we've successfully aborted compaction. Any further
4469 : // allocations might trigger OOM.
4470 : }
4471 5 : isolate->Exit();
4472 5 : isolate->Dispose();
4473 5 : }
4474 :
4475 :
4476 23723 : TEST(Regress357137) {
4477 5 : CcTest::InitializeVM();
4478 5 : v8::Isolate* isolate = CcTest::isolate();
4479 5 : v8::HandleScope hscope(isolate);
4480 5 : v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
4481 : global->Set(
4482 : v8::String::NewFromUtf8(isolate, "interrupt", v8::NewStringType::kNormal)
4483 : .ToLocalChecked(),
4484 15 : v8::FunctionTemplate::New(isolate, RequestInterrupt));
4485 5 : v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
4486 5 : CHECK(!context.IsEmpty());
4487 : v8::Context::Scope cscope(context);
4488 :
4489 : v8::Local<v8::Value> result = CompileRun(
4490 : "var locals = '';"
4491 : "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
4492 : "eval('function f() {' + locals + 'return function() { return v0; }; }');"
4493 : "interrupt();" // This triggers a fake stack overflow in f.
4494 : "f()()");
4495 15 : CHECK_EQ(42.0, result->ToNumber(context).ToLocalChecked()->Value());
4496 5 : }
4497 :
4498 :
4499 23723 : TEST(Regress507979) {
4500 : const int kFixedArrayLen = 10;
4501 5 : CcTest::InitializeVM();
4502 : Isolate* isolate = CcTest::i_isolate();
4503 5 : Heap* heap = isolate->heap();
4504 : HandleScope handle_scope(isolate);
4505 :
4506 5 : Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
4507 5 : Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
4508 5 : CHECK(heap->InNewSpace(*o1));
4509 5 : CHECK(heap->InNewSpace(*o2));
4510 :
4511 10 : HeapIterator it(heap, i::HeapIterator::kFilterUnreachable);
4512 :
4513 : // Replace parts of an object placed before a live object with a filler. This
4514 : // way the filler object shares the mark bits with the following live object.
4515 5 : o1->Shrink(kFixedArrayLen - 1);
4516 :
4517 39015 : for (HeapObject* obj = it.next(); obj != nullptr; obj = it.next()) {
4518 : // Let's not optimize the loop away.
4519 39010 : CHECK_NOT_NULL(obj->address());
4520 : }
4521 5 : }
4522 :
4523 23723 : TEST(Regress388880) {
4524 5 : if (!FLAG_incremental_marking) return;
4525 5 : FLAG_stress_incremental_marking = false;
4526 5 : FLAG_expose_gc = true;
4527 5 : CcTest::InitializeVM();
4528 5 : v8::HandleScope scope(CcTest::isolate());
4529 : Isolate* isolate = CcTest::i_isolate();
4530 : Factory* factory = isolate->factory();
4531 10 : Heap* heap = isolate->heap();
4532 :
4533 5 : Handle<Map> map1 = Map::Create(isolate, 1);
4534 5 : Handle<String> name = factory->NewStringFromStaticChars("foo");
4535 5 : name = factory->InternalizeString(name);
4536 : Handle<Map> map2 =
4537 : Map::CopyWithField(map1, name, FieldType::Any(isolate), NONE, kMutable,
4538 10 : Representation::Tagged(), OMIT_TRANSITION)
4539 10 : .ToHandleChecked();
4540 :
4541 5 : size_t desired_offset = Page::kPageSize - map1->instance_size();
4542 :
4543 : // Allocate padding objects in old pointer space so, that object allocated
4544 : // afterwards would end at the end of the page.
4545 5 : heap::SimulateFullSpace(heap->old_space());
4546 5 : size_t padding_size = desired_offset - Page::kObjectStartOffset;
4547 10 : heap::CreatePadding(heap, static_cast<int>(padding_size), TENURED);
4548 :
4549 5 : Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED);
4550 5 : o->set_raw_properties_or_hash(*factory->empty_fixed_array());
4551 :
4552 : // Ensure that the object allocated where we need it.
4553 5 : Page* page = Page::FromAddress(o->address());
4554 5 : CHECK_EQ(desired_offset, page->Offset(o->address()));
4555 :
4556 : // Now we have an object right at the end of the page.
4557 :
4558 : // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
4559 : // that would cause crash.
4560 5 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4561 5 : marking->Stop();
4562 : CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
4563 5 : i::GarbageCollectionReason::kTesting);
4564 5 : CHECK(marking->IsMarking());
4565 :
4566 : // Now everything is set up for crashing in JSObject::MigrateFastToFast()
4567 : // when it calls heap->AdjustLiveBytes(...).
4568 5 : JSObject::MigrateToMap(o, map2);
4569 : }
4570 :
4571 :
4572 23723 : TEST(Regress3631) {
4573 5 : if (!FLAG_incremental_marking) return;
4574 5 : FLAG_expose_gc = true;
4575 5 : CcTest::InitializeVM();
4576 5 : v8::HandleScope scope(CcTest::isolate());
4577 : Isolate* isolate = CcTest::i_isolate();
4578 5 : Heap* heap = isolate->heap();
4579 5 : IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4580 : v8::Local<v8::Value> result = CompileRun(
4581 : "var weak_map = new WeakMap();"
4582 : "var future_keys = [];"
4583 : "for (var i = 0; i < 50; i++) {"
4584 : " var key = {'k' : i + 0.1};"
4585 : " weak_map.set(key, 1);"
4586 : " future_keys.push({'x' : i + 0.2});"
4587 : "}"
4588 : "weak_map");
4589 5 : if (marking->IsStopped()) {
4590 : CcTest::heap()->StartIncrementalMarking(
4591 4 : i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
4592 : }
4593 : // Incrementally mark the backing store.
4594 : Handle<JSReceiver> obj =
4595 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
4596 : Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
4597 : HeapObject* weak_map_table = HeapObject::cast(weak_map->table());
4598 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
4599 20 : while (!marking_state->IsBlack(weak_map_table) && !marking->IsStopped()) {
4600 : marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
4601 5 : StepOrigin::kV8);
4602 : }
4603 : // Stash the backing store in a handle.
4604 : Handle<Object> save(weak_map->table(), isolate);
4605 : // The following line will update the backing store.
4606 : CompileRun(
4607 : "for (var i = 0; i < 50; i++) {"
4608 : " weak_map.set(future_keys[i], i);"
4609 : "}");
4610 : heap->incremental_marking()->set_should_hurry(true);
4611 5 : CcTest::CollectGarbage(OLD_SPACE);
4612 : }
4613 :
4614 :
4615 23723 : TEST(Regress442710) {
4616 5 : CcTest::InitializeVM();
4617 : Isolate* isolate = CcTest::i_isolate();
4618 : Factory* factory = isolate->factory();
4619 :
4620 : HandleScope sc(isolate);
4621 : Handle<JSGlobalObject> global(
4622 5 : CcTest::i_isolate()->context()->global_object());
4623 5 : Handle<JSArray> array = factory->NewJSArray(2);
4624 :
4625 5 : Handle<String> name = factory->InternalizeUtf8String("testArray");
4626 10 : JSReceiver::SetProperty(global, name, array, LanguageMode::kSloppy).Check();
4627 : CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
4628 5 : CcTest::CollectGarbage(OLD_SPACE);
4629 5 : }
4630 :
4631 :
4632 23723 : HEAP_TEST(NumberStringCacheSize) {
4633 : // Test that the number-string cache has not been resized in the snapshot.
4634 5 : CcTest::InitializeVM();
4635 5 : Isolate* isolate = CcTest::i_isolate();
4636 10 : if (!isolate->snapshot_available()) return;
4637 5 : Heap* heap = isolate->heap();
4638 5 : CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
4639 : heap->number_string_cache()->length());
4640 : }
4641 :
4642 :
4643 23723 : TEST(Regress3877) {
4644 5 : CcTest::InitializeVM();
4645 : Isolate* isolate = CcTest::i_isolate();
4646 : Factory* factory = isolate->factory();
4647 : HandleScope scope(isolate);
4648 : CompileRun("function cls() { this.x = 10; }");
4649 : Handle<WeakCell> weak_prototype;
4650 : {
4651 : HandleScope inner_scope(isolate);
4652 : v8::Local<v8::Value> result = CompileRun("cls.prototype");
4653 : Handle<JSReceiver> proto =
4654 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
4655 5 : weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto));
4656 : }
4657 5 : CHECK(!weak_prototype->cleared());
4658 : CompileRun(
4659 : "var a = { };"
4660 : "a.x = new cls();"
4661 : "cls.prototype = null;");
4662 25 : for (int i = 0; i < 4; i++) {
4663 20 : CcTest::CollectAllGarbage();
4664 : }
4665 : // The map of a.x keeps prototype alive
4666 5 : CHECK(!weak_prototype->cleared());
4667 : // Change the map of a.x and make the previous map garbage collectable.
4668 : CompileRun("a.x.__proto__ = {};");
4669 25 : for (int i = 0; i < 4; i++) {
4670 20 : CcTest::CollectAllGarbage();
4671 : }
4672 5 : CHECK(weak_prototype->cleared());
4673 5 : }
4674 :
4675 :
4676 25 : Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) {
4677 : HandleScope inner_scope(isolate);
4678 25 : Handle<Map> map = Map::Create(isolate, 1);
4679 : v8::Local<v8::Value> result =
4680 : CompileRun("(function () { return {x : 10}; })();");
4681 : Handle<JSReceiver> proto =
4682 : v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
4683 25 : Map::SetPrototype(map, proto);
4684 25 : heap->AddRetainedMap(map);
4685 50 : return inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
4686 : }
4687 :
4688 :
4689 20 : void CheckMapRetainingFor(int n) {
4690 20 : FLAG_retain_maps_for_n_gc = n;
4691 : Isolate* isolate = CcTest::i_isolate();
4692 20 : Heap* heap = isolate->heap();
4693 20 : Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
4694 20 : CHECK(!weak_cell->cleared());
4695 40 : for (int i = 0; i < n; i++) {
4696 40 : heap::SimulateIncrementalMarking(heap);
4697 40 : CcTest::CollectGarbage(OLD_SPACE);
4698 : }
4699 20 : CHECK(!weak_cell->cleared());
4700 20 : heap::SimulateIncrementalMarking(heap);
4701 20 : CcTest::CollectGarbage(OLD_SPACE);
4702 20 : CHECK(weak_cell->cleared());
4703 20 : }
4704 :
4705 :
4706 23723 : TEST(MapRetaining) {
4707 5 : if (!FLAG_incremental_marking) return;
4708 : ManualGCScope manual_gc_scope;
4709 5 : CcTest::InitializeVM();
4710 10 : v8::HandleScope scope(CcTest::isolate());
4711 5 : CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
4712 5 : CheckMapRetainingFor(0);
4713 5 : CheckMapRetainingFor(1);
4714 5 : CheckMapRetainingFor(7);
4715 : }
4716 :
4717 :
4718 23723 : TEST(RegressArrayListGC) {
4719 5 : FLAG_retain_maps_for_n_gc = 1;
4720 5 : FLAG_incremental_marking = 0;
4721 5 : FLAG_gc_global = true;
4722 5 : CcTest::InitializeVM();
4723 5 : v8::HandleScope scope(CcTest::isolate());
4724 : Isolate* isolate = CcTest::i_isolate();
4725 5 : Heap* heap = isolate->heap();
4726 5 : AddRetainedMap(isolate, heap);
4727 5 : Handle<Map> map = Map::Create(isolate, 1);
4728 5 : CcTest::CollectGarbage(OLD_SPACE);
4729 : // Force GC in old space on next addition of retained map.
4730 5 : Map::WeakCellForMap(map);
4731 5 : heap::SimulateFullSpace(CcTest::heap()->new_space());
4732 55 : for (int i = 0; i < 10; i++) {
4733 50 : heap->AddRetainedMap(map);
4734 : }
4735 5 : CcTest::CollectGarbage(OLD_SPACE);
4736 5 : }
4737 :
4738 :
4739 23723 : TEST(WritableVsImmortalRoots) {
4740 2160 : for (int i = 0; i < Heap::kStrongRootListLength; ++i) {
4741 2155 : Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
4742 2155 : bool writable = Heap::RootCanBeWrittenAfterInitialization(root_index);
4743 2155 : bool immortal = Heap::RootIsImmortalImmovable(root_index);
4744 : // A root value can be writable, immortal, or neither, but not both.
4745 2155 : CHECK(!immortal || !writable);
4746 : }
4747 5 : }
4748 :
4749 23723 : TEST(WeakFixedArray) {
4750 5 : CcTest::InitializeVM();
4751 5 : v8::HandleScope scope(CcTest::isolate());
4752 :
4753 : Handle<HeapNumber> number = CcTest::i_isolate()->factory()->NewHeapNumber(1);
4754 5 : Handle<WeakFixedArray> array = WeakFixedArray::Add(Handle<Object>(), number);
4755 5 : array->Remove(number);
4756 5 : array->Compact<WeakFixedArray::NullCallback>();
4757 5 : WeakFixedArray::Add(array, number);
4758 5 : }
4759 :
4760 :
4761 23718 : TEST(PreprocessStackTrace) {
4762 : // Do not automatically trigger early GC.
4763 0 : FLAG_gc_interval = -1;
4764 0 : CcTest::InitializeVM();
4765 0 : v8::HandleScope scope(CcTest::isolate());
4766 0 : v8::TryCatch try_catch(CcTest::isolate());
4767 : CompileRun("throw new Error();");
4768 0 : CHECK(try_catch.HasCaught());
4769 : Isolate* isolate = CcTest::i_isolate();
4770 0 : Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
4771 : Handle<Name> key = isolate->factory()->stack_trace_symbol();
4772 : Handle<Object> stack_trace =
4773 0 : Object::GetProperty(exception, key).ToHandleChecked();
4774 : Handle<Object> code =
4775 0 : Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
4776 0 : CHECK(code->IsCode());
4777 :
4778 0 : CcTest::CollectAllAvailableGarbage();
4779 :
4780 : Handle<Object> pos =
4781 0 : Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
4782 0 : CHECK(pos->IsSmi());
4783 :
4784 : Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
4785 : int array_length = Smi::ToInt(stack_trace_array->length());
4786 0 : for (int i = 0; i < array_length; i++) {
4787 : Handle<Object> element =
4788 0 : Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
4789 0 : CHECK(!element->IsCode());
4790 0 : }
4791 0 : }
4792 :
4793 :
4794 : static bool utils_has_been_collected = false;
4795 :
4796 0 : static void UtilsHasBeenCollected(
4797 0 : const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
4798 0 : utils_has_been_collected = true;
4799 : data.GetParameter()->Reset();
4800 0 : }
4801 :
4802 :
4803 23723 : TEST(BootstrappingExports) {
4804 : // Expose utils object and delete it to observe that it is indeed
4805 : // being garbage-collected.
4806 5 : FLAG_expose_natives_as = "utils";
4807 5 : CcTest::InitializeVM();
4808 5 : v8::Isolate* isolate = CcTest::isolate();
4809 5 : LocalContext env;
4810 :
4811 10 : if (Snapshot::HasContextSnapshot(CcTest::i_isolate(), 0)) return;
4812 :
4813 0 : utils_has_been_collected = false;
4814 :
4815 : v8::Persistent<v8::Object> utils;
4816 :
4817 : {
4818 0 : v8::HandleScope scope(isolate);
4819 0 : v8::Local<v8::String> name = v8_str("utils");
4820 : utils.Reset(isolate, CcTest::global()
4821 0 : ->Get(env.local(), name)
4822 0 : .ToLocalChecked()
4823 0 : ->ToObject(env.local())
4824 0 : .ToLocalChecked());
4825 0 : CHECK(CcTest::global()->Delete(env.local(), name).FromJust());
4826 : }
4827 :
4828 : utils.SetWeak(&utils, UtilsHasBeenCollected,
4829 : v8::WeakCallbackType::kParameter);
4830 :
4831 0 : CcTest::CollectAllAvailableGarbage();
4832 :
4833 0 : CHECK(utils_has_been_collected);
4834 : }
4835 :
4836 :
4837 23723 : TEST(Regress1878) {
4838 5 : FLAG_allow_natives_syntax = true;
4839 5 : CcTest::InitializeVM();
4840 5 : v8::Isolate* isolate = CcTest::isolate();
4841 5 : v8::HandleScope scope(isolate);
4842 : v8::Local<v8::Function> constructor = v8::Utils::CallableToLocal(
4843 5 : CcTest::i_isolate()->internal_array_function());
4844 10 : LocalContext env;
4845 20 : CHECK(CcTest::global()
4846 : ->Set(env.local(), v8_str("InternalArray"), constructor)
4847 : .FromJust());
4848 :
4849 10 : v8::TryCatch try_catch(isolate);
4850 :
4851 : CompileRun(
4852 : "var a = Array();"
4853 : "for (var i = 0; i < 1000; i++) {"
4854 : " var ai = new InternalArray(10000);"
4855 : " if (%HaveSameMap(ai, a)) throw Error();"
4856 : " if (!%HasObjectElements(ai)) throw Error();"
4857 : "}"
4858 : "for (var i = 0; i < 1000; i++) {"
4859 : " var ai = new InternalArray(10000);"
4860 : " if (%HaveSameMap(ai, a)) throw Error();"
4861 : " if (!%HasObjectElements(ai)) throw Error();"
4862 : "}");
4863 :
4864 10 : CHECK(!try_catch.HasCaught());
4865 5 : }
4866 :
4867 :
4868 215 : void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
4869 215 : CHECK_LE(FixedArray::kHeaderSize, bytes);
4870 430 : CHECK_EQ(0, bytes % kPointerSize);
4871 : Factory* factory = isolate->factory();
4872 : HandleScope scope(isolate);
4873 : AlwaysAllocateScope always_allocate(isolate);
4874 : int elements =
4875 215 : static_cast<int>((bytes - FixedArray::kHeaderSize) / kPointerSize);
4876 : Handle<FixedArray> array = factory->NewFixedArray(
4877 215 : elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
4878 430 : CHECK((space == NEW_SPACE) == isolate->heap()->InNewSpace(*array));
4879 215 : CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
4880 215 : }
4881 :
4882 :
4883 23723 : TEST(NewSpaceAllocationCounter) {
4884 5 : CcTest::InitializeVM();
4885 5 : v8::HandleScope scope(CcTest::isolate());
4886 : Isolate* isolate = CcTest::i_isolate();
4887 : Heap* heap = isolate->heap();
4888 : size_t counter1 = heap->NewSpaceAllocationCounter();
4889 5 : CcTest::CollectGarbage(NEW_SPACE);
4890 5 : CcTest::CollectGarbage(NEW_SPACE); // Ensure new space is empty.
4891 : const size_t kSize = 1024;
4892 5 : AllocateInSpace(isolate, kSize, NEW_SPACE);
4893 : size_t counter2 = heap->NewSpaceAllocationCounter();
4894 5 : CHECK_EQ(kSize, counter2 - counter1);
4895 5 : CcTest::CollectGarbage(NEW_SPACE);
4896 : size_t counter3 = heap->NewSpaceAllocationCounter();
4897 5 : CHECK_EQ(0U, counter3 - counter2);
4898 : // Test counter overflow.
4899 : size_t max_counter = static_cast<size_t>(-1);
4900 : heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
4901 : size_t start = heap->NewSpaceAllocationCounter();
4902 105 : for (int i = 0; i < 20; i++) {
4903 100 : AllocateInSpace(isolate, kSize, NEW_SPACE);
4904 : size_t counter = heap->NewSpaceAllocationCounter();
4905 100 : CHECK_EQ(kSize, counter - start);
4906 : start = counter;
4907 5 : }
4908 5 : }
4909 :
4910 :
4911 23723 : TEST(OldSpaceAllocationCounter) {
4912 5 : CcTest::InitializeVM();
4913 5 : v8::HandleScope scope(CcTest::isolate());
4914 : Isolate* isolate = CcTest::i_isolate();
4915 5 : Heap* heap = isolate->heap();
4916 : size_t counter1 = heap->OldGenerationAllocationCounter();
4917 5 : CcTest::CollectGarbage(NEW_SPACE);
4918 5 : CcTest::CollectGarbage(NEW_SPACE);
4919 : const size_t kSize = 1024;
4920 5 : AllocateInSpace(isolate, kSize, OLD_SPACE);
4921 : size_t counter2 = heap->OldGenerationAllocationCounter();
4922 : // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
4923 5 : CHECK_LE(kSize, counter2 - counter1);
4924 5 : CcTest::CollectGarbage(NEW_SPACE);
4925 : size_t counter3 = heap->OldGenerationAllocationCounter();
4926 5 : CHECK_EQ(0u, counter3 - counter2);
4927 5 : AllocateInSpace(isolate, kSize, OLD_SPACE);
4928 5 : CcTest::CollectGarbage(OLD_SPACE);
4929 : size_t counter4 = heap->OldGenerationAllocationCounter();
4930 5 : CHECK_LE(kSize, counter4 - counter3);
4931 : // Test counter overflow.
4932 : size_t max_counter = static_cast<size_t>(-1);
4933 : heap->set_old_generation_allocation_counter_at_last_gc(max_counter -
4934 : 10 * kSize);
4935 : size_t start = heap->OldGenerationAllocationCounter();
4936 105 : for (int i = 0; i < 20; i++) {
4937 100 : AllocateInSpace(isolate, kSize, OLD_SPACE);
4938 : size_t counter = heap->OldGenerationAllocationCounter();
4939 100 : CHECK_LE(kSize, counter - start);
4940 : start = counter;
4941 5 : }
4942 5 : }
4943 :
4944 :
4945 20 : static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
4946 : Isolate* isolate = CcTest::i_isolate();
4947 : Object* message =
4948 20 : *reinterpret_cast<Object**>(isolate->pending_message_obj_address());
4949 20 : CHECK(message->IsTheHole(isolate));
4950 20 : }
4951 :
4952 :
4953 23723 : TEST(MessageObjectLeak) {
4954 5 : CcTest::InitializeVM();
4955 5 : v8::Isolate* isolate = CcTest::isolate();
4956 5 : v8::HandleScope scope(isolate);
4957 5 : v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
4958 : global->Set(
4959 : v8::String::NewFromUtf8(isolate, "check", v8::NewStringType::kNormal)
4960 : .ToLocalChecked(),
4961 15 : v8::FunctionTemplate::New(isolate, CheckLeak));
4962 5 : v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
4963 : v8::Context::Scope cscope(context);
4964 :
4965 : const char* test =
4966 : "try {"
4967 : " throw 'message 1';"
4968 : "} catch (e) {"
4969 : "}"
4970 : "check();"
4971 : "L: try {"
4972 : " throw 'message 2';"
4973 : "} finally {"
4974 : " break L;"
4975 : "}"
4976 : "check();";
4977 : CompileRun(test);
4978 :
4979 : const char* flag = "--turbo-filter=*";
4980 5 : FlagList::SetFlagsFromString(flag, StrLength(flag));
4981 5 : FLAG_always_opt = true;
4982 :
4983 5 : CompileRun(test);
4984 5 : }
4985 :
4986 :
4987 10 : static void CheckEqualSharedFunctionInfos(
4988 : const v8::FunctionCallbackInfo<v8::Value>& args) {
4989 : Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
4990 : Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
4991 : Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
4992 : Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
4993 10 : CHECK(fun1->shared() == fun2->shared());
4994 10 : }
4995 :
4996 :
4997 10 : static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
4998 : Isolate* isolate = CcTest::i_isolate();
4999 : Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
5000 : Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
5001 : fun->shared()->ClearBytecodeArray(); // Bytecode is code too.
5002 20 : fun->set_code(*BUILTIN_CODE(isolate, CompileLazy));
5003 20 : fun->shared()->set_code(*BUILTIN_CODE(isolate, CompileLazy));
5004 10 : CcTest::CollectAllAvailableGarbage();
5005 10 : }
5006 :
5007 :
5008 23723 : TEST(CanonicalSharedFunctionInfo) {
5009 5 : CcTest::InitializeVM();
5010 5 : v8::Isolate* isolate = CcTest::isolate();
5011 5 : v8::HandleScope scope(isolate);
5012 5 : v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5013 : global->Set(isolate, "check", v8::FunctionTemplate::New(
5014 10 : isolate, CheckEqualSharedFunctionInfos));
5015 : global->Set(isolate, "remove",
5016 10 : v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
5017 5 : v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
5018 : v8::Context::Scope cscope(context);
5019 : CompileRun(
5020 : "function f() { return function g() {}; }"
5021 : "var g1 = f();"
5022 : "remove(f);"
5023 : "var g2 = f();"
5024 : "check(g1, g2);");
5025 :
5026 : CompileRun(
5027 : "function f() { return (function() { return function g() {}; })(); }"
5028 : "var g1 = f();"
5029 : "remove(f);"
5030 : "var g2 = f();"
5031 5 : "check(g1, g2);");
5032 5 : }
5033 :
5034 :
5035 23723 : TEST(ScriptIterator) {
5036 5 : CcTest::InitializeVM();
5037 5 : v8::HandleScope scope(CcTest::isolate());
5038 : Isolate* isolate = CcTest::i_isolate();
5039 5 : Heap* heap = CcTest::heap();
5040 10 : LocalContext context;
5041 :
5042 5 : CcTest::CollectAllGarbage();
5043 :
5044 : int script_count = 0;
5045 : {
5046 5 : HeapIterator it(heap);
5047 49617 : for (HeapObject* obj = it.next(); obj != nullptr; obj = it.next()) {
5048 49612 : if (obj->IsScript()) script_count++;
5049 5 : }
5050 : }
5051 :
5052 : {
5053 5 : Script::Iterator iterator(isolate);
5054 5 : while (iterator.Next()) script_count--;
5055 : }
5056 :
5057 10 : CHECK_EQ(0, script_count);
5058 5 : }
5059 :
5060 :
5061 23723 : TEST(SharedFunctionInfoIterator) {
5062 5 : CcTest::InitializeVM();
5063 5 : v8::HandleScope scope(CcTest::isolate());
5064 : Isolate* isolate = CcTest::i_isolate();
5065 5 : Heap* heap = CcTest::heap();
5066 10 : LocalContext context;
5067 :
5068 5 : CcTest::CollectAllGarbage();
5069 5 : CcTest::CollectAllGarbage();
5070 :
5071 : int sfi_count = 0;
5072 : {
5073 5 : HeapIterator it(heap);
5074 49260 : for (HeapObject* obj = it.next(); obj != nullptr; obj = it.next()) {
5075 49255 : if (!obj->IsSharedFunctionInfo()) continue;
5076 4065 : sfi_count++;
5077 5 : }
5078 : }
5079 :
5080 : {
5081 5 : SharedFunctionInfo::GlobalIterator iterator(isolate);
5082 5 : while (iterator.Next()) sfi_count--;
5083 : }
5084 :
5085 10 : CHECK_EQ(0, sfi_count);
5086 5 : }
5087 :
5088 23723 : HEAP_TEST(Regress587004) {
5089 5 : FLAG_concurrent_marking = false;
5090 5 : FLAG_concurrent_sweeping = false;
5091 : #ifdef VERIFY_HEAP
5092 : FLAG_verify_heap = false;
5093 : #endif
5094 5 : CcTest::InitializeVM();
5095 5 : v8::HandleScope scope(CcTest::isolate());
5096 20 : Heap* heap = CcTest::heap();
5097 : Isolate* isolate = CcTest::i_isolate();
5098 : Factory* factory = isolate->factory();
5099 : const int N =
5100 : (kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / kPointerSize;
5101 5 : Handle<FixedArray> array = factory->NewFixedArray(N, TENURED);
5102 5 : CHECK(heap->old_space()->Contains(*array));
5103 : Handle<Object> number = factory->NewHeapNumber(1.0);
5104 5 : CHECK(heap->InNewSpace(*number));
5105 316950 : for (int i = 0; i < N; i++) {
5106 316950 : array->set(i, *number);
5107 : }
5108 5 : CcTest::CollectGarbage(OLD_SPACE);
5109 5 : heap::SimulateFullSpace(heap->old_space());
5110 5 : heap->RightTrimFixedArray(*array, N - 1);
5111 5 : heap->mark_compact_collector()->EnsureSweepingCompleted();
5112 : ByteArray* byte_array;
5113 : const int M = 256;
5114 : // Don't allow old space expansion. The test works without this flag too,
5115 : // but becomes very slow.
5116 : heap->set_force_oom(true);
5117 10 : while (heap->AllocateByteArray(M, TENURED).To(&byte_array)) {
5118 0 : for (int j = 0; j < M; j++) {
5119 : byte_array->set(j, 0x31);
5120 : }
5121 : }
5122 : // Re-enable old space expansion to avoid OOM crash.
5123 : heap->set_force_oom(false);
5124 5 : CcTest::CollectGarbage(NEW_SPACE);
5125 5 : }
5126 :
5127 23723 : HEAP_TEST(Regress589413) {
5128 5 : if (!FLAG_incremental_marking) return;
5129 5 : FLAG_stress_compaction = true;
5130 5 : FLAG_manual_evacuation_candidates_selection = true;
5131 5 : FLAG_parallel_compaction = false;
5132 5 : FLAG_concurrent_marking = false;
5133 5 : FLAG_concurrent_sweeping = false;
5134 5 : CcTest::InitializeVM();
5135 5 : v8::HandleScope scope(CcTest::isolate());
5136 5 : Heap* heap = CcTest::heap();
5137 : // Get the heap in clean state.
5138 5 : CcTest::CollectGarbage(OLD_SPACE);
5139 5 : CcTest::CollectGarbage(OLD_SPACE);
5140 : Isolate* isolate = CcTest::i_isolate();
5141 : Factory* factory = isolate->factory();
5142 : // Fill the new space with byte arrays with elements looking like pointers.
5143 : const int M = 256;
5144 : ByteArray* byte_array;
5145 37915 : while (heap->AllocateByteArray(M).To(&byte_array)) {
5146 4851200 : for (int j = 0; j < M; j++) {
5147 : byte_array->set(j, 0x31);
5148 : }
5149 : // Add the array in root set.
5150 : handle(byte_array);
5151 : }
5152 : // Make sure the byte arrays will be promoted on the next GC.
5153 5 : CcTest::CollectGarbage(NEW_SPACE);
5154 : // This number is close to large free list category threshold.
5155 : const int N = 0x3eee;
5156 : {
5157 : std::vector<FixedArray*> arrays;
5158 : std::set<Page*> pages;
5159 : FixedArray* array;
5160 : // Fill all pages with fixed arrays.
5161 : heap->set_force_oom(true);
5162 29 : while (heap->AllocateFixedArray(N, TENURED).To(&array)) {
5163 24 : arrays.push_back(array);
5164 72 : pages.insert(Page::FromAddress(array->address()));
5165 : // Add the array in root set.
5166 24 : handle(array);
5167 : }
5168 : // Expand and full one complete page with fixed arrays.
5169 : heap->set_force_oom(false);
5170 24 : while (heap->AllocateFixedArray(N, TENURED).To(&array)) {
5171 19 : arrays.push_back(array);
5172 57 : pages.insert(Page::FromAddress(array->address()));
5173 : // Add the array in root set.
5174 19 : handle(array);
5175 : // Do not expand anymore.
5176 : heap->set_force_oom(true);
5177 : }
5178 : // Expand and mark the new page as evacuation candidate.
5179 : heap->set_force_oom(false);
5180 : {
5181 : AlwaysAllocateScope always_allocate(isolate);
5182 5 : Handle<HeapObject> ec_obj = factory->NewFixedArray(5000, TENURED);
5183 5 : Page* ec_page = Page::FromAddress(ec_obj->address());
5184 5 : heap::ForceEvacuationCandidate(ec_page);
5185 : // Make all arrays point to evacuation candidate so that
5186 : // slots are recorded for them.
5187 96 : for (size_t j = 0; j < arrays.size(); j++) {
5188 43 : array = arrays[j];
5189 692773 : for (int i = 0; i < N; i++) {
5190 692730 : array->set(i, *ec_obj);
5191 : }
5192 : }
5193 : }
5194 5 : heap::SimulateIncrementalMarking(heap);
5195 96 : for (size_t j = 0; j < arrays.size(); j++) {
5196 43 : heap->RightTrimFixedArray(arrays[j], N - 1);
5197 : }
5198 : }
5199 : // Force allocation from the free list.
5200 : heap->set_force_oom(true);
5201 5 : CcTest::CollectGarbage(OLD_SPACE);
5202 : }
5203 :
5204 23723 : TEST(Regress598319) {
5205 5 : if (!FLAG_incremental_marking) return;
5206 : ManualGCScope manual_gc_scope;
5207 : // This test ensures that no white objects can cross the progress bar of large
5208 : // objects during incremental marking. It checks this by using Shift() during
5209 : // incremental marking.
5210 5 : CcTest::InitializeVM();
5211 10 : v8::HandleScope scope(CcTest::isolate());
5212 30 : Heap* heap = CcTest::heap();
5213 : Isolate* isolate = heap->isolate();
5214 :
5215 : const int kNumberOfObjects = kMaxRegularHeapObjectSize / kPointerSize;
5216 :
5217 : struct Arr {
5218 5 : Arr(Isolate* isolate, int number_of_objects) {
5219 5 : root = isolate->factory()->NewFixedArray(1, TENURED);
5220 : {
5221 : // Temporary scope to avoid getting any other objects into the root set.
5222 5 : v8::HandleScope scope(CcTest::isolate());
5223 : Handle<FixedArray> tmp =
5224 5 : isolate->factory()->NewFixedArray(number_of_objects);
5225 5 : root->set(0, *tmp);
5226 633930 : for (int i = 0; i < get()->length(); i++) {
5227 316960 : tmp = isolate->factory()->NewFixedArray(100, TENURED);
5228 316960 : get()->set(i, *tmp);
5229 5 : }
5230 : }
5231 5 : }
5232 :
5233 : FixedArray* get() { return FixedArray::cast(root->get(0)); }
5234 :
5235 : Handle<FixedArray> root;
5236 5 : } arr(isolate, kNumberOfObjects);
5237 :
5238 5 : CHECK_EQ(arr.get()->length(), kNumberOfObjects);
5239 5 : CHECK(heap->lo_space()->Contains(arr.get()));
5240 10 : LargePage* page = heap->lo_space()->FindPage(arr.get()->address());
5241 5 : CHECK_NOT_NULL(page);
5242 :
5243 : // GC to cleanup state
5244 5 : CcTest::CollectGarbage(OLD_SPACE);
5245 : MarkCompactCollector* collector = heap->mark_compact_collector();
5246 5 : if (collector->sweeping_in_progress()) {
5247 5 : collector->EnsureSweepingCompleted();
5248 : }
5249 :
5250 5 : CHECK(heap->lo_space()->Contains(arr.get()));
5251 35 : IncrementalMarking* marking = heap->incremental_marking();
5252 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5253 5 : CHECK(marking_state->IsWhite(arr.get()));
5254 633925 : for (int i = 0; i < arr.get()->length(); i++) {
5255 : HeapObject* arr_value = HeapObject::cast(arr.get()->get(i));
5256 316960 : CHECK(marking_state->IsWhite(arr_value));
5257 : }
5258 :
5259 : // Start incremental marking.
5260 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5261 5 : if (marking->IsStopped()) {
5262 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5263 5 : i::GarbageCollectionReason::kTesting);
5264 : }
5265 5 : CHECK(marking->IsMarking());
5266 :
5267 : // Check that we have not marked the interesting array during root scanning.
5268 633925 : for (int i = 0; i < arr.get()->length(); i++) {
5269 : HeapObject* arr_value = HeapObject::cast(arr.get()->get(i));
5270 316960 : CHECK(marking_state->IsWhite(arr_value));
5271 : }
5272 :
5273 : // Now we search for a state where we are in incremental marking and have
5274 : // only partially marked the large object.
5275 3000 : while (!marking->IsComplete()) {
5276 : marking->Step(i::KB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5277 3000 : StepOrigin::kV8);
5278 6005 : if (page->IsFlagSet(Page::HAS_PROGRESS_BAR) && page->progress_bar() > 0) {
5279 10 : CHECK_NE(page->progress_bar(), arr.get()->Size());
5280 : {
5281 : // Shift by 1, effectively moving one white object across the progress
5282 : // bar, meaning that we will miss marking it.
5283 5 : v8::HandleScope scope(CcTest::isolate());
5284 : Handle<JSArray> js_array = isolate->factory()->NewJSArrayWithElements(
5285 : Handle<FixedArray>(arr.get()));
5286 5 : js_array->GetElementsAccessor()->Shift(js_array);
5287 : }
5288 5 : break;
5289 : }
5290 : }
5291 :
5292 : // Finish marking with bigger steps to speed up test.
5293 40 : while (!marking->IsComplete()) {
5294 : marking->Step(10 * i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5295 35 : StepOrigin::kV8);
5296 35 : if (marking->IsReadyToOverApproximateWeakClosure()) {
5297 5 : marking->FinalizeIncrementally();
5298 : }
5299 : }
5300 5 : CHECK(marking->IsComplete());
5301 :
5302 : // All objects need to be black after marking. If a white object crossed the
5303 : // progress bar, we would fail here.
5304 633925 : for (int i = 0; i < arr.get()->length(); i++) {
5305 : HeapObject* arr_value = HeapObject::cast(arr.get()->get(i));
5306 316960 : CHECK(marking_state->IsBlack(arr_value));
5307 : }
5308 : }
5309 :
5310 30 : Handle<FixedArray> ShrinkArrayAndCheckSize(Heap* heap, int length) {
5311 : // Make sure there is no garbage and the compilation cache is empty.
5312 60 : for (int i = 0; i < 5; i++) {
5313 50 : CcTest::CollectAllGarbage();
5314 : }
5315 10 : heap->mark_compact_collector()->EnsureSweepingCompleted();
5316 10 : size_t size_before_allocation = heap->SizeOfObjects();
5317 : Handle<FixedArray> array =
5318 10 : heap->isolate()->factory()->NewFixedArray(length, TENURED);
5319 10 : size_t size_after_allocation = heap->SizeOfObjects();
5320 10 : CHECK_EQ(size_after_allocation, size_before_allocation + array->Size());
5321 10 : array->Shrink(1);
5322 10 : size_t size_after_shrinking = heap->SizeOfObjects();
5323 : // Shrinking does not change the space size immediately.
5324 10 : CHECK_EQ(size_after_allocation, size_after_shrinking);
5325 : // GC and sweeping updates the size to acccount for shrinking.
5326 10 : CcTest::CollectAllGarbage();
5327 10 : heap->mark_compact_collector()->EnsureSweepingCompleted();
5328 10 : intptr_t size_after_gc = heap->SizeOfObjects();
5329 20 : CHECK_EQ(size_after_gc, size_before_allocation + array->Size());
5330 10 : return array;
5331 : }
5332 :
5333 23723 : TEST(Regress609761) {
5334 5 : CcTest::InitializeVM();
5335 5 : v8::HandleScope scope(CcTest::isolate());
5336 10 : Heap* heap = CcTest::heap();
5337 : int length = kMaxRegularHeapObjectSize / kPointerSize + 1;
5338 5 : Handle<FixedArray> array = ShrinkArrayAndCheckSize(heap, length);
5339 5 : CHECK(heap->lo_space()->Contains(*array));
5340 5 : }
5341 :
5342 23723 : TEST(LiveBytes) {
5343 5 : CcTest::InitializeVM();
5344 5 : v8::HandleScope scope(CcTest::isolate());
5345 10 : Heap* heap = CcTest::heap();
5346 5 : Handle<FixedArray> array = ShrinkArrayAndCheckSize(heap, 2000);
5347 5 : CHECK(heap->old_space()->Contains(*array));
5348 5 : }
5349 :
5350 23723 : TEST(Regress615489) {
5351 5 : if (!FLAG_incremental_marking) return;
5352 5 : FLAG_black_allocation = true;
5353 5 : CcTest::InitializeVM();
5354 5 : v8::HandleScope scope(CcTest::isolate());
5355 10 : Heap* heap = CcTest::heap();
5356 : Isolate* isolate = heap->isolate();
5357 5 : CcTest::CollectAllGarbage();
5358 :
5359 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5360 12 : i::IncrementalMarking* marking = heap->incremental_marking();
5361 5 : if (collector->sweeping_in_progress()) {
5362 5 : collector->EnsureSweepingCompleted();
5363 : }
5364 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5365 5 : if (marking->IsStopped()) {
5366 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5367 5 : i::GarbageCollectionReason::kTesting);
5368 : }
5369 5 : CHECK(marking->IsMarking());
5370 : marking->StartBlackAllocationForTesting();
5371 : {
5372 : AlwaysAllocateScope always_allocate(CcTest::i_isolate());
5373 10 : v8::HandleScope inner(CcTest::isolate());
5374 10 : isolate->factory()->NewFixedArray(500, TENURED)->Size();
5375 : }
5376 22 : while (!marking->IsComplete()) {
5377 : marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5378 12 : StepOrigin::kV8);
5379 12 : if (marking->IsReadyToOverApproximateWeakClosure()) {
5380 5 : marking->FinalizeIncrementally();
5381 : }
5382 : }
5383 5 : CHECK(marking->IsComplete());
5384 5 : intptr_t size_before = heap->SizeOfObjects();
5385 5 : CcTest::CollectAllGarbage();
5386 5 : intptr_t size_after = heap->SizeOfObjects();
5387 : // Live size does not increase after garbage collection.
5388 5 : CHECK_LE(size_after, size_before);
5389 : }
5390 :
5391 : class StaticOneByteResource : public v8::String::ExternalOneByteStringResource {
5392 : public:
5393 5 : explicit StaticOneByteResource(const char* data) : data_(data) {}
5394 :
5395 5 : ~StaticOneByteResource() {}
5396 :
5397 5 : const char* data() const { return data_; }
5398 :
5399 0 : size_t length() const { return strlen(data_); }
5400 :
5401 : private:
5402 : const char* data_;
5403 : };
5404 :
5405 23723 : TEST(Regress631969) {
5406 5 : if (!FLAG_incremental_marking) return;
5407 5 : FLAG_manual_evacuation_candidates_selection = true;
5408 5 : FLAG_parallel_compaction = false;
5409 5 : FLAG_concurrent_marking = false;
5410 5 : FLAG_concurrent_sweeping = false;
5411 5 : CcTest::InitializeVM();
5412 5 : v8::HandleScope scope(CcTest::isolate());
5413 20 : Heap* heap = CcTest::heap();
5414 : // Get the heap in clean state.
5415 5 : CcTest::CollectGarbage(OLD_SPACE);
5416 5 : CcTest::CollectGarbage(OLD_SPACE);
5417 : Isolate* isolate = CcTest::i_isolate();
5418 : Factory* factory = isolate->factory();
5419 : // Allocate two strings in a fresh page and mark the page as evacuation
5420 : // candidate.
5421 5 : heap::SimulateFullSpace(heap->old_space());
5422 5 : Handle<String> s1 = factory->NewStringFromStaticChars("123456789", TENURED);
5423 5 : Handle<String> s2 = factory->NewStringFromStaticChars("01234", TENURED);
5424 10 : heap::ForceEvacuationCandidate(Page::FromAddress(s1->address()));
5425 :
5426 5 : heap::SimulateIncrementalMarking(heap, false);
5427 :
5428 : // Allocate a cons string and promote it to a fresh page in the old space.
5429 5 : heap::SimulateFullSpace(heap->old_space());
5430 : Handle<String> s3;
5431 10 : factory->NewConsString(s1, s2).ToHandle(&s3);
5432 5 : CcTest::CollectGarbage(NEW_SPACE);
5433 5 : CcTest::CollectGarbage(NEW_SPACE);
5434 :
5435 : // Finish incremental marking.
5436 15 : IncrementalMarking* marking = heap->incremental_marking();
5437 25 : while (!marking->IsComplete()) {
5438 : marking->Step(MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5439 15 : StepOrigin::kV8);
5440 15 : if (marking->IsReadyToOverApproximateWeakClosure()) {
5441 5 : marking->FinalizeIncrementally();
5442 : }
5443 : }
5444 :
5445 : {
5446 : StaticOneByteResource external_string("12345678901234");
5447 5 : s3->MakeExternal(&external_string);
5448 5 : CcTest::CollectGarbage(OLD_SPACE);
5449 5 : }
5450 : }
5451 :
5452 23723 : TEST(LeftTrimFixedArrayInBlackArea) {
5453 5 : if (!FLAG_incremental_marking) return;
5454 5 : FLAG_black_allocation = true;
5455 5 : CcTest::InitializeVM();
5456 5 : v8::HandleScope scope(CcTest::isolate());
5457 20 : Heap* heap = CcTest::heap();
5458 : Isolate* isolate = heap->isolate();
5459 5 : CcTest::CollectAllGarbage();
5460 :
5461 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5462 : i::IncrementalMarking* marking = heap->incremental_marking();
5463 5 : if (collector->sweeping_in_progress()) {
5464 5 : collector->EnsureSweepingCompleted();
5465 : }
5466 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5467 5 : if (marking->IsStopped()) {
5468 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5469 5 : i::GarbageCollectionReason::kTesting);
5470 : }
5471 5 : CHECK(marking->IsMarking());
5472 : marking->StartBlackAllocationForTesting();
5473 :
5474 : // Ensure that we allocate a new page, set up a bump pointer area, and
5475 : // perform the allocation in a black area.
5476 5 : heap::SimulateFullSpace(heap->old_space());
5477 5 : isolate->factory()->NewFixedArray(4, TENURED);
5478 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(50, TENURED);
5479 5 : CHECK(heap->old_space()->Contains(*array));
5480 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5481 5 : CHECK(marking_state->IsBlack(*array));
5482 :
5483 : // Now left trim the allocated black area. A filler has to be installed
5484 : // for the trimmed area and all mark bits of the trimmed area have to be
5485 : // cleared.
5486 5 : FixedArrayBase* trimmed = heap->LeftTrimFixedArray(*array, 10);
5487 5 : CHECK(marking_state->IsBlack(trimmed));
5488 :
5489 5 : heap::GcAndSweep(heap, OLD_SPACE);
5490 : }
5491 :
5492 23723 : TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
5493 5 : if (!FLAG_incremental_marking) return;
5494 5 : FLAG_black_allocation = true;
5495 5 : CcTest::InitializeVM();
5496 5 : v8::HandleScope scope(CcTest::isolate());
5497 20 : Heap* heap = CcTest::heap();
5498 : Isolate* isolate = heap->isolate();
5499 5 : CcTest::CollectAllGarbage();
5500 :
5501 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5502 : i::IncrementalMarking* marking = heap->incremental_marking();
5503 5 : if (collector->sweeping_in_progress()) {
5504 5 : collector->EnsureSweepingCompleted();
5505 : }
5506 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5507 5 : if (marking->IsStopped()) {
5508 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5509 5 : i::GarbageCollectionReason::kTesting);
5510 : }
5511 5 : CHECK(marking->IsMarking());
5512 : marking->StartBlackAllocationForTesting();
5513 :
5514 : // Ensure that we allocate a new page, set up a bump pointer area, and
5515 : // perform the allocation in a black area.
5516 5 : heap::SimulateFullSpace(heap->old_space());
5517 5 : isolate->factory()->NewFixedArray(10, TENURED);
5518 :
5519 : // Allocate the fixed array that will be trimmed later.
5520 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(100, TENURED);
5521 5 : Address start_address = array->address();
5522 5 : Address end_address = start_address + array->Size();
5523 : Page* page = Page::FromAddress(start_address);
5524 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5525 5 : CHECK(marking_state->IsBlack(*array));
5526 10 : CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
5527 : page->AddressToMarkbitIndex(start_address),
5528 : page->AddressToMarkbitIndex(end_address)));
5529 5 : CHECK(heap->old_space()->Contains(*array));
5530 :
5531 : FixedArrayBase* previous = *array;
5532 : FixedArrayBase* trimmed;
5533 :
5534 : // First trim in one word steps.
5535 55 : for (int i = 0; i < 10; i++) {
5536 50 : trimmed = heap->LeftTrimFixedArray(previous, 1);
5537 : HeapObject* filler = HeapObject::FromAddress(previous->address());
5538 50 : CHECK(filler->IsFiller());
5539 50 : CHECK(marking_state->IsBlack(trimmed));
5540 50 : CHECK(marking_state->IsBlack(previous));
5541 : previous = trimmed;
5542 : }
5543 :
5544 : // Then trim in two and three word steps.
5545 10 : for (int i = 2; i <= 3; i++) {
5546 100 : for (int j = 0; j < 10; j++) {
5547 100 : trimmed = heap->LeftTrimFixedArray(previous, i);
5548 : HeapObject* filler = HeapObject::FromAddress(previous->address());
5549 100 : CHECK(filler->IsFiller());
5550 100 : CHECK(marking_state->IsBlack(trimmed));
5551 100 : CHECK(marking_state->IsBlack(previous));
5552 : previous = trimmed;
5553 : }
5554 : }
5555 :
5556 5 : heap::GcAndSweep(heap, OLD_SPACE);
5557 : }
5558 :
5559 23723 : TEST(ContinuousRightTrimFixedArrayInBlackArea) {
5560 5 : if (!FLAG_incremental_marking) return;
5561 5 : FLAG_black_allocation = true;
5562 5 : CcTest::InitializeVM();
5563 5 : v8::HandleScope scope(CcTest::isolate());
5564 20 : Heap* heap = CcTest::heap();
5565 : Isolate* isolate = heap->isolate();
5566 5 : CcTest::CollectAllGarbage();
5567 :
5568 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5569 : i::IncrementalMarking* marking = heap->incremental_marking();
5570 5 : if (collector->sweeping_in_progress()) {
5571 5 : collector->EnsureSweepingCompleted();
5572 : }
5573 5 : CHECK(marking->IsMarking() || marking->IsStopped());
5574 5 : if (marking->IsStopped()) {
5575 : heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5576 5 : i::GarbageCollectionReason::kTesting);
5577 : }
5578 5 : CHECK(marking->IsMarking());
5579 : marking->StartBlackAllocationForTesting();
5580 :
5581 : // Ensure that we allocate a new page, set up a bump pointer area, and
5582 : // perform the allocation in a black area.
5583 5 : heap::SimulateFullSpace(heap->old_space());
5584 5 : isolate->factory()->NewFixedArray(10, TENURED);
5585 :
5586 : // Allocate the fixed array that will be trimmed later.
5587 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(100, TENURED);
5588 5 : Address start_address = array->address();
5589 5 : Address end_address = start_address + array->Size();
5590 : Page* page = Page::FromAddress(start_address);
5591 : IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5592 5 : CHECK(marking_state->IsBlack(*array));
5593 :
5594 10 : CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
5595 : page->AddressToMarkbitIndex(start_address),
5596 : page->AddressToMarkbitIndex(end_address)));
5597 5 : CHECK(heap->old_space()->Contains(*array));
5598 :
5599 : // Trim it once by one word to make checking for white marking color uniform.
5600 5 : Address previous = end_address - kPointerSize;
5601 5 : heap->RightTrimFixedArray(*array, 1);
5602 5 : HeapObject* filler = HeapObject::FromAddress(previous);
5603 5 : CHECK(filler->IsFiller());
5604 5 : CHECK(marking_state->IsImpossible(filler));
5605 :
5606 : // Trim 10 times by one, two, and three word.
5607 15 : for (int i = 1; i <= 3; i++) {
5608 150 : for (int j = 0; j < 10; j++) {
5609 150 : previous -= kPointerSize * i;
5610 150 : heap->RightTrimFixedArray(*array, i);
5611 150 : HeapObject* filler = HeapObject::FromAddress(previous);
5612 150 : CHECK(filler->IsFiller());
5613 150 : CHECK(marking_state->IsWhite(filler));
5614 : }
5615 : }
5616 :
5617 5 : heap::GcAndSweep(heap, OLD_SPACE);
5618 : }
5619 :
5620 23723 : TEST(Regress618958) {
5621 5 : if (!FLAG_incremental_marking) return;
5622 5 : CcTest::InitializeVM();
5623 5 : v8::HandleScope scope(CcTest::isolate());
5624 15 : Heap* heap = CcTest::heap();
5625 : bool isolate_is_locked = true;
5626 : heap->update_external_memory(100 * MB);
5627 : int mark_sweep_count_before = heap->ms_count();
5628 : heap->MemoryPressureNotification(MemoryPressureLevel::kCritical,
5629 5 : isolate_is_locked);
5630 : int mark_sweep_count_after = heap->ms_count();
5631 5 : int mark_sweeps_performed = mark_sweep_count_after - mark_sweep_count_before;
5632 : // The memory pressuer handler either performed two GCs or performed one and
5633 : // started incremental marking.
5634 5 : CHECK(mark_sweeps_performed == 2 ||
5635 : (mark_sweeps_performed == 1 &&
5636 5 : !heap->incremental_marking()->IsStopped()));
5637 : }
5638 :
5639 23723 : TEST(UncommitUnusedLargeObjectMemory) {
5640 5 : CcTest::InitializeVM();
5641 5 : v8::HandleScope scope(CcTest::isolate());
5642 5 : Heap* heap = CcTest::heap();
5643 : Isolate* isolate = heap->isolate();
5644 :
5645 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(200000);
5646 5 : MemoryChunk* chunk = MemoryChunk::FromAddress(array->address());
5647 5 : CHECK(chunk->owner()->identity() == LO_SPACE);
5648 :
5649 5 : intptr_t size_before = array->Size();
5650 5 : size_t committed_memory_before = chunk->CommittedPhysicalMemory();
5651 :
5652 5 : array->Shrink(1);
5653 5 : CHECK(array->Size() < size_before);
5654 :
5655 5 : CcTest::CollectAllGarbage();
5656 5 : CHECK(chunk->CommittedPhysicalMemory() < committed_memory_before);
5657 : size_t shrinked_size =
5658 5 : RoundUp((array->address() - chunk->address()) + array->Size(),
5659 15 : base::OS::CommitPageSize());
5660 5 : CHECK_EQ(shrinked_size, chunk->CommittedPhysicalMemory());
5661 5 : }
5662 :
5663 23723 : TEST(RememberedSetRemoveRange) {
5664 5 : CcTest::InitializeVM();
5665 5 : v8::HandleScope scope(CcTest::isolate());
5666 5 : Heap* heap = CcTest::heap();
5667 : Isolate* isolate = heap->isolate();
5668 :
5669 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(Page::kPageSize /
5670 5 : kPointerSize);
5671 20 : MemoryChunk* chunk = MemoryChunk::FromAddress(array->address());
5672 5 : CHECK(chunk->owner()->identity() == LO_SPACE);
5673 5 : Address start = array->address();
5674 : // Maps slot to boolean indicator of whether the slot should be in the set.
5675 : std::map<Address, bool> slots;
5676 5 : slots[start + 0] = true;
5677 5 : slots[start + kPointerSize] = true;
5678 5 : slots[start + Page::kPageSize - kPointerSize] = true;
5679 5 : slots[start + Page::kPageSize] = true;
5680 5 : slots[start + Page::kPageSize + kPointerSize] = true;
5681 5 : slots[chunk->area_end() - kPointerSize] = true;
5682 :
5683 35 : for (auto x : slots) {
5684 25 : RememberedSet<OLD_TO_NEW>::Insert(chunk, x.first);
5685 : }
5686 :
5687 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
5688 25 : [&slots](Address addr) {
5689 25 : CHECK(slots[addr]);
5690 25 : return KEEP_SLOT;
5691 : },
5692 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
5693 :
5694 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start, start + kPointerSize,
5695 5 : SlotSet::FREE_EMPTY_BUCKETS);
5696 5 : slots[start] = false;
5697 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
5698 20 : [&slots](Address addr) {
5699 20 : CHECK(slots[addr]);
5700 20 : return KEEP_SLOT;
5701 : },
5702 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
5703 :
5704 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start + kPointerSize,
5705 : start + Page::kPageSize,
5706 5 : SlotSet::FREE_EMPTY_BUCKETS);
5707 5 : slots[start + kPointerSize] = false;
5708 5 : slots[start + Page::kPageSize - kPointerSize] = false;
5709 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
5710 10 : [&slots](Address addr) {
5711 10 : CHECK(slots[addr]);
5712 10 : return KEEP_SLOT;
5713 : },
5714 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
5715 :
5716 : RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start,
5717 : start + Page::kPageSize + kPointerSize,
5718 5 : SlotSet::FREE_EMPTY_BUCKETS);
5719 5 : slots[start + Page::kPageSize] = false;
5720 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
5721 5 : [&slots](Address addr) {
5722 5 : CHECK(slots[addr]);
5723 5 : return KEEP_SLOT;
5724 : },
5725 5 : SlotSet::PREFREE_EMPTY_BUCKETS);
5726 :
5727 : RememberedSet<OLD_TO_NEW>::RemoveRange(
5728 : chunk, chunk->area_end() - kPointerSize, chunk->area_end(),
5729 5 : SlotSet::FREE_EMPTY_BUCKETS);
5730 5 : slots[chunk->area_end() - kPointerSize] = false;
5731 : RememberedSet<OLD_TO_NEW>::Iterate(chunk,
5732 0 : [&slots](Address addr) {
5733 0 : CHECK(slots[addr]);
5734 0 : return KEEP_SLOT;
5735 : },
5736 10 : SlotSet::PREFREE_EMPTY_BUCKETS);
5737 5 : }
5738 :
5739 23723 : HEAP_TEST(Regress670675) {
5740 5 : if (!FLAG_incremental_marking) return;
5741 5 : FLAG_stress_incremental_marking = false;
5742 5 : CcTest::InitializeVM();
5743 5 : v8::HandleScope scope(CcTest::isolate());
5744 10 : Heap* heap = CcTest::heap();
5745 : Isolate* isolate = heap->isolate();
5746 : i::MarkCompactCollector* collector = heap->mark_compact_collector();
5747 5 : CcTest::CollectAllGarbage();
5748 :
5749 5 : if (collector->sweeping_in_progress()) {
5750 5 : collector->EnsureSweepingCompleted();
5751 : }
5752 5 : i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5753 5 : if (marking->IsStopped()) {
5754 5 : marking->Start(i::GarbageCollectionReason::kTesting);
5755 : }
5756 : size_t array_length = Page::kPageSize / kPointerSize + 100;
5757 5 : size_t n = heap->OldGenerationSpaceAvailable() / array_length;
5758 415 : for (size_t i = 0; i < n + 40; i++) {
5759 : {
5760 : HandleScope inner_scope(isolate);
5761 415 : isolate->factory()->NewFixedArray(static_cast<int>(array_length));
5762 : }
5763 415 : if (marking->IsStopped()) break;
5764 410 : double deadline = heap->MonotonicallyIncreasingTimeInMs() + 1;
5765 : marking->AdvanceIncrementalMarking(
5766 410 : deadline, IncrementalMarking::GC_VIA_STACK_GUARD, StepOrigin::kV8);
5767 : }
5768 5 : DCHECK(marking->IsStopped());
5769 : }
5770 :
5771 : namespace {
5772 10 : Handle<Code> GenerateDummyImmovableCode(Isolate* isolate) {
5773 10 : Assembler assm(isolate, nullptr, 256);
5774 :
5775 : const int kNumberOfNops = 1 << 10;
5776 10250 : for (int i = 0; i < kNumberOfNops; i++) {
5777 10240 : assm.nop(); // supported on all architectures
5778 : }
5779 :
5780 : CodeDesc desc;
5781 10 : assm.GetCode(isolate, &desc);
5782 : const bool kImmovable = true;
5783 : Handle<Code> code = isolate->factory()->NewCode(
5784 : desc, Code::STUB, Handle<Code>(), HandlerTable::Empty(isolate),
5785 30 : MaybeHandle<ByteArray>(), DeoptimizationData::Empty(isolate), kImmovable);
5786 10 : CHECK(code->IsCode());
5787 :
5788 10 : return code;
5789 : }
5790 : } // namespace
5791 :
5792 23723 : HEAP_TEST(Regress5831) {
5793 5 : CcTest::InitializeVM();
5794 25 : Heap* heap = CcTest::heap();
5795 : Isolate* isolate = CcTest::i_isolate();
5796 : HandleScope handle_scope(isolate);
5797 :
5798 : // Used to ensure that the generated code is not collected.
5799 : const int kInitialSize = 32;
5800 5 : Handle<FixedArray> array = isolate->factory()->NewFixedArray(kInitialSize);
5801 :
5802 : // Ensure that all immovable code space pages are full and we overflow into
5803 : // LO_SPACE.
5804 : const int kMaxIterations = 1 << 16;
5805 : bool overflowed_into_lospace = false;
5806 5 : for (int i = 0; i < kMaxIterations; i++) {
5807 5 : Handle<Code> code = GenerateDummyImmovableCode(isolate);
5808 5 : array = FixedArray::SetAndGrow(array, i, code);
5809 15 : CHECK(heap->code_space()->Contains(code->address()) ||
5810 : heap->lo_space()->Contains(*code));
5811 5 : if (heap->lo_space()->Contains(*code)) {
5812 : overflowed_into_lospace = true;
5813 : break;
5814 : }
5815 : }
5816 :
5817 5 : CHECK(overflowed_into_lospace);
5818 :
5819 : // Fake a serializer run.
5820 5 : isolate->serializer_enabled_ = true;
5821 :
5822 : // Generate the code.
5823 5 : Handle<Code> code = GenerateDummyImmovableCode(isolate);
5824 5 : CHECK_GE(i::kMaxRegularHeapObjectSize, code->Size());
5825 10 : CHECK(!heap->code_space()->FirstPage()->Contains(code->address()));
5826 :
5827 : // Ensure it's not in large object space.
5828 5 : MemoryChunk* chunk = MemoryChunk::FromAddress(code->address());
5829 5 : CHECK(chunk->owner()->identity() != LO_SPACE);
5830 5 : CHECK(chunk->NeverEvacuate());
5831 5 : }
5832 :
5833 23723 : TEST(Regress6800) {
5834 5 : CcTest::InitializeVM();
5835 : Isolate* isolate = CcTest::i_isolate();
5836 : HandleScope handle_scope(isolate);
5837 :
5838 : const int kRootLength = 1000;
5839 : Handle<FixedArray> root =
5840 5 : isolate->factory()->NewFixedArray(kRootLength, TENURED);
5841 : {
5842 : HandleScope inner_scope(isolate);
5843 5 : Handle<FixedArray> new_space_array = isolate->factory()->NewFixedArray(1);
5844 5005 : for (int i = 0; i < kRootLength; i++) {
5845 5000 : root->set(i, *new_space_array);
5846 : }
5847 5000 : for (int i = 0; i < kRootLength; i++) {
5848 10000 : root->set(i, CcTest::heap()->undefined_value());
5849 : }
5850 : }
5851 5 : CcTest::CollectGarbage(NEW_SPACE);
5852 10 : CHECK_EQ(0, RememberedSet<OLD_TO_NEW>::NumberOfPreFreedEmptyBuckets(
5853 : MemoryChunk::FromAddress(root->address())));
5854 5 : }
5855 :
5856 23723 : TEST(Regress6800LargeObject) {
5857 5 : CcTest::InitializeVM();
5858 : Isolate* isolate = CcTest::i_isolate();
5859 : HandleScope handle_scope(isolate);
5860 :
5861 : const int kRootLength = i::kMaxRegularHeapObjectSize / kPointerSize;
5862 : Handle<FixedArray> root =
5863 5 : isolate->factory()->NewFixedArray(kRootLength, TENURED);
5864 5 : CcTest::heap()->lo_space()->Contains(*root);
5865 : {
5866 : HandleScope inner_scope(isolate);
5867 5 : Handle<FixedArray> new_space_array = isolate->factory()->NewFixedArray(1);
5868 316965 : for (int i = 0; i < kRootLength; i++) {
5869 316960 : root->set(i, *new_space_array);
5870 : }
5871 316960 : for (int i = 0; i < kRootLength; i++) {
5872 633920 : root->set(i, CcTest::heap()->undefined_value());
5873 : }
5874 : }
5875 5 : CcTest::CollectGarbage(OLD_SPACE);
5876 10 : CHECK_EQ(0, RememberedSet<OLD_TO_NEW>::NumberOfPreFreedEmptyBuckets(
5877 : MemoryChunk::FromAddress(root->address())));
5878 5 : }
5879 :
5880 23723 : HEAP_TEST(RegressMissingWriteBarrierInAllocate) {
5881 5 : if (!FLAG_incremental_marking) return;
5882 : ManualGCScope manual_gc_scope;
5883 5 : FLAG_black_allocation = true;
5884 5 : CcTest::InitializeVM();
5885 10 : v8::HandleScope scope(CcTest::isolate());
5886 15 : Heap* heap = CcTest::heap();
5887 : Isolate* isolate = heap->isolate();
5888 5 : CcTest::CollectAllGarbage();
5889 5 : heap::SimulateIncrementalMarking(heap, false);
5890 : Map* map;
5891 : {
5892 : AlwaysAllocateScope always_allocate(isolate);
5893 : map = Map::cast(heap->AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize)
5894 5 : .ToObjectChecked());
5895 : }
5896 : heap->incremental_marking()->StartBlackAllocationForTesting();
5897 : Handle<HeapObject> object;
5898 : {
5899 : AlwaysAllocateScope always_allocate(isolate);
5900 : object = Handle<HeapObject>(
5901 10 : heap->Allocate(map, OLD_SPACE).ToObjectChecked(), isolate);
5902 : }
5903 : // The object is black. If Heap::Allocate sets the map without write-barrier,
5904 : // then the map is white and will be freed prematurely.
5905 5 : heap::SimulateIncrementalMarking(heap, true);
5906 5 : CcTest::CollectAllGarbage();
5907 : MarkCompactCollector* collector = heap->mark_compact_collector();
5908 5 : if (collector->sweeping_in_progress()) {
5909 5 : collector->EnsureSweepingCompleted();
5910 : }
5911 5 : CHECK(object->map()->IsMap());
5912 : }
5913 :
5914 23723 : UNINITIALIZED_TEST(ReinitializeStringHashSeed) {
5915 : // Enable rehashing and create an isolate and context.
5916 5 : i::FLAG_rehash_snapshot = true;
5917 15 : for (int i = 1; i < 3; i++) {
5918 10 : i::FLAG_hash_seed = 1337 * i;
5919 : v8::Isolate::CreateParams create_params;
5920 10 : create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
5921 10 : v8::Isolate* isolate = v8::Isolate::New(create_params);
5922 : {
5923 : v8::Isolate::Scope isolate_scope(isolate);
5924 10 : CHECK_EQ(1337 * i,
5925 : reinterpret_cast<i::Isolate*>(isolate)->heap()->HashSeed());
5926 20 : v8::HandleScope handle_scope(isolate);
5927 10 : v8::Local<v8::Context> context = v8::Context::New(isolate);
5928 10 : CHECK(!context.IsEmpty());
5929 : v8::Context::Scope context_scope(context);
5930 : }
5931 10 : isolate->Dispose();
5932 : }
5933 5 : }
5934 :
5935 : } // namespace heap
5936 : } // namespace internal
5937 71154 : } // namespace v8
|