Coverage Report

Created: 2022-05-20 06:19

/src/serenity/Userland/Libraries/LibJS/Heap/Heap.cpp
Line
Count
Source (jump to first uncovered line)
1
/*
2
 * Copyright (c) 2020-2022, Andreas Kling <kling@serenityos.org>
3
 *
4
 * SPDX-License-Identifier: BSD-2-Clause
5
 */
6
7
#include <AK/Badge.h>
8
#include <AK/Debug.h>
9
#include <AK/HashTable.h>
10
#include <AK/StackInfo.h>
11
#include <AK/TemporaryChange.h>
12
#include <LibCore/ElapsedTimer.h>
13
#include <LibJS/Heap/CellAllocator.h>
14
#include <LibJS/Heap/Handle.h>
15
#include <LibJS/Heap/Heap.h>
16
#include <LibJS/Heap/HeapBlock.h>
17
#include <LibJS/Interpreter.h>
18
#include <LibJS/Runtime/Object.h>
19
#include <LibJS/Runtime/WeakContainer.h>
20
#include <setjmp.h>
21
22
#ifdef __serenity__
23
#    include <serenity.h>
24
#endif
25
26
namespace JS {
27
28
#ifdef __serenity__
29
static int gc_perf_string_id;
30
#endif
31
32
Heap::Heap(VM& vm)
33
    : m_vm(vm)
34
76
{
35
#ifdef __serenity__
36
    auto gc_signpost_string = "Garbage collection"sv;
37
    gc_perf_string_id = perf_register_string(gc_signpost_string.characters_without_null_termination(), gc_signpost_string.length());
38
#endif
39
40
76
    if constexpr (HeapBlock::min_possible_cell_size <= 16) {
41
0
        m_allocators.append(make<CellAllocator>(16));
42
0
    }
43
76
    static_assert(HeapBlock::min_possible_cell_size <= 24, "Heap Cell tracking uses too much data!");
44
76
    m_allocators.append(make<CellAllocator>(32));
45
76
    m_allocators.append(make<CellAllocator>(64));
46
76
    m_allocators.append(make<CellAllocator>(96));
47
76
    m_allocators.append(make<CellAllocator>(128));
48
76
    m_allocators.append(make<CellAllocator>(256));
49
76
    m_allocators.append(make<CellAllocator>(512));
50
76
    m_allocators.append(make<CellAllocator>(1024));
51
76
    m_allocators.append(make<CellAllocator>(3072));
52
76
}
53
54
Heap::~Heap()
55
76
{
56
76
    vm().string_cache().clear();
57
76
    collect_garbage(CollectionType::CollectEverything);
58
76
}
59
60
ALWAYS_INLINE CellAllocator& Heap::allocator_for_size(size_t cell_size)
61
631k
{
62
2.98M
    for (auto& allocator : m_allocators) {
63
2.98M
        if (allocator->cell_size() >= cell_size)
64
631k
            return *allocator;
65
2.98M
    }
66
0
    dbgln("Cannot get CellAllocator for cell size {}, largest available is {}!", cell_size, m_allocators.last()->cell_size());
67
0
    VERIFY_NOT_REACHED();
68
0
}
69
70
Cell* Heap::allocate_cell(size_t size)
71
617k
{
72
617k
    if (should_collect_on_every_allocation()) {
73
0
        collect_garbage();
74
617k
    } else if (m_allocations_since_last_gc > m_max_allocations_between_gc) {
75
0
        m_allocations_since_last_gc = 0;
76
0
        collect_garbage();
77
617k
    } else {
78
617k
        ++m_allocations_since_last_gc;
79
617k
    }
80
81
617k
    auto& allocator = allocator_for_size(size);
82
617k
    return allocator.allocate_cell(*this);
83
617k
}
84
85
void Heap::collect_garbage(CollectionType collection_type, bool print_report)
86
138
{
87
138
    VERIFY(!m_collecting_garbage);
88
0
    TemporaryChange change(m_collecting_garbage, true);
89
90
#ifdef __serenity__
91
    static size_t global_gc_counter = 0;
92
    perf_event(PERF_EVENT_SIGNPOST, gc_perf_string_id, global_gc_counter++);
93
#endif
94
95
138
    auto collection_measurement_timer = Core::ElapsedTimer::start_new();
96
138
    if (collection_type == CollectionType::CollectGarbage) {
97
62
        if (m_gc_deferrals) {
98
0
            m_should_gc_when_deferral_ends = true;
99
0
            return;
100
0
        }
101
62
        HashTable<Cell*> roots;
102
62
        gather_roots(roots);
103
62
        mark_live_cells(roots);
104
62
    }
105
138
    sweep_dead_cells(print_report, collection_measurement_timer);
106
138
}
107
108
void Heap::gather_roots(HashTable<Cell*>& roots)
109
62
{
110
62
    vm().gather_roots(roots);
111
62
    gather_conservative_roots(roots);
112
113
62
    for (auto& handle : m_handles)
114
186
        roots.set(handle.cell());
115
116
62
    for (auto& vector : m_marked_vectors)
117
434
        vector.gather_roots(roots);
118
119
62
    if constexpr (HEAP_DEBUG) {
120
0
        dbgln("gather_roots:");
121
0
        for (auto* root : roots)
122
0
            dbgln("  + {}", root);
123
0
    }
124
62
}
125
126
__attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(HashTable<Cell*>& roots)
127
62
{
128
62
    FlatPtr dummy;
129
130
62
    dbgln_if(HEAP_DEBUG, "gather_conservative_roots:");
131
132
62
    jmp_buf buf;
133
62
    setjmp(buf);
134
135
62
    HashTable<FlatPtr> possible_pointers;
136
137
62
    auto* raw_jmp_buf = reinterpret_cast<FlatPtr const*>(buf);
138
139
310
    for (size_t i = 0; i < ((size_t)sizeof(buf)) / sizeof(FlatPtr); i += sizeof(FlatPtr))
140
248
        possible_pointers.set(raw_jmp_buf[i]);
141
142
62
    auto stack_reference = bit_cast<FlatPtr>(&dummy);
143
62
    auto& stack_info = m_vm.stack_info();
144
145
118k
    for (FlatPtr stack_address = stack_reference; stack_address < stack_info.top(); stack_address += sizeof(FlatPtr)) {
146
118k
        auto data = *reinterpret_cast<FlatPtr*>(stack_address);
147
118k
        possible_pointers.set(data);
148
118k
    }
149
150
62
    HashTable<HeapBlock*> all_live_heap_blocks;
151
7.26k
    for_each_block([&](auto& block) {
152
7.26k
        all_live_heap_blocks.set(&block);
153
7.26k
        return IterationDecision::Continue;
154
7.26k
    });
155
156
41.8k
    for (auto possible_pointer : possible_pointers) {
157
41.8k
        if (!possible_pointer)
158
62
            continue;
159
41.8k
        dbgln_if(HEAP_DEBUG, "  ? {}", (void const*)possible_pointer);
160
41.8k
        auto* possible_heap_block = HeapBlock::from_cell(reinterpret_cast<Cell const*>(possible_pointer));
161
41.8k
        if (all_live_heap_blocks.contains(possible_heap_block)) {
162
2.68k
            if (auto* cell = possible_heap_block->cell_from_possible_pointer(possible_pointer)) {
163
2.49k
                if (cell->state() == Cell::State::Live) {
164
2.47k
                    dbgln_if(HEAP_DEBUG, "  ?-> {}", (void const*)cell);
165
2.47k
                    roots.set(cell);
166
2.47k
                } else {
167
16
                    dbgln_if(HEAP_DEBUG, "  #-> {}", (void const*)cell);
168
16
                }
169
2.49k
            }
170
2.68k
        }
171
41.8k
    }
172
62
}
173
174
class MarkingVisitor final : public Cell::Visitor {
175
public:
176
62
    MarkingVisitor() = default;
177
178
    virtual void visit_impl(Cell& cell) override
179
403k
    {
180
403k
        if (cell.is_marked())
181
236k
            return;
182
167k
        dbgln_if(HEAP_DEBUG, "  ! {}", &cell);
183
184
167k
        cell.set_marked(true);
185
167k
        cell.visit_edges(*this);
186
167k
    }
187
};
188
189
void Heap::mark_live_cells(HashTable<Cell*> const& roots)
190
62
{
191
62
    dbgln_if(HEAP_DEBUG, "mark_live_cells:");
192
193
62
    MarkingVisitor visitor;
194
62
    for (auto* root : roots)
195
10.5k
        visitor.visit(root);
196
197
62
    for (auto& inverse_root : m_uprooted_cells)
198
0
        inverse_root->set_marked(false);
199
200
62
    m_uprooted_cells.clear();
201
62
}
202
203
void Heap::sweep_dead_cells(bool print_report, Core::ElapsedTimer const& measurement_timer)
204
138
{
205
138
    dbgln_if(HEAP_DEBUG, "sweep_dead_cells:");
206
138
    Vector<HeapBlock*, 32> empty_blocks;
207
138
    Vector<HeapBlock*, 32> full_blocks_that_became_usable;
208
209
138
    size_t collected_cells = 0;
210
138
    size_t live_cells = 0;
211
138
    size_t collected_cell_bytes = 0;
212
138
    size_t live_cell_bytes = 0;
213
214
15.2k
    for_each_block([&](auto& block) {
215
15.2k
        bool block_has_live_cells = false;
216
15.2k
        bool block_was_full = block.is_full();
217
785k
        block.template for_each_cell_in_state<Cell::State::Live>([&](Cell* cell) {
218
785k
            if (!cell->is_marked()) {
219
617k
                dbgln_if(HEAP_DEBUG, "  ~ {}", cell);
220
617k
                block.deallocate(cell);
221
617k
                ++collected_cells;
222
617k
                collected_cell_bytes += block.cell_size();
223
617k
            } else {
224
167k
                cell->set_marked(false);
225
167k
                block_has_live_cells = true;
226
167k
                ++live_cells;
227
167k
                live_cell_bytes += block.cell_size();
228
167k
            }
229
785k
        });
230
15.2k
        if (!block_has_live_cells)
231
13.4k
            empty_blocks.append(&block);
232
1.81k
        else if (block_was_full != block.is_full())
233
24
            full_blocks_that_became_usable.append(&block);
234
15.2k
        return IterationDecision::Continue;
235
15.2k
    });
236
237
138
    for (auto& weak_container : m_weak_containers)
238
0
        weak_container.remove_dead_cells({});
239
240
13.4k
    for (auto* block : empty_blocks) {
241
13.4k
        dbgln_if(HEAP_DEBUG, " - HeapBlock empty @ {}: cell_size={}", block, block->cell_size());
242
13.4k
        allocator_for_size(block->cell_size()).block_did_become_empty({}, *block);
243
13.4k
    }
244
245
138
    for (auto* block : full_blocks_that_became_usable) {
246
24
        dbgln_if(HEAP_DEBUG, " - HeapBlock usable again @ {}: cell_size={}", block, block->cell_size());
247
24
        allocator_for_size(block->cell_size()).block_did_become_usable({}, *block);
248
24
    }
249
250
138
    if constexpr (HEAP_DEBUG) {
251
0
        for_each_block([&](auto& block) {
252
0
            dbgln(" > Live HeapBlock @ {}: cell_size={}", &block, block.cell_size());
253
0
            return IterationDecision::Continue;
254
0
        });
255
0
    }
256
257
138
    int time_spent = measurement_timer.elapsed();
258
259
138
    if (print_report) {
260
0
        size_t live_block_count = 0;
261
0
        for_each_block([&](auto&) {
262
0
            ++live_block_count;
263
0
            return IterationDecision::Continue;
264
0
        });
265
266
0
        dbgln("Garbage collection report");
267
0
        dbgln("=============================================");
268
0
        dbgln("     Time spent: {} ms", time_spent);
269
0
        dbgln("     Live cells: {} ({} bytes)", live_cells, live_cell_bytes);
270
0
        dbgln("Collected cells: {} ({} bytes)", collected_cells, collected_cell_bytes);
271
0
        dbgln("    Live blocks: {} ({} bytes)", live_block_count, live_block_count * HeapBlock::block_size);
272
0
        dbgln("   Freed blocks: {} ({} bytes)", empty_blocks.size(), empty_blocks.size() * HeapBlock::block_size);
273
0
        dbgln("=============================================");
274
0
    }
275
138
}
276
277
void Heap::did_create_handle(Badge<HandleImpl>, HandleImpl& impl)
278
172
{
279
172
    VERIFY(!m_handles.contains(impl));
280
0
    m_handles.append(impl);
281
172
}
282
283
void Heap::did_destroy_handle(Badge<HandleImpl>, HandleImpl& impl)
284
172
{
285
172
    VERIFY(m_handles.contains(impl));
286
0
    m_handles.remove(impl);
287
172
}
288
289
void Heap::did_create_marked_vector(Badge<MarkedVectorBase>, MarkedVectorBase& vector)
290
548k
{
291
548k
    VERIFY(!m_marked_vectors.contains(vector));
292
0
    m_marked_vectors.append(vector);
293
548k
}
294
295
void Heap::did_destroy_marked_vector(Badge<MarkedVectorBase>, MarkedVectorBase& vector)
296
548k
{
297
548k
    VERIFY(m_marked_vectors.contains(vector));
298
0
    m_marked_vectors.remove(vector);
299
548k
}
300
301
void Heap::did_create_weak_container(Badge<WeakContainer>, WeakContainer& set)
302
0
{
303
0
    VERIFY(!m_weak_containers.contains(set));
304
0
    m_weak_containers.append(set);
305
0
}
306
307
void Heap::did_destroy_weak_container(Badge<WeakContainer>, WeakContainer& set)
308
0
{
309
0
    VERIFY(m_weak_containers.contains(set));
310
0
    m_weak_containers.remove(set);
311
0
}
312
313
void Heap::defer_gc(Badge<DeferGC>)
314
76
{
315
76
    ++m_gc_deferrals;
316
76
}
317
318
void Heap::undefer_gc(Badge<DeferGC>)
319
76
{
320
76
    VERIFY(m_gc_deferrals > 0);
321
0
    --m_gc_deferrals;
322
323
76
    if (!m_gc_deferrals) {
324
76
        if (m_should_gc_when_deferral_ends)
325
0
            collect_garbage();
326
76
        m_should_gc_when_deferral_ends = false;
327
76
    }
328
76
}
329
330
void Heap::uproot_cell(Cell* cell)
331
0
{
332
0
    m_uprooted_cells.append(cell);
333
0
}
334
335
}