Coverage Report

Created: 2025-06-10 07:27

/src/ghostpdl/psi/igc.c
Line
Count
Source (jump to first uncovered line)
1
/* Copyright (C) 2001-2024 Artifex Software, Inc.
2
   All Rights Reserved.
3
4
   This software is provided AS-IS with no warranty, either express or
5
   implied.
6
7
   This software is distributed under license and may not be copied,
8
   modified or distributed except as expressly authorized under the terms
9
   of the license contained in the file LICENSE in this distribution.
10
11
   Refer to licensing information at http://www.artifex.com or contact
12
   Artifex Software, Inc.,  39 Mesa Street, Suite 108A, San Francisco,
13
   CA 94129, USA, for further information.
14
*/
15
16
17
/* Garbage collector for Ghostscript */
18
#include "memory_.h"
19
#include "ghost.h"
20
#include "ierrors.h"
21
#include "gsexit.h"
22
#include "gsmdebug.h"
23
#include "gsstruct.h"
24
#include "iastate.h"
25
#include "isave.h"
26
#include "isstate.h"
27
#include "idict.h"
28
#include "ipacked.h"
29
#include "istruct.h"
30
#include "igc.h"
31
#include "igcstr.h"
32
#include "inamedef.h"
33
#include "opdef.h"    /* for marking oparray names */
34
35
/* Define whether to force all garbage collections to be global. */
36
static const bool I_FORCE_GLOBAL_GC = false;
37
38
/* Define whether to bypass the collector entirely. */
39
static const bool I_BYPASS_GC = false;
40
41
/* Define an entry on the mark stack. */
42
typedef struct {
43
    void *ptr;
44
    uint index;
45
    bool is_refs;
46
} ms_entry;
47
48
/* Define (a segment of) the mark stack. */
49
/* entries[0] has ptr = 0 to indicate the bottom of the stack. */
50
/* count additional entries follow this structure. */
51
typedef struct gc_mark_stack_s gc_mark_stack;
52
struct gc_mark_stack_s {
53
    gc_mark_stack *prev;
54
    gc_mark_stack *next;
55
    uint count;
56
    bool on_heap;   /* if true, allocated during GC */
57
    ms_entry entries[1];
58
};
59
60
/* Define the mark stack sizing parameters. */
61
23.0k
#define ms_size_default 100  /* default, allocated on C stack */
62
/* This should probably be defined as a parameter somewhere.... */
63
#define ms_size_desired   /* for additional allocation */\
64
20.1k
 ((max_ushort - sizeof(gc_mark_stack)) / sizeof(ms_entry) - 10)
65
14.1M
#define ms_size_min 50    /* min size for segment in free block */
66
67
/* Forward references */
68
69
static void gc_init_mark_stack(gc_mark_stack *, uint);
70
static void gc_objects_clear_marks(const gs_memory_t *mem, clump_t *);
71
static void gc_unmark_names(name_table *, op_array_table *, op_array_table *);
72
static int gc_trace(gs_gc_root_t *, gc_state_t *, gc_mark_stack *);
73
static int gc_rescan_clump(clump_t *, gc_state_t *, gc_mark_stack *);
74
static int gc_trace_clump(const gs_memory_t *mem, clump_t *, gc_state_t *, gc_mark_stack *);
75
static bool gc_trace_finish(gc_state_t *);
76
static void gc_clear_reloc(clump_t *);
77
static void gc_objects_set_reloc(gc_state_t * gcst, clump_t *);
78
static void gc_do_reloc(clump_t *, gs_ref_memory_t *, gc_state_t *);
79
static void gc_objects_compact(clump_t *, gc_state_t *);
80
static void gc_free_empty_clumps(gs_ref_memory_t *);
81
82
/* Forward references for pointer types */
83
static ptr_proc_unmark(ptr_struct_unmark);
84
static ptr_proc_mark(ptr_struct_mark);
85
static ptr_proc_unmark(ptr_string_unmark);
86
static ptr_proc_mark(ptr_string_mark);
87
static ptr_proc_unmark(ptr_name_index_unmark);
88
static ptr_proc_mark(ptr_name_index_mark);
89
/*ptr_proc_unmark(ptr_ref_unmark); *//* in igc.h */
90
/*ptr_proc_mark(ptr_ref_mark); *//* in igc.h */
91
static ptr_proc_reloc(igc_reloc_struct_ptr, void);
92
93
ptr_proc_reloc(igc_reloc_ref_ptr, ref_packed);  /* in igcref.c */
94
refs_proc_reloc(igc_reloc_refs);  /* in igcref.c */
95
96
/* Define this GC's procedure vector. */
97
static const gc_procs_with_refs_t igc_procs = {
98
    igc_reloc_struct_ptr, igc_reloc_string, igc_reloc_const_string,
99
    igc_reloc_param_string, igc_reloc_ref_ptr, igc_reloc_refs
100
};
101
102
/* Pointer type descriptors. */
103
/* Note that the trace/mark routine has special knowledge of ptr_ref_type */
104
/* and ptr_struct_type -- it assumes that no other types have embedded */
105
/* pointers.  Note also that the reloc procedures for string, ref and name */
106
/* pointers are never called. */
107
typedef ptr_proc_reloc((*ptr_proc_reloc_t), void);
108
const gs_ptr_procs_t ptr_struct_procs =
109
{ptr_struct_unmark, ptr_struct_mark, (ptr_proc_reloc_t) igc_reloc_struct_ptr};
110
const gs_ptr_procs_t ptr_string_procs =
111
{ptr_string_unmark, ptr_string_mark, NULL};
112
const gs_ptr_procs_t ptr_const_string_procs =
113
{ptr_string_unmark, ptr_string_mark, NULL};
114
const gs_ptr_procs_t ptr_ref_procs =
115
{ptr_ref_unmark, ptr_ref_mark, NULL};
116
const gs_ptr_procs_t ptr_name_index_procs =
117
{ptr_name_index_unmark, ptr_name_index_mark, NULL};
118
119
/* ------ Main program ------ */
120
121
/* Top level of garbage collector. */
122
#ifdef DEBUG
123
static void
124
end_phase(const gs_memory_t *mem, const char *str)
125
{
126
    if (gs_debug_c('6')) {
127
        dmlprintf1(mem, "[6]---------------- end %s ----------------\n",
128
                   (const char *)str);
129
        dmflush(mem);
130
    }
131
}
132
static const char *const depth_dots_string = "..........";
133
static const char *
134
depth_dots(const ms_entry * sp, const gc_mark_stack * pms)
135
{
136
    int depth = sp - pms->entries - 1;
137
    const gc_mark_stack *pss = pms;
138
139
    while ((pss = pss->prev) != 0)
140
        depth += pss->count - 1;
141
    return depth_dots_string + (depth >= 10 ? 0 : 10 - depth);
142
}
143
static void
144
gc_validate_spaces(gs_ref_memory_t **spaces, int max_space, gc_state_t *gcst)
145
{
146
    int i;
147
    gs_ref_memory_t *mem;
148
149
    for (i = 1; i <= max_space; ++i)
150
        if ((mem = spaces[i]) != 0)
151
            ialloc_validate_memory(mem, gcst);
152
}
153
#else  /* !DEBUG */
154
367k
#  define end_phase(mem,str) DO_NOTHING
155
#endif /* DEBUG */
156
157
void
158
gs_gc_reclaim(vm_spaces * pspaces, bool global)
159
23.0k
{
160
23.0k
#define nspaces ((i_vm_max + 1) * 2) /* * 2 for stable allocators */
161
162
23.0k
    vm_spaces spaces;
163
23.0k
    gs_ref_memory_t *space_memories[nspaces];
164
23.0k
    gs_gc_root_t space_roots[nspaces];
165
23.0k
    int max_trace;    /* max space_ to trace */
166
23.0k
    int min_collect;    /* min space_ to collect */
167
23.0k
    int min_collect_vm_space; /* min VM space to collect */
168
23.0k
    int ispace;
169
23.0k
    gs_ref_memory_t *mem;
170
23.0k
    clump_t *cp;
171
23.0k
    gs_gc_root_t *rp;
172
23.0k
    gc_state_t state;
173
23.0k
    struct _msd {
174
23.0k
        gc_mark_stack stack;
175
23.0k
        ms_entry body[ms_size_default];
176
23.0k
    } ms_default;
177
23.0k
    gc_mark_stack *mark_stack = &ms_default.stack;
178
23.0k
    const gs_memory_t *cmem;
179
23.0k
    clump_splay_walker sw;
180
181
    /* Optionally force global GC for debugging. */
182
183
23.0k
    if (I_FORCE_GLOBAL_GC)
184
0
        global = true;
185
186
    /* Determine which spaces we are tracing and collecting. */
187
188
23.0k
    spaces = *pspaces;
189
23.0k
    cmem = space_system->stable_memory;
190
23.0k
    space_memories[1] = space_system;
191
23.0k
    space_memories[2] = space_global;
192
23.0k
    min_collect = max_trace = 2;
193
23.0k
    min_collect_vm_space = i_vm_global;
194
23.0k
    if (space_global->stable_memory != (gs_memory_t *)space_global)
195
23.0k
        space_memories[++max_trace] =
196
23.0k
            (gs_ref_memory_t *)space_global->stable_memory;
197
23.0k
    if (space_global != space_local) {
198
23.0k
        space_memories[++max_trace] = space_local;
199
23.0k
        min_collect = max_trace;
200
23.0k
        min_collect_vm_space = i_vm_local;
201
23.0k
        if (space_local->stable_memory != (gs_memory_t *)space_local)
202
23.0k
            space_memories[++max_trace] =
203
23.0k
                (gs_ref_memory_t *)space_local->stable_memory;
204
23.0k
    }
205
23.0k
    if (global)
206
21.7k
        min_collect = min_collect_vm_space = 1;
207
208
23.0k
#define for_spaces(i, n)\
209
915k
  for (i = 1; i <= n; ++i)
210
23.0k
#define for_collected_spaces(i)\
211
1.20M
  for (i = min_collect; i <= max_trace; ++i)
212
23.0k
#define for_space_mems(i, mem)\
213
9.92M
  for (mem = space_memories[i]; mem != 0; mem = &mem->saved->state)
214
23.0k
#define for_mem_clumps(mem, cp, sw)\
215
44.0M
  for (cp = clump_splay_walk_init(sw, mem); cp != 0; cp = clump_splay_walk_fwd(sw))
216
23.0k
#define for_space_clumps(i, mem, cp, sw)\
217
6.36M
  for_space_mems(i, mem) for_mem_clumps(mem, cp, sw)
218
23.0k
#define for_clumps(i, n, mem, cp, sw)\
219
115k
  for_spaces(i, n) for_space_clumps(i, mem, cp, sw)
220
23.0k
#define for_collected_clumps(i, mem, cp, sw)\
221
46.0k
  for_collected_spaces(i) for_space_clumps(i, mem, cp, sw)
222
23.0k
#define for_roots(i, n, mem, rp)\
223
69.0k
  for_spaces(i, n)\
224
1.07M
    for (mem = space_memories[i], rp = mem->roots; rp != 0; rp = rp->next)
225
226
    /* Initialize the state. */
227
228
23.0k
    state.procs = &igc_procs;
229
23.0k
    state.loc.memory = space_global; /* any one will do */
230
231
23.0k
    state.loc.cp = 0;
232
23.0k
    state.spaces = spaces;
233
23.0k
    state.min_collect = min_collect_vm_space << r_space_shift;
234
23.0k
    state.relocating_untraced = false;
235
23.0k
    state.heap = state.loc.memory->non_gc_memory;
236
23.0k
    state.ntable = state.heap->gs_lib_ctx->gs_name_table;
237
238
    /* Register the allocators themselves as roots, */
239
    /* so we mark and relocate the change and save lists properly. */
240
241
115k
    for_spaces(ispace, max_trace) {
242
115k
        rp = &space_roots[ispace];
243
115k
        gs_register_struct_root((gs_memory_t *)space_memories[ispace],
244
115k
                                &rp,
245
115k
                                (void **)&space_memories[ispace],
246
115k
                                "gc_top_level");
247
115k
    }
248
249
23.0k
    end_phase(state.heap,"register space roots");
250
251
#ifdef DEBUG
252
253
    /* Pre-validate the state.  This shouldn't be necessary.... */
254
255
    gc_validate_spaces(space_memories, max_trace, &state);
256
257
    end_phase(state.heap,"pre-validate pointers");
258
259
#endif
260
261
23.0k
    if (I_BYPASS_GC) {   /* Don't collect at all. */
262
0
        goto no_collect;
263
0
    }
264
265
    /* Clear marks in spaces to be collected. */
266
267
23.0k
    for_collected_spaces(ispace)
268
6.78M
        for_space_clumps(ispace, mem, cp, &sw) {
269
6.78M
            gc_objects_clear_marks((const gs_memory_t *)mem, cp);
270
6.78M
            gc_strings_set_marks(cp, false);
271
6.78M
        }
272
273
23.0k
    end_phase(state.heap,"clear clump marks");
274
275
    /* Clear the marks of roots.  We must do this explicitly, */
276
    /* since some roots are not in any clump. */
277
278
242k
    for_roots(ispace, max_trace, mem, rp) {
279
242k
        enum_ptr_t eptr;
280
281
242k
        eptr.ptr = *rp->p;
282
242k
        if_debug_root('6', (const gs_memory_t *)mem, "[6]unmarking root", rp);
283
242k
        (*rp->ptype->unmark)(&eptr, &state);
284
242k
    }
285
286
23.0k
    end_phase(state.heap,"clear root marks");
287
288
23.0k
    if (global) {
289
21.7k
        op_array_table *global_ops = get_global_op_array(cmem);
290
21.7k
        op_array_table *local_ops  = get_local_op_array(cmem);
291
21.7k
        gc_unmark_names(state.ntable, global_ops, local_ops);
292
21.7k
    }
293
294
    /* Initialize the (default) mark stack. */
295
296
23.0k
    gc_init_mark_stack(&ms_default.stack, ms_size_default);
297
23.0k
    ms_default.stack.prev = 0;
298
23.0k
    ms_default.stack.on_heap = false;
299
300
    /* Add all large-enough free blocks to the mark stack. */
301
    /* Also initialize the rescan pointers. */
302
303
23.0k
    {
304
23.0k
        gc_mark_stack *end = mark_stack;
305
306
6.98M
        for_clumps(ispace, max_trace, mem, cp, &sw) {
307
6.98M
            uint avail = cp->ctop - cp->cbot;
308
309
6.98M
            if (avail >= sizeof(gc_mark_stack) + sizeof(ms_entry) *
310
6.98M
                ms_size_min &&
311
6.98M
                !cp->inner_count
312
6.98M
                ) {
313
741k
                gc_mark_stack *pms = (gc_mark_stack *) cp->cbot;
314
315
741k
                gc_init_mark_stack(pms, (avail - sizeof(gc_mark_stack)) /
316
741k
                                   sizeof(ms_entry));
317
741k
                end->next = pms;
318
741k
                pms->prev = end;
319
741k
                pms->on_heap = false;
320
741k
                if_debug2m('6', (const gs_memory_t *)mem,
321
741k
                           "[6]adding free "PRI_INTPTR"(%u) to mark stack\n",
322
741k
                           (intptr_t)pms, pms->count);
323
741k
            }
324
6.98M
            cp->rescan_bot = cp->cend;
325
6.98M
            cp->rescan_top = cp->cbase;
326
6.98M
        }
327
23.0k
    }
328
329
    /* Mark reachable objects. */
330
331
23.0k
    {
332
23.0k
        int more = 0;
333
334
        /* Mark from roots. */
335
336
242k
        for_roots(ispace, max_trace, mem, rp) {
337
242k
            if_debug_root('6', (const gs_memory_t *)mem, "[6]marking root", rp);
338
242k
            more |= gc_trace(rp, &state, mark_stack);
339
242k
        }
340
341
23.0k
        end_phase(state.heap,"mark");
342
343
        /* If this is a local GC, mark from non-local clumps. */
344
345
23.0k
        if (!global)
346
23.0k
            for_clumps(ispace, min_collect - 1, mem, cp, &sw)
347
195k
                more |= gc_trace_clump((const gs_memory_t *)mem, cp, &state, mark_stack);
348
349
        /* Handle mark stack overflow. */
350
351
23.0k
        while (more < 0) { /* stack overflowed */
352
10
            more = 0;
353
594k
            for_clumps(ispace, max_trace, mem, cp, &sw)
354
1.31M
                more |= gc_rescan_clump(cp, &state, mark_stack);
355
10
        }
356
357
23.0k
        end_phase(state.heap,"mark overflow");
358
23.0k
    }
359
360
    /* Free the mark stack. */
361
362
23.0k
    {
363
23.0k
        gc_mark_stack *pms = mark_stack;
364
365
46.7k
        while (pms->next)
366
23.7k
            pms = pms->next;
367
69.8k
        while (pms) {
368
46.7k
            gc_mark_stack *prev = pms->prev;
369
370
46.7k
            if (pms->on_heap)
371
732
                gs_free_object(state.heap, pms, "gc mark stack");
372
46.0k
            else
373
46.7k
                gs_alloc_fill(pms, gs_alloc_fill_free,
374
46.7k
                              sizeof(*pms) + sizeof(ms_entry) * pms->count);
375
46.7k
            pms = prev;
376
46.7k
        }
377
23.0k
    }
378
379
23.0k
    end_phase(state.heap,"free mark stack");
380
381
23.0k
    if (global) {
382
21.7k
        gc_trace_finish(&state);
383
21.7k
        names_trace_finish(state.ntable, &state);
384
385
21.7k
        end_phase(state.heap,"finish trace");
386
21.7k
    }
387
388
    /* Filter save change lists with removing elements,
389
       which point to unmarked blocks of refs. */
390
23.0k
    {
391
23.0k
        int i;
392
393
111k
        for_collected_spaces(i) {
394
111k
            gs_ref_memory_t *mem = space_memories[i];
395
396
111k
            alloc_save__filter_changes(mem);
397
111k
        }
398
23.0k
    }
399
    /* Clear marks and relocation in spaces that are only being traced. */
400
    /* We have to clear the marks first, because we want the */
401
    /* relocation to wind up as o_untraced, not o_unmarked. */
402
403
23.0k
    for_clumps(ispace, min_collect - 1, mem, cp, &sw)
404
195k
        gc_objects_clear_marks((const gs_memory_t *)mem, cp);
405
406
23.0k
    end_phase(state.heap,"post-clear marks");
407
408
23.0k
    for_clumps(ispace, min_collect - 1, mem, cp, &sw)
409
195k
        gc_clear_reloc(cp);
410
411
23.0k
    end_phase(state.heap,"clear reloc");
412
413
    /* Set the relocation of roots outside any clump to o_untraced, */
414
    /* so we won't try to relocate pointers to them. */
415
    /* (Currently, there aren't any.) */
416
417
    /* Disable freeing in the allocators of the spaces we are */
418
    /* collecting, so finalization procedures won't cause problems. */
419
23.0k
    {
420
23.0k
        int i;
421
422
23.0k
        for_collected_spaces(i)
423
111k
            gs_enable_free((gs_memory_t *)space_memories[i], false);
424
23.0k
    }
425
426
    /* Compute relocation based on marks, in the spaces */
427
    /* we are going to compact.  Also finalize freed objects. */
428
23.0k
    state.cur_mem = (gs_memory_t *)mem;
429
430
6.78M
    for_collected_clumps(ispace, mem, cp, &sw) {
431
6.78M
        gc_objects_set_reloc(&state, cp);
432
6.78M
        gc_strings_set_reloc(cp);
433
6.78M
    }
434
435
    /* Re-enable freeing. */
436
23.0k
    {
437
23.0k
        int i;
438
439
23.0k
        for_collected_spaces(i)
440
111k
            gs_enable_free((gs_memory_t *)space_memories[i], true);
441
23.0k
    }
442
443
23.0k
    end_phase(state.heap,"set reloc");
444
445
    /* Relocate pointers. */
446
447
23.0k
    state.relocating_untraced = true;
448
23.0k
    for_clumps(ispace, min_collect - 1, mem, cp, &sw)
449
195k
        gc_do_reloc(cp, mem, &state);
450
23.0k
    state.relocating_untraced = false;
451
1.54M
    for_collected_clumps(ispace, mem, cp, &sw)
452
6.78M
        gc_do_reloc(cp, mem, &state);
453
454
23.0k
    end_phase(state.heap,"relocate clumps");
455
456
242k
    for_roots(ispace, max_trace, mem, rp) {
457
242k
        if_debug3m('6', (const gs_memory_t *)mem,
458
242k
                   "[6]relocating root "PRI_INTPTR": "PRI_INTPTR" -> "PRI_INTPTR"\n",
459
242k
                   (intptr_t)rp, (intptr_t)rp->p, (intptr_t)*rp->p);
460
242k
        if (rp->ptype == ptr_ref_type) {
461
12.2k
            ref *pref = (ref *) * rp->p;
462
463
12.2k
            igc_reloc_refs((ref_packed *) pref,
464
12.2k
                           (ref_packed *) (pref + 1),
465
12.2k
                           &state);
466
12.2k
        } else
467
230k
            *rp->p = (*rp->ptype->reloc) (*rp->p, &state);
468
242k
        if_debug3m('6', (const gs_memory_t *)mem,
469
242k
                   "[6]relocated root "PRI_INTPTR": "PRI_INTPTR" -> "PRI_INTPTR"\n",
470
242k
                   (intptr_t)rp, (intptr_t)rp->p, (intptr_t)*rp->p);
471
242k
    }
472
473
23.0k
    end_phase(state.heap,"relocate roots");
474
475
    /* Compact data.  We only do this for spaces we are collecting. */
476
477
111k
    for_collected_spaces(ispace) {
478
1.43M
        for_space_mems(ispace, mem) {
479
6.78M
            for_mem_clumps(mem, cp, &sw) {
480
6.78M
                if_debug_clump('6', (const gs_memory_t *)mem, "[6]compacting clump", cp);
481
6.78M
                gc_objects_compact(cp, &state);
482
6.78M
                gc_strings_compact(cp, cmem);
483
6.78M
                if_debug_clump('6', (const gs_memory_t *)mem, "[6]after compaction:", cp);
484
6.78M
            }
485
1.43M
            mem->saved = mem->reloc_saved;
486
1.43M
            ialloc_reset_free(mem);
487
1.43M
        }
488
111k
    }
489
490
23.0k
    end_phase(state.heap,"compact");
491
492
    /* Free empty clumps. */
493
494
111k
    for_collected_spaces(ispace) {
495
1.43M
        for_space_mems(ispace, mem) {
496
1.43M
            gc_free_empty_clumps(mem);
497
1.43M
        }
498
111k
    }
499
500
23.0k
    end_phase(state.heap,"free empty clumps");
501
502
    /*
503
     * Update previous_status to reflect any freed clumps,
504
     * and set inherited to the negative of allocated,
505
     * so it has no effect.  We must update previous_status by
506
     * working back-to-front along the save chain, using pointer reversal.
507
     * (We could update inherited in any order, since it only uses
508
     * information local to the individual save level.)
509
     */
510
511
111k
    for_collected_spaces(ispace) { /* Reverse the pointers. */
512
111k
        alloc_save_t *curr;
513
111k
        alloc_save_t *prev = 0;
514
111k
        alloc_save_t *next;
515
111k
        gs_memory_status_t total;
516
517
1.43M
        for (curr = space_memories[ispace]->saved; curr != 0;
518
1.32M
             prev = curr, curr = next
519
1.32M
            ) {
520
1.32M
            next = curr->state.saved;
521
1.32M
            curr->state.saved = prev;
522
1.32M
        }
523
        /* Now work the other way, accumulating the values. */
524
111k
        total.allocated = 0, total.used = 0;
525
1.43M
        for (curr = prev, prev = 0; curr != 0;
526
1.32M
             prev = curr, curr = next
527
1.32M
            ) {
528
1.32M
            mem = &curr->state;
529
1.32M
            next = mem->saved;
530
1.32M
            mem->saved = prev;
531
1.32M
            mem->previous_status = total;
532
1.32M
            if_debug3m('6', (const gs_memory_t *)mem,
533
1.32M
                       "[6]"PRI_INTPTR" previous allocated=%lu, used=%lu\n",
534
1.32M
                       (intptr_t)mem, total.allocated, total.used);
535
1.32M
            gs_memory_status((gs_memory_t *) mem, &total);
536
1.32M
            mem->gc_allocated = mem->allocated + total.allocated;
537
1.32M
        }
538
111k
        mem = space_memories[ispace];
539
111k
        mem->previous_status = total;
540
111k
        mem->gc_allocated = mem->allocated + total.allocated;
541
111k
        if_debug3m('6', (const gs_memory_t *)mem,
542
111k
                   "[6]"PRI_INTPTR" previous allocated=%lu, used=%lu\n",
543
111k
                   (intptr_t)mem, total.allocated, total.used);
544
111k
    }
545
546
23.0k
    end_phase(state.heap,"update stats");
547
548
23.0k
  no_collect:
549
550
    /* Unregister the allocator roots. */
551
552
23.0k
    for_spaces(ispace, max_trace)
553
115k
        gs_unregister_root((gs_memory_t *)space_memories[ispace],
554
23.0k
                           &space_roots[ispace], "gc_top_level");
555
556
23.0k
    end_phase(state.heap,"unregister space roots");
557
558
#ifdef DEBUG
559
560
    /* Validate the state.  This shouldn't be necessary.... */
561
562
    gc_validate_spaces(space_memories, max_trace, &state);
563
564
    end_phase(state.heap,"validate pointers");
565
566
#endif
567
23.0k
}
568
569
/* ------ Debugging utilities ------ */
570
571
/* Validate a pointer to an object header. */
572
#ifdef DEBUG
573
#  define debug_check_object(pre, cp, gcst)\
574
     ialloc_validate_object((pre) + 1, cp, gcst)
575
#else
576
1.15G
#  define debug_check_object(pre, cp, gcst) DO_NOTHING
577
#endif
578
579
/* ------ Unmarking phase ------ */
580
581
/* Unmark a single struct. */
582
static void
583
ptr_struct_unmark(enum_ptr_t *pep, gc_state_t * ignored)
584
230k
{
585
230k
    void *const vptr = (void *)pep->ptr; /* break const */
586
587
230k
    if (vptr != 0)
588
230k
        o_set_unmarked(((obj_header_t *) vptr - 1));
589
230k
}
590
591
/* Unmark a single string. */
592
static void
593
ptr_string_unmark(enum_ptr_t *pep, gc_state_t * gcst)
594
0
{
595
0
    discard(gc_string_mark(pep->ptr, pep->size, false, gcst));
596
0
}
597
598
/* Unmark a single name. */
599
static void
600
ptr_name_index_unmark(enum_ptr_t *pep, gc_state_t * gcst)
601
0
{
602
    /* Do nothing */
603
0
}
604
605
/* Unmark the objects in a clump. */
606
static void
607
gc_objects_clear_marks(const gs_memory_t *mem, clump_t * cp)
608
6.98M
{
609
6.98M
    if_debug_clump('6', mem, "[6]unmarking clump", cp);
610
91.0M
    SCAN_CLUMP_OBJECTS(cp)
611
91.0M
        DO_ALL
612
91.0M
        struct_proc_clear_marks((*proc)) =
613
91.0M
        pre->o_type->clear_marks;
614
#ifdef DEBUG
615
    if (pre->o_type != &st_free)
616
        debug_check_object(pre, cp, NULL);
617
#endif
618
91.0M
    if_debug3m('7', (const gs_memory_t *)mem, " [7](un)marking %s(%lu) "PRI_INTPTR"\n",
619
91.0M
               struct_type_name_string(pre->o_type),
620
91.0M
               (ulong) size, (intptr_t)pre);
621
91.0M
    o_set_unmarked(pre);
622
91.0M
    if (proc != 0)
623
31.1M
        (*proc) (mem, pre + 1, size, pre->o_type);
624
91.0M
    END_OBJECTS_SCAN
625
6.98M
}
626
627
/* Mark 0- and 1-character names, and those referenced from the */
628
/* op_array_nx_table, and unmark all the rest. */
629
static void
630
gc_unmark_names(name_table * nt, op_array_table *op_array_table_global,
631
                op_array_table *op_array_table_local)
632
21.7k
{
633
21.7k
    uint i;
634
635
21.7k
    names_unmark_all(nt);
636
4.01M
    for (i = 0; i < op_array_table_global->count; i++) {
637
3.99M
        name_index_t nidx = op_array_table_global->nx_table[i];
638
639
3.99M
        names_mark_index(nt, nidx);
640
3.99M
    }
641
21.7k
    for (i = 0; i < op_array_table_local->count; i++) {
642
0
        name_index_t nidx = op_array_table_local->nx_table[i];
643
644
0
        names_mark_index(nt, nidx);
645
0
    }
646
21.7k
}
647
648
/* ------ Marking phase ------ */
649
650
/* Initialize (a segment of) the mark stack. */
651
static void
652
gc_init_mark_stack(gc_mark_stack * pms, uint count)
653
764k
{
654
764k
    pms->next = 0;
655
764k
    pms->count = count;
656
764k
    pms->entries[0].ptr = 0;
657
764k
    pms->entries[0].index = 0;
658
764k
    pms->entries[0].is_refs = false;
659
764k
}
660
661
/* Mark starting from all marked objects in the interval of a clump */
662
/* needing rescanning. */
663
static int
664
gc_rescan_clump(clump_t * cp, gc_state_t * pstate, gc_mark_stack * pmstack)
665
1.31M
{
666
1.31M
    byte *sbot = cp->rescan_bot;
667
1.31M
    byte *stop = cp->rescan_top;
668
1.31M
    gs_gc_root_t root;
669
1.31M
    void *comp;
670
1.31M
    int more = 0;
671
1.31M
    const gs_memory_t *mem = gcst_get_memory_ptr( pstate );
672
673
1.31M
    if (sbot > stop)
674
1.31M
        return 0;
675
611
    root.p = &comp;
676
611
    if_debug_clump('6', mem, "[6]rescanning clump", cp);
677
611
    cp->rescan_bot = cp->cend;
678
611
    cp->rescan_top = cp->cbase;
679
9.47k
    SCAN_CLUMP_OBJECTS(cp)
680
9.47k
        DO_ALL
681
9.47k
        if ((byte *) (pre + 1) + size < sbot);
682
3.55k
    else if ((byte *) (pre + 1) > stop)
683
472
        return more;    /* 'break' won't work here */
684
3.08k
    else {
685
3.08k
        if_debug2m('7', mem, " [7]scanning/marking "PRI_INTPTR"(%lu)\n",
686
3.08k
                   (intptr_t)pre, (ulong)size);
687
3.08k
        if (pre->o_type == &st_refs) {
688
0
            ref_packed *rp = (ref_packed *) (pre + 1);
689
0
            char *end = (char *)rp + size;
690
691
0
            root.ptype = ptr_ref_type;
692
0
            while ((char *)rp < end) {
693
0
                comp = rp;
694
0
                if (r_is_packed(rp)) {
695
0
                    if (r_has_pmark(rp)) {
696
0
                        r_clear_pmark(rp);
697
0
                        more |= gc_trace(&root, pstate,
698
0
                                         pmstack);
699
0
                    }
700
0
                    rp++;
701
0
                } else {
702
0
                    ref *const pref = (ref *)rp;
703
704
0
                    if (r_has_attr(pref, l_mark)) {
705
0
                        r_clear_attrs(pref, l_mark);
706
0
                        more |= gc_trace(&root, pstate, pmstack);
707
0
                    }
708
0
                    rp += packed_per_ref;
709
0
                }
710
0
            }
711
3.08k
        } else if (!o_is_unmarked(pre)) {
712
2.30k
            struct_proc_clear_marks((*proc)) =
713
2.30k
                pre->o_type->clear_marks;
714
2.30k
            root.ptype = ptr_struct_type;
715
2.30k
            comp = pre + 1;
716
2.30k
            if (!o_is_untraced(pre))
717
2.30k
                o_set_unmarked(pre);
718
2.30k
            if (proc != 0)
719
490
                (*proc) (mem, comp, size, pre->o_type);
720
2.30k
            more |= gc_trace(&root, pstate, pmstack);
721
2.30k
        }
722
3.08k
    }
723
9.47k
    END_OBJECTS_SCAN
724
139
        return more;
725
611
}
726
727
/* Mark starting from all the objects in a clump. */
728
/* We assume that pstate->min_collect > avm_system, */
729
/* so we don't have to trace names. */
730
static int
731
gc_trace_clump(const gs_memory_t *mem, clump_t * cp, gc_state_t * pstate, gc_mark_stack * pmstack)
732
195k
{
733
195k
    gs_gc_root_t root;
734
195k
    void *comp;
735
195k
    int more = 0;
736
195k
    int min_trace = pstate->min_collect;
737
738
195k
    root.p = &comp;
739
195k
    if_debug_clump('6', mem, "[6]marking from clump", cp);
740
2.24M
    SCAN_CLUMP_OBJECTS(cp)
741
2.24M
        DO_ALL
742
2.24M
    {
743
2.24M
        if_debug2m('7', mem, " [7]scanning/marking "PRI_INTPTR"(%lu)\n",
744
2.24M
                   (intptr_t)pre, (ulong)size);
745
2.24M
        if (pre->o_type == &st_refs) {
746
1.20M
            ref_packed *rp = (ref_packed *) (pre + 1);
747
1.20M
            char *end = (char *)rp + size;
748
749
1.20M
            root.ptype = ptr_ref_type;
750
128M
            while ((char *)rp < end) {
751
127M
                comp = rp;
752
127M
                if (r_is_packed(rp)) { /* No packed refs need tracing. */
753
46.0M
                    rp++;
754
81.2M
                } else {
755
81.2M
                    ref *const pref = (ref *)rp;
756
757
81.2M
                    if (r_space(pref) >= min_trace) {
758
14.3k
                        r_clear_attrs(pref, l_mark);
759
14.3k
                        more |= gc_trace(&root, pstate, pmstack);
760
14.3k
                    }
761
81.2M
                    rp += packed_per_ref;
762
81.2M
                }
763
127M
            }
764
1.20M
        } else if (!o_is_unmarked(pre)) {
765
1.03M
            if (!o_is_untraced(pre))
766
1.03M
                o_set_unmarked(pre);
767
1.03M
            if (pre->o_type != &st_free) {
768
1.02M
                struct_proc_clear_marks((*proc)) =
769
1.02M
                    pre->o_type->clear_marks;
770
771
1.02M
                root.ptype = ptr_struct_type;
772
1.02M
                comp = pre + 1;
773
1.02M
                if (proc != 0)
774
19.7k
                    (*proc) (mem, comp, size, pre->o_type);
775
1.02M
                more |= gc_trace(&root, pstate, pmstack);
776
1.02M
            }
777
1.03M
        }
778
2.24M
    }
779
2.24M
    END_OBJECTS_SCAN
780
195k
        return more;
781
195k
}
782
783
/* Recursively mark from a (root) pointer. */
784
/* Return -1 if we overflowed the mark stack, */
785
/* 0 if we completed successfully without marking any new objects, */
786
/* 1 if we completed and marked some new objects. */
787
static int gc_extend_stack(gc_mark_stack *, gc_state_t *);
788
static int
789
gc_trace(gs_gc_root_t * rp, /* lgtm [cpp/use-of-goto] */
790
         gc_state_t * pstate, gc_mark_stack * pmstack)
791
1.28M
{
792
1.28M
    int min_trace = pstate->min_collect;
793
1.28M
    gc_mark_stack *pms = pmstack;
794
1.28M
    ms_entry *sp = pms->entries + 1;
795
796
    /* We stop the mark stack 1 entry early, because we store into */
797
    /* the entry beyond the top. */
798
1.28M
    ms_entry *stop = sp + pms->count - 2;
799
1.28M
    int new = 0;
800
1.28M
    enum_ptr_t nep;
801
1.28M
    void *nptr;
802
1.28M
    name_table *nt = pstate->ntable;
803
804
1.28M
#define mark_name(nidx)\
805
476M
  BEGIN\
806
476M
    if (names_mark_index(nt, nidx)) {\
807
144M
        new |= 1;\
808
144M
        if_debug2m('8', gcst_get_memory_ptr(pstate), "  [8]marked name "PRI_INTPTR"(%u)\n",\
809
144M
                   (intptr_t)names_index_ptr(nt, nidx), nidx);\
810
144M
    }\
811
476M
  END
812
813
1.28M
    nptr = *rp->p;
814
1.28M
    if (nptr == 0)
815
0
        return 0;
816
817
    /* Initialize the stack */
818
1.28M
    sp->ptr = nptr;
819
1.28M
    if (rp->ptype == ptr_ref_type)
820
26.5k
        sp->index = 1, sp->is_refs = true;
821
1.26M
    else {
822
1.26M
        sp->index = 0, sp->is_refs = false;
823
1.26M
        nep.ptr = nptr;
824
1.26M
        if ((*rp->ptype->mark) (&nep, pstate))
825
1.21M
            new |= 1;
826
1.26M
    }
827
433M
    for (;;) {
828
433M
        gs_ptr_type_t ptp;
829
830
        /*
831
         * The following should really be an if..else, but that
832
         * would force unnecessary is_refs tests.
833
         */
834
433M
        if (sp->is_refs)
835
257M
            goto do_refs;
836
837
        /* ---------------- Structure ---------------- */
838
839
215M
      do_struct:
840
215M
        {
841
215M
            obj_header_t *ptr = sp->ptr;
842
843
215M
            struct_proc_enum_ptrs((*mproc));
844
845
215M
            if (ptr == 0) { /* We've reached the bottom of a stack segment. */
846
1.50M
                pms = pms->prev;
847
1.50M
                if (pms == 0)
848
1.28M
                    break;  /* all done */
849
219k
                stop = pms->entries + pms->count - 1;
850
219k
                sp = stop;
851
219k
                continue;
852
1.50M
            }
853
215M
            debug_check_object(ptr - 1, NULL, NULL);
854
421M
          ts:if_debug4m('7', pstate->heap, " [7]%smarking %s "PRI_INTPTR"[%u]",
855
421M
                        depth_dots(sp, pms),
856
421M
                        struct_type_name_string(ptr[-1].o_type),
857
421M
                        (intptr_t)ptr, sp->index);
858
421M
            mproc = ptr[-1].o_type->enum_ptrs;
859
421M
            if (mproc == gs_no_struct_enum_ptrs ||
860
421M
                (ptp = (*mproc)
861
419M
                 (gcst_get_memory_ptr(pstate), ptr, pre_obj_contents_size(ptr - 1),
862
419M
                  sp->index, &nep, ptr[-1].o_type, pstate)) == 0
863
421M
                ) {
864
40.2M
                if_debug0m('7', pstate->heap, " - done\n");
865
40.2M
                sp--;
866
40.2M
                continue;
867
40.2M
            }
868
            /* The cast in the following statement is the one */
869
            /* place we need to break 'const' to make the */
870
            /* template for pointer enumeration work. */
871
381M
            nptr = (void *)nep.ptr;
872
381M
            sp->index++;
873
381M
            if_debug1m('7', pstate->heap, " = "PRI_INTPTR"\n", (intptr_t)nptr);
874
            /* Descend into nep.ptr, whose pointer type is ptp. */
875
381M
            if (ptp == ptr_struct_type) {
876
245M
                sp[1].index = 0;
877
245M
                sp[1].is_refs = false;
878
245M
                if (sp == stop)
879
131k
                    goto push;
880
245M
                if (!ptr_struct_mark(&nep, pstate))
881
206M
                    goto ts;
882
38.3M
                new |= 1;
883
38.3M
                (++sp)->ptr = nptr;
884
38.3M
                goto do_struct;
885
245M
            } else if (ptp == ptr_ref_type) {
886
134M
                sp[1].index = 1;
887
134M
                sp[1].is_refs = true;
888
134M
                if (sp == stop)
889
87.8k
                    goto push;
890
134M
                new |= 1;
891
134M
                (++sp)->ptr = nptr;
892
134M
                goto do_refs;
893
134M
            } else {   /* We assume this is some non-pointer- */
894
                /* containing type. */
895
                /* This was the only formulation of this call/condition/assignment that valgrind did not complain about */
896
1.10M
                new |= ((*ptp->mark) (&nep, pstate)) ? 1 : 0;
897
1.10M
               goto ts;
898
1.10M
            }
899
381M
        }
900
901
        /* ---------------- Refs ---------------- */
902
903
649M
      do_refs:
904
649M
        {
905
649M
            ref_packed *pptr = sp->ptr;
906
649M
            ref *rptr;
907
908
3.26G
          tr:if (!sp->index) {
909
391M
                --sp;
910
391M
                continue;
911
391M
            }
912
2.87G
            --(sp->index);
913
2.87G
            if_debug3m('8', pstate->heap, "  [8]%smarking refs "PRI_INTPTR"[%u]\n",
914
2.87G
                      depth_dots(sp, pms), (intptr_t)pptr, sp->index);
915
2.87G
            if (r_is_packed(pptr)) {
916
788M
                if (!r_has_pmark(pptr)) {
917
511M
                    r_set_pmark(pptr);
918
511M
                    new |= 1;
919
511M
                    if (r_packed_is_name(pptr)) {
920
126M
                        name_index_t nidx = packed_name_index(pptr);
921
922
126M
                        mark_name(nidx);
923
126M
                    }
924
511M
                }
925
788M
                ++pptr;
926
788M
                goto tr;
927
788M
            }
928
2.08G
            rptr = (ref *) pptr;  /* * const beyond here */
929
2.08G
            if (r_has_attr(rptr, l_mark)) {
930
390M
                pptr = (ref_packed *)(rptr + 1);
931
390M
                goto tr;
932
390M
            }
933
1.69G
            r_set_attrs(rptr, l_mark);
934
1.69G
            new |= 1;
935
1.69G
            if (r_space(rptr) < min_trace) { /* Note that this always picks up all scalars. */
936
1.05G
                pptr = (ref_packed *) (rptr + 1);
937
1.05G
                goto tr;
938
1.05G
            }
939
641M
            sp->ptr = rptr + 1;
940
641M
            switch (r_type(rptr)) {
941
                    /* Struct cases */
942
127k
                case t_file:
943
127k
                    nptr = rptr->value.pfile;
944
1.41M
                  rs:sp[1].is_refs = false;
945
1.41M
                    sp[1].index = 0;
946
1.41M
                    if (sp == stop) {
947
1.06k
                        ptp = ptr_struct_type;
948
1.06k
                        break;
949
1.06k
                    }
950
1.41M
                    nep.ptr = nptr;
951
1.41M
                    if (!ptr_struct_mark(&nep, pstate))
952
803k
                        goto nr;
953
610k
                    new |= 1;
954
610k
                    (++sp)->ptr = nptr;
955
610k
                    goto do_struct;
956
477k
                case t_device:
957
477k
                    nptr = rptr->value.pdevice;
958
477k
                    goto rs;
959
32.9k
                case t_fontID:
960
786k
                case t_struct:
961
808k
                case t_astruct:
962
808k
                case t_pdfctx:
963
808k
                    nptr = rptr->value.pstruct;
964
808k
                    goto rs;
965
                    /* Non-trivial non-struct cases */
966
16.0M
                case t_dictionary:
967
16.0M
                    nptr = rptr->value.pdict;
968
16.0M
                    sp[1].index = sizeof(dict) / sizeof(ref);
969
16.0M
                    goto rrp;
970
114M
                case t_array:
971
114M
                    nptr = rptr->value.refs;
972
234M
                  rr:if ((sp[1].index = r_size(rptr)) == 0) { /* Set the base pointer to 0, */
973
                        /* so we never try to relocate it. */
974
2.96M
                        rptr->value.refs = 0;
975
2.96M
                        goto nr;
976
2.96M
                    }
977
247M
                  rrp:
978
256M
                  rrc:sp[1].is_refs = true;
979
256M
                    if (sp == stop) {
980
                        /*
981
                         * The following initialization is unnecessary:
982
                         * ptp will not be used if sp[1].is_refs = true.
983
                         * We put this here solely to get rid of bogus
984
                         * "possibly uninitialized variable" warnings
985
                         * from certain compilers.
986
                         */
987
18.1k
                        ptp = ptr_ref_type;
988
18.1k
                        break;
989
18.1k
                    }
990
256M
                    new |= 1;
991
256M
                    (++sp)->ptr = nptr;
992
256M
                    goto do_refs;
993
83.0M
                case t_mixedarray:
994
119M
                case t_shortarray:
995
119M
                    nptr = rptr->value.writable_packed;
996
119M
                    goto rr;
997
350M
                case t_name:
998
350M
                    mark_name(names_index(nt, rptr));
999
383M
                  nr:pptr = (ref_packed *) (rptr + 1);
1000
383M
                    goto tr;
1001
29.6M
                case t_string:
1002
29.6M
                    if (gc_string_mark(rptr->value.bytes, r_size(rptr), true, pstate))
1003
18.2M
                        new |= 1;
1004
29.6M
                    goto nr;
1005
9.13M
                case t_oparray:
1006
9.13M
                    nptr = rptr->value.refs;  /* discard const */
1007
9.13M
                    sp[1].index = 1;
1008
9.13M
                    goto rrc;
1009
0
                default:
1010
0
                    goto nr;
1011
641M
            }
1012
641M
        }
1013
1014
        /* ---------------- Recursion ---------------- */
1015
1016
238k
      push:
1017
238k
        if (sp == stop) { /* The current segment is full. */
1018
238k
            int new_added = gc_extend_stack(pms, pstate);
1019
1020
238k
            if (new_added) {
1021
19.4k
                new |= new_added;
1022
19.4k
                continue;
1023
19.4k
            }
1024
219k
            pms = pms->next;
1025
219k
            stop = pms->entries + pms->count - 1;
1026
219k
            pms->entries[1] = sp[1];
1027
219k
            sp = pms->entries;
1028
219k
        }
1029
        /* index and is_refs are already set */
1030
219k
        if (!sp[1].is_refs) {
1031
122k
            nep.ptr = nptr;
1032
122k
            if (!(*ptp->mark) (&nep, pstate))
1033
100k
                continue;
1034
21.3k
            new |= 1;
1035
21.3k
        }
1036
118k
        (++sp)->ptr = nptr;
1037
118k
    }
1038
1.28M
    return new;
1039
1.28M
}
1040
/* Link to, attempting to allocate if necessary, */
1041
/* another clump of mark stack. */
1042
static int
1043
gc_extend_stack(gc_mark_stack * pms, gc_state_t * pstate)
1044
238k
{
1045
238k
    if (pms->next == 0) { /* Try to allocate another segment. */
1046
20.1k
        uint count;
1047
1048
156k
        for (count = ms_size_desired; count >= ms_size_min; count >>= 1) {
1049
136k
            pms->next = (gc_mark_stack *)
1050
136k
                gs_alloc_bytes_immovable(pstate->heap,
1051
136k
                                         sizeof(gc_mark_stack) +
1052
136k
                                         sizeof(ms_entry) * count,
1053
136k
                                         "gc mark stack");
1054
136k
            if (pms->next != 0)
1055
732
                break;
1056
136k
        }
1057
20.1k
        if (pms->next == 0) { /* The mark stack overflowed. */
1058
19.4k
            ms_entry *sp = pms->entries + pms->count - 1;
1059
19.4k
            byte *cptr = sp->ptr; /* container */
1060
19.4k
            clump_t *cp = gc_locate(cptr, pstate);
1061
19.4k
            int new = 1;
1062
1063
19.4k
            if (cp == 0) { /* We were tracing outside collectible */
1064
                /* storage.  This can't happen. */
1065
0
                if_debug1('6', "mark stack overflowed while outside collectible space at "PRI_INTPTR"!\n",
1066
0
                         (intptr_t)cptr);
1067
0
                gs_abort(pstate->heap);
1068
0
            }
1069
19.4k
            if (cptr < cp->rescan_bot)
1070
974
                cp->rescan_bot = cptr, new = -1;
1071
19.4k
            if (cptr > cp->rescan_top)
1072
3.01k
                cp->rescan_top = cptr, new = -1;
1073
19.4k
            return new;
1074
19.4k
        }
1075
732
        gc_init_mark_stack(pms->next, count);
1076
732
        pms->next->prev = pms;
1077
732
        pms->next->on_heap = true;
1078
732
    }
1079
219k
    return 0;
1080
238k
}
1081
1082
/* Mark a struct.  Return true if new mark. */
1083
static bool
1084
ptr_struct_mark(enum_ptr_t *pep, gc_state_t * ignored)
1085
550M
{
1086
550M
    obj_header_t *ptr = (obj_header_t *)pep->ptr;
1087
1088
550M
    if (ptr == 0)
1089
123M
        return false;
1090
427M
    ptr--;      /* point to header */
1091
427M
    if (!o_is_unmarked(ptr))
1092
387M
        return false;
1093
40.2M
    o_mark(ptr);
1094
40.2M
    return true;
1095
427M
}
1096
1097
/* Mark a string.  Return true if new mark. */
1098
static bool
1099
ptr_string_mark(enum_ptr_t *pep, gc_state_t * gcst)
1100
1.10M
{
1101
1.10M
    return gc_string_mark(pep->ptr, pep->size, true, gcst);
1102
1.10M
}
1103
1104
/* Mark a name.  Return true if new mark. */
1105
static bool
1106
ptr_name_index_mark(enum_ptr_t *pep, gc_state_t * gcst)
1107
0
{
1108
0
    return names_mark_index(gcst->heap->gs_lib_ctx->gs_name_table, pep->size);
1109
0
}
1110
1111
/* Finish tracing by marking names. */
1112
static bool
1113
gc_trace_finish(gc_state_t * pstate)
1114
21.7k
{
1115
21.7k
    name_table *nt = pstate->ntable;
1116
21.7k
    name_index_t nidx = 0;
1117
21.7k
    bool marked = false;
1118
1119
157M
    while ((nidx = names_next_valid_index(nt, nidx)) != 0) {
1120
157M
        name_string_t *pnstr = names_index_string_inline(nt, nidx);
1121
1122
157M
        if (pnstr->mark) {
1123
151M
            enum_ptr_t enst, ensst;
1124
1125
151M
            if (!pnstr->foreign_string &&
1126
151M
                gc_string_mark(pnstr->string_bytes, pnstr->string_size,
1127
135M
                               true, pstate)
1128
151M
                )
1129
135M
                marked = true;
1130
151M
            enst.ptr = names_index_sub_table(nt, nidx);
1131
151M
            ensst.ptr = names_index_string_sub_table(nt, nidx);
1132
151M
            marked |=
1133
151M
                ptr_struct_mark(&enst, pstate) |
1134
151M
                ptr_struct_mark(&ensst, pstate);
1135
151M
        }
1136
157M
    }
1137
21.7k
    return marked;
1138
21.7k
}
1139
1140
/* ------ Relocation planning phase ------ */
1141
1142
/* Initialize the relocation information in the clump header. */
1143
static void
1144
gc_init_reloc(clump_t * cp)
1145
6.98M
{
1146
6.98M
    clump_head_t *chead = cp->chead;
1147
1148
6.98M
    chead->dest = cp->cbase;
1149
6.98M
    chead->free.o_back =
1150
6.98M
        offset_of(clump_head_t, free) >> obj_back_shift;
1151
6.98M
    chead->free.o_size = sizeof(obj_header_t);
1152
6.98M
    chead->free.o_nreloc = 0;
1153
6.98M
}
1154
1155
/* Set marks and clear relocation for clumps that won't be compacted. */
1156
static void
1157
gc_clear_reloc(clump_t * cp)
1158
195k
{
1159
195k
    byte *pfree = (byte *) & cp->chead->free;
1160
1161
195k
    gc_init_reloc(cp);
1162
2.24M
    SCAN_CLUMP_OBJECTS(cp)
1163
2.24M
        DO_ALL
1164
2.24M
        const struct_shared_procs_t *procs =
1165
2.24M
    pre->o_type->shared;
1166
1167
2.24M
    if (procs != 0)
1168
1.20M
        (*procs->clear_reloc) (pre, size);
1169
2.24M
    o_set_untraced(pre);
1170
2.24M
    pre->o_back = ((byte *) pre - pfree) >> obj_back_shift;
1171
2.24M
    END_OBJECTS_SCAN
1172
195k
        gc_strings_set_marks(cp, true);
1173
195k
    gc_strings_clear_reloc(cp);
1174
195k
}
1175
1176
/* Set the relocation for the objects in a clump. */
1177
/* This will never be called for a clump with any o_untraced objects. */
1178
static void
1179
gc_objects_set_reloc(gc_state_t * gcst, clump_t * cp)
1180
6.78M
{
1181
6.78M
    size_t reloc = 0;
1182
6.78M
    clump_head_t *chead = cp->chead;
1183
6.78M
    byte *pfree = (byte *) & chead->free; /* most recent free object */
1184
1185
6.78M
    if_debug_clump('6', gcst->heap, "[6]setting reloc for clump", cp);
1186
6.78M
    gc_init_reloc(cp);
1187
88.7M
    SCAN_CLUMP_OBJECTS(cp)
1188
88.7M
        DO_ALL
1189
88.7M
        struct_proc_finalize((*finalize));
1190
88.7M
    const struct_shared_procs_t *procs =
1191
88.7M
    pre->o_type->shared;
1192
1193
88.7M
    if ((procs == 0 ? o_is_unmarked(pre) :
1194
88.7M
         !(*procs->set_reloc) (pre, reloc, size))
1195
88.7M
        ) {     /* Free object */
1196
29.4M
        reloc += sizeof(obj_header_t) + obj_align_round(size);
1197
29.4M
        if ((finalize = pre->o_type->finalize) != 0) {
1198
726k
            if_debug2m('u', gcst->heap, "[u]GC finalizing %s "PRI_INTPTR"\n",
1199
726k
                       struct_type_name_string(pre->o_type),
1200
726k
                       (intptr_t)(pre + 1));
1201
726k
            (*finalize) (gcst->cur_mem, pre + 1);
1202
726k
        }
1203
29.4M
        pfree = (byte *) pre;
1204
29.4M
        pre->o_back = (pfree - (byte *) chead) >> obj_back_shift;
1205
29.4M
        pre->o_nreloc = reloc;
1206
29.4M
        if_debug3m('7', gcst->heap, " [7]at "PRI_INTPTR", unmarked %lu, new reloc = %u\n",
1207
29.4M
                   (intptr_t)pre, (ulong) size, (unsigned int)reloc);
1208
59.2M
    } else {     /* Useful object */
1209
59.2M
        debug_check_object(pre, cp, gcst);
1210
59.2M
        pre->o_back = ((byte *) pre - pfree) >> obj_back_shift;
1211
59.2M
    }
1212
88.7M
    END_OBJECTS_SCAN
1213
#ifdef DEBUG
1214
    if (reloc != 0) {
1215
        if_debug1m('6', gcst->heap, "[6]freed %u", (unsigned int)reloc);
1216
        if_debug_clump('6', gcst->heap, " in", cp);
1217
    }
1218
#endif
1219
6.78M
}
1220
1221
/* ------ Relocation phase ------ */
1222
1223
/* Relocate the pointers in all the objects in a clump. */
1224
static void
1225
gc_do_reloc(clump_t * cp, gs_ref_memory_t * mem, gc_state_t * pstate)
1226
6.98M
{
1227
6.98M
    clump_head_t *chead = cp->chead;
1228
1229
6.98M
    if_debug_clump('6', (const gs_memory_t *)mem, "[6]relocating in clump", cp);
1230
91.0M
    SCAN_CLUMP_OBJECTS(cp)
1231
91.0M
        DO_ALL
1232
#ifdef DEBUG
1233
        pstate->container = cp;
1234
#endif
1235
    /* We need to relocate the pointers in an object iff */
1236
    /* it is o_untraced, or it is a useful object. */
1237
    /* An object is free iff its back pointer points to */
1238
    /* the clump_head structure. */
1239
91.0M
        if (o_is_untraced(pre) ||
1240
91.0M
            pre->o_back << obj_back_shift != (byte *) pre - (byte *) chead
1241
91.0M
            ) {
1242
61.5M
            struct_proc_reloc_ptrs((*proc)) =
1243
61.5M
                pre->o_type->reloc_ptrs;
1244
1245
61.5M
            if_debug3m('7', (const gs_memory_t *)mem,
1246
61.5M
                       " [7]relocating ptrs in %s(%lu) "PRI_INTPTR"\n",
1247
61.5M
                       struct_type_name_string(pre->o_type),
1248
61.5M
                       (ulong)size, (intptr_t)pre);
1249
61.5M
            if (proc != 0)
1250
61.5M
                (*proc) (pre + 1, size, pre->o_type, pstate);
1251
61.5M
        }
1252
#ifdef DEBUG
1253
        pstate->container = 0;
1254
#endif
1255
91.0M
    END_OBJECTS_SCAN
1256
6.98M
}
1257
1258
/* Print pointer relocation if debugging. */
1259
/* We have to provide this procedure even if DEBUG is not defined, */
1260
/* in case one of the other GC modules was compiled with DEBUG. */
1261
const void *
1262
print_reloc_proc(const void *obj, const char *cname, const void *robj)
1263
0
{
1264
0
    if_debug3('9', "  [9]relocate %s * "PRI_INTPTR" to "PRI_INTPTR"\n",
1265
0
              cname, (intptr_t)obj, (intptr_t)robj);
1266
0
    return robj;
1267
0
}
1268
1269
/* Relocate a pointer to an (aligned) object. */
1270
/* See gsmemory.h for why the argument is const and the result is not. */
1271
static void /*obj_header_t */ *
1272
igc_reloc_struct_ptr(const void /*obj_header_t */ *obj, gc_state_t * gcst)
1273
817M
{
1274
817M
    const obj_header_t *const optr = (const obj_header_t *)obj;
1275
817M
    const void *robj;
1276
1277
817M
    if (obj == 0) {
1278
119M
        discard(print_reloc(obj, "NULL", 0));
1279
119M
        return 0;
1280
119M
    }
1281
817M
    debug_check_object(optr - 1, NULL, gcst);
1282
698M
    {
1283
698M
        uint back = optr[-1].o_back;
1284
1285
698M
        if (back == o_untraced)
1286
0
            robj = obj;
1287
698M
        else {
1288
#ifdef DEBUG
1289
            /* Do some sanity checking. */
1290
            clump_t *cp = gcst->container;
1291
1292
            if (cp != 0 && cp->cbase <= (byte *)obj && (byte *)obj <cp->ctop) {
1293
                if (back > (cp->ctop - cp->cbase) >> obj_back_shift) {
1294
                    if_debug2('6', "Invalid back pointer %u at "PRI_INTPTR"!\n",
1295
                             back, (intptr_t)obj);
1296
                    gs_abort(NULL);
1297
                }
1298
            } else {
1299
                /* Pointed to unknown clump. Can't check it, sorry. */
1300
            }
1301
#endif
1302
698M
            {
1303
698M
                const obj_header_t *pfree = (const obj_header_t *)
1304
698M
                ((const char *)(optr - 1) -
1305
698M
                 (back << obj_back_shift));
1306
698M
                const clump_head_t *chead = (const clump_head_t *)
1307
698M
                ((const char *)pfree -
1308
698M
                 (pfree->o_back << obj_back_shift));
1309
1310
698M
                robj = chead->dest +
1311
698M
                    ((const char *)obj - (const char *)(chead + 1) -
1312
698M
                     pfree->o_nreloc);
1313
698M
            }
1314
698M
        }
1315
698M
    }
1316
    /* Use a severely deprecated pun to remove the const property. */
1317
698M
    {
1318
698M
        union { const void *r; void *w; } u;
1319
1320
698M
        u.r = print_reloc(obj, struct_type_name_string(optr[-1].o_type), robj);
1321
698M
        return u.w;
1322
817M
    }
1323
817M
}
1324
1325
/* ------ Compaction phase ------ */
1326
1327
/* Compact the objects in a clump. */
1328
/* This will never be called for a clump with any o_untraced objects. */
1329
static void
1330
gc_objects_compact(clump_t * cp, gc_state_t * gcst)
1331
6.78M
{
1332
6.78M
    clump_head_t *chead = cp->chead;
1333
6.78M
    obj_header_t *dpre = (obj_header_t *) chead->dest;
1334
6.78M
    const gs_memory_t *cmem = gcst->spaces.memories.named.system->stable_memory;
1335
1336
88.7M
    SCAN_CLUMP_OBJECTS(cp)
1337
88.7M
        DO_ALL
1338
    /* An object is free iff its back pointer points to */
1339
    /* the clump_head structure. */
1340
88.7M
        if (pre->o_back << obj_back_shift != (byte *) pre - (byte *) chead) {
1341
59.2M
        const struct_shared_procs_t *procs = pre->o_type->shared;
1342
1343
59.2M
        debug_check_object(pre, cp, gcst);
1344
59.2M
        if_debug4m('7', cmem,
1345
59.2M
                   " [7]compacting %s "PRI_INTPTR"(%lu) to "PRI_INTPTR"\n",
1346
59.2M
                   struct_type_name_string(pre->o_type),
1347
59.2M
                   (intptr_t)pre, (ulong)size, (intptr_t)dpre);
1348
59.2M
        if (procs == 0) {
1349
38.5M
            if (dpre != pre)
1350
1.11M
                memmove(dpre, pre,
1351
1.11M
                        sizeof(obj_header_t) + size);
1352
38.5M
        } else
1353
20.7M
            (*procs->compact) (cmem, pre, dpre, size);
1354
59.2M
        dpre = (obj_header_t *)
1355
59.2M
            ((byte *) dpre + obj_size_round(size));
1356
59.2M
    }
1357
88.7M
    END_OBJECTS_SCAN
1358
6.78M
        if (cp->outer == 0 && chead->dest != cp->cbase)
1359
0
        dpre = (obj_header_t *) cp->cbase; /* compacted this clump into another */
1360
6.78M
    gs_alloc_fill(dpre, gs_alloc_fill_collected, cp->cbot - (byte *) dpre);
1361
6.78M
    cp->cbot = (byte *) dpre;
1362
6.78M
    cp->rcur = 0;
1363
6.78M
    cp->rtop = 0;   /* just to be sure */
1364
6.78M
}
1365
1366
/* ------ Cleanup ------ */
1367
1368
static splay_app_result_t
1369
free_if_empty(clump_t *cp, void *arg)
1370
6.78M
{
1371
6.78M
    gs_ref_memory_t * mem = (gs_ref_memory_t *)arg;
1372
1373
6.78M
    if (cp->cbot == cp->cbase && cp->ctop == cp->climit &&
1374
6.78M
        cp->outer == 0 && cp->inner_count == 0)
1375
282k
    {
1376
282k
        alloc_free_clump(cp, mem);
1377
282k
        if (mem->cc == cp)
1378
0
            mem->cc = NULL;
1379
282k
    }
1380
6.78M
    return SPLAY_APP_CONTINUE;
1381
6.78M
}
1382
1383
/* Free empty clumps. */
1384
static void
1385
gc_free_empty_clumps(gs_ref_memory_t * mem)
1386
1.43M
{
1387
    /* NOTE: Not in reverse order any more, so potentially
1388
     * not quite as good for crap allocators. */
1389
1.43M
    clump_splay_app(mem->root, mem, free_if_empty, mem);
1390
1.43M
}
1391
1392
const gs_memory_t * gcst_get_memory_ptr(gc_state_t *gcst)
1393
421M
{
1394
421M
    vm_spaces spaces = gcst->spaces;
1395
421M
    const gs_memory_t *cmem = space_system->stable_memory;
1396
421M
    return cmem;
1397
421M
}