Coverage Report

Created: 2025-06-10 06:58

/src/ghostpdl/psi/igc.c
Line
Count
Source (jump to first uncovered line)
1
/* Copyright (C) 2001-2024 Artifex Software, Inc.
2
   All Rights Reserved.
3
4
   This software is provided AS-IS with no warranty, either express or
5
   implied.
6
7
   This software is distributed under license and may not be copied,
8
   modified or distributed except as expressly authorized under the terms
9
   of the license contained in the file LICENSE in this distribution.
10
11
   Refer to licensing information at http://www.artifex.com or contact
12
   Artifex Software, Inc.,  39 Mesa Street, Suite 108A, San Francisco,
13
   CA 94129, USA, for further information.
14
*/
15
16
17
/* Garbage collector for Ghostscript */
18
#include "memory_.h"
19
#include "ghost.h"
20
#include "ierrors.h"
21
#include "gsexit.h"
22
#include "gsmdebug.h"
23
#include "gsstruct.h"
24
#include "iastate.h"
25
#include "isave.h"
26
#include "isstate.h"
27
#include "idict.h"
28
#include "ipacked.h"
29
#include "istruct.h"
30
#include "igc.h"
31
#include "igcstr.h"
32
#include "inamedef.h"
33
#include "opdef.h"    /* for marking oparray names */
34
35
/* Define whether to force all garbage collections to be global. */
36
static const bool I_FORCE_GLOBAL_GC = false;
37
38
/* Define whether to bypass the collector entirely. */
39
static const bool I_BYPASS_GC = false;
40
41
/* Define an entry on the mark stack. */
42
typedef struct {
43
    void *ptr;
44
    uint index;
45
    bool is_refs;
46
} ms_entry;
47
48
/* Define (a segment of) the mark stack. */
49
/* entries[0] has ptr = 0 to indicate the bottom of the stack. */
50
/* count additional entries follow this structure. */
51
typedef struct gc_mark_stack_s gc_mark_stack;
52
struct gc_mark_stack_s {
53
    gc_mark_stack *prev;
54
    gc_mark_stack *next;
55
    uint count;
56
    bool on_heap;   /* if true, allocated during GC */
57
    ms_entry entries[1];
58
};
59
60
/* Define the mark stack sizing parameters. */
61
21.1k
#define ms_size_default 100  /* default, allocated on C stack */
62
/* This should probably be defined as a parameter somewhere.... */
63
#define ms_size_desired   /* for additional allocation */\
64
313
 ((max_ushort - sizeof(gc_mark_stack)) / sizeof(ms_entry) - 10)
65
8.49M
#define ms_size_min 50    /* min size for segment in free block */
66
67
/* Forward references */
68
69
static void gc_init_mark_stack(gc_mark_stack *, uint);
70
static void gc_objects_clear_marks(const gs_memory_t *mem, clump_t *);
71
static void gc_unmark_names(name_table *, op_array_table *, op_array_table *);
72
static int gc_trace(gs_gc_root_t *, gc_state_t *, gc_mark_stack *);
73
static int gc_rescan_clump(clump_t *, gc_state_t *, gc_mark_stack *);
74
static int gc_trace_clump(const gs_memory_t *mem, clump_t *, gc_state_t *, gc_mark_stack *);
75
static bool gc_trace_finish(gc_state_t *);
76
static void gc_clear_reloc(clump_t *);
77
static void gc_objects_set_reloc(gc_state_t * gcst, clump_t *);
78
static void gc_do_reloc(clump_t *, gs_ref_memory_t *, gc_state_t *);
79
static void gc_objects_compact(clump_t *, gc_state_t *);
80
static void gc_free_empty_clumps(gs_ref_memory_t *);
81
82
/* Forward references for pointer types */
83
static ptr_proc_unmark(ptr_struct_unmark);
84
static ptr_proc_mark(ptr_struct_mark);
85
static ptr_proc_unmark(ptr_string_unmark);
86
static ptr_proc_mark(ptr_string_mark);
87
static ptr_proc_unmark(ptr_name_index_unmark);
88
static ptr_proc_mark(ptr_name_index_mark);
89
/*ptr_proc_unmark(ptr_ref_unmark); *//* in igc.h */
90
/*ptr_proc_mark(ptr_ref_mark); *//* in igc.h */
91
static ptr_proc_reloc(igc_reloc_struct_ptr, void);
92
93
ptr_proc_reloc(igc_reloc_ref_ptr, ref_packed);  /* in igcref.c */
94
refs_proc_reloc(igc_reloc_refs);  /* in igcref.c */
95
96
/* Define this GC's procedure vector. */
97
static const gc_procs_with_refs_t igc_procs = {
98
    igc_reloc_struct_ptr, igc_reloc_string, igc_reloc_const_string,
99
    igc_reloc_param_string, igc_reloc_ref_ptr, igc_reloc_refs
100
};
101
102
/* Pointer type descriptors. */
103
/* Note that the trace/mark routine has special knowledge of ptr_ref_type */
104
/* and ptr_struct_type -- it assumes that no other types have embedded */
105
/* pointers.  Note also that the reloc procedures for string, ref and name */
106
/* pointers are never called. */
107
typedef ptr_proc_reloc((*ptr_proc_reloc_t), void);
108
const gs_ptr_procs_t ptr_struct_procs =
109
{ptr_struct_unmark, ptr_struct_mark, (ptr_proc_reloc_t) igc_reloc_struct_ptr};
110
const gs_ptr_procs_t ptr_string_procs =
111
{ptr_string_unmark, ptr_string_mark, NULL};
112
const gs_ptr_procs_t ptr_const_string_procs =
113
{ptr_string_unmark, ptr_string_mark, NULL};
114
const gs_ptr_procs_t ptr_ref_procs =
115
{ptr_ref_unmark, ptr_ref_mark, NULL};
116
const gs_ptr_procs_t ptr_name_index_procs =
117
{ptr_name_index_unmark, ptr_name_index_mark, NULL};
118
119
/* ------ Main program ------ */
120
121
/* Top level of garbage collector. */
122
#ifdef DEBUG
123
static void
124
end_phase(const gs_memory_t *mem, const char *str)
125
{
126
    if (gs_debug_c('6')) {
127
        dmlprintf1(mem, "[6]---------------- end %s ----------------\n",
128
                   (const char *)str);
129
        dmflush(mem);
130
    }
131
}
132
static const char *const depth_dots_string = "..........";
133
static const char *
134
depth_dots(const ms_entry * sp, const gc_mark_stack * pms)
135
{
136
    int depth = sp - pms->entries - 1;
137
    const gc_mark_stack *pss = pms;
138
139
    while ((pss = pss->prev) != 0)
140
        depth += pss->count - 1;
141
    return depth_dots_string + (depth >= 10 ? 0 : 10 - depth);
142
}
143
static void
144
gc_validate_spaces(gs_ref_memory_t **spaces, int max_space, gc_state_t *gcst)
145
{
146
    int i;
147
    gs_ref_memory_t *mem;
148
149
    for (i = 1; i <= max_space; ++i)
150
        if ((mem = spaces[i]) != 0)
151
            ialloc_validate_memory(mem, gcst);
152
}
153
#else  /* !DEBUG */
154
337k
#  define end_phase(mem,str) DO_NOTHING
155
#endif /* DEBUG */
156
157
void
158
gs_gc_reclaim(vm_spaces * pspaces, bool global)
159
21.1k
{
160
21.1k
#define nspaces ((i_vm_max + 1) * 2) /* * 2 for stable allocators */
161
162
21.1k
    vm_spaces spaces;
163
21.1k
    gs_ref_memory_t *space_memories[nspaces];
164
21.1k
    gs_gc_root_t space_roots[nspaces];
165
21.1k
    int max_trace;    /* max space_ to trace */
166
21.1k
    int min_collect;    /* min space_ to collect */
167
21.1k
    int min_collect_vm_space; /* min VM space to collect */
168
21.1k
    int ispace;
169
21.1k
    gs_ref_memory_t *mem;
170
21.1k
    clump_t *cp;
171
21.1k
    gs_gc_root_t *rp;
172
21.1k
    gc_state_t state;
173
21.1k
    struct _msd {
174
21.1k
        gc_mark_stack stack;
175
21.1k
        ms_entry body[ms_size_default];
176
21.1k
    } ms_default;
177
21.1k
    gc_mark_stack *mark_stack = &ms_default.stack;
178
21.1k
    const gs_memory_t *cmem;
179
21.1k
    clump_splay_walker sw;
180
181
    /* Optionally force global GC for debugging. */
182
183
21.1k
    if (I_FORCE_GLOBAL_GC)
184
0
        global = true;
185
186
    /* Determine which spaces we are tracing and collecting. */
187
188
21.1k
    spaces = *pspaces;
189
21.1k
    cmem = space_system->stable_memory;
190
21.1k
    space_memories[1] = space_system;
191
21.1k
    space_memories[2] = space_global;
192
21.1k
    min_collect = max_trace = 2;
193
21.1k
    min_collect_vm_space = i_vm_global;
194
21.1k
    if (space_global->stable_memory != (gs_memory_t *)space_global)
195
21.1k
        space_memories[++max_trace] =
196
21.1k
            (gs_ref_memory_t *)space_global->stable_memory;
197
21.1k
    if (space_global != space_local) {
198
21.1k
        space_memories[++max_trace] = space_local;
199
21.1k
        min_collect = max_trace;
200
21.1k
        min_collect_vm_space = i_vm_local;
201
21.1k
        if (space_local->stable_memory != (gs_memory_t *)space_local)
202
21.1k
            space_memories[++max_trace] =
203
21.1k
                (gs_ref_memory_t *)space_local->stable_memory;
204
21.1k
    }
205
21.1k
    if (global)
206
20.6k
        min_collect = min_collect_vm_space = 1;
207
208
21.1k
#define for_spaces(i, n)\
209
829k
  for (i = 1; i <= n; ++i)
210
21.1k
#define for_collected_spaces(i)\
211
1.12M
  for (i = min_collect; i <= max_trace; ++i)
212
21.1k
#define for_space_mems(i, mem)\
213
1.49M
  for (mem = space_memories[i]; mem != 0; mem = &mem->saved->state)
214
21.1k
#define for_mem_clumps(mem, cp, sw)\
215
21.9M
  for (cp = clump_splay_walk_init(sw, mem); cp != 0; cp = clump_splay_walk_fwd(sw))
216
21.1k
#define for_space_clumps(i, mem, cp, sw)\
217
581k
  for_space_mems(i, mem) for_mem_clumps(mem, cp, sw)
218
21.1k
#define for_clumps(i, n, mem, cp, sw)\
219
105k
  for_spaces(i, n) for_space_clumps(i, mem, cp, sw)
220
21.1k
#define for_collected_clumps(i, mem, cp, sw)\
221
42.2k
  for_collected_spaces(i) for_space_clumps(i, mem, cp, sw)
222
21.1k
#define for_roots(i, n, mem, rp)\
223
63.3k
  for_spaces(i, n)\
224
982k
    for (mem = space_memories[i], rp = mem->roots; rp != 0; rp = rp->next)
225
226
    /* Initialize the state. */
227
228
21.1k
    state.procs = &igc_procs;
229
21.1k
    state.loc.memory = space_global; /* any one will do */
230
231
21.1k
    state.loc.cp = 0;
232
21.1k
    state.spaces = spaces;
233
21.1k
    state.min_collect = min_collect_vm_space << r_space_shift;
234
21.1k
    state.relocating_untraced = false;
235
21.1k
    state.heap = state.loc.memory->non_gc_memory;
236
21.1k
    state.ntable = state.heap->gs_lib_ctx->gs_name_table;
237
238
    /* Register the allocators themselves as roots, */
239
    /* so we mark and relocate the change and save lists properly. */
240
241
105k
    for_spaces(ispace, max_trace) {
242
105k
        rp = &space_roots[ispace];
243
105k
        gs_register_struct_root((gs_memory_t *)space_memories[ispace],
244
105k
                                &rp,
245
105k
                                (void **)&space_memories[ispace],
246
105k
                                "gc_top_level");
247
105k
    }
248
249
21.1k
    end_phase(state.heap,"register space roots");
250
251
#ifdef DEBUG
252
253
    /* Pre-validate the state.  This shouldn't be necessary.... */
254
255
    gc_validate_spaces(space_memories, max_trace, &state);
256
257
    end_phase(state.heap,"pre-validate pointers");
258
259
#endif
260
261
21.1k
    if (I_BYPASS_GC) {   /* Don't collect at all. */
262
0
        goto no_collect;
263
0
    }
264
265
    /* Clear marks in spaces to be collected. */
266
267
21.1k
    for_collected_spaces(ispace)
268
4.17M
        for_space_clumps(ispace, mem, cp, &sw) {
269
4.17M
            gc_objects_clear_marks((const gs_memory_t *)mem, cp);
270
4.17M
            gc_strings_set_marks(cp, false);
271
4.17M
        }
272
273
21.1k
    end_phase(state.heap,"clear clump marks");
274
275
    /* Clear the marks of roots.  We must do this explicitly, */
276
    /* since some roots are not in any clump. */
277
278
221k
    for_roots(ispace, max_trace, mem, rp) {
279
221k
        enum_ptr_t eptr;
280
281
221k
        eptr.ptr = *rp->p;
282
221k
        if_debug_root('6', (const gs_memory_t *)mem, "[6]unmarking root", rp);
283
221k
        (*rp->ptype->unmark)(&eptr, &state);
284
221k
    }
285
286
21.1k
    end_phase(state.heap,"clear root marks");
287
288
21.1k
    if (global) {
289
20.6k
        op_array_table *global_ops = get_global_op_array(cmem);
290
20.6k
        op_array_table *local_ops  = get_local_op_array(cmem);
291
20.6k
        gc_unmark_names(state.ntable, global_ops, local_ops);
292
20.6k
    }
293
294
    /* Initialize the (default) mark stack. */
295
296
21.1k
    gc_init_mark_stack(&ms_default.stack, ms_size_default);
297
21.1k
    ms_default.stack.prev = 0;
298
21.1k
    ms_default.stack.on_heap = false;
299
300
    /* Add all large-enough free blocks to the mark stack. */
301
    /* Also initialize the rescan pointers. */
302
303
21.1k
    {
304
21.1k
        gc_mark_stack *end = mark_stack;
305
306
4.24M
        for_clumps(ispace, max_trace, mem, cp, &sw) {
307
4.24M
            uint avail = cp->ctop - cp->cbot;
308
309
4.24M
            if (avail >= sizeof(gc_mark_stack) + sizeof(ms_entry) *
310
4.24M
                ms_size_min &&
311
4.24M
                !cp->inner_count
312
4.24M
                ) {
313
444k
                gc_mark_stack *pms = (gc_mark_stack *) cp->cbot;
314
315
444k
                gc_init_mark_stack(pms, (avail - sizeof(gc_mark_stack)) /
316
444k
                                   sizeof(ms_entry));
317
444k
                end->next = pms;
318
444k
                pms->prev = end;
319
444k
                pms->on_heap = false;
320
444k
                if_debug2m('6', (const gs_memory_t *)mem,
321
444k
                           "[6]adding free "PRI_INTPTR"(%u) to mark stack\n",
322
444k
                           (intptr_t)pms, pms->count);
323
444k
            }
324
4.24M
            cp->rescan_bot = cp->cend;
325
4.24M
            cp->rescan_top = cp->cbase;
326
4.24M
        }
327
21.1k
    }
328
329
    /* Mark reachable objects. */
330
331
21.1k
    {
332
21.1k
        int more = 0;
333
334
        /* Mark from roots. */
335
336
221k
        for_roots(ispace, max_trace, mem, rp) {
337
221k
            if_debug_root('6', (const gs_memory_t *)mem, "[6]marking root", rp);
338
221k
            more |= gc_trace(rp, &state, mark_stack);
339
221k
        }
340
341
21.1k
        end_phase(state.heap,"mark");
342
343
        /* If this is a local GC, mark from non-local clumps. */
344
345
21.1k
        if (!global)
346
21.1k
            for_clumps(ispace, min_collect - 1, mem, cp, &sw)
347
76.5k
                more |= gc_trace_clump((const gs_memory_t *)mem, cp, &state, mark_stack);
348
349
        /* Handle mark stack overflow. */
350
351
21.1k
        while (more < 0) { /* stack overflowed */
352
0
            more = 0;
353
0
            for_clumps(ispace, max_trace, mem, cp, &sw)
354
0
                more |= gc_rescan_clump(cp, &state, mark_stack);
355
0
        }
356
357
21.1k
        end_phase(state.heap,"mark overflow");
358
21.1k
    }
359
360
    /* Free the mark stack. */
361
362
21.1k
    {
363
21.1k
        gc_mark_stack *pms = mark_stack;
364
365
42.5k
        while (pms->next)
366
21.4k
            pms = pms->next;
367
63.6k
        while (pms) {
368
42.5k
            gc_mark_stack *prev = pms->prev;
369
370
42.5k
            if (pms->on_heap)
371
313
                gs_free_object(state.heap, pms, "gc mark stack");
372
42.2k
            else
373
42.5k
                gs_alloc_fill(pms, gs_alloc_fill_free,
374
42.5k
                              sizeof(*pms) + sizeof(ms_entry) * pms->count);
375
42.5k
            pms = prev;
376
42.5k
        }
377
21.1k
    }
378
379
21.1k
    end_phase(state.heap,"free mark stack");
380
381
21.1k
    if (global) {
382
20.6k
        gc_trace_finish(&state);
383
20.6k
        names_trace_finish(state.ntable, &state);
384
385
20.6k
        end_phase(state.heap,"finish trace");
386
20.6k
    }
387
388
    /* Filter save change lists with removing elements,
389
       which point to unmarked blocks of refs. */
390
21.1k
    {
391
21.1k
        int i;
392
393
104k
        for_collected_spaces(i) {
394
104k
            gs_ref_memory_t *mem = space_memories[i];
395
396
104k
            alloc_save__filter_changes(mem);
397
104k
        }
398
21.1k
    }
399
    /* Clear marks and relocation in spaces that are only being traced. */
400
    /* We have to clear the marks first, because we want the */
401
    /* relocation to wind up as o_untraced, not o_unmarked. */
402
403
21.1k
    for_clumps(ispace, min_collect - 1, mem, cp, &sw)
404
76.5k
        gc_objects_clear_marks((const gs_memory_t *)mem, cp);
405
406
21.1k
    end_phase(state.heap,"post-clear marks");
407
408
21.1k
    for_clumps(ispace, min_collect - 1, mem, cp, &sw)
409
76.5k
        gc_clear_reloc(cp);
410
411
21.1k
    end_phase(state.heap,"clear reloc");
412
413
    /* Set the relocation of roots outside any clump to o_untraced, */
414
    /* so we won't try to relocate pointers to them. */
415
    /* (Currently, there aren't any.) */
416
417
    /* Disable freeing in the allocators of the spaces we are */
418
    /* collecting, so finalization procedures won't cause problems. */
419
21.1k
    {
420
21.1k
        int i;
421
422
21.1k
        for_collected_spaces(i)
423
104k
            gs_enable_free((gs_memory_t *)space_memories[i], false);
424
21.1k
    }
425
426
    /* Compute relocation based on marks, in the spaces */
427
    /* we are going to compact.  Also finalize freed objects. */
428
21.1k
    state.cur_mem = (gs_memory_t *)mem;
429
430
4.17M
    for_collected_clumps(ispace, mem, cp, &sw) {
431
4.17M
        gc_objects_set_reloc(&state, cp);
432
4.17M
        gc_strings_set_reloc(cp);
433
4.17M
    }
434
435
    /* Re-enable freeing. */
436
21.1k
    {
437
21.1k
        int i;
438
439
21.1k
        for_collected_spaces(i)
440
104k
            gs_enable_free((gs_memory_t *)space_memories[i], true);
441
21.1k
    }
442
443
21.1k
    end_phase(state.heap,"set reloc");
444
445
    /* Relocate pointers. */
446
447
21.1k
    state.relocating_untraced = true;
448
21.1k
    for_clumps(ispace, min_collect - 1, mem, cp, &sw)
449
76.5k
        gc_do_reloc(cp, mem, &state);
450
21.1k
    state.relocating_untraced = false;
451
247k
    for_collected_clumps(ispace, mem, cp, &sw)
452
4.17M
        gc_do_reloc(cp, mem, &state);
453
454
21.1k
    end_phase(state.heap,"relocate clumps");
455
456
221k
    for_roots(ispace, max_trace, mem, rp) {
457
221k
        if_debug3m('6', (const gs_memory_t *)mem,
458
221k
                   "[6]relocating root "PRI_INTPTR": "PRI_INTPTR" -> "PRI_INTPTR"\n",
459
221k
                   (intptr_t)rp, (intptr_t)rp->p, (intptr_t)*rp->p);
460
221k
        if (rp->ptype == ptr_ref_type) {
461
10.8k
            ref *pref = (ref *) * rp->p;
462
463
10.8k
            igc_reloc_refs((ref_packed *) pref,
464
10.8k
                           (ref_packed *) (pref + 1),
465
10.8k
                           &state);
466
10.8k
        } else
467
211k
            *rp->p = (*rp->ptype->reloc) (*rp->p, &state);
468
221k
        if_debug3m('6', (const gs_memory_t *)mem,
469
221k
                   "[6]relocated root "PRI_INTPTR": "PRI_INTPTR" -> "PRI_INTPTR"\n",
470
221k
                   (intptr_t)rp, (intptr_t)rp->p, (intptr_t)*rp->p);
471
221k
    }
472
473
21.1k
    end_phase(state.heap,"relocate roots");
474
475
    /* Compact data.  We only do this for spaces we are collecting. */
476
477
104k
    for_collected_spaces(ispace) {
478
143k
        for_space_mems(ispace, mem) {
479
4.17M
            for_mem_clumps(mem, cp, &sw) {
480
4.17M
                if_debug_clump('6', (const gs_memory_t *)mem, "[6]compacting clump", cp);
481
4.17M
                gc_objects_compact(cp, &state);
482
4.17M
                gc_strings_compact(cp, cmem);
483
4.17M
                if_debug_clump('6', (const gs_memory_t *)mem, "[6]after compaction:", cp);
484
4.17M
            }
485
143k
            mem->saved = mem->reloc_saved;
486
143k
            ialloc_reset_free(mem);
487
143k
        }
488
104k
    }
489
490
21.1k
    end_phase(state.heap,"compact");
491
492
    /* Free empty clumps. */
493
494
104k
    for_collected_spaces(ispace) {
495
143k
        for_space_mems(ispace, mem) {
496
143k
            gc_free_empty_clumps(mem);
497
143k
        }
498
104k
    }
499
500
21.1k
    end_phase(state.heap,"free empty clumps");
501
502
    /*
503
     * Update previous_status to reflect any freed clumps,
504
     * and set inherited to the negative of allocated,
505
     * so it has no effect.  We must update previous_status by
506
     * working back-to-front along the save chain, using pointer reversal.
507
     * (We could update inherited in any order, since it only uses
508
     * information local to the individual save level.)
509
     */
510
511
104k
    for_collected_spaces(ispace) { /* Reverse the pointers. */
512
104k
        alloc_save_t *curr;
513
104k
        alloc_save_t *prev = 0;
514
104k
        alloc_save_t *next;
515
104k
        gs_memory_status_t total;
516
517
143k
        for (curr = space_memories[ispace]->saved; curr != 0;
518
104k
             prev = curr, curr = next
519
104k
            ) {
520
38.8k
            next = curr->state.saved;
521
38.8k
            curr->state.saved = prev;
522
38.8k
        }
523
        /* Now work the other way, accumulating the values. */
524
104k
        total.allocated = 0, total.used = 0;
525
143k
        for (curr = prev, prev = 0; curr != 0;
526
104k
             prev = curr, curr = next
527
104k
            ) {
528
38.8k
            mem = &curr->state;
529
38.8k
            next = mem->saved;
530
38.8k
            mem->saved = prev;
531
38.8k
            mem->previous_status = total;
532
38.8k
            if_debug3m('6', (const gs_memory_t *)mem,
533
38.8k
                       "[6]"PRI_INTPTR" previous allocated=%lu, used=%lu\n",
534
38.8k
                       (intptr_t)mem, total.allocated, total.used);
535
38.8k
            gs_memory_status((gs_memory_t *) mem, &total);
536
38.8k
            mem->gc_allocated = mem->allocated + total.allocated;
537
38.8k
        }
538
104k
        mem = space_memories[ispace];
539
104k
        mem->previous_status = total;
540
104k
        mem->gc_allocated = mem->allocated + total.allocated;
541
104k
        if_debug3m('6', (const gs_memory_t *)mem,
542
104k
                   "[6]"PRI_INTPTR" previous allocated=%lu, used=%lu\n",
543
104k
                   (intptr_t)mem, total.allocated, total.used);
544
104k
    }
545
546
21.1k
    end_phase(state.heap,"update stats");
547
548
21.1k
  no_collect:
549
550
    /* Unregister the allocator roots. */
551
552
21.1k
    for_spaces(ispace, max_trace)
553
105k
        gs_unregister_root((gs_memory_t *)space_memories[ispace],
554
21.1k
                           &space_roots[ispace], "gc_top_level");
555
556
21.1k
    end_phase(state.heap,"unregister space roots");
557
558
#ifdef DEBUG
559
560
    /* Validate the state.  This shouldn't be necessary.... */
561
562
    gc_validate_spaces(space_memories, max_trace, &state);
563
564
    end_phase(state.heap,"validate pointers");
565
566
#endif
567
21.1k
}
568
569
/* ------ Debugging utilities ------ */
570
571
/* Validate a pointer to an object header. */
572
#ifdef DEBUG
573
#  define debug_check_object(pre, cp, gcst)\
574
     ialloc_validate_object((pre) + 1, cp, gcst)
575
#else
576
639M
#  define debug_check_object(pre, cp, gcst) DO_NOTHING
577
#endif
578
579
/* ------ Unmarking phase ------ */
580
581
/* Unmark a single struct. */
582
static void
583
ptr_struct_unmark(enum_ptr_t *pep, gc_state_t * ignored)
584
211k
{
585
211k
    void *const vptr = (void *)pep->ptr; /* break const */
586
587
211k
    if (vptr != 0)
588
211k
        o_set_unmarked(((obj_header_t *) vptr - 1));
589
211k
}
590
591
/* Unmark a single string. */
592
static void
593
ptr_string_unmark(enum_ptr_t *pep, gc_state_t * gcst)
594
0
{
595
0
    discard(gc_string_mark(pep->ptr, pep->size, false, gcst));
596
0
}
597
598
/* Unmark a single name. */
599
static void
600
ptr_name_index_unmark(enum_ptr_t *pep, gc_state_t * gcst)
601
0
{
602
    /* Do nothing */
603
0
}
604
605
/* Unmark the objects in a clump. */
606
static void
607
gc_objects_clear_marks(const gs_memory_t *mem, clump_t * cp)
608
4.24M
{
609
4.24M
    if_debug_clump('6', mem, "[6]unmarking clump", cp);
610
53.8M
    SCAN_CLUMP_OBJECTS(cp)
611
53.8M
        DO_ALL
612
53.8M
        struct_proc_clear_marks((*proc)) =
613
53.8M
        pre->o_type->clear_marks;
614
#ifdef DEBUG
615
    if (pre->o_type != &st_free)
616
        debug_check_object(pre, cp, NULL);
617
#endif
618
53.8M
    if_debug3m('7', (const gs_memory_t *)mem, " [7](un)marking %s(%lu) "PRI_INTPTR"\n",
619
53.8M
               struct_type_name_string(pre->o_type),
620
53.8M
               (ulong) size, (intptr_t)pre);
621
53.8M
    o_set_unmarked(pre);
622
53.8M
    if (proc != 0)
623
28.1M
        (*proc) (mem, pre + 1, size, pre->o_type);
624
53.8M
    END_OBJECTS_SCAN
625
4.24M
}
626
627
/* Mark 0- and 1-character names, and those referenced from the */
628
/* op_array_nx_table, and unmark all the rest. */
629
static void
630
gc_unmark_names(name_table * nt, op_array_table *op_array_table_global,
631
                op_array_table *op_array_table_local)
632
20.6k
{
633
20.6k
    uint i;
634
635
20.6k
    names_unmark_all(nt);
636
3.82M
    for (i = 0; i < op_array_table_global->count; i++) {
637
3.80M
        name_index_t nidx = op_array_table_global->nx_table[i];
638
639
3.80M
        names_mark_index(nt, nidx);
640
3.80M
    }
641
20.6k
    for (i = 0; i < op_array_table_local->count; i++) {
642
0
        name_index_t nidx = op_array_table_local->nx_table[i];
643
644
0
        names_mark_index(nt, nidx);
645
0
    }
646
20.6k
}
647
648
/* ------ Marking phase ------ */
649
650
/* Initialize (a segment of) the mark stack. */
651
static void
652
gc_init_mark_stack(gc_mark_stack * pms, uint count)
653
465k
{
654
465k
    pms->next = 0;
655
465k
    pms->count = count;
656
465k
    pms->entries[0].ptr = 0;
657
465k
    pms->entries[0].index = 0;
658
465k
    pms->entries[0].is_refs = false;
659
465k
}
660
661
/* Mark starting from all marked objects in the interval of a clump */
662
/* needing rescanning. */
663
static int
664
gc_rescan_clump(clump_t * cp, gc_state_t * pstate, gc_mark_stack * pmstack)
665
0
{
666
0
    byte *sbot = cp->rescan_bot;
667
0
    byte *stop = cp->rescan_top;
668
0
    gs_gc_root_t root;
669
0
    void *comp;
670
0
    int more = 0;
671
0
    const gs_memory_t *mem = gcst_get_memory_ptr( pstate );
672
673
0
    if (sbot > stop)
674
0
        return 0;
675
0
    root.p = &comp;
676
0
    if_debug_clump('6', mem, "[6]rescanning clump", cp);
677
0
    cp->rescan_bot = cp->cend;
678
0
    cp->rescan_top = cp->cbase;
679
0
    SCAN_CLUMP_OBJECTS(cp)
680
0
        DO_ALL
681
0
        if ((byte *) (pre + 1) + size < sbot);
682
0
    else if ((byte *) (pre + 1) > stop)
683
0
        return more;   /* 'break' won't work here */
684
0
    else {
685
0
        if_debug2m('7', mem, " [7]scanning/marking "PRI_INTPTR"(%lu)\n",
686
0
                   (intptr_t)pre, (ulong)size);
687
0
        if (pre->o_type == &st_refs) {
688
0
            ref_packed *rp = (ref_packed *) (pre + 1);
689
0
            char *end = (char *)rp + size;
690
691
0
            root.ptype = ptr_ref_type;
692
0
            while ((char *)rp < end) {
693
0
                comp = rp;
694
0
                if (r_is_packed(rp)) {
695
0
                    if (r_has_pmark(rp)) {
696
0
                        r_clear_pmark(rp);
697
0
                        more |= gc_trace(&root, pstate,
698
0
                                         pmstack);
699
0
                    }
700
0
                    rp++;
701
0
                } else {
702
0
                    ref *const pref = (ref *)rp;
703
704
0
                    if (r_has_attr(pref, l_mark)) {
705
0
                        r_clear_attrs(pref, l_mark);
706
0
                        more |= gc_trace(&root, pstate, pmstack);
707
0
                    }
708
0
                    rp += packed_per_ref;
709
0
                }
710
0
            }
711
0
        } else if (!o_is_unmarked(pre)) {
712
0
            struct_proc_clear_marks((*proc)) =
713
0
                pre->o_type->clear_marks;
714
0
            root.ptype = ptr_struct_type;
715
0
            comp = pre + 1;
716
0
            if (!o_is_untraced(pre))
717
0
                o_set_unmarked(pre);
718
0
            if (proc != 0)
719
0
                (*proc) (mem, comp, size, pre->o_type);
720
0
            more |= gc_trace(&root, pstate, pmstack);
721
0
        }
722
0
    }
723
0
    END_OBJECTS_SCAN
724
0
        return more;
725
0
}
726
727
/* Mark starting from all the objects in a clump. */
728
/* We assume that pstate->min_collect > avm_system, */
729
/* so we don't have to trace names. */
730
static int
731
gc_trace_clump(const gs_memory_t *mem, clump_t * cp, gc_state_t * pstate, gc_mark_stack * pmstack)
732
76.5k
{
733
76.5k
    gs_gc_root_t root;
734
76.5k
    void *comp;
735
76.5k
    int more = 0;
736
76.5k
    int min_trace = pstate->min_collect;
737
738
76.5k
    root.p = &comp;
739
76.5k
    if_debug_clump('6', mem, "[6]marking from clump", cp);
740
992k
    SCAN_CLUMP_OBJECTS(cp)
741
992k
        DO_ALL
742
992k
    {
743
992k
        if_debug2m('7', mem, " [7]scanning/marking "PRI_INTPTR"(%lu)\n",
744
992k
                   (intptr_t)pre, (ulong)size);
745
992k
        if (pre->o_type == &st_refs) {
746
436k
            ref_packed *rp = (ref_packed *) (pre + 1);
747
436k
            char *end = (char *)rp + size;
748
749
436k
            root.ptype = ptr_ref_type;
750
60.0M
            while ((char *)rp < end) {
751
59.6M
                comp = rp;
752
59.6M
                if (r_is_packed(rp)) { /* No packed refs need tracing. */
753
16.0M
                    rp++;
754
43.6M
                } else {
755
43.6M
                    ref *const pref = (ref *)rp;
756
757
43.6M
                    if (r_space(pref) >= min_trace) {
758
4.95k
                        r_clear_attrs(pref, l_mark);
759
4.95k
                        more |= gc_trace(&root, pstate, pmstack);
760
4.95k
                    }
761
43.6M
                    rp += packed_per_ref;
762
43.6M
                }
763
59.6M
            }
764
555k
        } else if (!o_is_unmarked(pre)) {
765
537k
            if (!o_is_untraced(pre))
766
537k
                o_set_unmarked(pre);
767
537k
            if (pre->o_type != &st_free) {
768
536k
                struct_proc_clear_marks((*proc)) =
769
536k
                    pre->o_type->clear_marks;
770
771
536k
                root.ptype = ptr_struct_type;
772
536k
                comp = pre + 1;
773
536k
                if (proc != 0)
774
28.6k
                    (*proc) (mem, comp, size, pre->o_type);
775
536k
                more |= gc_trace(&root, pstate, pmstack);
776
536k
            }
777
537k
        }
778
992k
    }
779
992k
    END_OBJECTS_SCAN
780
76.5k
        return more;
781
76.5k
}
782
783
/* Recursively mark from a (root) pointer. */
784
/* Return -1 if we overflowed the mark stack, */
785
/* 0 if we completed successfully without marking any new objects, */
786
/* 1 if we completed and marked some new objects. */
787
static int gc_extend_stack(gc_mark_stack *, gc_state_t *);
788
static int
789
gc_trace(gs_gc_root_t * rp, /* lgtm [cpp/use-of-goto] */
790
         gc_state_t * pstate, gc_mark_stack * pmstack)
791
763k
{
792
763k
    int min_trace = pstate->min_collect;
793
763k
    gc_mark_stack *pms = pmstack;
794
763k
    ms_entry *sp = pms->entries + 1;
795
796
    /* We stop the mark stack 1 entry early, because we store into */
797
    /* the entry beyond the top. */
798
763k
    ms_entry *stop = sp + pms->count - 2;
799
763k
    int new = 0;
800
763k
    enum_ptr_t nep;
801
763k
    void *nptr;
802
763k
    name_table *nt = pstate->ntable;
803
804
763k
#define mark_name(nidx)\
805
594M
  BEGIN\
806
594M
    if (names_mark_index(nt, nidx)) {\
807
180M
        new |= 1;\
808
180M
        if_debug2m('8', gcst_get_memory_ptr(pstate), "  [8]marked name "PRI_INTPTR"(%u)\n",\
809
180M
                   (intptr_t)names_index_ptr(nt, nidx), nidx);\
810
180M
    }\
811
594M
  END
812
813
763k
    nptr = *rp->p;
814
763k
    if (nptr == 0)
815
0
        return 0;
816
817
    /* Initialize the stack */
818
763k
    sp->ptr = nptr;
819
763k
    if (rp->ptype == ptr_ref_type)
820
15.7k
        sp->index = 1, sp->is_refs = true;
821
747k
    else {
822
747k
        sp->index = 0, sp->is_refs = false;
823
747k
        nep.ptr = nptr;
824
747k
        if ((*rp->ptype->mark) (&nep, pstate))
825
705k
            new |= 1;
826
747k
    }
827
251M
    for (;;) {
828
251M
        gs_ptr_type_t ptp;
829
830
        /*
831
         * The following should really be an if..else, but that
832
         * would force unnecessary is_refs tests.
833
         */
834
251M
        if (sp->is_refs)
835
233M
            goto do_refs;
836
837
        /* ---------------- Structure ---------------- */
838
839
27.6M
      do_struct:
840
27.6M
        {
841
27.6M
            obj_header_t *ptr = sp->ptr;
842
843
27.6M
            struct_proc_enum_ptrs((*mproc));
844
845
27.6M
            if (ptr == 0) { /* We've reached the bottom of a stack segment. */
846
801k
                pms = pms->prev;
847
801k
                if (pms == 0)
848
763k
                    break;  /* all done */
849
38.3k
                stop = pms->entries + pms->count - 1;
850
38.3k
                sp = stop;
851
38.3k
                continue;
852
801k
            }
853
27.6M
            debug_check_object(ptr - 1, NULL, NULL);
854
201M
          ts:if_debug4m('7', pstate->heap, " [7]%smarking %s "PRI_INTPTR"[%u]",
855
201M
                        depth_dots(sp, pms),
856
201M
                        struct_type_name_string(ptr[-1].o_type),
857
201M
                        (intptr_t)ptr, sp->index);
858
201M
            mproc = ptr[-1].o_type->enum_ptrs;
859
201M
            if (mproc == gs_no_struct_enum_ptrs ||
860
201M
                (ptp = (*mproc)
861
199M
                 (gcst_get_memory_ptr(pstate), ptr, pre_obj_contents_size(ptr - 1),
862
199M
                  sp->index, &nep, ptr[-1].o_type, pstate)) == 0
863
201M
                ) {
864
10.3M
                if_debug0m('7', pstate->heap, " - done\n");
865
10.3M
                sp--;
866
10.3M
                continue;
867
10.3M
            }
868
            /* The cast in the following statement is the one */
869
            /* place we need to break 'const' to make the */
870
            /* template for pointer enumeration work. */
871
191M
            nptr = (void *)nep.ptr;
872
191M
            sp->index++;
873
191M
            if_debug1m('7', pstate->heap, " = "PRI_INTPTR"\n", (intptr_t)nptr);
874
            /* Descend into nep.ptr, whose pointer type is ptp. */
875
191M
            if (ptp == ptr_struct_type) {
876
80.3M
                sp[1].index = 0;
877
80.3M
                sp[1].is_refs = false;
878
80.3M
                if (sp == stop)
879
21.8k
                    goto push;
880
80.3M
                if (!ptr_struct_mark(&nep, pstate))
881
71.3M
                    goto ts;
882
9.00M
                new |= 1;
883
9.00M
                (++sp)->ptr = nptr;
884
9.00M
                goto do_struct;
885
111M
            } else if (ptp == ptr_ref_type) {
886
7.43M
                sp[1].index = 1;
887
7.43M
                sp[1].is_refs = true;
888
7.43M
                if (sp == stop)
889
11.4k
                    goto push;
890
7.42M
                new |= 1;
891
7.42M
                (++sp)->ptr = nptr;
892
7.42M
                goto do_refs;
893
103M
            } else {   /* We assume this is some non-pointer- */
894
                /* containing type. */
895
                /* This was the only formulation of this call/condition/assignment that valgrind did not complain about */
896
103M
                new |= ((*ptp->mark) (&nep, pstate)) ? 1 : 0;
897
103M
               goto ts;
898
103M
            }
899
191M
        }
900
901
        /* ---------------- Refs ---------------- */
902
903
473M
      do_refs:
904
473M
        {
905
473M
            ref_packed *pptr = sp->ptr;
906
473M
            ref *rptr;
907
908
3.56G
          tr:if (!sp->index) {
909
240M
                --sp;
910
240M
                continue;
911
240M
            }
912
3.32G
            --(sp->index);
913
3.32G
            if_debug3m('8', pstate->heap, "  [8]%smarking refs "PRI_INTPTR"[%u]\n",
914
3.32G
                      depth_dots(sp, pms), (intptr_t)pptr, sp->index);
915
3.32G
            if (r_is_packed(pptr)) {
916
821M
                if (!r_has_pmark(pptr)) {
917
569M
                    r_set_pmark(pptr);
918
569M
                    new |= 1;
919
569M
                    if (r_packed_is_name(pptr)) {
920
120M
                        name_index_t nidx = packed_name_index(pptr);
921
922
120M
                        mark_name(nidx);
923
120M
                    }
924
569M
                }
925
821M
                ++pptr;
926
821M
                goto tr;
927
821M
            }
928
2.50G
            rptr = (ref *) pptr;  /* * const beyond here */
929
2.50G
            if (r_has_attr(rptr, l_mark)) {
930
261M
                pptr = (ref_packed *)(rptr + 1);
931
261M
                goto tr;
932
261M
            }
933
2.24G
            r_set_attrs(rptr, l_mark);
934
2.24G
            new |= 1;
935
2.24G
            if (r_space(rptr) < min_trace) { /* Note that this always picks up all scalars. */
936
1.51G
                pptr = (ref_packed *) (rptr + 1);
937
1.51G
                goto tr;
938
1.51G
            }
939
731M
            sp->ptr = rptr + 1;
940
731M
            switch (r_type(rptr)) {
941
                    /* Struct cases */
942
122k
                case t_file:
943
122k
                    nptr = rptr->value.pfile;
944
704k
                  rs:sp[1].is_refs = false;
945
704k
                    sp[1].index = 0;
946
704k
                    if (sp == stop) {
947
199
                        ptp = ptr_struct_type;
948
199
                        break;
949
199
                    }
950
704k
                    nep.ptr = nptr;
951
704k
                    if (!ptr_struct_mark(&nep, pstate))
952
137k
                        goto nr;
953
567k
                    new |= 1;
954
567k
                    (++sp)->ptr = nptr;
955
567k
                    goto do_struct;
956
433k
                case t_device:
957
433k
                    nptr = rptr->value.pdevice;
958
433k
                    goto rs;
959
38.1k
                case t_fontID:
960
127k
                case t_struct:
961
148k
                case t_astruct:
962
148k
                case t_pdfctx:
963
148k
                    nptr = rptr->value.pstruct;
964
148k
                    goto rs;
965
                    /* Non-trivial non-struct cases */
966
11.8M
                case t_dictionary:
967
11.8M
                    nptr = rptr->value.pdict;
968
11.8M
                    sp[1].index = sizeof(dict) / sizeof(ref);
969
11.8M
                    goto rrp;
970
105M
                case t_array:
971
105M
                    nptr = rptr->value.refs;
972
215M
                  rr:if ((sp[1].index = r_size(rptr)) == 0) { /* Set the base pointer to 0, */
973
                        /* so we never try to relocate it. */
974
3.65M
                        rptr->value.refs = 0;
975
3.65M
                        goto nr;
976
3.65M
                    }
977
224M
                  rrp:
978
232M
                  rrc:sp[1].is_refs = true;
979
232M
                    if (sp == stop) {
980
                        /*
981
                         * The following initialization is unnecessary:
982
                         * ptp will not be used if sp[1].is_refs = true.
983
                         * We put this here solely to get rid of bogus
984
                         * "possibly uninitialized variable" warnings
985
                         * from certain compilers.
986
                         */
987
4.79k
                        ptp = ptr_ref_type;
988
4.79k
                        break;
989
4.79k
                    }
990
232M
                    new |= 1;
991
232M
                    (++sp)->ptr = nptr;
992
232M
                    goto do_refs;
993
76.1M
                case t_mixedarray:
994
110M
                case t_shortarray:
995
110M
                    nptr = rptr->value.writable_packed;
996
110M
                    goto rr;
997
474M
                case t_name:
998
474M
                    mark_name(names_index(nt, rptr));
999
497M
                  nr:pptr = (ref_packed *) (rptr + 1);
1000
497M
                    goto tr;
1001
19.7M
                case t_string:
1002
19.7M
                    if (gc_string_mark(rptr->value.bytes, r_size(rptr), true, pstate))
1003
17.0M
                        new |= 1;
1004
19.7M
                    goto nr;
1005
8.69M
                case t_oparray:
1006
8.69M
                    nptr = rptr->value.refs;  /* discard const */
1007
8.69M
                    sp[1].index = 1;
1008
8.69M
                    goto rrc;
1009
0
                default:
1010
0
                    goto nr;
1011
731M
            }
1012
731M
        }
1013
1014
        /* ---------------- Recursion ---------------- */
1015
1016
38.3k
      push:
1017
38.3k
        if (sp == stop) { /* The current segment is full. */
1018
38.3k
            int new_added = gc_extend_stack(pms, pstate);
1019
1020
38.3k
            if (new_added) {
1021
0
                new |= new_added;
1022
0
                continue;
1023
0
            }
1024
38.3k
            pms = pms->next;
1025
38.3k
            stop = pms->entries + pms->count - 1;
1026
38.3k
            pms->entries[1] = sp[1];
1027
38.3k
            sp = pms->entries;
1028
38.3k
        }
1029
        /* index and is_refs are already set */
1030
38.3k
        if (!sp[1].is_refs) {
1031
22.0k
            nep.ptr = nptr;
1032
22.0k
            if (!(*ptp->mark) (&nep, pstate))
1033
13.2k
                continue;
1034
8.87k
            new |= 1;
1035
8.87k
        }
1036
25.0k
        (++sp)->ptr = nptr;
1037
25.0k
    }
1038
763k
    return new;
1039
763k
}
1040
/* Link to, attempting to allocate if necessary, */
1041
/* another clump of mark stack. */
1042
static int
1043
gc_extend_stack(gc_mark_stack * pms, gc_state_t * pstate)
1044
38.3k
{
1045
38.3k
    if (pms->next == 0) { /* Try to allocate another segment. */
1046
313
        uint count;
1047
1048
313
        for (count = ms_size_desired; count >= ms_size_min; count >>= 1) {
1049
313
            pms->next = (gc_mark_stack *)
1050
313
                gs_alloc_bytes_immovable(pstate->heap,
1051
313
                                         sizeof(gc_mark_stack) +
1052
313
                                         sizeof(ms_entry) * count,
1053
313
                                         "gc mark stack");
1054
313
            if (pms->next != 0)
1055
313
                break;
1056
313
        }
1057
313
        if (pms->next == 0) { /* The mark stack overflowed. */
1058
0
            ms_entry *sp = pms->entries + pms->count - 1;
1059
0
            byte *cptr = sp->ptr; /* container */
1060
0
            clump_t *cp = gc_locate(cptr, pstate);
1061
0
            int new = 1;
1062
1063
0
            if (cp == 0) { /* We were tracing outside collectible */
1064
                /* storage.  This can't happen. */
1065
0
                if_debug1('6', "mark stack overflowed while outside collectible space at "PRI_INTPTR"!\n",
1066
0
                         (intptr_t)cptr);
1067
0
                gs_abort(pstate->heap);
1068
0
            }
1069
0
            if (cptr < cp->rescan_bot)
1070
0
                cp->rescan_bot = cptr, new = -1;
1071
0
            if (cptr > cp->rescan_top)
1072
0
                cp->rescan_top = cptr, new = -1;
1073
0
            return new;
1074
0
        }
1075
313
        gc_init_mark_stack(pms->next, count);
1076
313
        pms->next->prev = pms;
1077
313
        pms->next->on_heap = true;
1078
313
    }
1079
38.3k
    return 0;
1080
38.3k
}
1081
1082
/* Mark a struct.  Return true if new mark. */
1083
static bool
1084
ptr_struct_mark(enum_ptr_t *pep, gc_state_t * ignored)
1085
456M
{
1086
456M
    obj_header_t *ptr = (obj_header_t *)pep->ptr;
1087
1088
456M
    if (ptr == 0)
1089
68.1M
        return false;
1090
387M
    ptr--;      /* point to header */
1091
387M
    if (!o_is_unmarked(ptr))
1092
377M
        return false;
1093
10.2M
    o_mark(ptr);
1094
10.2M
    return true;
1095
387M
}
1096
1097
/* Mark a string.  Return true if new mark. */
1098
static bool
1099
ptr_string_mark(enum_ptr_t *pep, gc_state_t * gcst)
1100
103M
{
1101
103M
    return gc_string_mark(pep->ptr, pep->size, true, gcst);
1102
103M
}
1103
1104
/* Mark a name.  Return true if new mark. */
1105
static bool
1106
ptr_name_index_mark(enum_ptr_t *pep, gc_state_t * gcst)
1107
0
{
1108
0
    return names_mark_index(gcst->heap->gs_lib_ctx->gs_name_table, pep->size);
1109
0
}
1110
1111
/* Finish tracing by marking names. */
1112
static bool
1113
gc_trace_finish(gc_state_t * pstate)
1114
20.6k
{
1115
20.6k
    name_table *nt = pstate->ntable;
1116
20.6k
    name_index_t nidx = 0;
1117
20.6k
    bool marked = false;
1118
1119
192M
    while ((nidx = names_next_valid_index(nt, nidx)) != 0) {
1120
192M
        name_string_t *pnstr = names_index_string_inline(nt, nidx);
1121
1122
192M
        if (pnstr->mark) {
1123
187M
            enum_ptr_t enst, ensst;
1124
1125
187M
            if (!pnstr->foreign_string &&
1126
187M
                gc_string_mark(pnstr->string_bytes, pnstr->string_size,
1127
172M
                               true, pstate)
1128
187M
                )
1129
172M
                marked = true;
1130
187M
            enst.ptr = names_index_sub_table(nt, nidx);
1131
187M
            ensst.ptr = names_index_string_sub_table(nt, nidx);
1132
187M
            marked |=
1133
187M
                ptr_struct_mark(&enst, pstate) |
1134
187M
                ptr_struct_mark(&ensst, pstate);
1135
187M
        }
1136
192M
    }
1137
20.6k
    return marked;
1138
20.6k
}
1139
1140
/* ------ Relocation planning phase ------ */
1141
1142
/* Initialize the relocation information in the clump header. */
1143
static void
1144
gc_init_reloc(clump_t * cp)
1145
4.24M
{
1146
4.24M
    clump_head_t *chead = cp->chead;
1147
1148
4.24M
    chead->dest = cp->cbase;
1149
4.24M
    chead->free.o_back =
1150
4.24M
        offset_of(clump_head_t, free) >> obj_back_shift;
1151
4.24M
    chead->free.o_size = sizeof(obj_header_t);
1152
4.24M
    chead->free.o_nreloc = 0;
1153
4.24M
}
1154
1155
/* Set marks and clear relocation for clumps that won't be compacted. */
1156
static void
1157
gc_clear_reloc(clump_t * cp)
1158
76.5k
{
1159
76.5k
    byte *pfree = (byte *) & cp->chead->free;
1160
1161
76.5k
    gc_init_reloc(cp);
1162
992k
    SCAN_CLUMP_OBJECTS(cp)
1163
992k
        DO_ALL
1164
992k
        const struct_shared_procs_t *procs =
1165
992k
    pre->o_type->shared;
1166
1167
992k
    if (procs != 0)
1168
436k
        (*procs->clear_reloc) (pre, size);
1169
992k
    o_set_untraced(pre);
1170
992k
    pre->o_back = ((byte *) pre - pfree) >> obj_back_shift;
1171
992k
    END_OBJECTS_SCAN
1172
76.5k
        gc_strings_set_marks(cp, true);
1173
76.5k
    gc_strings_clear_reloc(cp);
1174
76.5k
}
1175
1176
/* Set the relocation for the objects in a clump. */
1177
/* This will never be called for a clump with any o_untraced objects. */
1178
static void
1179
gc_objects_set_reloc(gc_state_t * gcst, clump_t * cp)
1180
4.17M
{
1181
4.17M
    size_t reloc = 0;
1182
4.17M
    clump_head_t *chead = cp->chead;
1183
4.17M
    byte *pfree = (byte *) & chead->free; /* most recent free object */
1184
1185
4.17M
    if_debug_clump('6', gcst->heap, "[6]setting reloc for clump", cp);
1186
4.17M
    gc_init_reloc(cp);
1187
52.8M
    SCAN_CLUMP_OBJECTS(cp)
1188
52.8M
        DO_ALL
1189
52.8M
        struct_proc_finalize((*finalize));
1190
52.8M
    const struct_shared_procs_t *procs =
1191
52.8M
    pre->o_type->shared;
1192
1193
52.8M
    if ((procs == 0 ? o_is_unmarked(pre) :
1194
52.8M
         !(*procs->set_reloc) (pre, reloc, size))
1195
52.8M
        ) {     /* Free object */
1196
25.0M
        reloc += sizeof(obj_header_t) + obj_align_round(size);
1197
25.0M
        if ((finalize = pre->o_type->finalize) != 0) {
1198
924k
            if_debug2m('u', gcst->heap, "[u]GC finalizing %s "PRI_INTPTR"\n",
1199
924k
                       struct_type_name_string(pre->o_type),
1200
924k
                       (intptr_t)(pre + 1));
1201
924k
            (*finalize) (gcst->cur_mem, pre + 1);
1202
924k
        }
1203
25.0M
        pfree = (byte *) pre;
1204
25.0M
        pre->o_back = (pfree - (byte *) chead) >> obj_back_shift;
1205
25.0M
        pre->o_nreloc = reloc;
1206
25.0M
        if_debug3m('7', gcst->heap, " [7]at "PRI_INTPTR", unmarked %lu, new reloc = %u\n",
1207
25.0M
                   (intptr_t)pre, (ulong) size, (unsigned int)reloc);
1208
27.7M
    } else {     /* Useful object */
1209
27.7M
        debug_check_object(pre, cp, gcst);
1210
27.7M
        pre->o_back = ((byte *) pre - pfree) >> obj_back_shift;
1211
27.7M
    }
1212
52.8M
    END_OBJECTS_SCAN
1213
#ifdef DEBUG
1214
    if (reloc != 0) {
1215
        if_debug1m('6', gcst->heap, "[6]freed %u", (unsigned int)reloc);
1216
        if_debug_clump('6', gcst->heap, " in", cp);
1217
    }
1218
#endif
1219
4.17M
}
1220
1221
/* ------ Relocation phase ------ */
1222
1223
/* Relocate the pointers in all the objects in a clump. */
1224
static void
1225
gc_do_reloc(clump_t * cp, gs_ref_memory_t * mem, gc_state_t * pstate)
1226
4.24M
{
1227
4.24M
    clump_head_t *chead = cp->chead;
1228
1229
4.24M
    if_debug_clump('6', (const gs_memory_t *)mem, "[6]relocating in clump", cp);
1230
53.8M
    SCAN_CLUMP_OBJECTS(cp)
1231
53.8M
        DO_ALL
1232
#ifdef DEBUG
1233
        pstate->container = cp;
1234
#endif
1235
    /* We need to relocate the pointers in an object iff */
1236
    /* it is o_untraced, or it is a useful object. */
1237
    /* An object is free iff its back pointer points to */
1238
    /* the clump_head structure. */
1239
53.8M
        if (o_is_untraced(pre) ||
1240
53.8M
            pre->o_back << obj_back_shift != (byte *) pre - (byte *) chead
1241
53.8M
            ) {
1242
28.7M
            struct_proc_reloc_ptrs((*proc)) =
1243
28.7M
                pre->o_type->reloc_ptrs;
1244
1245
28.7M
            if_debug3m('7', (const gs_memory_t *)mem,
1246
28.7M
                       " [7]relocating ptrs in %s(%lu) "PRI_INTPTR"\n",
1247
28.7M
                       struct_type_name_string(pre->o_type),
1248
28.7M
                       (ulong)size, (intptr_t)pre);
1249
28.7M
            if (proc != 0)
1250
28.7M
                (*proc) (pre + 1, size, pre->o_type, pstate);
1251
28.7M
        }
1252
#ifdef DEBUG
1253
        pstate->container = 0;
1254
#endif
1255
53.8M
    END_OBJECTS_SCAN
1256
4.24M
}
1257
1258
/* Print pointer relocation if debugging. */
1259
/* We have to provide this procedure even if DEBUG is not defined, */
1260
/* in case one of the other GC modules was compiled with DEBUG. */
1261
const void *
1262
print_reloc_proc(const void *obj, const char *cname, const void *robj)
1263
0
{
1264
0
    if_debug3('9', "  [9]relocate %s * "PRI_INTPTR" to "PRI_INTPTR"\n",
1265
0
              cname, (intptr_t)obj, (intptr_t)robj);
1266
0
    return robj;
1267
0
}
1268
1269
/* Relocate a pointer to an (aligned) object. */
1270
/* See gsmemory.h for why the argument is const and the result is not. */
1271
static void /*obj_header_t */ *
1272
igc_reloc_struct_ptr(const void /*obj_header_t */ *obj, gc_state_t * gcst)
1273
556M
{
1274
556M
    const obj_header_t *const optr = (const obj_header_t *)obj;
1275
556M
    const void *robj;
1276
1277
556M
    if (obj == 0) {
1278
64.5M
        discard(print_reloc(obj, "NULL", 0));
1279
64.5M
        return 0;
1280
64.5M
    }
1281
556M
    debug_check_object(optr - 1, NULL, gcst);
1282
491M
    {
1283
491M
        uint back = optr[-1].o_back;
1284
1285
491M
        if (back == o_untraced)
1286
0
            robj = obj;
1287
491M
        else {
1288
#ifdef DEBUG
1289
            /* Do some sanity checking. */
1290
            clump_t *cp = gcst->container;
1291
1292
            if (cp != 0 && cp->cbase <= (byte *)obj && (byte *)obj <cp->ctop) {
1293
                if (back > (cp->ctop - cp->cbase) >> obj_back_shift) {
1294
                    if_debug2('6', "Invalid back pointer %u at "PRI_INTPTR"!\n",
1295
                             back, (intptr_t)obj);
1296
                    gs_abort(NULL);
1297
                }
1298
            } else {
1299
                /* Pointed to unknown clump. Can't check it, sorry. */
1300
            }
1301
#endif
1302
491M
            {
1303
491M
                const obj_header_t *pfree = (const obj_header_t *)
1304
491M
                ((const char *)(optr - 1) -
1305
491M
                 (back << obj_back_shift));
1306
491M
                const clump_head_t *chead = (const clump_head_t *)
1307
491M
                ((const char *)pfree -
1308
491M
                 (pfree->o_back << obj_back_shift));
1309
1310
491M
                robj = chead->dest +
1311
491M
                    ((const char *)obj - (const char *)(chead + 1) -
1312
491M
                     pfree->o_nreloc);
1313
491M
            }
1314
491M
        }
1315
491M
    }
1316
    /* Use a severely deprecated pun to remove the const property. */
1317
491M
    {
1318
491M
        union { const void *r; void *w; } u;
1319
1320
491M
        u.r = print_reloc(obj, struct_type_name_string(optr[-1].o_type), robj);
1321
491M
        return u.w;
1322
556M
    }
1323
556M
}
1324
1325
/* ------ Compaction phase ------ */
1326
1327
/* Compact the objects in a clump. */
1328
/* This will never be called for a clump with any o_untraced objects. */
1329
static void
1330
gc_objects_compact(clump_t * cp, gc_state_t * gcst)
1331
4.17M
{
1332
4.17M
    clump_head_t *chead = cp->chead;
1333
4.17M
    obj_header_t *dpre = (obj_header_t *) chead->dest;
1334
4.17M
    const gs_memory_t *cmem = gcst->spaces.memories.named.system->stable_memory;
1335
1336
52.8M
    SCAN_CLUMP_OBJECTS(cp)
1337
52.8M
        DO_ALL
1338
    /* An object is free iff its back pointer points to */
1339
    /* the clump_head structure. */
1340
52.8M
        if (pre->o_back << obj_back_shift != (byte *) pre - (byte *) chead) {
1341
27.7M
        const struct_shared_procs_t *procs = pre->o_type->shared;
1342
1343
27.7M
        debug_check_object(pre, cp, gcst);
1344
27.7M
        if_debug4m('7', cmem,
1345
27.7M
                   " [7]compacting %s "PRI_INTPTR"(%lu) to "PRI_INTPTR"\n",
1346
27.7M
                   struct_type_name_string(pre->o_type),
1347
27.7M
                   (intptr_t)pre, (ulong)size, (intptr_t)dpre);
1348
27.7M
        if (procs == 0) {
1349
7.64M
            if (dpre != pre)
1350
2.16M
                memmove(dpre, pre,
1351
2.16M
                        sizeof(obj_header_t) + size);
1352
7.64M
        } else
1353
20.1M
            (*procs->compact) (cmem, pre, dpre, size);
1354
27.7M
        dpre = (obj_header_t *)
1355
27.7M
            ((byte *) dpre + obj_size_round(size));
1356
27.7M
    }
1357
52.8M
    END_OBJECTS_SCAN
1358
4.17M
        if (cp->outer == 0 && chead->dest != cp->cbase)
1359
0
        dpre = (obj_header_t *) cp->cbase; /* compacted this clump into another */
1360
4.17M
    gs_alloc_fill(dpre, gs_alloc_fill_collected, cp->cbot - (byte *) dpre);
1361
4.17M
    cp->cbot = (byte *) dpre;
1362
4.17M
    cp->rcur = 0;
1363
4.17M
    cp->rtop = 0;   /* just to be sure */
1364
4.17M
}
1365
1366
/* ------ Cleanup ------ */
1367
1368
static splay_app_result_t
1369
free_if_empty(clump_t *cp, void *arg)
1370
4.17M
{
1371
4.17M
    gs_ref_memory_t * mem = (gs_ref_memory_t *)arg;
1372
1373
4.17M
    if (cp->cbot == cp->cbase && cp->ctop == cp->climit &&
1374
4.17M
        cp->outer == 0 && cp->inner_count == 0)
1375
280k
    {
1376
280k
        alloc_free_clump(cp, mem);
1377
280k
        if (mem->cc == cp)
1378
0
            mem->cc = NULL;
1379
280k
    }
1380
4.17M
    return SPLAY_APP_CONTINUE;
1381
4.17M
}
1382
1383
/* Free empty clumps. */
1384
static void
1385
gc_free_empty_clumps(gs_ref_memory_t * mem)
1386
143k
{
1387
    /* NOTE: Not in reverse order any more, so potentially
1388
     * not quite as good for crap allocators. */
1389
143k
    clump_splay_app(mem->root, mem, free_if_empty, mem);
1390
143k
}
1391
1392
const gs_memory_t * gcst_get_memory_ptr(gc_state_t *gcst)
1393
199M
{
1394
199M
    vm_spaces spaces = gcst->spaces;
1395
199M
    const gs_memory_t *cmem = space_system->stable_memory;
1396
199M
    return cmem;
1397
199M
}