Coverage Report

Created: 2025-06-10 06:56

/src/ghostpdl/psi/igc.c
Line
Count
Source (jump to first uncovered line)
1
/* Copyright (C) 2001-2024 Artifex Software, Inc.
2
   All Rights Reserved.
3
4
   This software is provided AS-IS with no warranty, either express or
5
   implied.
6
7
   This software is distributed under license and may not be copied,
8
   modified or distributed except as expressly authorized under the terms
9
   of the license contained in the file LICENSE in this distribution.
10
11
   Refer to licensing information at http://www.artifex.com or contact
12
   Artifex Software, Inc.,  39 Mesa Street, Suite 108A, San Francisco,
13
   CA 94129, USA, for further information.
14
*/
15
16
17
/* Garbage collector for Ghostscript */
18
#include "memory_.h"
19
#include "ghost.h"
20
#include "ierrors.h"
21
#include "gsexit.h"
22
#include "gsmdebug.h"
23
#include "gsstruct.h"
24
#include "iastate.h"
25
#include "isave.h"
26
#include "isstate.h"
27
#include "idict.h"
28
#include "ipacked.h"
29
#include "istruct.h"
30
#include "igc.h"
31
#include "igcstr.h"
32
#include "inamedef.h"
33
#include "opdef.h"    /* for marking oparray names */
34
35
/* Define whether to force all garbage collections to be global. */
36
static const bool I_FORCE_GLOBAL_GC = false;
37
38
/* Define whether to bypass the collector entirely. */
39
static const bool I_BYPASS_GC = false;
40
41
/* Define an entry on the mark stack. */
42
typedef struct {
43
    void *ptr;
44
    uint index;
45
    bool is_refs;
46
} ms_entry;
47
48
/* Define (a segment of) the mark stack. */
49
/* entries[0] has ptr = 0 to indicate the bottom of the stack. */
50
/* count additional entries follow this structure. */
51
typedef struct gc_mark_stack_s gc_mark_stack;
52
struct gc_mark_stack_s {
53
    gc_mark_stack *prev;
54
    gc_mark_stack *next;
55
    uint count;
56
    bool on_heap;   /* if true, allocated during GC */
57
    ms_entry entries[1];
58
};
59
60
/* Define the mark stack sizing parameters. */
61
19.7k
#define ms_size_default 100  /* default, allocated on C stack */
62
/* This should probably be defined as a parameter somewhere.... */
63
#define ms_size_desired   /* for additional allocation */\
64
140
 ((max_ushort - sizeof(gc_mark_stack)) / sizeof(ms_entry) - 10)
65
6.70M
#define ms_size_min 50    /* min size for segment in free block */
66
67
/* Forward references */
68
69
static void gc_init_mark_stack(gc_mark_stack *, uint);
70
static void gc_objects_clear_marks(const gs_memory_t *mem, clump_t *);
71
static void gc_unmark_names(name_table *, op_array_table *, op_array_table *);
72
static int gc_trace(gs_gc_root_t *, gc_state_t *, gc_mark_stack *);
73
static int gc_rescan_clump(clump_t *, gc_state_t *, gc_mark_stack *);
74
static int gc_trace_clump(const gs_memory_t *mem, clump_t *, gc_state_t *, gc_mark_stack *);
75
static bool gc_trace_finish(gc_state_t *);
76
static void gc_clear_reloc(clump_t *);
77
static void gc_objects_set_reloc(gc_state_t * gcst, clump_t *);
78
static void gc_do_reloc(clump_t *, gs_ref_memory_t *, gc_state_t *);
79
static void gc_objects_compact(clump_t *, gc_state_t *);
80
static void gc_free_empty_clumps(gs_ref_memory_t *);
81
82
/* Forward references for pointer types */
83
static ptr_proc_unmark(ptr_struct_unmark);
84
static ptr_proc_mark(ptr_struct_mark);
85
static ptr_proc_unmark(ptr_string_unmark);
86
static ptr_proc_mark(ptr_string_mark);
87
static ptr_proc_unmark(ptr_name_index_unmark);
88
static ptr_proc_mark(ptr_name_index_mark);
89
/*ptr_proc_unmark(ptr_ref_unmark); *//* in igc.h */
90
/*ptr_proc_mark(ptr_ref_mark); *//* in igc.h */
91
static ptr_proc_reloc(igc_reloc_struct_ptr, void);
92
93
ptr_proc_reloc(igc_reloc_ref_ptr, ref_packed);  /* in igcref.c */
94
refs_proc_reloc(igc_reloc_refs);  /* in igcref.c */
95
96
/* Define this GC's procedure vector. */
97
static const gc_procs_with_refs_t igc_procs = {
98
    igc_reloc_struct_ptr, igc_reloc_string, igc_reloc_const_string,
99
    igc_reloc_param_string, igc_reloc_ref_ptr, igc_reloc_refs
100
};
101
102
/* Pointer type descriptors. */
103
/* Note that the trace/mark routine has special knowledge of ptr_ref_type */
104
/* and ptr_struct_type -- it assumes that no other types have embedded */
105
/* pointers.  Note also that the reloc procedures for string, ref and name */
106
/* pointers are never called. */
107
typedef ptr_proc_reloc((*ptr_proc_reloc_t), void);
108
const gs_ptr_procs_t ptr_struct_procs =
109
{ptr_struct_unmark, ptr_struct_mark, (ptr_proc_reloc_t) igc_reloc_struct_ptr};
110
const gs_ptr_procs_t ptr_string_procs =
111
{ptr_string_unmark, ptr_string_mark, NULL};
112
const gs_ptr_procs_t ptr_const_string_procs =
113
{ptr_string_unmark, ptr_string_mark, NULL};
114
const gs_ptr_procs_t ptr_ref_procs =
115
{ptr_ref_unmark, ptr_ref_mark, NULL};
116
const gs_ptr_procs_t ptr_name_index_procs =
117
{ptr_name_index_unmark, ptr_name_index_mark, NULL};
118
119
/* ------ Main program ------ */
120
121
/* Top level of garbage collector. */
122
#ifdef DEBUG
123
static void
124
end_phase(const gs_memory_t *mem, const char *str)
125
{
126
    if (gs_debug_c('6')) {
127
        dmlprintf1(mem, "[6]---------------- end %s ----------------\n",
128
                   (const char *)str);
129
        dmflush(mem);
130
    }
131
}
132
static const char *const depth_dots_string = "..........";
133
static const char *
134
depth_dots(const ms_entry * sp, const gc_mark_stack * pms)
135
{
136
    int depth = sp - pms->entries - 1;
137
    const gc_mark_stack *pss = pms;
138
139
    while ((pss = pss->prev) != 0)
140
        depth += pss->count - 1;
141
    return depth_dots_string + (depth >= 10 ? 0 : 10 - depth);
142
}
143
static void
144
gc_validate_spaces(gs_ref_memory_t **spaces, int max_space, gc_state_t *gcst)
145
{
146
    int i;
147
    gs_ref_memory_t *mem;
148
149
    for (i = 1; i <= max_space; ++i)
150
        if ((mem = spaces[i]) != 0)
151
            ialloc_validate_memory(mem, gcst);
152
}
153
#else  /* !DEBUG */
154
315k
#  define end_phase(mem,str) DO_NOTHING
155
#endif /* DEBUG */
156
157
void
158
gs_gc_reclaim(vm_spaces * pspaces, bool global)
159
19.7k
{
160
19.7k
#define nspaces ((i_vm_max + 1) * 2) /* * 2 for stable allocators */
161
162
19.7k
    vm_spaces spaces;
163
19.7k
    gs_ref_memory_t *space_memories[nspaces];
164
19.7k
    gs_gc_root_t space_roots[nspaces];
165
19.7k
    int max_trace;    /* max space_ to trace */
166
19.7k
    int min_collect;    /* min space_ to collect */
167
19.7k
    int min_collect_vm_space; /* min VM space to collect */
168
19.7k
    int ispace;
169
19.7k
    gs_ref_memory_t *mem;
170
19.7k
    clump_t *cp;
171
19.7k
    gs_gc_root_t *rp;
172
19.7k
    gc_state_t state;
173
19.7k
    struct _msd {
174
19.7k
        gc_mark_stack stack;
175
19.7k
        ms_entry body[ms_size_default];
176
19.7k
    } ms_default;
177
19.7k
    gc_mark_stack *mark_stack = &ms_default.stack;
178
19.7k
    const gs_memory_t *cmem;
179
19.7k
    clump_splay_walker sw;
180
181
    /* Optionally force global GC for debugging. */
182
183
19.7k
    if (I_FORCE_GLOBAL_GC)
184
0
        global = true;
185
186
    /* Determine which spaces we are tracing and collecting. */
187
188
19.7k
    spaces = *pspaces;
189
19.7k
    cmem = space_system->stable_memory;
190
19.7k
    space_memories[1] = space_system;
191
19.7k
    space_memories[2] = space_global;
192
19.7k
    min_collect = max_trace = 2;
193
19.7k
    min_collect_vm_space = i_vm_global;
194
19.7k
    if (space_global->stable_memory != (gs_memory_t *)space_global)
195
19.7k
        space_memories[++max_trace] =
196
19.7k
            (gs_ref_memory_t *)space_global->stable_memory;
197
19.7k
    if (space_global != space_local) {
198
19.7k
        space_memories[++max_trace] = space_local;
199
19.7k
        min_collect = max_trace;
200
19.7k
        min_collect_vm_space = i_vm_local;
201
19.7k
        if (space_local->stable_memory != (gs_memory_t *)space_local)
202
19.7k
            space_memories[++max_trace] =
203
19.7k
                (gs_ref_memory_t *)space_local->stable_memory;
204
19.7k
    }
205
19.7k
    if (global)
206
19.3k
        min_collect = min_collect_vm_space = 1;
207
208
19.7k
#define for_spaces(i, n)\
209
774k
  for (i = 1; i <= n; ++i)
210
19.7k
#define for_collected_spaces(i)\
211
1.05M
  for (i = min_collect; i <= max_trace; ++i)
212
19.7k
#define for_space_mems(i, mem)\
213
1.44M
  for (mem = space_memories[i]; mem != 0; mem = &mem->saved->state)
214
19.7k
#define for_mem_clumps(mem, cp, sw)\
215
17.4M
  for (cp = clump_splay_walk_init(sw, mem); cp != 0; cp = clump_splay_walk_fwd(sw))
216
19.7k
#define for_space_clumps(i, mem, cp, sw)\
217
570k
  for_space_mems(i, mem) for_mem_clumps(mem, cp, sw)
218
19.7k
#define for_clumps(i, n, mem, cp, sw)\
219
98.6k
  for_spaces(i, n) for_space_clumps(i, mem, cp, sw)
220
19.7k
#define for_collected_clumps(i, mem, cp, sw)\
221
39.4k
  for_collected_spaces(i) for_space_clumps(i, mem, cp, sw)
222
19.7k
#define for_roots(i, n, mem, rp)\
223
59.2k
  for_spaces(i, n)\
224
918k
    for (mem = space_memories[i], rp = mem->roots; rp != 0; rp = rp->next)
225
226
    /* Initialize the state. */
227
228
19.7k
    state.procs = &igc_procs;
229
19.7k
    state.loc.memory = space_global; /* any one will do */
230
231
19.7k
    state.loc.cp = 0;
232
19.7k
    state.spaces = spaces;
233
19.7k
    state.min_collect = min_collect_vm_space << r_space_shift;
234
19.7k
    state.relocating_untraced = false;
235
19.7k
    state.heap = state.loc.memory->non_gc_memory;
236
19.7k
    state.ntable = state.heap->gs_lib_ctx->gs_name_table;
237
238
    /* Register the allocators themselves as roots, */
239
    /* so we mark and relocate the change and save lists properly. */
240
241
98.6k
    for_spaces(ispace, max_trace) {
242
98.6k
        rp = &space_roots[ispace];
243
98.6k
        gs_register_struct_root((gs_memory_t *)space_memories[ispace],
244
98.6k
                                &rp,
245
98.6k
                                (void **)&space_memories[ispace],
246
98.6k
                                "gc_top_level");
247
98.6k
    }
248
249
19.7k
    end_phase(state.heap,"register space roots");
250
251
#ifdef DEBUG
252
253
    /* Pre-validate the state.  This shouldn't be necessary.... */
254
255
    gc_validate_spaces(space_memories, max_trace, &state);
256
257
    end_phase(state.heap,"pre-validate pointers");
258
259
#endif
260
261
19.7k
    if (I_BYPASS_GC) {   /* Don't collect at all. */
262
0
        goto no_collect;
263
0
    }
264
265
    /* Clear marks in spaces to be collected. */
266
267
19.7k
    for_collected_spaces(ispace)
268
3.28M
        for_space_clumps(ispace, mem, cp, &sw) {
269
3.28M
            gc_objects_clear_marks((const gs_memory_t *)mem, cp);
270
3.28M
            gc_strings_set_marks(cp, false);
271
3.28M
        }
272
273
19.7k
    end_phase(state.heap,"clear clump marks");
274
275
    /* Clear the marks of roots.  We must do this explicitly, */
276
    /* since some roots are not in any clump. */
277
278
207k
    for_roots(ispace, max_trace, mem, rp) {
279
207k
        enum_ptr_t eptr;
280
281
207k
        eptr.ptr = *rp->p;
282
207k
        if_debug_root('6', (const gs_memory_t *)mem, "[6]unmarking root", rp);
283
207k
        (*rp->ptype->unmark)(&eptr, &state);
284
207k
    }
285
286
19.7k
    end_phase(state.heap,"clear root marks");
287
288
19.7k
    if (global) {
289
19.3k
        op_array_table *global_ops = get_global_op_array(cmem);
290
19.3k
        op_array_table *local_ops  = get_local_op_array(cmem);
291
19.3k
        gc_unmark_names(state.ntable, global_ops, local_ops);
292
19.3k
    }
293
294
    /* Initialize the (default) mark stack. */
295
296
19.7k
    gc_init_mark_stack(&ms_default.stack, ms_size_default);
297
19.7k
    ms_default.stack.prev = 0;
298
19.7k
    ms_default.stack.on_heap = false;
299
300
    /* Add all large-enough free blocks to the mark stack. */
301
    /* Also initialize the rescan pointers. */
302
303
19.7k
    {
304
19.7k
        gc_mark_stack *end = mark_stack;
305
306
3.35M
        for_clumps(ispace, max_trace, mem, cp, &sw) {
307
3.35M
            uint avail = cp->ctop - cp->cbot;
308
309
3.35M
            if (avail >= sizeof(gc_mark_stack) + sizeof(ms_entry) *
310
3.35M
                ms_size_min &&
311
3.35M
                !cp->inner_count
312
3.35M
                ) {
313
430k
                gc_mark_stack *pms = (gc_mark_stack *) cp->cbot;
314
315
430k
                gc_init_mark_stack(pms, (avail - sizeof(gc_mark_stack)) /
316
430k
                                   sizeof(ms_entry));
317
430k
                end->next = pms;
318
430k
                pms->prev = end;
319
430k
                pms->on_heap = false;
320
430k
                if_debug2m('6', (const gs_memory_t *)mem,
321
430k
                           "[6]adding free "PRI_INTPTR"(%u) to mark stack\n",
322
430k
                           (intptr_t)pms, pms->count);
323
430k
            }
324
3.35M
            cp->rescan_bot = cp->cend;
325
3.35M
            cp->rescan_top = cp->cbase;
326
3.35M
        }
327
19.7k
    }
328
329
    /* Mark reachable objects. */
330
331
19.7k
    {
332
19.7k
        int more = 0;
333
334
        /* Mark from roots. */
335
336
207k
        for_roots(ispace, max_trace, mem, rp) {
337
207k
            if_debug_root('6', (const gs_memory_t *)mem, "[6]marking root", rp);
338
207k
            more |= gc_trace(rp, &state, mark_stack);
339
207k
        }
340
341
19.7k
        end_phase(state.heap,"mark");
342
343
        /* If this is a local GC, mark from non-local clumps. */
344
345
19.7k
        if (!global)
346
19.7k
            for_clumps(ispace, min_collect - 1, mem, cp, &sw)
347
63.2k
                more |= gc_trace_clump((const gs_memory_t *)mem, cp, &state, mark_stack);
348
349
        /* Handle mark stack overflow. */
350
351
19.7k
        while (more < 0) { /* stack overflowed */
352
0
            more = 0;
353
0
            for_clumps(ispace, max_trace, mem, cp, &sw)
354
0
                more |= gc_rescan_clump(cp, &state, mark_stack);
355
0
        }
356
357
19.7k
        end_phase(state.heap,"mark overflow");
358
19.7k
    }
359
360
    /* Free the mark stack. */
361
362
19.7k
    {
363
19.7k
        gc_mark_stack *pms = mark_stack;
364
365
39.6k
        while (pms->next)
366
19.8k
            pms = pms->next;
367
59.3k
        while (pms) {
368
39.6k
            gc_mark_stack *prev = pms->prev;
369
370
39.6k
            if (pms->on_heap)
371
140
                gs_free_object(state.heap, pms, "gc mark stack");
372
39.4k
            else
373
39.6k
                gs_alloc_fill(pms, gs_alloc_fill_free,
374
39.6k
                              sizeof(*pms) + sizeof(ms_entry) * pms->count);
375
39.6k
            pms = prev;
376
39.6k
        }
377
19.7k
    }
378
379
19.7k
    end_phase(state.heap,"free mark stack");
380
381
19.7k
    if (global) {
382
19.3k
        gc_trace_finish(&state);
383
19.3k
        names_trace_finish(state.ntable, &state);
384
385
19.3k
        end_phase(state.heap,"finish trace");
386
19.3k
    }
387
388
    /* Filter save change lists with removing elements,
389
       which point to unmarked blocks of refs. */
390
19.7k
    {
391
19.7k
        int i;
392
393
97.4k
        for_collected_spaces(i) {
394
97.4k
            gs_ref_memory_t *mem = space_memories[i];
395
396
97.4k
            alloc_save__filter_changes(mem);
397
97.4k
        }
398
19.7k
    }
399
    /* Clear marks and relocation in spaces that are only being traced. */
400
    /* We have to clear the marks first, because we want the */
401
    /* relocation to wind up as o_untraced, not o_unmarked. */
402
403
19.7k
    for_clumps(ispace, min_collect - 1, mem, cp, &sw)
404
63.2k
        gc_objects_clear_marks((const gs_memory_t *)mem, cp);
405
406
19.7k
    end_phase(state.heap,"post-clear marks");
407
408
19.7k
    for_clumps(ispace, min_collect - 1, mem, cp, &sw)
409
63.2k
        gc_clear_reloc(cp);
410
411
19.7k
    end_phase(state.heap,"clear reloc");
412
413
    /* Set the relocation of roots outside any clump to o_untraced, */
414
    /* so we won't try to relocate pointers to them. */
415
    /* (Currently, there aren't any.) */
416
417
    /* Disable freeing in the allocators of the spaces we are */
418
    /* collecting, so finalization procedures won't cause problems. */
419
19.7k
    {
420
19.7k
        int i;
421
422
19.7k
        for_collected_spaces(i)
423
97.4k
            gs_enable_free((gs_memory_t *)space_memories[i], false);
424
19.7k
    }
425
426
    /* Compute relocation based on marks, in the spaces */
427
    /* we are going to compact.  Also finalize freed objects. */
428
19.7k
    state.cur_mem = (gs_memory_t *)mem;
429
430
3.28M
    for_collected_clumps(ispace, mem, cp, &sw) {
431
3.28M
        gc_objects_set_reloc(&state, cp);
432
3.28M
        gc_strings_set_reloc(cp);
433
3.28M
    }
434
435
    /* Re-enable freeing. */
436
19.7k
    {
437
19.7k
        int i;
438
439
19.7k
        for_collected_spaces(i)
440
97.4k
            gs_enable_free((gs_memory_t *)space_memories[i], true);
441
19.7k
    }
442
443
19.7k
    end_phase(state.heap,"set reloc");
444
445
    /* Relocate pointers. */
446
447
19.7k
    state.relocating_untraced = true;
448
19.7k
    for_clumps(ispace, min_collect - 1, mem, cp, &sw)
449
63.2k
        gc_do_reloc(cp, mem, &state);
450
19.7k
    state.relocating_untraced = false;
451
237k
    for_collected_clumps(ispace, mem, cp, &sw)
452
3.28M
        gc_do_reloc(cp, mem, &state);
453
454
19.7k
    end_phase(state.heap,"relocate clumps");
455
456
207k
    for_roots(ispace, max_trace, mem, rp) {
457
207k
        if_debug3m('6', (const gs_memory_t *)mem,
458
207k
                   "[6]relocating root "PRI_INTPTR": "PRI_INTPTR" -> "PRI_INTPTR"\n",
459
207k
                   (intptr_t)rp, (intptr_t)rp->p, (intptr_t)*rp->p);
460
207k
        if (rp->ptype == ptr_ref_type) {
461
10.0k
            ref *pref = (ref *) * rp->p;
462
463
10.0k
            igc_reloc_refs((ref_packed *) pref,
464
10.0k
                           (ref_packed *) (pref + 1),
465
10.0k
                           &state);
466
10.0k
        } else
467
197k
            *rp->p = (*rp->ptype->reloc) (*rp->p, &state);
468
207k
        if_debug3m('6', (const gs_memory_t *)mem,
469
207k
                   "[6]relocated root "PRI_INTPTR": "PRI_INTPTR" -> "PRI_INTPTR"\n",
470
207k
                   (intptr_t)rp, (intptr_t)rp->p, (intptr_t)*rp->p);
471
207k
    }
472
473
19.7k
    end_phase(state.heap,"relocate roots");
474
475
    /* Compact data.  We only do this for spaces we are collecting. */
476
477
97.4k
    for_collected_spaces(ispace) {
478
140k
        for_space_mems(ispace, mem) {
479
3.28M
            for_mem_clumps(mem, cp, &sw) {
480
3.28M
                if_debug_clump('6', (const gs_memory_t *)mem, "[6]compacting clump", cp);
481
3.28M
                gc_objects_compact(cp, &state);
482
3.28M
                gc_strings_compact(cp, cmem);
483
3.28M
                if_debug_clump('6', (const gs_memory_t *)mem, "[6]after compaction:", cp);
484
3.28M
            }
485
140k
            mem->saved = mem->reloc_saved;
486
140k
            ialloc_reset_free(mem);
487
140k
        }
488
97.4k
    }
489
490
19.7k
    end_phase(state.heap,"compact");
491
492
    /* Free empty clumps. */
493
494
97.4k
    for_collected_spaces(ispace) {
495
140k
        for_space_mems(ispace, mem) {
496
140k
            gc_free_empty_clumps(mem);
497
140k
        }
498
97.4k
    }
499
500
19.7k
    end_phase(state.heap,"free empty clumps");
501
502
    /*
503
     * Update previous_status to reflect any freed clumps,
504
     * and set inherited to the negative of allocated,
505
     * so it has no effect.  We must update previous_status by
506
     * working back-to-front along the save chain, using pointer reversal.
507
     * (We could update inherited in any order, since it only uses
508
     * information local to the individual save level.)
509
     */
510
511
97.4k
    for_collected_spaces(ispace) { /* Reverse the pointers. */
512
97.4k
        alloc_save_t *curr;
513
97.4k
        alloc_save_t *prev = 0;
514
97.4k
        alloc_save_t *next;
515
97.4k
        gs_memory_status_t total;
516
517
140k
        for (curr = space_memories[ispace]->saved; curr != 0;
518
97.4k
             prev = curr, curr = next
519
97.4k
            ) {
520
43.1k
            next = curr->state.saved;
521
43.1k
            curr->state.saved = prev;
522
43.1k
        }
523
        /* Now work the other way, accumulating the values. */
524
97.4k
        total.allocated = 0, total.used = 0;
525
140k
        for (curr = prev, prev = 0; curr != 0;
526
97.4k
             prev = curr, curr = next
527
97.4k
            ) {
528
43.1k
            mem = &curr->state;
529
43.1k
            next = mem->saved;
530
43.1k
            mem->saved = prev;
531
43.1k
            mem->previous_status = total;
532
43.1k
            if_debug3m('6', (const gs_memory_t *)mem,
533
43.1k
                       "[6]"PRI_INTPTR" previous allocated=%lu, used=%lu\n",
534
43.1k
                       (intptr_t)mem, total.allocated, total.used);
535
43.1k
            gs_memory_status((gs_memory_t *) mem, &total);
536
43.1k
            mem->gc_allocated = mem->allocated + total.allocated;
537
43.1k
        }
538
97.4k
        mem = space_memories[ispace];
539
97.4k
        mem->previous_status = total;
540
97.4k
        mem->gc_allocated = mem->allocated + total.allocated;
541
97.4k
        if_debug3m('6', (const gs_memory_t *)mem,
542
97.4k
                   "[6]"PRI_INTPTR" previous allocated=%lu, used=%lu\n",
543
97.4k
                   (intptr_t)mem, total.allocated, total.used);
544
97.4k
    }
545
546
19.7k
    end_phase(state.heap,"update stats");
547
548
19.7k
  no_collect:
549
550
    /* Unregister the allocator roots. */
551
552
19.7k
    for_spaces(ispace, max_trace)
553
98.6k
        gs_unregister_root((gs_memory_t *)space_memories[ispace],
554
19.7k
                           &space_roots[ispace], "gc_top_level");
555
556
19.7k
    end_phase(state.heap,"unregister space roots");
557
558
#ifdef DEBUG
559
560
    /* Validate the state.  This shouldn't be necessary.... */
561
562
    gc_validate_spaces(space_memories, max_trace, &state);
563
564
    end_phase(state.heap,"validate pointers");
565
566
#endif
567
19.7k
}
568
569
/* ------ Debugging utilities ------ */
570
571
/* Validate a pointer to an object header. */
572
#ifdef DEBUG
573
#  define debug_check_object(pre, cp, gcst)\
574
     ialloc_validate_object((pre) + 1, cp, gcst)
575
#else
576
426M
#  define debug_check_object(pre, cp, gcst) DO_NOTHING
577
#endif
578
579
/* ------ Unmarking phase ------ */
580
581
/* Unmark a single struct. */
582
static void
583
ptr_struct_unmark(enum_ptr_t *pep, gc_state_t * ignored)
584
197k
{
585
197k
    void *const vptr = (void *)pep->ptr; /* break const */
586
587
197k
    if (vptr != 0)
588
197k
        o_set_unmarked(((obj_header_t *) vptr - 1));
589
197k
}
590
591
/* Unmark a single string. */
592
static void
593
ptr_string_unmark(enum_ptr_t *pep, gc_state_t * gcst)
594
0
{
595
0
    discard(gc_string_mark(pep->ptr, pep->size, false, gcst));
596
0
}
597
598
/* Unmark a single name. */
599
static void
600
ptr_name_index_unmark(enum_ptr_t *pep, gc_state_t * gcst)
601
0
{
602
    /* Do nothing */
603
0
}
604
605
/* Unmark the objects in a clump. */
606
static void
607
gc_objects_clear_marks(const gs_memory_t *mem, clump_t * cp)
608
3.35M
{
609
3.35M
    if_debug_clump('6', mem, "[6]unmarking clump", cp);
610
40.1M
    SCAN_CLUMP_OBJECTS(cp)
611
40.1M
        DO_ALL
612
40.1M
        struct_proc_clear_marks((*proc)) =
613
40.1M
        pre->o_type->clear_marks;
614
#ifdef DEBUG
615
    if (pre->o_type != &st_free)
616
        debug_check_object(pre, cp, NULL);
617
#endif
618
40.1M
    if_debug3m('7', (const gs_memory_t *)mem, " [7](un)marking %s(%lu) "PRI_INTPTR"\n",
619
40.1M
               struct_type_name_string(pre->o_type),
620
40.1M
               (ulong) size, (intptr_t)pre);
621
40.1M
    o_set_unmarked(pre);
622
40.1M
    if (proc != 0)
623
23.7M
        (*proc) (mem, pre + 1, size, pre->o_type);
624
40.1M
    END_OBJECTS_SCAN
625
3.35M
}
626
627
/* Mark 0- and 1-character names, and those referenced from the */
628
/* op_array_nx_table, and unmark all the rest. */
629
static void
630
gc_unmark_names(name_table * nt, op_array_table *op_array_table_global,
631
                op_array_table *op_array_table_local)
632
19.3k
{
633
19.3k
    uint i;
634
635
19.3k
    names_unmark_all(nt);
636
3.57M
    for (i = 0; i < op_array_table_global->count; i++) {
637
3.55M
        name_index_t nidx = op_array_table_global->nx_table[i];
638
639
3.55M
        names_mark_index(nt, nidx);
640
3.55M
    }
641
19.3k
    for (i = 0; i < op_array_table_local->count; i++) {
642
0
        name_index_t nidx = op_array_table_local->nx_table[i];
643
644
0
        names_mark_index(nt, nidx);
645
0
    }
646
19.3k
}
647
648
/* ------ Marking phase ------ */
649
650
/* Initialize (a segment of) the mark stack. */
651
static void
652
gc_init_mark_stack(gc_mark_stack * pms, uint count)
653
450k
{
654
450k
    pms->next = 0;
655
450k
    pms->count = count;
656
450k
    pms->entries[0].ptr = 0;
657
450k
    pms->entries[0].index = 0;
658
450k
    pms->entries[0].is_refs = false;
659
450k
}
660
661
/* Mark starting from all marked objects in the interval of a clump */
662
/* needing rescanning. */
663
static int
664
gc_rescan_clump(clump_t * cp, gc_state_t * pstate, gc_mark_stack * pmstack)
665
0
{
666
0
    byte *sbot = cp->rescan_bot;
667
0
    byte *stop = cp->rescan_top;
668
0
    gs_gc_root_t root;
669
0
    void *comp;
670
0
    int more = 0;
671
0
    const gs_memory_t *mem = gcst_get_memory_ptr( pstate );
672
673
0
    if (sbot > stop)
674
0
        return 0;
675
0
    root.p = &comp;
676
0
    if_debug_clump('6', mem, "[6]rescanning clump", cp);
677
0
    cp->rescan_bot = cp->cend;
678
0
    cp->rescan_top = cp->cbase;
679
0
    SCAN_CLUMP_OBJECTS(cp)
680
0
        DO_ALL
681
0
        if ((byte *) (pre + 1) + size < sbot);
682
0
    else if ((byte *) (pre + 1) > stop)
683
0
        return more;   /* 'break' won't work here */
684
0
    else {
685
0
        if_debug2m('7', mem, " [7]scanning/marking "PRI_INTPTR"(%lu)\n",
686
0
                   (intptr_t)pre, (ulong)size);
687
0
        if (pre->o_type == &st_refs) {
688
0
            ref_packed *rp = (ref_packed *) (pre + 1);
689
0
            char *end = (char *)rp + size;
690
691
0
            root.ptype = ptr_ref_type;
692
0
            while ((char *)rp < end) {
693
0
                comp = rp;
694
0
                if (r_is_packed(rp)) {
695
0
                    if (r_has_pmark(rp)) {
696
0
                        r_clear_pmark(rp);
697
0
                        more |= gc_trace(&root, pstate,
698
0
                                         pmstack);
699
0
                    }
700
0
                    rp++;
701
0
                } else {
702
0
                    ref *const pref = (ref *)rp;
703
704
0
                    if (r_has_attr(pref, l_mark)) {
705
0
                        r_clear_attrs(pref, l_mark);
706
0
                        more |= gc_trace(&root, pstate, pmstack);
707
0
                    }
708
0
                    rp += packed_per_ref;
709
0
                }
710
0
            }
711
0
        } else if (!o_is_unmarked(pre)) {
712
0
            struct_proc_clear_marks((*proc)) =
713
0
                pre->o_type->clear_marks;
714
0
            root.ptype = ptr_struct_type;
715
0
            comp = pre + 1;
716
0
            if (!o_is_untraced(pre))
717
0
                o_set_unmarked(pre);
718
0
            if (proc != 0)
719
0
                (*proc) (mem, comp, size, pre->o_type);
720
0
            more |= gc_trace(&root, pstate, pmstack);
721
0
        }
722
0
    }
723
0
    END_OBJECTS_SCAN
724
0
        return more;
725
0
}
726
727
/* Mark starting from all the objects in a clump. */
728
/* We assume that pstate->min_collect > avm_system, */
729
/* so we don't have to trace names. */
730
static int
731
gc_trace_clump(const gs_memory_t *mem, clump_t * cp, gc_state_t * pstate, gc_mark_stack * pmstack)
732
63.2k
{
733
63.2k
    gs_gc_root_t root;
734
63.2k
    void *comp;
735
63.2k
    int more = 0;
736
63.2k
    int min_trace = pstate->min_collect;
737
738
63.2k
    root.p = &comp;
739
63.2k
    if_debug_clump('6', mem, "[6]marking from clump", cp);
740
849k
    SCAN_CLUMP_OBJECTS(cp)
741
849k
        DO_ALL
742
849k
    {
743
849k
        if_debug2m('7', mem, " [7]scanning/marking "PRI_INTPTR"(%lu)\n",
744
849k
                   (intptr_t)pre, (ulong)size);
745
849k
        if (pre->o_type == &st_refs) {
746
393k
            ref_packed *rp = (ref_packed *) (pre + 1);
747
393k
            char *end = (char *)rp + size;
748
749
393k
            root.ptype = ptr_ref_type;
750
41.0M
            while ((char *)rp < end) {
751
40.6M
                comp = rp;
752
40.6M
                if (r_is_packed(rp)) { /* No packed refs need tracing. */
753
14.6M
                    rp++;
754
25.9M
                } else {
755
25.9M
                    ref *const pref = (ref *)rp;
756
757
25.9M
                    if (r_space(pref) >= min_trace) {
758
4.54k
                        r_clear_attrs(pref, l_mark);
759
4.54k
                        more |= gc_trace(&root, pstate, pmstack);
760
4.54k
                    }
761
25.9M
                    rp += packed_per_ref;
762
25.9M
                }
763
40.6M
            }
764
456k
        } else if (!o_is_unmarked(pre)) {
765
408k
            if (!o_is_untraced(pre))
766
408k
                o_set_unmarked(pre);
767
408k
            if (pre->o_type != &st_free) {
768
395k
                struct_proc_clear_marks((*proc)) =
769
395k
                    pre->o_type->clear_marks;
770
771
395k
                root.ptype = ptr_struct_type;
772
395k
                comp = pre + 1;
773
395k
                if (proc != 0)
774
18.8k
                    (*proc) (mem, comp, size, pre->o_type);
775
395k
                more |= gc_trace(&root, pstate, pmstack);
776
395k
            }
777
408k
        }
778
849k
    }
779
849k
    END_OBJECTS_SCAN
780
63.2k
        return more;
781
63.2k
}
782
783
/* Recursively mark from a (root) pointer. */
784
/* Return -1 if we overflowed the mark stack, */
785
/* 0 if we completed successfully without marking any new objects, */
786
/* 1 if we completed and marked some new objects. */
787
static int gc_extend_stack(gc_mark_stack *, gc_state_t *);
788
static int
789
gc_trace(gs_gc_root_t * rp, /* lgtm [cpp/use-of-goto] */
790
         gc_state_t * pstate, gc_mark_stack * pmstack)
791
607k
{
792
607k
    int min_trace = pstate->min_collect;
793
607k
    gc_mark_stack *pms = pmstack;
794
607k
    ms_entry *sp = pms->entries + 1;
795
796
    /* We stop the mark stack 1 entry early, because we store into */
797
    /* the entry beyond the top. */
798
607k
    ms_entry *stop = sp + pms->count - 2;
799
607k
    int new = 0;
800
607k
    enum_ptr_t nep;
801
607k
    void *nptr;
802
607k
    name_table *nt = pstate->ntable;
803
804
607k
#define mark_name(nidx)\
805
422M
  BEGIN\
806
422M
    if (names_mark_index(nt, nidx)) {\
807
127M
        new |= 1;\
808
127M
        if_debug2m('8', gcst_get_memory_ptr(pstate), "  [8]marked name "PRI_INTPTR"(%u)\n",\
809
127M
                   (intptr_t)names_index_ptr(nt, nidx), nidx);\
810
127M
    }\
811
422M
  END
812
813
607k
    nptr = *rp->p;
814
607k
    if (nptr == 0)
815
0
        return 0;
816
817
    /* Initialize the stack */
818
607k
    sp->ptr = nptr;
819
607k
    if (rp->ptype == ptr_ref_type)
820
14.6k
        sp->index = 1, sp->is_refs = true;
821
593k
    else {
822
593k
        sp->index = 0, sp->is_refs = false;
823
593k
        nep.ptr = nptr;
824
593k
        if ((*rp->ptype->mark) (&nep, pstate))
825
554k
            new |= 1;
826
593k
    }
827
230M
    for (;;) {
828
230M
        gs_ptr_type_t ptp;
829
830
        /*
831
         * The following should really be an if..else, but that
832
         * would force unnecessary is_refs tests.
833
         */
834
230M
        if (sp->is_refs)
835
217M
            goto do_refs;
836
837
        /* ---------------- Structure ---------------- */
838
839
18.3M
      do_struct:
840
18.3M
        {
841
18.3M
            obj_header_t *ptr = sp->ptr;
842
843
18.3M
            struct_proc_enum_ptrs((*mproc));
844
845
18.3M
            if (ptr == 0) { /* We've reached the bottom of a stack segment. */
846
639k
                pms = pms->prev;
847
639k
                if (pms == 0)
848
607k
                    break;  /* all done */
849
31.3k
                stop = pms->entries + pms->count - 1;
850
31.3k
                sp = stop;
851
31.3k
                continue;
852
639k
            }
853
18.3M
            debug_check_object(ptr - 1, NULL, NULL);
854
59.4M
          ts:if_debug4m('7', pstate->heap, " [7]%smarking %s "PRI_INTPTR"[%u]",
855
59.4M
                        depth_dots(sp, pms),
856
59.4M
                        struct_type_name_string(ptr[-1].o_type),
857
59.4M
                        (intptr_t)ptr, sp->index);
858
59.4M
            mproc = ptr[-1].o_type->enum_ptrs;
859
59.4M
            if (mproc == gs_no_struct_enum_ptrs ||
860
59.4M
                (ptp = (*mproc)
861
58.2M
                 (gcst_get_memory_ptr(pstate), ptr, pre_obj_contents_size(ptr - 1),
862
58.2M
                  sp->index, &nep, ptr[-1].o_type, pstate)) == 0
863
59.4M
                ) {
864
6.32M
                if_debug0m('7', pstate->heap, " - done\n");
865
6.32M
                sp--;
866
6.32M
                continue;
867
6.32M
            }
868
            /* The cast in the following statement is the one */
869
            /* place we need to break 'const' to make the */
870
            /* template for pointer enumeration work. */
871
53.1M
            nptr = (void *)nep.ptr;
872
53.1M
            sp->index++;
873
53.1M
            if_debug1m('7', pstate->heap, " = "PRI_INTPTR"\n", (intptr_t)nptr);
874
            /* Descend into nep.ptr, whose pointer type is ptp. */
875
53.1M
            if (ptp == ptr_struct_type) {
876
45.9M
                sp[1].index = 0;
877
45.9M
                sp[1].is_refs = false;
878
45.9M
                if (sp == stop)
879
16.9k
                    goto push;
880
45.9M
                if (!ptr_struct_mark(&nep, pstate))
881
40.7M
                    goto ts;
882
5.17M
                new |= 1;
883
5.17M
                (++sp)->ptr = nptr;
884
5.17M
                goto do_struct;
885
45.9M
            } else if (ptp == ptr_ref_type) {
886
6.24M
                sp[1].index = 1;
887
6.24M
                sp[1].is_refs = true;
888
6.24M
                if (sp == stop)
889
10.0k
                    goto push;
890
6.23M
                new |= 1;
891
6.23M
                (++sp)->ptr = nptr;
892
6.23M
                goto do_refs;
893
6.24M
            } else {   /* We assume this is some non-pointer- */
894
                /* containing type. */
895
                /* This was the only formulation of this call/condition/assignment that valgrind did not complain about */
896
942k
                new |= ((*ptp->mark) (&nep, pstate)) ? 1 : 0;
897
942k
               goto ts;
898
942k
            }
899
53.1M
        }
900
901
        /* ---------------- Refs ---------------- */
902
903
440M
      do_refs:
904
440M
        {
905
440M
            ref_packed *pptr = sp->ptr;
906
440M
            ref *rptr;
907
908
2.65G
          tr:if (!sp->index) {
909
223M
                --sp;
910
223M
                continue;
911
223M
            }
912
2.43G
            --(sp->index);
913
2.43G
            if_debug3m('8', pstate->heap, "  [8]%smarking refs "PRI_INTPTR"[%u]\n",
914
2.43G
                      depth_dots(sp, pms), (intptr_t)pptr, sp->index);
915
2.43G
            if (r_is_packed(pptr)) {
916
756M
                if (!r_has_pmark(pptr)) {
917
519M
                    r_set_pmark(pptr);
918
519M
                    new |= 1;
919
519M
                    if (r_packed_is_name(pptr)) {
920
112M
                        name_index_t nidx = packed_name_index(pptr);
921
922
112M
                        mark_name(nidx);
923
112M
                    }
924
519M
                }
925
756M
                ++pptr;
926
756M
                goto tr;
927
756M
            }
928
1.67G
            rptr = (ref *) pptr;  /* * const beyond here */
929
1.67G
            if (r_has_attr(rptr, l_mark)) {
930
244M
                pptr = (ref_packed *)(rptr + 1);
931
244M
                goto tr;
932
244M
            }
933
1.43G
            r_set_attrs(rptr, l_mark);
934
1.43G
            new |= 1;
935
1.43G
            if (r_space(rptr) < min_trace) { /* Note that this always picks up all scalars. */
936
883M
                pptr = (ref_packed *) (rptr + 1);
937
883M
                goto tr;
938
883M
            }
939
548M
            sp->ptr = rptr + 1;
940
548M
            switch (r_type(rptr)) {
941
                    /* Struct cases */
942
114k
                case t_file:
943
114k
                    nptr = rptr->value.pfile;
944
684k
                  rs:sp[1].is_refs = false;
945
684k
                    sp[1].index = 0;
946
684k
                    if (sp == stop) {
947
209
                        ptp = ptr_struct_type;
948
209
                        break;
949
209
                    }
950
684k
                    nep.ptr = nptr;
951
684k
                    if (!ptr_struct_mark(&nep, pstate))
952
136k
                        goto nr;
953
547k
                    new |= 1;
954
547k
                    (++sp)->ptr = nptr;
955
547k
                    goto do_struct;
956
425k
                case t_device:
957
425k
                    nptr = rptr->value.pdevice;
958
425k
                    goto rs;
959
33.4k
                case t_fontID:
960
125k
                case t_struct:
961
145k
                case t_astruct:
962
145k
                case t_pdfctx:
963
145k
                    nptr = rptr->value.pstruct;
964
145k
                    goto rs;
965
                    /* Non-trivial non-struct cases */
966
10.7M
                case t_dictionary:
967
10.7M
                    nptr = rptr->value.pdict;
968
10.7M
                    sp[1].index = sizeof(dict) / sizeof(ref);
969
10.7M
                    goto rrp;
970
97.5M
                case t_array:
971
97.5M
                    nptr = rptr->value.refs;
972
200M
                  rr:if ((sp[1].index = r_size(rptr)) == 0) { /* Set the base pointer to 0, */
973
                        /* so we never try to relocate it. */
974
2.65M
                        rptr->value.refs = 0;
975
2.65M
                        goto nr;
976
2.65M
                    }
977
208M
                  rrp:
978
216M
                  rrc:sp[1].is_refs = true;
979
216M
                    if (sp == stop) {
980
                        /*
981
                         * The following initialization is unnecessary:
982
                         * ptp will not be used if sp[1].is_refs = true.
983
                         * We put this here solely to get rid of bogus
984
                         * "possibly uninitialized variable" warnings
985
                         * from certain compilers.
986
                         */
987
4.15k
                        ptp = ptr_ref_type;
988
4.15k
                        break;
989
4.15k
                    }
990
216M
                    new |= 1;
991
216M
                    (++sp)->ptr = nptr;
992
216M
                    goto do_refs;
993
71.5M
                case t_mixedarray:
994
103M
                case t_shortarray:
995
103M
                    nptr = rptr->value.writable_packed;
996
103M
                    goto rr;
997
309M
                case t_name:
998
309M
                    mark_name(names_index(nt, rptr));
999
330M
                  nr:pptr = (ref_packed *) (rptr + 1);
1000
330M
                    goto tr;
1001
18.1M
                case t_string:
1002
18.1M
                    if (gc_string_mark(rptr->value.bytes, r_size(rptr), true, pstate))
1003
15.9M
                        new |= 1;
1004
18.1M
                    goto nr;
1005
8.12M
                case t_oparray:
1006
8.12M
                    nptr = rptr->value.refs;  /* discard const */
1007
8.12M
                    sp[1].index = 1;
1008
8.12M
                    goto rrc;
1009
0
                default:
1010
0
                    goto nr;
1011
548M
            }
1012
548M
        }
1013
1014
        /* ---------------- Recursion ---------------- */
1015
1016
31.3k
      push:
1017
31.3k
        if (sp == stop) { /* The current segment is full. */
1018
31.3k
            int new_added = gc_extend_stack(pms, pstate);
1019
1020
31.3k
            if (new_added) {
1021
0
                new |= new_added;
1022
0
                continue;
1023
0
            }
1024
31.3k
            pms = pms->next;
1025
31.3k
            stop = pms->entries + pms->count - 1;
1026
31.3k
            pms->entries[1] = sp[1];
1027
31.3k
            sp = pms->entries;
1028
31.3k
        }
1029
        /* index and is_refs are already set */
1030
31.3k
        if (!sp[1].is_refs) {
1031
17.2k
            nep.ptr = nptr;
1032
17.2k
            if (!(*ptp->mark) (&nep, pstate))
1033
12.0k
                continue;
1034
5.11k
            new |= 1;
1035
5.11k
        }
1036
19.2k
        (++sp)->ptr = nptr;
1037
19.2k
    }
1038
607k
    return new;
1039
607k
}
1040
/* Link to, attempting to allocate if necessary, */
1041
/* another clump of mark stack. */
1042
static int
1043
gc_extend_stack(gc_mark_stack * pms, gc_state_t * pstate)
1044
31.3k
{
1045
31.3k
    if (pms->next == 0) { /* Try to allocate another segment. */
1046
140
        uint count;
1047
1048
140
        for (count = ms_size_desired; count >= ms_size_min; count >>= 1) {
1049
140
            pms->next = (gc_mark_stack *)
1050
140
                gs_alloc_bytes_immovable(pstate->heap,
1051
140
                                         sizeof(gc_mark_stack) +
1052
140
                                         sizeof(ms_entry) * count,
1053
140
                                         "gc mark stack");
1054
140
            if (pms->next != 0)
1055
140
                break;
1056
140
        }
1057
140
        if (pms->next == 0) { /* The mark stack overflowed. */
1058
0
            ms_entry *sp = pms->entries + pms->count - 1;
1059
0
            byte *cptr = sp->ptr; /* container */
1060
0
            clump_t *cp = gc_locate(cptr, pstate);
1061
0
            int new = 1;
1062
1063
0
            if (cp == 0) { /* We were tracing outside collectible */
1064
                /* storage.  This can't happen. */
1065
0
                if_debug1('6', "mark stack overflowed while outside collectible space at "PRI_INTPTR"!\n",
1066
0
                         (intptr_t)cptr);
1067
0
                gs_abort(pstate->heap);
1068
0
            }
1069
0
            if (cptr < cp->rescan_bot)
1070
0
                cp->rescan_bot = cptr, new = -1;
1071
0
            if (cptr > cp->rescan_top)
1072
0
                cp->rescan_top = cptr, new = -1;
1073
0
            return new;
1074
0
        }
1075
140
        gc_init_mark_stack(pms->next, count);
1076
140
        pms->next->prev = pms;
1077
140
        pms->next->on_heap = true;
1078
140
    }
1079
31.3k
    return 0;
1080
31.3k
}
1081
1082
/* Mark a struct.  Return true if new mark. */
1083
static bool
1084
ptr_struct_mark(enum_ptr_t *pep, gc_state_t * ignored)
1085
313M
{
1086
313M
    obj_header_t *ptr = (obj_header_t *)pep->ptr;
1087
1088
313M
    if (ptr == 0)
1089
38.1M
        return false;
1090
275M
    ptr--;      /* point to header */
1091
275M
    if (!o_is_unmarked(ptr))
1092
268M
        return false;
1093
6.28M
    o_mark(ptr);
1094
6.28M
    return true;
1095
275M
}
1096
1097
/* Mark a string.  Return true if new mark. */
1098
static bool
1099
ptr_string_mark(enum_ptr_t *pep, gc_state_t * gcst)
1100
942k
{
1101
942k
    return gc_string_mark(pep->ptr, pep->size, true, gcst);
1102
942k
}
1103
1104
/* Mark a name.  Return true if new mark. */
1105
static bool
1106
ptr_name_index_mark(enum_ptr_t *pep, gc_state_t * gcst)
1107
0
{
1108
0
    return names_mark_index(gcst->heap->gs_lib_ctx->gs_name_table, pep->size);
1109
0
}
1110
1111
/* Finish tracing by marking names. */
1112
static bool
1113
gc_trace_finish(gc_state_t * pstate)
1114
19.3k
{
1115
19.3k
    name_table *nt = pstate->ntable;
1116
19.3k
    name_index_t nidx = 0;
1117
19.3k
    bool marked = false;
1118
1119
138M
    while ((nidx = names_next_valid_index(nt, nidx)) != 0) {
1120
138M
        name_string_t *pnstr = names_index_string_inline(nt, nidx);
1121
1122
138M
        if (pnstr->mark) {
1123
133M
            enum_ptr_t enst, ensst;
1124
1125
133M
            if (!pnstr->foreign_string &&
1126
133M
                gc_string_mark(pnstr->string_bytes, pnstr->string_size,
1127
120M
                               true, pstate)
1128
133M
                )
1129
120M
                marked = true;
1130
133M
            enst.ptr = names_index_sub_table(nt, nidx);
1131
133M
            ensst.ptr = names_index_string_sub_table(nt, nidx);
1132
133M
            marked |=
1133
133M
                ptr_struct_mark(&enst, pstate) |
1134
133M
                ptr_struct_mark(&ensst, pstate);
1135
133M
        }
1136
138M
    }
1137
19.3k
    return marked;
1138
19.3k
}
1139
1140
/* ------ Relocation planning phase ------ */
1141
1142
/* Initialize the relocation information in the clump header. */
1143
static void
1144
gc_init_reloc(clump_t * cp)
1145
3.35M
{
1146
3.35M
    clump_head_t *chead = cp->chead;
1147
1148
3.35M
    chead->dest = cp->cbase;
1149
3.35M
    chead->free.o_back =
1150
3.35M
        offset_of(clump_head_t, free) >> obj_back_shift;
1151
3.35M
    chead->free.o_size = sizeof(obj_header_t);
1152
3.35M
    chead->free.o_nreloc = 0;
1153
3.35M
}
1154
1155
/* Set marks and clear relocation for clumps that won't be compacted. */
1156
static void
1157
gc_clear_reloc(clump_t * cp)
1158
63.2k
{
1159
63.2k
    byte *pfree = (byte *) & cp->chead->free;
1160
1161
63.2k
    gc_init_reloc(cp);
1162
849k
    SCAN_CLUMP_OBJECTS(cp)
1163
849k
        DO_ALL
1164
849k
        const struct_shared_procs_t *procs =
1165
849k
    pre->o_type->shared;
1166
1167
849k
    if (procs != 0)
1168
393k
        (*procs->clear_reloc) (pre, size);
1169
849k
    o_set_untraced(pre);
1170
849k
    pre->o_back = ((byte *) pre - pfree) >> obj_back_shift;
1171
849k
    END_OBJECTS_SCAN
1172
63.2k
        gc_strings_set_marks(cp, true);
1173
63.2k
    gc_strings_clear_reloc(cp);
1174
63.2k
}
1175
1176
/* Set the relocation for the objects in a clump. */
1177
/* This will never be called for a clump with any o_untraced objects. */
1178
static void
1179
gc_objects_set_reloc(gc_state_t * gcst, clump_t * cp)
1180
3.28M
{
1181
3.28M
    size_t reloc = 0;
1182
3.28M
    clump_head_t *chead = cp->chead;
1183
3.28M
    byte *pfree = (byte *) & chead->free; /* most recent free object */
1184
1185
3.28M
    if_debug_clump('6', gcst->heap, "[6]setting reloc for clump", cp);
1186
3.28M
    gc_init_reloc(cp);
1187
39.3M
    SCAN_CLUMP_OBJECTS(cp)
1188
39.3M
        DO_ALL
1189
39.3M
        struct_proc_finalize((*finalize));
1190
39.3M
    const struct_shared_procs_t *procs =
1191
39.3M
    pre->o_type->shared;
1192
1193
39.3M
    if ((procs == 0 ? o_is_unmarked(pre) :
1194
39.3M
         !(*procs->set_reloc) (pre, reloc, size))
1195
39.3M
        ) {     /* Free object */
1196
15.8M
        reloc += sizeof(obj_header_t) + obj_align_round(size);
1197
15.8M
        if ((finalize = pre->o_type->finalize) != 0) {
1198
762k
            if_debug2m('u', gcst->heap, "[u]GC finalizing %s "PRI_INTPTR"\n",
1199
762k
                       struct_type_name_string(pre->o_type),
1200
762k
                       (intptr_t)(pre + 1));
1201
762k
            (*finalize) (gcst->cur_mem, pre + 1);
1202
762k
        }
1203
15.8M
        pfree = (byte *) pre;
1204
15.8M
        pre->o_back = (pfree - (byte *) chead) >> obj_back_shift;
1205
15.8M
        pre->o_nreloc = reloc;
1206
15.8M
        if_debug3m('7', gcst->heap, " [7]at "PRI_INTPTR", unmarked %lu, new reloc = %u\n",
1207
15.8M
                   (intptr_t)pre, (ulong) size, (unsigned int)reloc);
1208
23.4M
    } else {     /* Useful object */
1209
23.4M
        debug_check_object(pre, cp, gcst);
1210
23.4M
        pre->o_back = ((byte *) pre - pfree) >> obj_back_shift;
1211
23.4M
    }
1212
39.3M
    END_OBJECTS_SCAN
1213
#ifdef DEBUG
1214
    if (reloc != 0) {
1215
        if_debug1m('6', gcst->heap, "[6]freed %u", (unsigned int)reloc);
1216
        if_debug_clump('6', gcst->heap, " in", cp);
1217
    }
1218
#endif
1219
3.28M
}
1220
1221
/* ------ Relocation phase ------ */
1222
1223
/* Relocate the pointers in all the objects in a clump. */
1224
static void
1225
gc_do_reloc(clump_t * cp, gs_ref_memory_t * mem, gc_state_t * pstate)
1226
3.35M
{
1227
3.35M
    clump_head_t *chead = cp->chead;
1228
1229
3.35M
    if_debug_clump('6', (const gs_memory_t *)mem, "[6]relocating in clump", cp);
1230
40.1M
    SCAN_CLUMP_OBJECTS(cp)
1231
40.1M
        DO_ALL
1232
#ifdef DEBUG
1233
        pstate->container = cp;
1234
#endif
1235
    /* We need to relocate the pointers in an object iff */
1236
    /* it is o_untraced, or it is a useful object. */
1237
    /* An object is free iff its back pointer points to */
1238
    /* the clump_head structure. */
1239
40.1M
        if (o_is_untraced(pre) ||
1240
40.1M
            pre->o_back << obj_back_shift != (byte *) pre - (byte *) chead
1241
40.1M
            ) {
1242
24.3M
            struct_proc_reloc_ptrs((*proc)) =
1243
24.3M
                pre->o_type->reloc_ptrs;
1244
1245
24.3M
            if_debug3m('7', (const gs_memory_t *)mem,
1246
24.3M
                       " [7]relocating ptrs in %s(%lu) "PRI_INTPTR"\n",
1247
24.3M
                       struct_type_name_string(pre->o_type),
1248
24.3M
                       (ulong)size, (intptr_t)pre);
1249
24.3M
            if (proc != 0)
1250
24.3M
                (*proc) (pre + 1, size, pre->o_type, pstate);
1251
24.3M
        }
1252
#ifdef DEBUG
1253
        pstate->container = 0;
1254
#endif
1255
40.1M
    END_OBJECTS_SCAN
1256
3.35M
}
1257
1258
/* Print pointer relocation if debugging. */
1259
/* We have to provide this procedure even if DEBUG is not defined, */
1260
/* in case one of the other GC modules was compiled with DEBUG. */
1261
const void *
1262
print_reloc_proc(const void *obj, const char *cname, const void *robj)
1263
0
{
1264
0
    if_debug3('9', "  [9]relocate %s * "PRI_INTPTR" to "PRI_INTPTR"\n",
1265
0
              cname, (intptr_t)obj, (intptr_t)robj);
1266
0
    return robj;
1267
0
}
1268
1269
/* Relocate a pointer to an (aligned) object. */
1270
/* See gsmemory.h for why the argument is const and the result is not. */
1271
static void /*obj_header_t */ *
1272
igc_reloc_struct_ptr(const void /*obj_header_t */ *obj, gc_state_t * gcst)
1273
361M
{
1274
361M
    const obj_header_t *const optr = (const obj_header_t *)obj;
1275
361M
    const void *robj;
1276
1277
361M
    if (obj == 0) {
1278
37.2M
        discard(print_reloc(obj, "NULL", 0));
1279
37.2M
        return 0;
1280
37.2M
    }
1281
361M
    debug_check_object(optr - 1, NULL, gcst);
1282
324M
    {
1283
324M
        uint back = optr[-1].o_back;
1284
1285
324M
        if (back == o_untraced)
1286
0
            robj = obj;
1287
324M
        else {
1288
#ifdef DEBUG
1289
            /* Do some sanity checking. */
1290
            clump_t *cp = gcst->container;
1291
1292
            if (cp != 0 && cp->cbase <= (byte *)obj && (byte *)obj <cp->ctop) {
1293
                if (back > (cp->ctop - cp->cbase) >> obj_back_shift) {
1294
                    if_debug2('6', "Invalid back pointer %u at "PRI_INTPTR"!\n",
1295
                             back, (intptr_t)obj);
1296
                    gs_abort(NULL);
1297
                }
1298
            } else {
1299
                /* Pointed to unknown clump. Can't check it, sorry. */
1300
            }
1301
#endif
1302
324M
            {
1303
324M
                const obj_header_t *pfree = (const obj_header_t *)
1304
324M
                ((const char *)(optr - 1) -
1305
324M
                 (back << obj_back_shift));
1306
324M
                const clump_head_t *chead = (const clump_head_t *)
1307
324M
                ((const char *)pfree -
1308
324M
                 (pfree->o_back << obj_back_shift));
1309
1310
324M
                robj = chead->dest +
1311
324M
                    ((const char *)obj - (const char *)(chead + 1) -
1312
324M
                     pfree->o_nreloc);
1313
324M
            }
1314
324M
        }
1315
324M
    }
1316
    /* Use a severely deprecated pun to remove the const property. */
1317
324M
    {
1318
324M
        union { const void *r; void *w; } u;
1319
1320
324M
        u.r = print_reloc(obj, struct_type_name_string(optr[-1].o_type), robj);
1321
324M
        return u.w;
1322
361M
    }
1323
361M
}
1324
1325
/* ------ Compaction phase ------ */
1326
1327
/* Compact the objects in a clump. */
1328
/* This will never be called for a clump with any o_untraced objects. */
1329
static void
1330
gc_objects_compact(clump_t * cp, gc_state_t * gcst)
1331
3.28M
{
1332
3.28M
    clump_head_t *chead = cp->chead;
1333
3.28M
    obj_header_t *dpre = (obj_header_t *) chead->dest;
1334
3.28M
    const gs_memory_t *cmem = gcst->spaces.memories.named.system->stable_memory;
1335
1336
39.3M
    SCAN_CLUMP_OBJECTS(cp)
1337
39.3M
        DO_ALL
1338
    /* An object is free iff its back pointer points to */
1339
    /* the clump_head structure. */
1340
39.3M
        if (pre->o_back << obj_back_shift != (byte *) pre - (byte *) chead) {
1341
23.4M
        const struct_shared_procs_t *procs = pre->o_type->shared;
1342
1343
23.4M
        debug_check_object(pre, cp, gcst);
1344
23.4M
        if_debug4m('7', cmem,
1345
23.4M
                   " [7]compacting %s "PRI_INTPTR"(%lu) to "PRI_INTPTR"\n",
1346
23.4M
                   struct_type_name_string(pre->o_type),
1347
23.4M
                   (intptr_t)pre, (ulong)size, (intptr_t)dpre);
1348
23.4M
        if (procs == 0) {
1349
4.93M
            if (dpre != pre)
1350
1.21M
                memmove(dpre, pre,
1351
1.21M
                        sizeof(obj_header_t) + size);
1352
4.93M
        } else
1353
18.5M
            (*procs->compact) (cmem, pre, dpre, size);
1354
23.4M
        dpre = (obj_header_t *)
1355
23.4M
            ((byte *) dpre + obj_size_round(size));
1356
23.4M
    }
1357
39.3M
    END_OBJECTS_SCAN
1358
3.28M
        if (cp->outer == 0 && chead->dest != cp->cbase)
1359
0
        dpre = (obj_header_t *) cp->cbase; /* compacted this clump into another */
1360
3.28M
    gs_alloc_fill(dpre, gs_alloc_fill_collected, cp->cbot - (byte *) dpre);
1361
3.28M
    cp->cbot = (byte *) dpre;
1362
3.28M
    cp->rcur = 0;
1363
3.28M
    cp->rtop = 0;   /* just to be sure */
1364
3.28M
}
1365
1366
/* ------ Cleanup ------ */
1367
1368
static splay_app_result_t
1369
free_if_empty(clump_t *cp, void *arg)
1370
3.28M
{
1371
3.28M
    gs_ref_memory_t * mem = (gs_ref_memory_t *)arg;
1372
1373
3.28M
    if (cp->cbot == cp->cbase && cp->ctop == cp->climit &&
1374
3.28M
        cp->outer == 0 && cp->inner_count == 0)
1375
150k
    {
1376
150k
        alloc_free_clump(cp, mem);
1377
150k
        if (mem->cc == cp)
1378
0
            mem->cc = NULL;
1379
150k
    }
1380
3.28M
    return SPLAY_APP_CONTINUE;
1381
3.28M
}
1382
1383
/* Free empty clumps. */
1384
static void
1385
gc_free_empty_clumps(gs_ref_memory_t * mem)
1386
140k
{
1387
    /* NOTE: Not in reverse order any more, so potentially
1388
     * not quite as good for crap allocators. */
1389
140k
    clump_splay_app(mem->root, mem, free_if_empty, mem);
1390
140k
}
1391
1392
const gs_memory_t * gcst_get_memory_ptr(gc_state_t *gcst)
1393
58.2M
{
1394
58.2M
    vm_spaces spaces = gcst->spaces;
1395
58.2M
    const gs_memory_t *cmem = space_system->stable_memory;
1396
58.2M
    return cmem;
1397
58.2M
}