Coverage Report

Created: 2022-10-31 07:00

/src/ghostpdl/psi/igc.c
Line
Count
Source (jump to first uncovered line)
1
/* Copyright (C) 2001-2021 Artifex Software, Inc.
2
   All Rights Reserved.
3
4
   This software is provided AS-IS with no warranty, either express or
5
   implied.
6
7
   This software is distributed under license and may not be copied,
8
   modified or distributed except as expressly authorized under the terms
9
   of the license contained in the file LICENSE in this distribution.
10
11
   Refer to licensing information at http://www.artifex.com or contact
12
   Artifex Software, Inc.,  1305 Grant Avenue - Suite 200, Novato,
13
   CA 94945, U.S.A., +1(415)492-9861, for further information.
14
*/
15
16
17
/* Garbage collector for Ghostscript */
18
#include "memory_.h"
19
#include "ghost.h"
20
#include "ierrors.h"
21
#include "gsexit.h"
22
#include "gsmdebug.h"
23
#include "gsstruct.h"
24
#include "iastate.h"
25
#include "isave.h"
26
#include "isstate.h"
27
#include "idict.h"
28
#include "ipacked.h"
29
#include "istruct.h"
30
#include "igc.h"
31
#include "igcstr.h"
32
#include "inamedef.h"
33
#include "opdef.h"    /* for marking oparray names */
34
35
/* Define whether to force all garbage collections to be global. */
36
static const bool I_FORCE_GLOBAL_GC = false;
37
38
/* Define whether to bypass the collector entirely. */
39
static const bool I_BYPASS_GC = false;
40
41
/* Define an entry on the mark stack. */
42
typedef struct {
43
    void *ptr;
44
    uint index;
45
    bool is_refs;
46
} ms_entry;
47
48
/* Define (a segment of) the mark stack. */
49
/* entries[0] has ptr = 0 to indicate the bottom of the stack. */
50
/* count additional entries follow this structure. */
51
typedef struct gc_mark_stack_s gc_mark_stack;
52
struct gc_mark_stack_s {
53
    gc_mark_stack *prev;
54
    gc_mark_stack *next;
55
    uint count;
56
    bool on_heap;   /* if true, allocated during GC */
57
    ms_entry entries[1];
58
};
59
60
/* Define the mark stack sizing parameters. */
61
182k
#define ms_size_default 100  /* default, allocated on C stack */
62
/* This should probably be defined as a parameter somewhere.... */
63
#define ms_size_desired   /* for additional allocation */\
64
3.40k
 ((max_ushort - sizeof(gc_mark_stack)) / sizeof(ms_entry) - 10)
65
84.4M
#define ms_size_min 50    /* min size for segment in free block */
66
67
/* Forward references */
68
69
static void gc_init_mark_stack(gc_mark_stack *, uint);
70
static void gc_objects_clear_marks(const gs_memory_t *mem, clump_t *);
71
static void gc_unmark_names(name_table *, op_array_table *, op_array_table *);
72
static int gc_trace(gs_gc_root_t *, gc_state_t *, gc_mark_stack *);
73
static int gc_rescan_clump(clump_t *, gc_state_t *, gc_mark_stack *);
74
static int gc_trace_clump(const gs_memory_t *mem, clump_t *, gc_state_t *, gc_mark_stack *);
75
static bool gc_trace_finish(gc_state_t *);
76
static void gc_clear_reloc(clump_t *);
77
static void gc_objects_set_reloc(gc_state_t * gcst, clump_t *);
78
static void gc_do_reloc(clump_t *, gs_ref_memory_t *, gc_state_t *);
79
static void gc_objects_compact(clump_t *, gc_state_t *);
80
static void gc_free_empty_clumps(gs_ref_memory_t *);
81
82
/* Forward references for pointer types */
83
static ptr_proc_unmark(ptr_struct_unmark);
84
static ptr_proc_mark(ptr_struct_mark);
85
static ptr_proc_unmark(ptr_string_unmark);
86
static ptr_proc_mark(ptr_string_mark);
87
static ptr_proc_unmark(ptr_name_index_unmark);
88
static ptr_proc_mark(ptr_name_index_mark);
89
/*ptr_proc_unmark(ptr_ref_unmark); *//* in igc.h */
90
/*ptr_proc_mark(ptr_ref_mark); *//* in igc.h */
91
static ptr_proc_reloc(igc_reloc_struct_ptr, void);
92
93
ptr_proc_reloc(igc_reloc_ref_ptr, ref_packed);  /* in igcref.c */
94
refs_proc_reloc(igc_reloc_refs);  /* in igcref.c */
95
96
/* Define this GC's procedure vector. */
97
static const gc_procs_with_refs_t igc_procs = {
98
    igc_reloc_struct_ptr, igc_reloc_string, igc_reloc_const_string,
99
    igc_reloc_param_string, igc_reloc_ref_ptr, igc_reloc_refs
100
};
101
102
/* Pointer type descriptors. */
103
/* Note that the trace/mark routine has special knowledge of ptr_ref_type */
104
/* and ptr_struct_type -- it assumes that no other types have embedded */
105
/* pointers.  Note also that the reloc procedures for string, ref and name */
106
/* pointers are never called. */
107
typedef ptr_proc_reloc((*ptr_proc_reloc_t), void);
108
const gs_ptr_procs_t ptr_struct_procs =
109
{ptr_struct_unmark, ptr_struct_mark, (ptr_proc_reloc_t) igc_reloc_struct_ptr};
110
const gs_ptr_procs_t ptr_string_procs =
111
{ptr_string_unmark, ptr_string_mark, NULL};
112
const gs_ptr_procs_t ptr_const_string_procs =
113
{ptr_string_unmark, ptr_string_mark, NULL};
114
const gs_ptr_procs_t ptr_ref_procs =
115
{ptr_ref_unmark, ptr_ref_mark, NULL};
116
const gs_ptr_procs_t ptr_name_index_procs =
117
{ptr_name_index_unmark, ptr_name_index_mark, NULL};
118
119
/* ------ Main program ------ */
120
121
/* Top level of garbage collector. */
122
#ifdef DEBUG
123
static void
124
end_phase(const gs_memory_t *mem, const char *str)
125
{
126
    if (gs_debug_c('6')) {
127
        dmlprintf1(mem, "[6]---------------- end %s ----------------\n",
128
                   (const char *)str);
129
        dmflush(mem);
130
    }
131
}
132
static const char *const depth_dots_string = "..........";
133
static const char *
134
depth_dots(const ms_entry * sp, const gc_mark_stack * pms)
135
{
136
    int depth = sp - pms->entries - 1;
137
    const gc_mark_stack *pss = pms;
138
139
    while ((pss = pss->prev) != 0)
140
        depth += pss->count - 1;
141
    return depth_dots_string + (depth >= 10 ? 0 : 10 - depth);
142
}
143
static void
144
gc_validate_spaces(gs_ref_memory_t **spaces, int max_space, gc_state_t *gcst)
145
{
146
    int i;
147
    gs_ref_memory_t *mem;
148
149
    for (i = 1; i <= max_space; ++i)
150
        if ((mem = spaces[i]) != 0)
151
            ialloc_validate_memory(mem, gcst);
152
}
153
#else  /* !DEBUG */
154
2.91M
#  define end_phase(mem,str) DO_NOTHING
155
#endif /* DEBUG */
156
157
void
158
gs_gc_reclaim(vm_spaces * pspaces, bool global)
159
182k
{
160
182k
#define nspaces ((i_vm_max + 1) * 2) /* * 2 for stable allocators */
161
162
182k
    vm_spaces spaces;
163
182k
    gs_ref_memory_t *space_memories[nspaces];
164
182k
    gs_gc_root_t space_roots[nspaces];
165
182k
    int max_trace;    /* max space_ to trace */
166
182k
    int min_collect;    /* min space_ to collect */
167
182k
    int min_collect_vm_space; /* min VM space to collect */
168
182k
    int ispace;
169
182k
    gs_ref_memory_t *mem;
170
182k
    clump_t *cp;
171
182k
    gs_gc_root_t *rp;
172
182k
    gc_state_t state;
173
182k
    struct _msd {
174
182k
        gc_mark_stack stack;
175
182k
        ms_entry body[ms_size_default];
176
182k
    } ms_default;
177
182k
    gc_mark_stack *mark_stack = &ms_default.stack;
178
182k
    const gs_memory_t *cmem;
179
182k
    clump_splay_walker sw;
180
181
    /* Optionally force global GC for debugging. */
182
183
182k
    if (I_FORCE_GLOBAL_GC)
184
0
        global = true;
185
186
    /* Determine which spaces we are tracing and collecting. */
187
188
182k
    spaces = *pspaces;
189
182k
    cmem = space_system->stable_memory;
190
182k
    space_memories[1] = space_system;
191
182k
    space_memories[2] = space_global;
192
182k
    min_collect = max_trace = 2;
193
182k
    min_collect_vm_space = i_vm_global;
194
182k
    if (space_global->stable_memory != (gs_memory_t *)space_global)
195
182k
        space_memories[++max_trace] =
196
182k
            (gs_ref_memory_t *)space_global->stable_memory;
197
182k
    if (space_global != space_local) {
198
182k
        space_memories[++max_trace] = space_local;
199
182k
        min_collect = max_trace;
200
182k
        min_collect_vm_space = i_vm_local;
201
182k
        if (space_local->stable_memory != (gs_memory_t *)space_local)
202
182k
            space_memories[++max_trace] =
203
182k
                (gs_ref_memory_t *)space_local->stable_memory;
204
182k
    }
205
182k
    if (global)
206
178k
        min_collect = min_collect_vm_space = 1;
207
208
182k
#define for_spaces(i, n)\
209
7.16M
  for (i = 1; i <= n; ++i)
210
182k
#define for_collected_spaces(i)\
211
9.74M
  for (i = min_collect; i <= max_trace; ++i)
212
182k
#define for_space_mems(i, mem)\
213
12.5M
  for (mem = space_memories[i]; mem != 0; mem = &mem->saved->state)
214
182k
#define for_mem_clumps(mem, cp, sw)\
215
217M
  for (cp = clump_splay_walk_init(sw, mem); cp != 0; cp = clump_splay_walk_fwd(sw))
216
182k
#define for_space_clumps(i, mem, cp, sw)\
217
4.74M
  for_space_mems(i, mem) for_mem_clumps(mem, cp, sw)
218
182k
#define for_clumps(i, n, mem, cp, sw)\
219
912k
  for_spaces(i, n) for_space_clumps(i, mem, cp, sw)
220
182k
#define for_collected_clumps(i, mem, cp, sw)\
221
364k
  for_collected_spaces(i) for_space_clumps(i, mem, cp, sw)
222
182k
#define for_roots(i, n, mem, rp)\
223
547k
  for_spaces(i, n)\
224
8.49M
    for (mem = space_memories[i], rp = mem->roots; rp != 0; rp = rp->next)
225
226
    /* Initialize the state. */
227
228
182k
    state.procs = &igc_procs;
229
182k
    state.loc.memory = space_global; /* any one will do */
230
231
182k
    state.loc.cp = 0;
232
182k
    state.spaces = spaces;
233
182k
    state.min_collect = min_collect_vm_space << r_space_shift;
234
182k
    state.relocating_untraced = false;
235
182k
    state.heap = state.loc.memory->non_gc_memory;
236
182k
    state.ntable = state.heap->gs_lib_ctx->gs_name_table;
237
238
    /* Register the allocators themselves as roots, */
239
    /* so we mark and relocate the change and save lists properly. */
240
241
912k
    for_spaces(ispace, max_trace) {
242
912k
        rp = &space_roots[ispace];
243
912k
        gs_register_struct_root((gs_memory_t *)space_memories[ispace],
244
912k
                                &rp,
245
912k
                                (void **)&space_memories[ispace],
246
912k
                                "gc_top_level");
247
912k
    }
248
249
182k
    end_phase(state.heap,"register space roots");
250
251
#ifdef DEBUG
252
253
    /* Pre-validate the state.  This shouldn't be necessary.... */
254
255
    gc_validate_spaces(space_memories, max_trace, &state);
256
257
    end_phase(state.heap,"pre-validate pointers");
258
259
#endif
260
261
182k
    if (I_BYPASS_GC) {   /* Don't collect at all. */
262
0
        goto no_collect;
263
0
    }
264
265
    /* Clear marks in spaces to be collected. */
266
267
182k
    for_collected_spaces(ispace)
268
41.4M
        for_space_clumps(ispace, mem, cp, &sw) {
269
41.4M
            gc_objects_clear_marks((const gs_memory_t *)mem, cp);
270
41.4M
            gc_strings_set_marks(cp, false);
271
41.4M
        }
272
273
182k
    end_phase(state.heap,"clear clump marks");
274
275
    /* Clear the marks of roots.  We must do this explicitly, */
276
    /* since some roots are not in any clump. */
277
278
1.91M
    for_roots(ispace, max_trace, mem, rp) {
279
1.91M
        enum_ptr_t eptr;
280
281
1.91M
        eptr.ptr = *rp->p;
282
1.91M
        if_debug_root('6', (const gs_memory_t *)mem, "[6]unmarking root", rp);
283
1.91M
        (*rp->ptype->unmark)(&eptr, &state);
284
1.91M
    }
285
286
182k
    end_phase(state.heap,"clear root marks");
287
288
182k
    if (global) {
289
178k
        op_array_table *global_ops = get_global_op_array(cmem);
290
178k
        op_array_table *local_ops  = get_local_op_array(cmem);
291
178k
        gc_unmark_names(state.ntable, global_ops, local_ops);
292
178k
    }
293
294
    /* Initialize the (default) mark stack. */
295
296
182k
    gc_init_mark_stack(&ms_default.stack, ms_size_default);
297
182k
    ms_default.stack.prev = 0;
298
182k
    ms_default.stack.on_heap = false;
299
300
    /* Add all large-enough free blocks to the mark stack. */
301
    /* Also initialize the rescan pointers. */
302
303
182k
    {
304
182k
        gc_mark_stack *end = mark_stack;
305
306
42.2M
        for_clumps(ispace, max_trace, mem, cp, &sw) {
307
42.2M
            uint avail = cp->ctop - cp->cbot;
308
309
42.2M
            if (avail >= sizeof(gc_mark_stack) + sizeof(ms_entry) *
310
42.2M
                ms_size_min &&
311
42.2M
                !cp->inner_count
312
42.2M
                ) {
313
6.72M
                gc_mark_stack *pms = (gc_mark_stack *) cp->cbot;
314
315
6.72M
                gc_init_mark_stack(pms, (avail - sizeof(gc_mark_stack)) /
316
6.72M
                                   sizeof(ms_entry));
317
6.72M
                end->next = pms;
318
6.72M
                pms->prev = end;
319
6.72M
                pms->on_heap = false;
320
6.72M
                if_debug2m('6', (const gs_memory_t *)mem,
321
6.72M
                           "[6]adding free "PRI_INTPTR"(%u) to mark stack\n",
322
6.72M
                           (intptr_t)pms, pms->count);
323
6.72M
            }
324
42.2M
            cp->rescan_bot = cp->cend;
325
42.2M
            cp->rescan_top = cp->cbase;
326
42.2M
        }
327
182k
    }
328
329
    /* Mark reachable objects. */
330
331
182k
    {
332
182k
        int more = 0;
333
334
        /* Mark from roots. */
335
336
1.91M
        for_roots(ispace, max_trace, mem, rp) {
337
1.91M
            if_debug_root('6', (const gs_memory_t *)mem, "[6]marking root", rp);
338
1.91M
            more |= gc_trace(rp, &state, mark_stack);
339
1.91M
        }
340
341
182k
        end_phase(state.heap,"mark");
342
343
        /* If this is a local GC, mark from non-local clumps. */
344
345
182k
        if (!global)
346
182k
            for_clumps(ispace, min_collect - 1, mem, cp, &sw)
347
804k
                more |= gc_trace_clump((const gs_memory_t *)mem, cp, &state, mark_stack);
348
349
        /* Handle mark stack overflow. */
350
351
182k
        while (more < 0) { /* stack overflowed */
352
0
            more = 0;
353
0
            for_clumps(ispace, max_trace, mem, cp, &sw)
354
0
                more |= gc_rescan_clump(cp, &state, mark_stack);
355
0
        }
356
357
182k
        end_phase(state.heap,"mark overflow");
358
182k
    }
359
360
    /* Free the mark stack. */
361
362
182k
    {
363
182k
        gc_mark_stack *pms = mark_stack;
364
365
368k
        while (pms->next)
366
185k
            pms = pms->next;
367
550k
        while (pms) {
368
368k
            gc_mark_stack *prev = pms->prev;
369
370
368k
            if (pms->on_heap)
371
3.40k
                gs_free_object(state.heap, pms, "gc mark stack");
372
364k
            else
373
368k
                gs_alloc_fill(pms, gs_alloc_fill_free,
374
368k
                              sizeof(*pms) + sizeof(ms_entry) * pms->count);
375
368k
            pms = prev;
376
368k
        }
377
182k
    }
378
379
182k
    end_phase(state.heap,"free mark stack");
380
381
182k
    if (global) {
382
178k
        gc_trace_finish(&state);
383
178k
        names_trace_finish(state.ntable, &state);
384
385
178k
        end_phase(state.heap,"finish trace");
386
178k
    }
387
388
    /* Filter save change lists with removing elements,
389
       which point to unmarked blocks of refs. */
390
182k
    {
391
182k
        int i;
392
393
900k
        for_collected_spaces(i) {
394
900k
            gs_ref_memory_t *mem = space_memories[i];
395
396
900k
            alloc_save__filter_changes(mem);
397
900k
        }
398
182k
    }
399
    /* Clear marks and relocation in spaces that are only being traced. */
400
    /* We have to clear the marks first, because we want the */
401
    /* relocation to wind up as o_untraced, not o_unmarked. */
402
403
182k
    for_clumps(ispace, min_collect - 1, mem, cp, &sw)
404
804k
        gc_objects_clear_marks((const gs_memory_t *)mem, cp);
405
406
182k
    end_phase(state.heap,"post-clear marks");
407
408
182k
    for_clumps(ispace, min_collect - 1, mem, cp, &sw)
409
804k
        gc_clear_reloc(cp);
410
411
182k
    end_phase(state.heap,"clear reloc");
412
413
    /* Set the relocation of roots outside any clump to o_untraced, */
414
    /* so we won't try to relocate pointers to them. */
415
    /* (Currently, there aren't any.) */
416
417
    /* Disable freeing in the allocators of the spaces we are */
418
    /* collecting, so finalization procedures won't cause problems. */
419
182k
    {
420
182k
        int i;
421
422
182k
        for_collected_spaces(i)
423
900k
            gs_enable_free((gs_memory_t *)space_memories[i], false);
424
182k
    }
425
426
    /* Compute relocation based on marks, in the spaces */
427
    /* we are going to compact.  Also finalize freed objects. */
428
182k
    state.cur_mem = (gs_memory_t *)mem;
429
430
41.4M
    for_collected_clumps(ispace, mem, cp, &sw) {
431
41.4M
        gc_objects_set_reloc(&state, cp);
432
41.4M
        gc_strings_set_reloc(cp);
433
41.4M
    }
434
435
    /* Re-enable freeing. */
436
182k
    {
437
182k
        int i;
438
439
182k
        for_collected_spaces(i)
440
900k
            gs_enable_free((gs_memory_t *)space_memories[i], true);
441
182k
    }
442
443
182k
    end_phase(state.heap,"set reloc");
444
445
    /* Relocate pointers. */
446
447
182k
    state.relocating_untraced = true;
448
182k
    for_clumps(ispace, min_collect - 1, mem, cp, &sw)
449
804k
        gc_do_reloc(cp, mem, &state);
450
182k
    state.relocating_untraced = false;
451
2.06M
    for_collected_clumps(ispace, mem, cp, &sw)
452
41.4M
        gc_do_reloc(cp, mem, &state);
453
454
182k
    end_phase(state.heap,"relocate clumps");
455
456
1.91M
    for_roots(ispace, max_trace, mem, rp) {
457
1.91M
        if_debug3m('6', (const gs_memory_t *)mem,
458
1.91M
                   "[6]relocating root "PRI_INTPTR": "PRI_INTPTR" -> "PRI_INTPTR"\n",
459
1.91M
                   (intptr_t)rp, (intptr_t)rp->p, (intptr_t)*rp->p);
460
1.91M
        if (rp->ptype == ptr_ref_type) {
461
93.2k
            ref *pref = (ref *) * rp->p;
462
463
93.2k
            igc_reloc_refs((ref_packed *) pref,
464
93.2k
                           (ref_packed *) (pref + 1),
465
93.2k
                           &state);
466
93.2k
        } else
467
1.82M
            *rp->p = (*rp->ptype->reloc) (*rp->p, &state);
468
1.91M
        if_debug3m('6', (const gs_memory_t *)mem,
469
1.91M
                   "[6]relocated root "PRI_INTPTR": "PRI_INTPTR" -> "PRI_INTPTR"\n",
470
1.91M
                   (intptr_t)rp, (intptr_t)rp->p, (intptr_t)*rp->p);
471
1.91M
    }
472
473
182k
    end_phase(state.heap,"relocate roots");
474
475
    /* Compact data.  We only do this for spaces we are collecting. */
476
477
900k
    for_collected_spaces(ispace) {
478
1.16M
        for_space_mems(ispace, mem) {
479
41.4M
            for_mem_clumps(mem, cp, &sw) {
480
41.4M
                if_debug_clump('6', (const gs_memory_t *)mem, "[6]compacting clump", cp);
481
41.4M
                gc_objects_compact(cp, &state);
482
41.4M
                gc_strings_compact(cp, cmem);
483
41.4M
                if_debug_clump('6', (const gs_memory_t *)mem, "[6]after compaction:", cp);
484
41.4M
            }
485
1.16M
            mem->saved = mem->reloc_saved;
486
1.16M
            ialloc_reset_free(mem);
487
1.16M
        }
488
900k
    }
489
490
182k
    end_phase(state.heap,"compact");
491
492
    /* Free empty clumps. */
493
494
900k
    for_collected_spaces(ispace) {
495
1.16M
        for_space_mems(ispace, mem) {
496
1.16M
            gc_free_empty_clumps(mem);
497
1.16M
        }
498
900k
    }
499
500
182k
    end_phase(state.heap,"free empty clumps");
501
502
    /*
503
     * Update previous_status to reflect any freed clumps,
504
     * and set inherited to the negative of allocated,
505
     * so it has no effect.  We must update previous_status by
506
     * working back-to-front along the save chain, using pointer reversal.
507
     * (We could update inherited in any order, since it only uses
508
     * information local to the individual save level.)
509
     */
510
511
900k
    for_collected_spaces(ispace) { /* Reverse the pointers. */
512
900k
        alloc_save_t *curr;
513
900k
        alloc_save_t *prev = 0;
514
900k
        alloc_save_t *next;
515
900k
        gs_memory_status_t total;
516
517
1.16M
        for (curr = space_memories[ispace]->saved; curr != 0;
518
900k
             prev = curr, curr = next
519
900k
            ) {
520
266k
            next = curr->state.saved;
521
266k
            curr->state.saved = prev;
522
266k
        }
523
        /* Now work the other way, accumulating the values. */
524
900k
        total.allocated = 0, total.used = 0;
525
1.16M
        for (curr = prev, prev = 0; curr != 0;
526
900k
             prev = curr, curr = next
527
900k
            ) {
528
266k
            mem = &curr->state;
529
266k
            next = mem->saved;
530
266k
            mem->saved = prev;
531
266k
            mem->previous_status = total;
532
266k
            if_debug3m('6', (const gs_memory_t *)mem,
533
266k
                       "[6]"PRI_INTPTR" previous allocated=%lu, used=%lu\n",
534
266k
                       (intptr_t)mem, total.allocated, total.used);
535
266k
            gs_memory_status((gs_memory_t *) mem, &total);
536
266k
            mem->gc_allocated = mem->allocated + total.allocated;
537
266k
        }
538
900k
        mem = space_memories[ispace];
539
900k
        mem->previous_status = total;
540
900k
        mem->gc_allocated = mem->allocated + total.allocated;
541
900k
        if_debug3m('6', (const gs_memory_t *)mem,
542
900k
                   "[6]"PRI_INTPTR" previous allocated=%lu, used=%lu\n",
543
900k
                   (intptr_t)mem, total.allocated, total.used);
544
900k
    }
545
546
182k
    end_phase(state.heap,"update stats");
547
548
182k
  no_collect:
549
550
    /* Unregister the allocator roots. */
551
552
182k
    for_spaces(ispace, max_trace)
553
912k
        gs_unregister_root((gs_memory_t *)space_memories[ispace],
554
182k
                           &space_roots[ispace], "gc_top_level");
555
556
182k
    end_phase(state.heap,"unregister space roots");
557
558
#ifdef DEBUG
559
560
    /* Validate the state.  This shouldn't be necessary.... */
561
562
    gc_validate_spaces(space_memories, max_trace, &state);
563
564
    end_phase(state.heap,"validate pointers");
565
566
#endif
567
182k
}
568
569
/* ------ Debugging utilities ------ */
570
571
/* Validate a pointer to an object header. */
572
#ifdef DEBUG
573
#  define debug_check_object(pre, cp, gcst)\
574
     ialloc_validate_object((pre) + 1, cp, gcst)
575
#else
576
5.73G
#  define debug_check_object(pre, cp, gcst) DO_NOTHING
577
#endif
578
579
/* ------ Unmarking phase ------ */
580
581
/* Unmark a single struct. */
582
static void
583
ptr_struct_unmark(enum_ptr_t *pep, gc_state_t * ignored)
584
1.82M
{
585
1.82M
    void *const vptr = (void *)pep->ptr; /* break const */
586
587
1.82M
    if (vptr != 0)
588
1.82M
        o_set_unmarked(((obj_header_t *) vptr - 1));
589
1.82M
}
590
591
/* Unmark a single string. */
592
static void
593
ptr_string_unmark(enum_ptr_t *pep, gc_state_t * gcst)
594
0
{
595
0
    discard(gc_string_mark(pep->ptr, pep->size, false, gcst));
596
0
}
597
598
/* Unmark a single name. */
599
static void
600
ptr_name_index_unmark(enum_ptr_t *pep, gc_state_t * gcst)
601
0
{
602
    /* Do nothing */
603
0
}
604
605
/* Unmark the objects in a clump. */
606
static void
607
gc_objects_clear_marks(const gs_memory_t *mem, clump_t * cp)
608
42.2M
{
609
42.2M
    if_debug_clump('6', mem, "[6]unmarking clump", cp);
610
717M
    SCAN_CLUMP_OBJECTS(cp)
611
717M
        DO_ALL
612
717M
        struct_proc_clear_marks((*proc)) =
613
717M
        pre->o_type->clear_marks;
614
#ifdef DEBUG
615
    if (pre->o_type != &st_free)
616
        debug_check_object(pre, cp, NULL);
617
#endif
618
717M
    if_debug3m('7', (const gs_memory_t *)mem, " [7](un)marking %s(%lu) "PRI_INTPTR"\n",
619
717M
               struct_type_name_string(pre->o_type),
620
717M
               (ulong) size, (intptr_t)pre);
621
717M
    o_set_unmarked(pre);
622
717M
    if (proc != 0)
623
388M
        (*proc) (mem, pre + 1, size, pre->o_type);
624
717M
    END_OBJECTS_SCAN
625
42.2M
}
626
627
/* Mark 0- and 1-character names, and those referenced from the */
628
/* op_array_nx_table, and unmark all the rest. */
629
static void
630
gc_unmark_names(name_table * nt, op_array_table *op_array_table_global,
631
                op_array_table *op_array_table_local)
632
178k
{
633
178k
    uint i;
634
635
178k
    names_unmark_all(nt);
636
34.6M
    for (i = 0; i < op_array_table_global->count; i++) {
637
34.4M
        name_index_t nidx = op_array_table_global->nx_table[i];
638
639
34.4M
        names_mark_index(nt, nidx);
640
34.4M
    }
641
178k
    for (i = 0; i < op_array_table_local->count; i++) {
642
0
        name_index_t nidx = op_array_table_local->nx_table[i];
643
644
0
        names_mark_index(nt, nidx);
645
0
    }
646
178k
}
647
648
/* ------ Marking phase ------ */
649
650
/* Initialize (a segment of) the mark stack. */
651
static void
652
gc_init_mark_stack(gc_mark_stack * pms, uint count)
653
6.91M
{
654
6.91M
    pms->next = 0;
655
6.91M
    pms->count = count;
656
6.91M
    pms->entries[0].ptr = 0;
657
6.91M
    pms->entries[0].index = 0;
658
6.91M
    pms->entries[0].is_refs = false;
659
6.91M
}
660
661
/* Mark starting from all marked objects in the interval of a clump */
662
/* needing rescanning. */
663
static int
664
gc_rescan_clump(clump_t * cp, gc_state_t * pstate, gc_mark_stack * pmstack)
665
0
{
666
0
    byte *sbot = cp->rescan_bot;
667
0
    byte *stop = cp->rescan_top;
668
0
    gs_gc_root_t root;
669
0
    void *comp;
670
0
    int more = 0;
671
0
    const gs_memory_t *mem = gcst_get_memory_ptr( pstate );
672
673
0
    if (sbot > stop)
674
0
        return 0;
675
0
    root.p = &comp;
676
0
    if_debug_clump('6', mem, "[6]rescanning clump", cp);
677
0
    cp->rescan_bot = cp->cend;
678
0
    cp->rescan_top = cp->cbase;
679
0
    SCAN_CLUMP_OBJECTS(cp)
680
0
        DO_ALL
681
0
        if ((byte *) (pre + 1) + size < sbot);
682
0
    else if ((byte *) (pre + 1) > stop)
683
0
        return more;   /* 'break' won't work here */
684
0
    else {
685
0
        if_debug2m('7', mem, " [7]scanning/marking "PRI_INTPTR"(%lu)\n",
686
0
                   (intptr_t)pre, (ulong)size);
687
0
        if (pre->o_type == &st_refs) {
688
0
            ref_packed *rp = (ref_packed *) (pre + 1);
689
0
            char *end = (char *)rp + size;
690
691
0
            root.ptype = ptr_ref_type;
692
0
            while ((char *)rp < end) {
693
0
                comp = rp;
694
0
                if (r_is_packed(rp)) {
695
0
                    if (r_has_pmark(rp)) {
696
0
                        r_clear_pmark(rp);
697
0
                        more |= gc_trace(&root, pstate,
698
0
                                         pmstack);
699
0
                    }
700
0
                    rp++;
701
0
                } else {
702
0
                    ref *const pref = (ref *)rp;
703
704
0
                    if (r_has_attr(pref, l_mark)) {
705
0
                        r_clear_attrs(pref, l_mark);
706
0
                        more |= gc_trace(&root, pstate, pmstack);
707
0
                    }
708
0
                    rp += packed_per_ref;
709
0
                }
710
0
            }
711
0
        } else if (!o_is_unmarked(pre)) {
712
0
            struct_proc_clear_marks((*proc)) =
713
0
                pre->o_type->clear_marks;
714
0
            root.ptype = ptr_struct_type;
715
0
            comp = pre + 1;
716
0
            if (!o_is_untraced(pre))
717
0
                o_set_unmarked(pre);
718
0
            if (proc != 0)
719
0
                (*proc) (mem, comp, size, pre->o_type);
720
0
            more |= gc_trace(&root, pstate, pmstack);
721
0
        }
722
0
    }
723
0
    END_OBJECTS_SCAN
724
0
        return more;
725
0
}
726
727
/* Mark starting from all the objects in a clump. */
728
/* We assume that pstate->min_collect > avm_system, */
729
/* so we don't have to trace names. */
730
static int
731
gc_trace_clump(const gs_memory_t *mem, clump_t * cp, gc_state_t * pstate, gc_mark_stack * pmstack)
732
804k
{
733
804k
    gs_gc_root_t root;
734
804k
    void *comp;
735
804k
    int more = 0;
736
804k
    int min_trace = pstate->min_collect;
737
738
804k
    root.p = &comp;
739
804k
    if_debug_clump('6', mem, "[6]marking from clump", cp);
740
13.3M
    SCAN_CLUMP_OBJECTS(cp)
741
13.3M
        DO_ALL
742
13.3M
    {
743
13.3M
        if_debug2m('7', mem, " [7]scanning/marking "PRI_INTPTR"(%lu)\n",
744
13.3M
                   (intptr_t)pre, (ulong)size);
745
13.3M
        if (pre->o_type == &st_refs) {
746
6.11M
            ref_packed *rp = (ref_packed *) (pre + 1);
747
6.11M
            char *end = (char *)rp + size;
748
749
6.11M
            root.ptype = ptr_ref_type;
750
627M
            while ((char *)rp < end) {
751
620M
                comp = rp;
752
620M
                if (r_is_packed(rp)) { /* No packed refs need tracing. */
753
273M
                    rp++;
754
347M
                } else {
755
347M
                    ref *const pref = (ref *)rp;
756
757
347M
                    if (r_space(pref) >= min_trace) {
758
47.6k
                        r_clear_attrs(pref, l_mark);
759
47.6k
                        more |= gc_trace(&root, pstate, pmstack);
760
47.6k
                    }
761
347M
                    rp += packed_per_ref;
762
347M
                }
763
620M
            }
764
7.18M
        } else if (!o_is_unmarked(pre)) {
765
7.05M
            if (!o_is_untraced(pre))
766
7.05M
                o_set_unmarked(pre);
767
7.05M
            if (pre->o_type != &st_free) {
768
7.01M
                struct_proc_clear_marks((*proc)) =
769
7.01M
                    pre->o_type->clear_marks;
770
771
7.01M
                root.ptype = ptr_struct_type;
772
7.01M
                comp = pre + 1;
773
7.01M
                if (proc != 0)
774
57.9k
                    (*proc) (mem, comp, size, pre->o_type);
775
7.01M
                more |= gc_trace(&root, pstate, pmstack);
776
7.01M
            }
777
7.05M
        }
778
13.3M
    }
779
13.3M
    END_OBJECTS_SCAN
780
804k
        return more;
781
804k
}
782
783
/* Recursively mark from a (root) pointer. */
784
/* Return -1 if we overflowed the mark stack, */
785
/* 0 if we completed successfully without marking any new objects, */
786
/* 1 if we completed and marked some new objects. */
787
static int gc_extend_stack(gc_mark_stack *, gc_state_t *);
788
static int
789
gc_trace(gs_gc_root_t * rp, /* lgtm [cpp/use-of-goto] */
790
         gc_state_t * pstate, gc_mark_stack * pmstack)
791
8.97M
{
792
8.97M
    int min_trace = pstate->min_collect;
793
8.97M
    gc_mark_stack *pms = pmstack;
794
8.97M
    ms_entry *sp = pms->entries + 1;
795
796
    /* We stop the mark stack 1 entry early, because we store into */
797
    /* the entry beyond the top. */
798
8.97M
    ms_entry *stop = sp + pms->count - 2;
799
8.97M
    int new = 0;
800
8.97M
    enum_ptr_t nep;
801
8.97M
    void *nptr;
802
8.97M
    name_table *nt = pstate->ntable;
803
804
8.97M
#define mark_name(nidx)\
805
5.45G
  BEGIN\
806
5.45G
    if (names_mark_index(nt, nidx)) {\
807
1.40G
        new |= 1;\
808
1.40G
        if_debug2m('8', gcst_get_memory_ptr(pstate), "  [8]marked name "PRI_INTPTR"(%u)\n",\
809
1.40G
                   (intptr_t)names_index_ptr(nt, nidx), nidx);\
810
1.40G
    }\
811
5.45G
  END
812
813
8.97M
    nptr = *rp->p;
814
8.97M
    if (nptr == 0)
815
0
        return 0;
816
817
    /* Initialize the stack */
818
8.97M
    sp->ptr = nptr;
819
8.97M
    if (rp->ptype == ptr_ref_type)
820
140k
        sp->index = 1, sp->is_refs = true;
821
8.83M
    else {
822
8.83M
        sp->index = 0, sp->is_refs = false;
823
8.83M
        nep.ptr = nptr;
824
8.83M
        if ((*rp->ptype->mark) (&nep, pstate))
825
8.47M
            new |= 1;
826
8.83M
    }
827
3.16G
    for (;;) {
828
3.16G
        gs_ptr_type_t ptp;
829
830
        /*
831
         * The following should really be an if..else, but that
832
         * would force unnecessary is_refs tests.
833
         */
834
3.16G
        if (sp->is_refs)
835
3.00G
            goto do_refs;
836
837
        /* ---------------- Structure ---------------- */
838
839
261M
      do_struct:
840
261M
        {
841
261M
            obj_header_t *ptr = sp->ptr;
842
843
261M
            struct_proc_enum_ptrs((*mproc));
844
845
261M
            if (ptr == 0) { /* We've reached the bottom of a stack segment. */
846
9.12M
                pms = pms->prev;
847
9.12M
                if (pms == 0)
848
8.97M
                    break;  /* all done */
849
151k
                stop = pms->entries + pms->count - 1;
850
151k
                sp = stop;
851
151k
                continue;
852
9.12M
            }
853
261M
            debug_check_object(ptr - 1, NULL, NULL);
854
807M
          ts:if_debug4m('7', pstate->heap, " [7]%smarking %s "PRI_INTPTR"[%u]",
855
807M
                        depth_dots(sp, pms),
856
807M
                        struct_type_name_string(ptr[-1].o_type),
857
807M
                        (intptr_t)ptr, sp->index);
858
807M
            mproc = ptr[-1].o_type->enum_ptrs;
859
807M
            if (mproc == gs_no_struct_enum_ptrs ||
860
807M
                (ptp = (*mproc)
861
792M
                 (gcst_get_memory_ptr(pstate), ptr, pre_obj_contents_size(ptr - 1),
862
792M
                  sp->index, &nep, ptr[-1].o_type, pstate)) == 0
863
807M
                ) {
864
110M
                if_debug0m('7', pstate->heap, " - done\n");
865
110M
                sp--;
866
110M
                continue;
867
110M
            }
868
            /* The cast in the following statement is the one */
869
            /* place we need to break 'const' to make the */
870
            /* template for pointer enumeration work. */
871
697M
            nptr = (void *)nep.ptr;
872
697M
            sp->index++;
873
697M
            if_debug1m('7', pstate->heap, " = "PRI_INTPTR"\n", (intptr_t)nptr);
874
            /* Descend into nep.ptr, whose pointer type is ptp. */
875
697M
            if (ptp == ptr_struct_type) {
876
553M
                sp[1].index = 0;
877
553M
                sp[1].is_refs = false;
878
553M
                if (sp == stop)
879
106k
                    goto push;
880
553M
                if (!ptr_struct_mark(&nep, pstate))
881
456M
                    goto ts;
882
96.8M
                new |= 1;
883
96.8M
                (++sp)->ptr = nptr;
884
96.8M
                goto do_struct;
885
553M
            } else if (ptp == ptr_ref_type) {
886
44.3M
                sp[1].index = 1;
887
44.3M
                sp[1].is_refs = true;
888
44.3M
                if (sp == stop)
889
33.5k
                    goto push;
890
44.2M
                new |= 1;
891
44.2M
                (++sp)->ptr = nptr;
892
44.2M
                goto do_refs;
893
99.1M
            } else {   /* We assume this is some non-pointer- */
894
                /* containing type. */
895
                /* This was the only formulation of this call/condition/assignment that valgrind did not complain about */
896
99.1M
                new |= ((*ptp->mark) (&nep, pstate)) ? 1 : 0;
897
99.1M
               goto ts;
898
99.1M
            }
899
697M
        }
900
901
        /* ---------------- Refs ---------------- */
902
903
6.05G
      do_refs:
904
6.05G
        {
905
6.05G
            ref_packed *pptr = sp->ptr;
906
6.05G
            ref *rptr;
907
908
34.6G
          tr:if (!sp->index) {
909
3.04G
                --sp;
910
3.04G
                continue;
911
3.04G
            }
912
31.6G
            --(sp->index);
913
31.6G
            if_debug3m('8', pstate->heap, "  [8]%smarking refs "PRI_INTPTR"[%u]\n",
914
31.6G
                      depth_dots(sp, pms), (intptr_t)pptr, sp->index);
915
31.6G
            if (r_is_packed(pptr)) {
916
11.1G
                if (!r_has_pmark(pptr)) {
917
8.60G
                    r_set_pmark(pptr);
918
8.60G
                    new |= 1;
919
8.60G
                    if (r_packed_is_name(pptr)) {
920
1.32G
                        name_index_t nidx = packed_name_index(pptr);
921
922
1.32G
                        mark_name(nidx);
923
1.32G
                    }
924
8.60G
                }
925
11.1G
                ++pptr;
926
11.1G
                goto tr;
927
11.1G
            }
928
20.4G
            rptr = (ref *) pptr;  /* * const beyond here */
929
20.4G
            if (r_has_attr(rptr, l_mark)) {
930
2.39G
                pptr = (ref_packed *)(rptr + 1);
931
2.39G
                goto tr;
932
2.39G
            }
933
18.0G
            r_set_attrs(rptr, l_mark);
934
18.0G
            new |= 1;
935
18.0G
            if (r_space(rptr) < min_trace) { /* Note that this always picks up all scalars. */
936
10.5G
                pptr = (ref_packed *) (rptr + 1);
937
10.5G
                goto tr;
938
10.5G
            }
939
7.49G
            sp->ptr = rptr + 1;
940
7.49G
            switch (r_type(rptr)) {
941
                    /* Struct cases */
942
1.72M
                case t_file:
943
1.72M
                    nptr = rptr->value.pfile;
944
6.71M
                  rs:sp[1].is_refs = false;
945
6.71M
                    sp[1].index = 0;
946
6.71M
                    if (sp == stop) {
947
348
                        ptp = ptr_struct_type;
948
348
                        break;
949
348
                    }
950
6.71M
                    nep.ptr = nptr;
951
6.71M
                    if (!ptr_struct_mark(&nep, pstate))
952
1.83M
                        goto nr;
953
4.87M
                    new |= 1;
954
4.87M
                    (++sp)->ptr = nptr;
955
4.87M
                    goto do_struct;
956
3.90M
                case t_device:
957
3.90M
                    nptr = rptr->value.pdevice;
958
3.90M
                    goto rs;
959
229k
                case t_fontID:
960
904k
                case t_struct:
961
1.08M
                case t_astruct:
962
1.08M
                case t_pdfctx:
963
1.08M
                    nptr = rptr->value.pstruct;
964
1.08M
                    goto rs;
965
                    /* Non-trivial non-struct cases */
966
136M
                case t_dictionary:
967
136M
                    nptr = rptr->value.pdict;
968
136M
                    sp[1].index = sizeof(dict) / sizeof(ref);
969
136M
                    goto rrp;
970
931M
                case t_array:
971
931M
                    nptr = rptr->value.refs;
972
2.78G
                  rr:if ((sp[1].index = r_size(rptr)) == 0) { /* Set the base pointer to 0, */
973
                        /* so we never try to relocate it. */
974
34.1M
                        rptr->value.refs = 0;
975
34.1M
                        goto nr;
976
34.1M
                    }
977
2.89G
                  rrp:
978
3.00G
                  rrc:sp[1].is_refs = true;
979
3.00G
                    if (sp == stop) {
980
                        /*
981
                         * The following initialization is unnecessary:
982
                         * ptp will not be used if sp[1].is_refs = true.
983
                         * We put this here solely to get rid of bogus
984
                         * "possibly uninitialized variable" warnings
985
                         * from certain compilers.
986
                         */
987
10.9k
                        ptp = ptr_ref_type;
988
10.9k
                        break;
989
10.9k
                    }
990
3.00G
                    new |= 1;
991
3.00G
                    (++sp)->ptr = nptr;
992
3.00G
                    goto do_refs;
993
1.32G
                case t_mixedarray:
994
1.85G
                case t_shortarray:
995
1.85G
                    nptr = rptr->value.writable_packed;
996
1.85G
                    goto rr;
997
4.13G
                case t_name:
998
4.13G
                    mark_name(names_index(nt, rptr));
999
4.48G
                  nr:pptr = (ref_packed *) (rptr + 1);
1000
4.48G
                    goto tr;
1001
316M
                case t_string:
1002
316M
                    if (gc_string_mark(rptr->value.bytes, r_size(rptr), true, pstate))
1003
282M
                        new |= 1;
1004
316M
                    goto nr;
1005
111M
                case t_oparray:
1006
111M
                    nptr = rptr->value.refs;  /* discard const */
1007
111M
                    sp[1].index = 1;
1008
111M
                    goto rrc;
1009
0
                default:
1010
0
                    goto nr;
1011
7.49G
            }
1012
7.49G
        }
1013
1014
        /* ---------------- Recursion ---------------- */
1015
1016
151k
      push:
1017
151k
        if (sp == stop) { /* The current segment is full. */
1018
151k
            int new_added = gc_extend_stack(pms, pstate);
1019
1020
151k
            if (new_added) {
1021
0
                new |= new_added;
1022
0
                continue;
1023
0
            }
1024
151k
            pms = pms->next;
1025
151k
            stop = pms->entries + pms->count - 1;
1026
151k
            pms->entries[1] = sp[1];
1027
151k
            sp = pms->entries;
1028
151k
        }
1029
        /* index and is_refs are already set */
1030
151k
        if (!sp[1].is_refs) {
1031
107k
            nep.ptr = nptr;
1032
107k
            if (!(*ptp->mark) (&nep, pstate))
1033
63.7k
                continue;
1034
43.4k
            new |= 1;
1035
43.4k
        }
1036
87.9k
        (++sp)->ptr = nptr;
1037
87.9k
    }
1038
8.97M
    return new;
1039
8.97M
}
1040
/* Link to, attempting to allocate if necessary, */
1041
/* another clump of mark stack. */
1042
static int
1043
gc_extend_stack(gc_mark_stack * pms, gc_state_t * pstate)
1044
151k
{
1045
151k
    if (pms->next == 0) { /* Try to allocate another segment. */
1046
3.40k
        uint count;
1047
1048
3.40k
        for (count = ms_size_desired; count >= ms_size_min; count >>= 1) {
1049
3.40k
            pms->next = (gc_mark_stack *)
1050
3.40k
                gs_alloc_bytes_immovable(pstate->heap,
1051
3.40k
                                         sizeof(gc_mark_stack) +
1052
3.40k
                                         sizeof(ms_entry) * count,
1053
3.40k
                                         "gc mark stack");
1054
3.40k
            if (pms->next != 0)
1055
3.40k
                break;
1056
3.40k
        }
1057
3.40k
        if (pms->next == 0) { /* The mark stack overflowed. */
1058
0
            ms_entry *sp = pms->entries + pms->count - 1;
1059
0
            byte *cptr = sp->ptr; /* container */
1060
0
            clump_t *cp = gc_locate(cptr, pstate);
1061
0
            int new = 1;
1062
1063
0
            if (cp == 0) { /* We were tracing outside collectible */
1064
                /* storage.  This can't happen. */
1065
0
                lprintf1("mark stack overflowed while outside collectible space at "PRI_INTPTR"!\n",
1066
0
                         (intptr_t)cptr);
1067
0
                gs_abort(pstate->heap);
1068
0
            }
1069
0
            if (cptr < cp->rescan_bot)
1070
0
                cp->rescan_bot = cptr, new = -1;
1071
0
            if (cptr > cp->rescan_top)
1072
0
                cp->rescan_top = cptr, new = -1;
1073
0
            return new;
1074
0
        }
1075
3.40k
        gc_init_mark_stack(pms->next, count);
1076
3.40k
        pms->next->prev = pms;
1077
3.40k
        pms->next->on_heap = true;
1078
3.40k
    }
1079
151k
    return 0;
1080
151k
}
1081
1082
/* Mark a struct.  Return true if new mark. */
1083
static bool
1084
ptr_struct_mark(enum_ptr_t *pep, gc_state_t * ignored)
1085
3.48G
{
1086
3.48G
    obj_header_t *ptr = (obj_header_t *)pep->ptr;
1087
1088
3.48G
    if (ptr == 0)
1089
383M
        return false;
1090
3.09G
    ptr--;      /* point to header */
1091
3.09G
    if (!o_is_unmarked(ptr))
1092
2.98G
        return false;
1093
110M
    o_mark(ptr);
1094
110M
    return true;
1095
3.09G
}
1096
1097
/* Mark a string.  Return true if new mark. */
1098
static bool
1099
ptr_string_mark(enum_ptr_t *pep, gc_state_t * gcst)
1100
99.1M
{
1101
99.1M
    return gc_string_mark(pep->ptr, pep->size, true, gcst);
1102
99.1M
}
1103
1104
/* Mark a name.  Return true if new mark. */
1105
static bool
1106
ptr_name_index_mark(enum_ptr_t *pep, gc_state_t * gcst)
1107
0
{
1108
0
    return names_mark_index(gcst->heap->gs_lib_ctx->gs_name_table, pep->size);
1109
0
}
1110
1111
/* Finish tracing by marking names. */
1112
static bool
1113
gc_trace_finish(gc_state_t * pstate)
1114
178k
{
1115
178k
    name_table *nt = pstate->ntable;
1116
178k
    name_index_t nidx = 0;
1117
178k
    bool marked = false;
1118
1119
1.51G
    while ((nidx = names_next_valid_index(nt, nidx)) != 0) {
1120
1.51G
        name_string_t *pnstr = names_index_string_inline(nt, nidx);
1121
1122
1.51G
        if (pnstr->mark) {
1123
1.45G
            enum_ptr_t enst, ensst;
1124
1125
1.45G
            if (!pnstr->foreign_string &&
1126
1.45G
                gc_string_mark(pnstr->string_bytes, pnstr->string_size,
1127
1.33G
                               true, pstate)
1128
1.45G
                )
1129
1.33G
                marked = true;
1130
1.45G
            enst.ptr = names_index_sub_table(nt, nidx);
1131
1.45G
            ensst.ptr = names_index_string_sub_table(nt, nidx);
1132
1.45G
            marked |=
1133
1.45G
                ptr_struct_mark(&enst, pstate) |
1134
1.45G
                ptr_struct_mark(&ensst, pstate);
1135
1.45G
        }
1136
1.51G
    }
1137
178k
    return marked;
1138
178k
}
1139
1140
/* ------ Relocation planning phase ------ */
1141
1142
/* Initialize the relocation information in the clump header. */
1143
static void
1144
gc_init_reloc(clump_t * cp)
1145
42.2M
{
1146
42.2M
    clump_head_t *chead = cp->chead;
1147
1148
42.2M
    chead->dest = cp->cbase;
1149
42.2M
    chead->free.o_back =
1150
42.2M
        offset_of(clump_head_t, free) >> obj_back_shift;
1151
42.2M
    chead->free.o_size = sizeof(obj_header_t);
1152
42.2M
    chead->free.o_nreloc = 0;
1153
42.2M
}
1154
1155
/* Set marks and clear relocation for clumps that won't be compacted. */
1156
static void
1157
gc_clear_reloc(clump_t * cp)
1158
804k
{
1159
804k
    byte *pfree = (byte *) & cp->chead->free;
1160
1161
804k
    gc_init_reloc(cp);
1162
13.3M
    SCAN_CLUMP_OBJECTS(cp)
1163
13.3M
        DO_ALL
1164
13.3M
        const struct_shared_procs_t *procs =
1165
13.3M
    pre->o_type->shared;
1166
1167
13.3M
    if (procs != 0)
1168
6.11M
        (*procs->clear_reloc) (pre, size);
1169
13.3M
    o_set_untraced(pre);
1170
13.3M
    pre->o_back = ((byte *) pre - pfree) >> obj_back_shift;
1171
13.3M
    END_OBJECTS_SCAN
1172
804k
        gc_strings_set_marks(cp, true);
1173
804k
    gc_strings_clear_reloc(cp);
1174
804k
}
1175
1176
/* Set the relocation for the objects in a clump. */
1177
/* This will never be called for a clump with any o_untraced objects. */
1178
static void
1179
gc_objects_set_reloc(gc_state_t * gcst, clump_t * cp)
1180
41.4M
{
1181
41.4M
    size_t reloc = 0;
1182
41.4M
    clump_head_t *chead = cp->chead;
1183
41.4M
    byte *pfree = (byte *) & chead->free; /* most recent free object */
1184
1185
41.4M
    if_debug_clump('6', gcst->heap, "[6]setting reloc for clump", cp);
1186
41.4M
    gc_init_reloc(cp);
1187
704M
    SCAN_CLUMP_OBJECTS(cp)
1188
704M
        DO_ALL
1189
704M
        struct_proc_finalize((*finalize));
1190
704M
    const struct_shared_procs_t *procs =
1191
704M
    pre->o_type->shared;
1192
1193
704M
    if ((procs == 0 ? o_is_unmarked(pre) :
1194
704M
         !(*procs->set_reloc) (pre, reloc, size))
1195
704M
        ) {     /* Free object */
1196
328M
        reloc += sizeof(obj_header_t) + obj_align_round(size);
1197
328M
        if ((finalize = pre->o_type->finalize) != 0) {
1198
6.67M
            if_debug2m('u', gcst->heap, "[u]GC finalizing %s "PRI_INTPTR"\n",
1199
6.67M
                       struct_type_name_string(pre->o_type),
1200
6.67M
                       (intptr_t)(pre + 1));
1201
6.67M
            (*finalize) (gcst->cur_mem, pre + 1);
1202
6.67M
        }
1203
328M
        pfree = (byte *) pre;
1204
328M
        pre->o_back = (pfree - (byte *) chead) >> obj_back_shift;
1205
328M
        pre->o_nreloc = reloc;
1206
328M
        if_debug3m('7', gcst->heap, " [7]at "PRI_INTPTR", unmarked %lu, new reloc = %u\n",
1207
328M
                   (intptr_t)pre, (ulong) size, (unsigned int)reloc);
1208
376M
    } else {     /* Useful object */
1209
376M
        debug_check_object(pre, cp, gcst);
1210
376M
        pre->o_back = ((byte *) pre - pfree) >> obj_back_shift;
1211
376M
    }
1212
704M
    END_OBJECTS_SCAN
1213
#ifdef DEBUG
1214
    if (reloc != 0) {
1215
        if_debug1m('6', gcst->heap, "[6]freed %u", (unsigned int)reloc);
1216
        if_debug_clump('6', gcst->heap, " in", cp);
1217
    }
1218
#endif
1219
41.4M
}
1220
1221
/* ------ Relocation phase ------ */
1222
1223
/* Relocate the pointers in all the objects in a clump. */
1224
static void
1225
gc_do_reloc(clump_t * cp, gs_ref_memory_t * mem, gc_state_t * pstate)
1226
42.2M
{
1227
42.2M
    clump_head_t *chead = cp->chead;
1228
1229
42.2M
    if_debug_clump('6', (const gs_memory_t *)mem, "[6]relocating in clump", cp);
1230
717M
    SCAN_CLUMP_OBJECTS(cp)
1231
717M
        DO_ALL
1232
#ifdef DEBUG
1233
        pstate->container = cp;
1234
#endif
1235
    /* We need to relocate the pointers in an object iff */
1236
    /* it is o_untraced, or it is a useful object. */
1237
    /* An object is free iff its back pointer points to */
1238
    /* the clump_head structure. */
1239
717M
        if (o_is_untraced(pre) ||
1240
717M
            pre->o_back << obj_back_shift != (byte *) pre - (byte *) chead
1241
717M
            ) {
1242
389M
            struct_proc_reloc_ptrs((*proc)) =
1243
389M
                pre->o_type->reloc_ptrs;
1244
1245
389M
            if_debug3m('7', (const gs_memory_t *)mem,
1246
389M
                       " [7]relocating ptrs in %s(%lu) "PRI_INTPTR"\n",
1247
389M
                       struct_type_name_string(pre->o_type),
1248
389M
                       (ulong)size, (intptr_t)pre);
1249
389M
            if (proc != 0)
1250
389M
                (*proc) (pre + 1, size, pre->o_type, pstate);
1251
389M
        }
1252
#ifdef DEBUG
1253
        pstate->container = 0;
1254
#endif
1255
717M
    END_OBJECTS_SCAN
1256
42.2M
}
1257
1258
/* Print pointer relocation if debugging. */
1259
/* We have to provide this procedure even if DEBUG is not defined, */
1260
/* in case one of the other GC modules was compiled with DEBUG. */
1261
const void *
1262
print_reloc_proc(const void *obj, const char *cname, const void *robj)
1263
0
{
1264
0
    if_debug3('9', "  [9]relocate %s * "PRI_INTPTR" to "PRI_INTPTR"\n",
1265
0
              cname, (intptr_t)obj, (intptr_t)robj);
1266
0
    return robj;
1267
0
}
1268
1269
/* Relocate a pointer to an (aligned) object. */
1270
/* See gsmemory.h for why the argument is const and the result is not. */
1271
static void /*obj_header_t */ *
1272
igc_reloc_struct_ptr(const void /*obj_header_t */ *obj, gc_state_t * gcst)
1273
4.72G
{
1274
4.72G
    const obj_header_t *const optr = (const obj_header_t *)obj;
1275
4.72G
    const void *robj;
1276
1277
4.72G
    if (obj == 0) {
1278
374M
        discard(print_reloc(obj, "NULL", 0));
1279
374M
        return 0;
1280
374M
    }
1281
4.72G
    debug_check_object(optr - 1, NULL, gcst);
1282
4.35G
    {
1283
4.35G
        uint back = optr[-1].o_back;
1284
1285
4.35G
        if (back == o_untraced)
1286
0
            robj = obj;
1287
4.35G
        else {
1288
#ifdef DEBUG
1289
            /* Do some sanity checking. */
1290
            clump_t *cp = gcst->container;
1291
1292
            if (cp != 0 && cp->cbase <= (byte *)obj && (byte *)obj <cp->ctop) {
1293
                if (back > (cp->ctop - cp->cbase) >> obj_back_shift) {
1294
                    lprintf2("Invalid back pointer %u at "PRI_INTPTR"!\n",
1295
                             back, (intptr_t)obj);
1296
                    gs_abort(NULL);
1297
                }
1298
            } else {
1299
                /* Pointed to unknown clump. Can't check it, sorry. */
1300
            }
1301
#endif
1302
4.35G
            {
1303
4.35G
                const obj_header_t *pfree = (const obj_header_t *)
1304
4.35G
                ((const char *)(optr - 1) -
1305
4.35G
                 (back << obj_back_shift));
1306
4.35G
                const clump_head_t *chead = (const clump_head_t *)
1307
4.35G
                ((const char *)pfree -
1308
4.35G
                 (pfree->o_back << obj_back_shift));
1309
1310
4.35G
                robj = chead->dest +
1311
4.35G
                    ((const char *)obj - (const char *)(chead + 1) -
1312
4.35G
                     pfree->o_nreloc);
1313
4.35G
            }
1314
4.35G
        }
1315
4.35G
    }
1316
    /* Use a severely deprecated pun to remove the const property. */
1317
4.35G
    {
1318
4.35G
        union { const void *r; void *w; } u;
1319
1320
4.35G
        u.r = print_reloc(obj, struct_type_name_string(optr[-1].o_type), robj);
1321
4.35G
        return u.w;
1322
4.72G
    }
1323
4.72G
}
1324
1325
/* ------ Compaction phase ------ */
1326
1327
/* Compact the objects in a clump. */
1328
/* This will never be called for a clump with any o_untraced objects. */
1329
static void
1330
gc_objects_compact(clump_t * cp, gc_state_t * gcst)
1331
41.4M
{
1332
41.4M
    clump_head_t *chead = cp->chead;
1333
41.4M
    obj_header_t *dpre = (obj_header_t *) chead->dest;
1334
41.4M
    const gs_memory_t *cmem = gcst->spaces.memories.named.system->stable_memory;
1335
1336
704M
    SCAN_CLUMP_OBJECTS(cp)
1337
704M
        DO_ALL
1338
    /* An object is free iff its back pointer points to */
1339
    /* the clump_head structure. */
1340
704M
        if (pre->o_back << obj_back_shift != (byte *) pre - (byte *) chead) {
1341
376M
        const struct_shared_procs_t *procs = pre->o_type->shared;
1342
1343
376M
        debug_check_object(pre, cp, gcst);
1344
376M
        if_debug4m('7', cmem,
1345
376M
                   " [7]compacting %s "PRI_INTPTR"(%lu) to "PRI_INTPTR"\n",
1346
376M
                   struct_type_name_string(pre->o_type),
1347
376M
                   (intptr_t)pre, (ulong)size, (intptr_t)dpre);
1348
376M
        if (procs == 0) {
1349
94.5M
            if (dpre != pre)
1350
10.5M
                memmove(dpre, pre,
1351
10.5M
                        sizeof(obj_header_t) + size);
1352
94.5M
        } else
1353
281M
            (*procs->compact) (cmem, pre, dpre, size);
1354
376M
        dpre = (obj_header_t *)
1355
376M
            ((byte *) dpre + obj_size_round(size));
1356
376M
    }
1357
704M
    END_OBJECTS_SCAN
1358
41.4M
        if (cp->outer == 0 && chead->dest != cp->cbase)
1359
0
        dpre = (obj_header_t *) cp->cbase; /* compacted this clump into another */
1360
41.4M
    gs_alloc_fill(dpre, gs_alloc_fill_collected, cp->cbot - (byte *) dpre);
1361
41.4M
    cp->cbot = (byte *) dpre;
1362
41.4M
    cp->rcur = 0;
1363
41.4M
    cp->rtop = 0;   /* just to be sure */
1364
41.4M
}
1365
1366
/* ------ Cleanup ------ */
1367
1368
static splay_app_result_t
1369
free_if_empty(clump_t *cp, void *arg)
1370
41.4M
{
1371
41.4M
    gs_ref_memory_t * mem = (gs_ref_memory_t *)arg;
1372
1373
41.4M
    if (cp->cbot == cp->cbase && cp->ctop == cp->climit &&
1374
41.4M
        cp->outer == 0 && cp->inner_count == 0)
1375
3.60M
    {
1376
3.60M
        alloc_free_clump(cp, mem);
1377
3.60M
        if (mem->cc == cp)
1378
0
            mem->cc = NULL;
1379
3.60M
    }
1380
41.4M
    return SPLAY_APP_CONTINUE;
1381
41.4M
}
1382
1383
/* Free empty clumps. */
1384
static void
1385
gc_free_empty_clumps(gs_ref_memory_t * mem)
1386
1.16M
{
1387
    /* NOTE: Not in reverse order any more, so potentially
1388
     * not quite as good for crap allocators. */
1389
1.16M
    clump_splay_app(mem->root, mem, free_if_empty, mem);
1390
1.16M
}
1391
1392
const gs_memory_t * gcst_get_memory_ptr(gc_state_t *gcst)
1393
792M
{
1394
792M
    vm_spaces spaces = gcst->spaces;
1395
792M
    const gs_memory_t *cmem = space_system->stable_memory;
1396
792M
    return cmem;
1397
792M
}