Coverage Report

Created: 2025-06-10 06:59

/src/ghostpdl/psi/igcref.c
Line
Count
Source (jump to first uncovered line)
1
/* Copyright (C) 2001-2023 Artifex Software, Inc.
2
   All Rights Reserved.
3
4
   This software is provided AS-IS with no warranty, either express or
5
   implied.
6
7
   This software is distributed under license and may not be copied,
8
   modified or distributed except as expressly authorized under the terms
9
   of the license contained in the file LICENSE in this distribution.
10
11
   Refer to licensing information at http://www.artifex.com or contact
12
   Artifex Software, Inc.,  39 Mesa Street, Suite 108A, San Francisco,
13
   CA 94129, USA, for further information.
14
*/
15
16
17
/* ref garbage collector for Ghostscript */
18
#include "memory_.h"
19
#include "ghost.h"
20
#include "gsexit.h"
21
#include "gsstruct.h"   /* for gxalloc.h included by iastate.h */
22
#include "iname.h"
23
#include "iastate.h"
24
#include "idebug.h"
25
#include "igc.h"
26
#include "ipacked.h"
27
#include "store.h"    /* for ref_assign_inline */
28
29
/* Define whether to trace every step of relocating ref pointers. */
30
#if 0
31
#  define rputc(m,c) dmputc(m,c)
32
#else
33
6.84G
#  define rputc(m,c) DO_NOTHING
34
#endif
35
36
/* Forward references */
37
ptr_proc_reloc(igc_reloc_ref_ptr, ref_packed);
38
ptr_proc_reloc(igc_reloc_ref_ptr_nocheck, ref_packed);
39
refs_proc_reloc(igc_reloc_refs);
40
41
/*
42
 * Define the 'structure' type descriptor for refs.
43
 * This is special because it has different shared procs.
44
 */
45
static gc_proc_clear_reloc(refs_clear_reloc);
46
static gc_proc_set_reloc(refs_set_reloc);
47
static gc_proc_compact(refs_compact);
48
static const struct_shared_procs_t refs_shared_procs =
49
{refs_clear_reloc, refs_set_reloc, refs_compact};
50
static struct_proc_clear_marks(refs_clear_marks);
51
static struct_proc_reloc_ptrs(refs_do_reloc);
52
const gs_memory_struct_type_t st_refs =
53
{sizeof(ref), "refs", &refs_shared_procs, refs_clear_marks, 0, refs_do_reloc};
54
55
/*
56
 * Define the GC procedures for structs that actually contain refs.
57
 * These are special because the shared refs_* procedures
58
 * are never called.  Instead, we unmark the individual refs in clear_marks,
59
 * disregard refs_*_reloc (because we will never relocate a ptr_ref_type
60
 * pointer pointing into the structure), disregard refs_compact (because
61
 * compaction is never required), and remove the marks in reloc_ptrs.
62
 * See also the comment about ptr_ref_type in imemory.h.
63
 */
64
CLEAR_MARKS_PROC(ref_struct_clear_marks)
65
2.03M
{
66
2.03M
    ref *pref = (ref *) vptr;
67
2.03M
    ref *end = (ref *) ((char *)vptr + size);
68
69
73.8M
    for (; pref < end; pref++)
70
71.8M
        r_clear_attrs(pref, l_mark);
71
2.03M
}
72
ENUM_PTRS_BEGIN_PROC(ref_struct_enum_ptrs)
73
36.9M
{
74
36.9M
    if (index >= size / sizeof(ref))
75
1.01M
        return 0;
76
35.8M
    pep->ptr = (const ref *)vptr + index;
77
35.8M
    return ptr_ref_type;
78
36.9M
    ENUM_PTRS_END_PROC
79
36.9M
}
80
1.01M
RELOC_PTRS_BEGIN(ref_struct_reloc_ptrs)
81
1.01M
{
82
1.01M
    vm_spaces spaces = gcst->spaces;
83
1.01M
    const gs_memory_t *cmem = space_system->stable_memory;
84
85
1.01M
    ref *beg = vptr;
86
1.01M
    ref *end = (ref *) ((char *)vptr + size);
87
88
1.01M
    igc_reloc_refs((ref_packed *) beg, (ref_packed *) end, gcst);
89
1.01M
    ref_struct_clear_marks(cmem, vptr, size, pstype);
90
1.01M
} RELOC_PTRS_END
91
92
/* ------ Unmarking phase ------ */
93
94
/* Unmark a single ref. */
95
void
96
ptr_ref_unmark(enum_ptr_t *pep, gc_state_t * ignored)
97
10.6k
{
98
10.6k
    ref_packed *rpp = (ref_packed *)pep->ptr;
99
100
10.6k
    if (r_is_packed(rpp))
101
0
        r_clear_pmark(rpp);
102
10.6k
    else
103
10.6k
        r_clear_attrs((ref *)rpp, l_mark);
104
10.6k
}
105
106
/* Unmarking routine for ref objects. */
107
static void
108
refs_clear_marks(const gs_memory_t *cmem,
109
                 void /*obj_header_t */ *vptr, uint size,
110
                 const gs_memory_struct_type_t * pstype)
111
22.7M
{
112
22.7M
    ref_packed *rp = (ref_packed *) vptr;
113
22.7M
    ref_packed *end = (ref_packed *) ((byte *) vptr + size);
114
115
    /* Since the last ref is full-size, we only need to check for */
116
    /* the end of the block when we see one of those. */
117
4.57G
    for (;;) {
118
4.57G
        if (r_is_packed(rp)) {
119
#ifdef DEBUG
120
            if (gs_debug_c('8')) {
121
                dmlprintf1(cmem, "  [8]unmark packed "PRI_INTPTR" ", (intptr_t) rp);
122
                debug_print_ref(cmem, (const ref *)rp);
123
                dmputs(cmem, "\n");
124
            }
125
#endif
126
941M
            r_clear_pmark(rp);
127
941M
            rp++;
128
3.63G
        } else {   /* full-size ref */
129
3.63G
            ref *const pref = (ref *)rp;
130
131
#ifdef DEBUG
132
            if (gs_debug_c('8')) {
133
                dmlprintf1(cmem, "  [8]unmark ref "PRI_INTPTR" ", (intptr_t)rp);
134
                debug_print_ref(cmem, pref);
135
                dmputs(cmem, "\n");
136
            }
137
#endif
138
3.63G
            r_clear_attrs(pref, l_mark);
139
3.63G
            rp += packed_per_ref;
140
3.63G
            if (rp >= (ref_packed *) end)
141
22.7M
                break;
142
3.63G
        }
143
4.57G
    }
144
22.7M
}
145
146
/* ------ Marking phase ------ */
147
148
/* Mark a ref.  Return true if new mark. */
149
bool
150
ptr_ref_mark(enum_ptr_t *pep, gc_state_t * ignored)
151
0
{
152
0
    ref_packed *rpp = (void *)pep->ptr;
153
154
0
    if (r_is_packed(rpp)) {
155
0
        if (r_has_pmark(rpp))
156
0
            return false;
157
0
        r_set_pmark(rpp);
158
0
    } else {
159
0
        ref *const pref = (ref *)rpp;
160
161
0
        if (r_has_attr(pref, l_mark))
162
0
            return false;
163
0
        r_set_attrs(pref, l_mark);
164
0
    }
165
0
    return true;
166
0
}
167
168
/* ------ Relocation planning phase ------ */
169
170
/*
171
 * We store relocation in the size field of refs that don't use it,
172
 * so that we don't have to scan all the way to an unmarked object.
173
 * We must avoid nulls, which sometimes have useful information
174
 * in their size fields, and the types above t_next_index, which are
175
 * actually operators in disguise and also use the size field.
176
 */
177
178
/* Clear the relocation for a ref object. */
179
static void
180
refs_clear_reloc(obj_header_t *hdr, uint size)
181
751k
{
182
751k
    ref_packed *rp = (ref_packed *) (hdr + 1);
183
751k
    ref_packed *end = (ref_packed *) ((byte *) rp + size);
184
185
80.2M
    while (rp < end) {
186
79.4M
        if (r_is_packed(rp))
187
28.7M
            rp++;
188
50.7M
        else {
189
            /* Full-size ref.  Store the relocation here if possible. */
190
50.7M
            ref *const pref = (ref *)rp;
191
192
50.7M
            if (!ref_type_uses_size_or_null(r_type(pref))) {
193
11.8M
                if_debug1('8', "  [8]clearing reloc at "PRI_INTPTR"\n", (intptr_t)rp);
194
11.8M
                r_set_size(pref, 0);
195
11.8M
            }
196
50.7M
            rp += packed_per_ref;
197
50.7M
        }
198
79.4M
    }
199
751k
}
200
201
/* Set the relocation for a ref object. */
202
static bool
203
refs_set_reloc(obj_header_t * hdr, uint reloc, uint size)
204
22.0M
{
205
22.0M
    ref_packed *rp = (ref_packed *) (hdr + 1);
206
22.0M
    ref_packed *end = (ref_packed *) ((byte *) rp + size);
207
22.0M
    uint freed = 0;
208
209
    /*
210
     * We have to be careful to keep refs aligned properly.
211
     * For the moment, we do this by either keeping or discarding
212
     * an entire (aligned) block of align_packed_per_ref packed elements
213
     * as a unit.  We know that align_packed_per_ref <= packed_per_ref,
214
     * and we also know that packed refs are always allocated in blocks
215
     * of align_packed_per_ref, so this makes things relatively easy.
216
     */
217
3.83G
    while (rp < end) {
218
3.81G
        if (r_is_packed(rp)) {
219
#if align_packed_per_ref == 1
220
            if (r_has_pmark(rp)) {
221
                if_debug1('8',
222
                          "  [8]packed ref "PRI_INTPTR" is marked\n",
223
                          (intptr_t)rp);
224
                rp++;
225
            } else {
226
#else
227
228M
            int i;
228
229
            /*
230
             * Note: align_packed_per_ref is typically
231
             * 2 or 4 for 32-bit processors.
232
             */
233
228M
#define all_marked (align_packed_per_ref * lp_mark)
234
# if align_packed_per_ref == 2
235
#  if ARCH_SIZEOF_INT == ARCH_SIZEOF_SHORT * 2
236
#    undef all_marked
237
#    define all_marked ( (lp_mark << (sizeof(short) * 8)) + lp_mark )
238
#    define marked (*(int *)rp & all_marked)
239
#  else
240
#    define marked ((*rp & lp_mark) + (rp[1] & lp_mark))
241
#  endif
242
# else
243
228M
#  if align_packed_per_ref == 4
244
228M
#    define marked ((*rp & lp_mark) + (rp[1] & lp_mark) +\
245
228M
                    (rp[2] & lp_mark) + (rp[3] & lp_mark))
246
#  else
247
            /*
248
             * The value of marked is logically a uint, not an int:
249
             * we declare it as int only to avoid a compiler warning
250
             * message about using a non-int value in a switch statement.
251
             */
252
            int marked = *rp & lp_mark;
253
254
            for (i = 1; i < align_packed_per_ref; i++)
255
                marked += rp[i] & lp_mark;
256
#  endif
257
228M
# endif
258
            /*
259
             * Now marked is lp_mark * the number of marked
260
             * packed refs in the aligned block, except for
261
             * a couple of special cases above.
262
             */
263
228M
            switch (marked) {
264
81.2M
                case all_marked:
265
81.2M
                    if_debug2('8',
266
81.2M
                              "  [8]packed refs "PRI_INTPTR".."PRI_INTPTR" are marked\n",
267
81.2M
                              (intptr_t)rp,
268
81.2M
                              (intptr_t)(rp + (align_packed_per_ref - 1)));
269
81.2M
                    rp += align_packed_per_ref;
270
81.2M
                    break;
271
93.7M
                default:
272
                    /* At least one packed ref in the block */
273
                    /* is marked: Keep the whole block. */
274
468M
                    for (i = align_packed_per_ref; i--; rp++) {
275
375M
                        r_set_pmark(rp);
276
375M
                        if_debug1('8',
277
375M
                                  "  [8]packed ref "PRI_INTPTR" is marked\n",
278
375M
                                  (intptr_t)rp);
279
375M
                    }
280
93.7M
                    break;
281
53.1M
                case 0:
282
53.1M
#endif
283
53.1M
                    if_debug2('8', "  [8]%d packed ref(s) at "PRI_INTPTR" are unmarked\n",
284
53.1M
                              align_packed_per_ref, (intptr_t)rp);
285
53.1M
                    {
286
53.1M
                        uint rel = reloc + freed;
287
288
                        /* Change this to an integer so we can */
289
                        /* store the relocation here. */
290
53.1M
                        *rp = pt_tag(pt_integer) +
291
53.1M
                            min(rel, packed_max_value);
292
53.1M
                    }
293
53.1M
                    rp += align_packed_per_ref;
294
53.1M
                    freed += sizeof(ref_packed) * align_packed_per_ref;
295
228M
            }
296
3.58G
        } else {   /* full-size ref */
297
3.58G
            uint rel = reloc + freed;
298
299
            /* The following assignment is logically */
300
            /* unnecessary; we do it only for convenience */
301
            /* in debugging. */
302
3.58G
            ref *pref = (ref *) rp;
303
304
3.58G
            if (!r_has_attr(pref, l_mark)) {
305
2.17G
                if_debug1('8', "  [8]ref "PRI_INTPTR" is unmarked\n",
306
2.17G
                          (intptr_t)pref);
307
                /* Change this to a mark so we can */
308
                /* store the relocation. */
309
2.17G
                r_set_type(pref, t_mark);
310
2.17G
                r_set_size(pref, rel);
311
2.17G
                freed += sizeof(ref);
312
2.17G
            } else {
313
1.40G
                if_debug1('8', "  [8]ref "PRI_INTPTR" is marked\n",
314
1.40G
                          (intptr_t)pref);
315
                /* Store the relocation here if possible. */
316
1.40G
                if (!ref_type_uses_size_or_null(r_type(pref))) {
317
280M
                    if_debug2('8', "  [8]storing reloc %u at "PRI_INTPTR"\n",
318
280M
                              rel, (intptr_t)pref);
319
280M
                    r_set_size(pref, rel);
320
280M
                }
321
1.40G
            }
322
3.58G
            rp += packed_per_ref;
323
3.58G
        }
324
3.81G
    }
325
22.0M
    if_debug3('7', " [7]at end of refs "PRI_INTPTR", size = %u, freed = %u\n",
326
22.0M
              (intptr_t)(hdr + 1), size, freed);
327
22.0M
    if (freed == size)
328
3.12M
        return false;
329
18.8M
#if ARCH_SIZEOF_INT > ARCH_SIZEOF_SHORT
330
    /*
331
     * If the final relocation can't fit in the r_size field
332
     * (which can't happen if the object shares a clump with
333
     * any other objects, so we know reloc = 0 in this case),
334
     * we have to keep the entire object unless there are no
335
     * references to any ref in it.
336
     */
337
18.8M
    if (freed <= max_ushort)
338
18.8M
        return true;
339
    /*
340
     * We have to mark all surviving refs, but we also must
341
     * overwrite any non-surviving refs with something that
342
     * doesn't contain any pointers.
343
     */
344
0
    rp = (ref_packed *) (hdr + 1);
345
0
    while (rp < end) {
346
0
        if (r_is_packed(rp)) {
347
0
            if (!r_has_pmark(rp))
348
0
                *rp = pt_tag(pt_integer) | lp_mark;
349
0
            ++rp;
350
0
        } else {   /* The following assignment is logically */
351
            /* unnecessary; we do it only for convenience */
352
            /* in debugging. */
353
0
            ref *pref = (ref *) rp;
354
355
0
            if (!r_has_attr(pref, l_mark)) {
356
0
                r_set_type_attrs(pref, t_mark, l_mark);
357
0
                r_set_size(pref, reloc);
358
0
            } else {
359
0
                if (!ref_type_uses_size_or_null(r_type(pref)))
360
0
                    r_set_size(pref, reloc);
361
0
            }
362
0
            rp += packed_per_ref;
363
0
        }
364
0
    }
365
    /* The last ref has to remain unmarked. */
366
0
    r_clear_attrs((ref *) rp - 1, l_mark);
367
0
#endif
368
0
    return true;
369
18.8M
}
370
371
/* ------ Relocation phase ------ */
372
373
/* Relocate all the pointers in a block of refs. */
374
static void
375
refs_do_reloc(void /*obj_header_t */ *vptr, uint size,
376
              const gs_memory_struct_type_t * pstype, gc_state_t * gcst)
377
19.6M
{
378
19.6M
    igc_reloc_refs((ref_packed *) vptr,
379
19.6M
                   (ref_packed *) ((char *)vptr + size),
380
19.6M
                   gcst);
381
19.6M
}
382
/* Relocate the contents of a block of refs. */
383
/* If gcst->relocating_untraced is true, we are relocating pointers from an */
384
/* untraced space, so relocate all refs, not just marked ones. */
385
void
386
igc_reloc_refs(ref_packed * from, ref_packed * to, gc_state_t * gcst)
387
21.5M
{
388
21.5M
    int min_trace = gcst->min_collect;
389
21.5M
    ref_packed *rp = from;
390
21.5M
    bool do_all = gcst->relocating_untraced;
391
392
21.5M
    vm_spaces spaces = gcst->spaces;
393
21.5M
    const gs_memory_t *cmem = space_system->stable_memory;
394
395
2.45G
    while (rp < to) {
396
2.43G
        ref *pref;
397
#ifdef DEBUG
398
        const void *before = 0;
399
        const void *after = 0;
400
# define DO_RELOC(var, stat)\
401
    BEGIN before = (var); stat; after = (var); END
402
# define SET_RELOC(var, expr)\
403
    BEGIN before = (var); after = (var) = (expr); END
404
#else
405
2.43G
# define DO_RELOC(var, stat) stat
406
2.43G
# define SET_RELOC(var, expr) var = expr
407
2.43G
#endif
408
409
2.43G
        if (r_is_packed(rp)) {
410
885M
            rp++;
411
885M
            continue;
412
885M
        }
413
        /* The following assignment is logically unnecessary; */
414
        /* we do it only for convenience in debugging. */
415
1.54G
        pref = (ref *) rp;
416
1.54G
        if_debug3m('8', gcst->heap, "  [8]relocating %s %d ref at "PRI_INTPTR"\n",
417
1.54G
                   (r_has_attr(pref, l_mark) ? "marked" : "unmarked"),
418
1.54G
                   r_btype(pref), (intptr_t)pref);
419
1.54G
        if ((r_has_attr(pref, l_mark) || do_all) &&
420
1.54G
            r_space(pref) >= min_trace
421
1.54G
            ) {
422
563M
            switch (r_type(pref)) {
423
                    /* Struct cases */
424
117k
                case t_file:
425
117k
                    DO_RELOC(pref->value.pfile, RELOC_VAR(pref->value.pfile));
426
117k
                    break;
427
434k
                case t_device:
428
434k
                    DO_RELOC(pref->value.pdevice,
429
434k
                             RELOC_VAR(pref->value.pdevice));
430
434k
                    break;
431
33.8k
                case t_fontID:
432
292k
                case t_struct:
433
311k
                case t_astruct:
434
311k
                case t_pdfctx:
435
311k
                    DO_RELOC(pref->value.pstruct,
436
311k
                             RELOC_VAR(pref->value.pstruct));
437
311k
                    break;
438
                    /* Non-trivial non-struct cases */
439
12.0M
                case t_dictionary:
440
12.0M
                    rputc(gcst->heap, 'd');
441
12.0M
                    SET_RELOC(pref->value.pdict,
442
12.0M
                              (dict *)igc_reloc_ref_ptr((ref_packed *)pref->value.pdict, gcst));
443
12.0M
                    break;
444
101M
                case t_array:
445
101M
                    {
446
101M
                        uint size = r_size(pref);
447
448
101M
                        if (size != 0) { /* value.refs might be NULL */
449
450
                            /*
451
                             * If the array is large, we allocated it in its
452
                             * own object (at least originally -- this might
453
                             * be a pointer to a subarray.)  In this case,
454
                             * we know it is the only object in its
455
                             * containing st_refs object, so we know that
456
                             * the mark containing the relocation appears
457
                             * just after it.
458
                             */
459
100M
                            if (size < max_size_st_refs / sizeof(ref)) {
460
98.6M
                                rputc(gcst->heap, 'a');
461
98.6M
                                SET_RELOC(pref->value.refs,
462
98.6M
                                    (ref *) igc_reloc_ref_ptr(
463
98.6M
                                     (ref_packed *) pref->value.refs, gcst));
464
98.6M
                            } else {
465
1.74M
                                rputc(gcst->heap, 'A');
466
                                /*
467
                                 * See the t_shortarray case below for why we
468
                                 * decrement size.
469
                                 */
470
1.74M
                                --size;
471
1.74M
                                SET_RELOC(pref->value.refs,
472
1.74M
                                    (ref *) igc_reloc_ref_ptr(
473
1.74M
                                   (ref_packed *) (pref->value.refs + size),
474
1.74M
                                                               gcst) - size);
475
1.74M
                            }
476
100M
                        }
477
101M
                    }
478
101M
                    break;
479
72.7M
                case t_mixedarray:
480
72.7M
                    if (r_size(pref) != 0) { /* value.refs might be NULL */
481
72.7M
                        rputc(gcst->heap, 'm');
482
72.7M
                        SET_RELOC(pref->value.packed,
483
72.7M
                                  igc_reloc_ref_ptr(pref->value.packed, gcst));
484
72.7M
                    }
485
72.7M
                    break;
486
32.6M
                case t_shortarray:
487
32.6M
                    {
488
32.6M
                        uint size = r_size(pref);
489
490
                        /*
491
                         * Since we know that igc_reloc_ref_ptr works by
492
                         * scanning forward, and we know that all the
493
                         * elements of this array itself are marked, we can
494
                         * save some scanning time by relocating the pointer
495
                         * to the end of the array rather than the
496
                         * beginning.
497
                         */
498
32.6M
                        if (size != 0) { /* value.refs might be NULL */
499
30.4M
                            rputc(gcst->heap, 's');
500
                            /*
501
                             * igc_reloc_ref_ptr has to be able to determine
502
                             * whether the pointer points into a space that
503
                             * isn't being collected.  It does this by
504
                             * checking whether the referent of the pointer
505
                             * is marked.  For this reason, we have to pass
506
                             * a pointer to the last real element of the
507
                             * array, rather than just beyond it.
508
                             */
509
30.4M
                            --size;
510
30.4M
                            SET_RELOC(pref->value.packed,
511
30.4M
                                igc_reloc_ref_ptr(pref->value.packed + size,
512
30.4M
                                                  gcst) - size);
513
30.4M
                        }
514
32.6M
                    }
515
32.6M
                    break;
516
316M
                case t_name:
517
316M
                    {
518
316M
                        void *psub = name_ref_sub_table(cmem, pref);
519
316M
                        void *rsub = RELOC_OBJ(psub); /* gcst implicit */
520
521
316M
                        SET_RELOC(pref->value.pname,
522
316M
                                  (name *)
523
316M
                                  ((char *)rsub + ((char *)pref->value.pname -
524
316M
                                                   (char *)psub)));
525
316M
                    } break;
526
19.2M
                case t_string:
527
19.2M
                    {
528
19.2M
                        gs_string str;
529
530
19.2M
                        str.data = pref->value.bytes;
531
19.2M
                        str.size = r_size(pref);
532
533
19.2M
                        DO_RELOC(str.data, RELOC_STRING_VAR(str));
534
19.2M
                        pref->value.bytes = str.data;
535
19.2M
                    }
536
19.2M
                    break;
537
8.30M
                case t_oparray:
538
8.30M
                    rputc(gcst->heap, 'o');
539
8.30M
                    SET_RELOC(pref->value.const_refs,
540
8.30M
                        (const ref *)igc_reloc_ref_ptr((const ref_packed *)pref->value.const_refs, gcst));
541
8.30M
                    break;
542
0
                default:
543
0
                    goto no_reloc; /* don't print trace message */
544
563M
            }
545
563M
            if_debug2m('8', gcst->heap, "  [8]relocated "PRI_INTPTR" => "PRI_INTPTR"\n",
546
563M
                       (intptr_t)before, (intptr_t)after);
547
563M
        }
548
1.54G
no_reloc:
549
1.54G
        rp += packed_per_ref;
550
1.54G
    }
551
21.5M
}
552
553
/* Relocate a pointer to a ref. */
554
/* See gsmemory.h for why the argument is const and the result is not. */
555
ref_packed *
556
igc_reloc_ref_ptr_nocheck(const ref_packed * prp, gc_state_t *gcst)
557
224M
{
558
    /*
559
     * Search forward for relocation.  This algorithm is intrinsically very
560
     * inefficient; we hope eventually to replace it with a better one.
561
     */
562
224M
    const ref_packed *rp = prp;
563
224M
    uint dec = 0;
564
#ifdef ALIGNMENT_ALIASING_BUG
565
    const ref *rpref;
566
# define RP_REF(rp) (rpref = (const ref *)rp, rpref)
567
#else
568
224M
# define RP_REF(rp) ((const ref *)rp)
569
224M
#endif
570
6.39G
    for (;;) {
571
572
6.39G
        if (r_is_packed(rp)) {
573
            /*
574
             * Normally, an unmarked packed ref will be an
575
             * integer whose value is the amount of relocation.
576
             * However, the relocation value might have been
577
             * too large to fit.  If this is the case, for
578
             * each such unmarked packed ref we pass over,
579
             * we have to decrement the final relocation.
580
             */
581
1.82G
            rputc(gcst->heap, (*rp & lp_mark ? '1' : '0'));
582
1.82G
            if (!(*rp & lp_mark)) {
583
46.6M
                if (*rp != pt_tag(pt_integer) + packed_max_value) {
584
                    /* This is a stored relocation value. */
585
41.6M
                    rputc(gcst->heap, '\n');
586
41.6M
                    rp = print_reloc(prp, "ref",
587
41.6M
                                     (const ref_packed *)
588
41.6M
                                     ((const char *)prp -
589
41.6M
                                      (*rp & packed_value_mask) + dec));
590
41.6M
                    break;
591
41.6M
                }
592
                /*
593
                 * We know this is the first of an aligned block
594
                 * of packed refs.  Skip over the entire block,
595
                 * decrementing the final relocation.
596
                 */
597
5.03M
                dec += sizeof(ref_packed) * align_packed_per_ref;
598
5.03M
                rp += align_packed_per_ref;
599
5.03M
            } else
600
1.77G
                rp++;
601
1.78G
            continue;
602
1.82G
        }
603
4.56G
        if (!ref_type_uses_size_or_null(r_type(RP_REF(rp)))) {
604
            /* reloc is in r_size */
605
182M
            rputc(gcst->heap, '\n');
606
182M
            rp = print_reloc(prp, "ref",
607
182M
                             (const ref_packed *)
608
182M
                             (r_size(RP_REF(rp)) == 0 ? prp :
609
182M
                              (const ref_packed *)((const char *)prp -
610
182M
                                                   r_size(RP_REF(rp)) + dec)));
611
182M
            break;
612
182M
        }
613
4.56G
        rputc(gcst->heap, 'u');
614
4.38G
        rp += packed_per_ref;
615
4.38G
    }
616
    /* Use a severely deprecated pun to remove the const property. */
617
224M
    {
618
224M
        union { const ref_packed *r; ref_packed *w; } u;
619
620
224M
        u.r = rp;
621
224M
        return u.w;
622
224M
    }
623
224M
#undef RP_REF
624
224M
}
625
ref_packed *
626
igc_reloc_ref_ptr(const ref_packed * prp, gc_state_t *gcst)
627
224M
{
628
    /*
629
     * Search forward for relocation.  This algorithm is intrinsically very
630
     * inefficient; we hope eventually to replace it with a better one.
631
     */
632
224M
    const ref_packed *rp = prp;
633
#ifdef ALIGNMENT_ALIASING_BUG
634
    const ref *rpref;
635
# define RP_REF(rp) (rpref = (const ref *)rp, rpref)
636
#else
637
224M
# define RP_REF(rp) ((const ref *)rp)
638
224M
#endif
639
    /*
640
     * Iff this pointer points into a space that wasn't traced,
641
     * the referent won't be marked.  In this case, we shouldn't
642
     * do any relocation.  Check for this first.
643
     */
644
224M
    if (r_is_packed(rp)) {
645
75.3M
        if (!r_has_pmark(rp))
646
8
            goto ret_rp;
647
148M
    } else {
648
148M
        if (!r_has_attr(RP_REF(rp), l_mark))
649
1.79k
            goto ret_rp;
650
148M
    }
651
224M
    return igc_reloc_ref_ptr_nocheck(prp, gcst);
652
1.79k
ret_rp:
653
    /* Use a severely deprecated pun to remove the const property. */
654
1.79k
    {
655
1.79k
        union { const ref_packed *r; ref_packed *w; } u;
656
657
1.79k
        u.r = rp;
658
1.79k
        return u.w;
659
224M
    }
660
224M
}
661
662
/* ------ Compaction phase ------ */
663
664
/* Compact a ref object. */
665
/* Remove the marks at the same time. */
666
static void
667
refs_compact(const gs_memory_t *mem, obj_header_t * pre, obj_header_t * dpre, uint size)
668
18.8M
{
669
18.8M
    ref_packed *dest;
670
18.8M
    ref_packed *src;
671
18.8M
    ref_packed *end;
672
18.8M
    uint new_size;
673
674
   /* The next switch controls an optimization
675
      for the loop termination condition.
676
      It was useful during the development,
677
      when some assumptions were temporary wrong.
678
      We keep it for records. */
679
680
18.8M
    src = (ref_packed *) (pre + 1);
681
18.8M
    end = (ref_packed *) ((byte *) src + size);
682
    /*
683
     * We know that a block of refs always ends with a
684
     * full-size ref, so we only need to check for reaching the end
685
     * of the block when we see one of those.
686
     */
687
18.8M
    if (dpre == pre)    /* Loop while we don't need to copy. */
688
1.08G
        for (;;) {
689
1.08G
            if (r_is_packed(src)) {
690
108M
                if (!r_has_pmark(src))
691
3.01M
                    break;
692
108M
                if_debug1m('8', mem, "  [8]packed ref "PRI_INTPTR" \"copied\"\n",
693
105M
                          (intptr_t)src);
694
105M
                *src &= ~lp_mark;
695
105M
                src++;
696
971M
            } else {   /* full-size ref */
697
971M
                ref *const pref = (ref *)src;
698
699
971M
                if (!r_has_attr(pref, l_mark))
700
3.95M
                    break;
701
971M
                if_debug1m('8', mem, "  [8]ref "PRI_INTPTR" \"copied\"\n", (intptr_t)src);
702
967M
                r_clear_attrs(pref, l_mark);
703
967M
                src += packed_per_ref;
704
967M
            }
705
1.08G
    } else
706
11.9M
        *dpre = *pre;
707
18.8M
    dest = (ref_packed *) ((char *)dpre + ((char *)src - (char *)pre));
708
1.24G
    for (;;) {
709
1.24G
        if (r_is_packed(src)) {
710
751M
            if (r_has_pmark(src)) {
711
594M
                if_debug2m('8', mem, "  [8]packed ref "PRI_INTPTR" copied to "PRI_INTPTR"\n",
712
594M
                          (intptr_t)src, (intptr_t)dest);
713
594M
                *dest++ = *src & ~lp_mark;
714
594M
            }
715
751M
            src++;
716
751M
        } else {   /* full-size ref */
717
491M
            if (r_has_attr((ref *) src, l_mark)) {
718
441M
                ref rtemp;
719
720
441M
                if_debug2m('8', mem, "  [8]ref "PRI_INTPTR" copied to "PRI_INTPTR"\n",
721
441M
                           (intptr_t)src, (intptr_t)dest);
722
                /* We can't just use ref_assign_inline, */
723
                /* because the source and destination */
724
                /* might overlap! */
725
441M
                ref_assign_inline(&rtemp, (ref *) src);
726
441M
                r_clear_attrs(&rtemp, l_mark);
727
441M
                ref_assign_inline((ref *) dest, &rtemp);
728
441M
                src += packed_per_ref;
729
441M
                dest += packed_per_ref;
730
441M
            } else {   /* check for end of block */
731
50.5M
                src += packed_per_ref;
732
50.5M
                if (src >= end)
733
18.8M
                    break;
734
50.5M
            }
735
491M
        }
736
1.24G
    }
737
18.8M
    new_size = (byte *) dest - (byte *) (dpre + 1) + sizeof(ref);
738
#ifdef DEBUG
739
    /* Check that the relocation came out OK. */
740
    /* NOTE: this check only works within a single clump. */
741
    if ((byte *) src - (byte *) dest != r_size((ref *) src - 1) + sizeof(ref)) {
742
        mlprintf3(mem, "Reloc error for refs "PRI_INTPTR": reloc = %lu, stored = %u\n",
743
                 (intptr_t) dpre, (ulong) ((byte *) src - (byte *) dest),
744
                 (uint) r_size((ref *) src - 1));
745
        gs_abort(mem);
746
    }
747
#endif
748
    /* Pad to a multiple of sizeof(ref). */
749
42.8M
    while (new_size % sizeof(ref))
750
23.9M
        *dest++ = pt_tag(pt_integer),
751
23.9M
            new_size += sizeof(ref_packed);
752
    /* We want to make the newly freed space into a free block, */
753
    /* but we can only do this if we have enough room. */
754
18.8M
    if (size - new_size < sizeof(obj_header_t)) { /* Not enough room.  Pad to original size. */
755
12.8M
        while (new_size < size)
756
0
            *dest++ = pt_tag(pt_integer),
757
0
                new_size += sizeof(ref_packed);
758
12.8M
    } else {
759
6.01M
        obj_header_t *pfree = (obj_header_t *) ((ref *) dest + 1);
760
761
6.01M
        pfree->o_pad = 0;
762
6.01M
        pfree->o_alone = 0;
763
6.01M
        pfree->o_size = size - new_size - sizeof(obj_header_t);
764
6.01M
        pfree->o_type = &st_bytes;
765
6.01M
    }
766
    /* Re-create the final ref. */
767
18.8M
    r_set_type((ref *) dest, t_integer);
768
18.8M
    dpre->o_size = new_size;
769
18.8M
}