Coverage Report

Created: 2025-06-10 07:27

/src/ghostpdl/base/gsalloc.c
Line
Count
Source (jump to first uncovered line)
1
/* Copyright (C) 2001-2023 Artifex Software, Inc.
2
   All Rights Reserved.
3
4
   This software is provided AS-IS with no warranty, either express or
5
   implied.
6
7
   This software is distributed under license and may not be copied,
8
   modified or distributed except as expressly authorized under the terms
9
   of the license contained in the file LICENSE in this distribution.
10
11
   Refer to licensing information at http://www.artifex.com or contact
12
   Artifex Software, Inc.,  39 Mesa Street, Suite 108A, San Francisco,
13
   CA 94129, USA, for further information.
14
*/
15
16
17
/* Standard memory allocator */
18
#include "gx.h"
19
#include "memory_.h"
20
#include "gserrors.h"
21
#include "gsexit.h"
22
#include "gsmdebug.h"
23
#include "gsstruct.h"
24
#include "gxalloc.h"
25
#include "stream.h"   /* for clearing stream list */
26
#include "malloc_.h" /* For MEMENTO */
27
28
/*
29
 * Define whether to try consolidating space before adding a new clump.
30
 * The default is not to do this, because it is computationally
31
 * expensive and doesn't seem to help much.  However, this is done for
32
 * "controlled" spaces whether or not the #define is in effect.
33
 */
34
/*#define CONSOLIDATE_BEFORE_ADDING_CLUMP */
35
36
/*
37
 * This allocator produces tracing messages of the form
38
 *      [aNMOTS]...
39
 * where
40
 *   N is the VM space number, +1 if we are allocating from stable memory.
41
 *   M is : for movable objects, | for immovable,
42
 *   O is {alloc = +, free = -, grow = >, shrink = <},
43
 *   T is {bytes = b, object = <, ref = $, string = >}, and
44
 *   S is {small freelist = f, large freelist = F, LIFO = space,
45
 *      own clump = L, lost = #, lost own clump = ~, other = .}.
46
 */
47
#ifdef DEBUG
48
static int
49
alloc_trace_space(const gs_ref_memory_t *imem)
50
{
51
    return (int)(imem->space + (imem->stable_memory == (const gs_memory_t *)imem));
52
}
53
static void
54
alloc_trace(const char *chars, gs_ref_memory_t * imem, client_name_t cname,
55
            gs_memory_type_ptr_t stype, uint size, const void *ptr)
56
{
57
    if_debug7m('A', (const gs_memory_t *)imem, "[a%d%s]%s %s(%u) %s"PRI_INTPTR"\n",
58
               alloc_trace_space(imem), chars, client_name_string(cname),
59
               (ptr == 0 || stype == 0 ? "" :
60
                struct_type_name_string(stype)),
61
               size, (chars[1] == '+' ? "= " : ""), (intptr_t)ptr);
62
}
63
static bool
64
alloc_size_is_ok(gs_memory_type_ptr_t stype)
65
{
66
    return (stype->ssize > 0 && stype->ssize < 0x200000);
67
}
68
#  define ALLOC_CHECK_SIZE(mem,stype)\
69
    BEGIN\
70
      if (!alloc_size_is_ok(stype)) {\
71
        mlprintf2(mem,"size of struct type "PRI_INTPTR" is 0x%lx!\n",\
72
                  (intptr_t)(stype), (ulong)((stype)->ssize));\
73
        return 0;\
74
      }\
75
    END
76
#else
77
8.69G
#  define alloc_trace(chars, imem, cname, stype, size, ptr) DO_NOTHING
78
3.40G
#  define ALLOC_CHECK_SIZE(mem,stype) DO_NOTHING
79
#endif
80
81
/*
82
 * The structure descriptor for allocators.  Even though allocators
83
 * are allocated outside GC space, they reference objects within it.
84
 */
85
public_st_ref_memory();
86
static
87
36.9M
ENUM_PTRS_BEGIN(ref_memory_enum_ptrs) return 0;
88
6.16M
ENUM_PTR3(0, gs_ref_memory_t, streams, names_array, changes);
89
6.16M
ENUM_PTR(3, gs_ref_memory_t, saved);
90
36.9M
ENUM_PTR(4, gs_ref_memory_t, scan_limit);
91
36.9M
ENUM_PTRS_END
92
6.11M
static RELOC_PTRS_WITH(ref_memory_reloc_ptrs, gs_ref_memory_t *mptr)
93
6.11M
{
94
6.11M
    RELOC_PTR(gs_ref_memory_t, streams);
95
6.11M
    RELOC_PTR(gs_ref_memory_t, names_array);
96
6.11M
    RELOC_PTR(gs_ref_memory_t, changes);
97
6.11M
    RELOC_PTR(gs_ref_memory_t, scan_limit);
98
    /* Don't relocate the saved pointer now -- see igc.c for details. */
99
6.11M
    mptr->reloc_saved = RELOC_OBJ(mptr->saved);
100
6.11M
}
101
6.11M
RELOC_PTRS_END
102
103
/*
104
 * Define the flags for alloc_obj, which implements all but the fastest
105
 * case of allocation.
106
 */
107
typedef enum {
108
    ALLOC_IMMOVABLE = 1,
109
    ALLOC_DIRECT = 2    /* called directly, without fast-case checks */
110
} alloc_flags_t;
111
112
/* Forward references */
113
static void remove_range_from_freelist(gs_ref_memory_t *mem, void* bottom, void* top);
114
static obj_header_t *large_freelist_alloc(gs_ref_memory_t *mem, obj_size_t size);
115
static obj_header_t *scavenge_low_free(gs_ref_memory_t *mem, unsigned request_size);
116
static size_t compute_free_objects(gs_ref_memory_t *);
117
static obj_header_t *alloc_obj(gs_ref_memory_t *, obj_size_t, gs_memory_type_ptr_t, alloc_flags_t, client_name_t);
118
static void consolidate_clump_free(clump_t *cp, gs_ref_memory_t *mem);
119
static void trim_obj(gs_ref_memory_t *mem, obj_header_t *obj, obj_size_t size, clump_t *cp);
120
static clump_t *alloc_acquire_clump(gs_ref_memory_t *, size_t, bool, client_name_t);
121
static clump_t *alloc_add_clump(gs_ref_memory_t *, size_t, client_name_t);
122
void alloc_close_clump(gs_ref_memory_t *);
123
124
/*
125
 * Define the standard implementation (with garbage collection)
126
 * of Ghostscript's memory manager interface.
127
 */
128
/* Raw memory procedures */
129
static gs_memory_proc_alloc_bytes(i_alloc_bytes_immovable);
130
static gs_memory_proc_resize_object(i_resize_object);
131
static gs_memory_proc_free_object(i_free_object);
132
static gs_memory_proc_stable(i_stable);
133
static gs_memory_proc_status(i_status);
134
static gs_memory_proc_free_all(i_free_all);
135
static gs_memory_proc_consolidate_free(i_consolidate_free);
136
137
/* Object memory procedures */
138
static gs_memory_proc_alloc_bytes(i_alloc_bytes);
139
static gs_memory_proc_alloc_struct(i_alloc_struct);
140
static gs_memory_proc_alloc_struct(i_alloc_struct_immovable);
141
static gs_memory_proc_alloc_byte_array(i_alloc_byte_array);
142
static gs_memory_proc_alloc_byte_array(i_alloc_byte_array_immovable);
143
static gs_memory_proc_alloc_struct_array(i_alloc_struct_array);
144
static gs_memory_proc_alloc_struct_array(i_alloc_struct_array_immovable);
145
static gs_memory_proc_object_size(i_object_size);
146
static gs_memory_proc_object_type(i_object_type);
147
static gs_memory_proc_alloc_string(i_alloc_string);
148
static gs_memory_proc_alloc_string(i_alloc_string_immovable);
149
static gs_memory_proc_resize_string(i_resize_string);
150
static gs_memory_proc_free_string(i_free_string);
151
static gs_memory_proc_register_root(i_register_root);
152
static gs_memory_proc_unregister_root(i_unregister_root);
153
static gs_memory_proc_enable_free(i_enable_free);
154
static gs_memory_proc_set_object_type(i_set_object_type);
155
static gs_memory_proc_defer_frees(i_defer_frees);
156
157
/* We export the procedures for subclasses. */
158
const gs_memory_procs_t gs_ref_memory_procs =
159
{
160
    /* Raw memory procedures */
161
    i_alloc_bytes_immovable,
162
    i_resize_object,
163
    i_free_object,
164
    i_stable,
165
    i_status,
166
    i_free_all,
167
    i_consolidate_free,
168
    /* Object memory procedures */
169
    i_alloc_bytes,
170
    i_alloc_struct,
171
    i_alloc_struct_immovable,
172
    i_alloc_byte_array,
173
    i_alloc_byte_array_immovable,
174
    i_alloc_struct_array,
175
    i_alloc_struct_array_immovable,
176
    i_object_size,
177
    i_object_type,
178
    i_alloc_string,
179
    i_alloc_string_immovable,
180
    i_resize_string,
181
    i_free_string,
182
    i_register_root,
183
    i_unregister_root,
184
    i_enable_free,
185
    i_set_object_type,
186
    i_defer_frees
187
};
188
189
/*
190
 * Previous versions of this code used a simple linked list of
191
 * clumps. We change here to use a splay tree of clumps.
192
 * Splay Trees can be found documented in "Algorithms and Data
193
 * Structures" by Jeffrey H Kingston.
194
 *
195
 * Essentially they are binary trees, ordered by address of the
196
 * 'cbase' pointer. The 'cunning' feature with them is that
197
 * when a node in the tree is accessed, we do a 'move to root'
198
 * operation. This involves performing various 'rotations' as
199
 * we move up the tree, the net effect of which tends to
200
 * lead to more balanced trees (see Kingston for analysis).
201
 * It also leads to better locality of reference in that
202
 * recently accessed nodes stay near the root.
203
 */
204
205
/* #define DEBUG_CLUMPS */
206
#ifdef DEBUG_CLUMPS
207
#define SANITY_CHECK(cp) sanity_check(cp)
208
#define SANITY_CHECK_MID(cp) sanity_check_mid(cp)
209
210
static void
211
broken_splay()
212
{
213
    dlprintf("Broken splay tree!\n");
214
}
215
216
void sanity_check_rec(clump_t *cp)
217
{
218
    splay_dir_t from = SPLAY_FROM_ABOVE;
219
220
    while (cp)
221
    {
222
        if (from == SPLAY_FROM_ABOVE)
223
        {
224
            /* We have arrived from above. Step left. */
225
            if (cp->left)
226
            {
227
                if (cp->left->cbase > cp->cbase || cp->left->parent != cp)
228
                    broken_splay();
229
                cp = cp->left;
230
                from = SPLAY_FROM_ABOVE;
231
                continue;
232
            }
233
            /* No left to step to, so imagine we have just arrived from there */
234
            from = SPLAY_FROM_LEFT;
235
        }
236
        if (from == SPLAY_FROM_LEFT)
237
        {
238
            /* We have arrived from the left. Step right. */
239
            if (cp->right)
240
            {
241
                if (cp->right->cbase < cp->cbase || cp->right->parent != cp)
242
                    broken_splay();
243
                cp = cp->right;
244
                from = SPLAY_FROM_ABOVE;
245
                continue;
246
            }
247
            /* No right to step to, so imagine we have just arrived from there. */
248
            from = SPLAY_FROM_RIGHT;
249
        }
250
        if (from == SPLAY_FROM_RIGHT)
251
        {
252
            /* We have arrived from the right. Step up. */
253
            if (cp->parent == NULL)
254
                break;
255
            if (cp->parent->left != cp && cp->parent->right != cp)
256
                broken_splay();
257
            from = (cp->parent->left == cp ? SPLAY_FROM_LEFT : SPLAY_FROM_RIGHT);
258
            cp = cp->parent;
259
        }
260
    }
261
}
262
263
void sanity_check(clump_t *cp)
264
{
265
    sanity_check_rec(cp);
266
}
267
268
void sanity_check_mid(clump_t *cp)
269
{
270
    clump_t *parent;
271
272
    while ((parent = cp->parent) != NULL)
273
    {
274
        if (parent->left == cp)
275
        {
276
            if (parent->right == cp)
277
                broken_splay();
278
        }
279
        else if (parent->right != cp)
280
            broken_splay();
281
        cp = parent;
282
    }
283
284
    sanity_check_rec(cp);
285
}
286
287
#else
288
528M
#define SANITY_CHECK(cp) while (0) {}
289
2.12G
#define SANITY_CHECK_MID(cp) while (0) {}
290
#endif
291
292
/* When initing with the root, we want to pass the smallest inorder one
293
 * back immediately, and set it up so that we step right for the next
294
 * one. */
295
clump_t *
296
clump_splay_walk_init(clump_splay_walker *sw, const gs_ref_memory_t *mem)
297
44.7M
{
298
44.7M
    clump_t *cp = mem->root;
299
300
44.7M
    if (cp)
301
44.6M
    {
302
44.6M
        SANITY_CHECK(cp);
303
304
44.6M
        sw->from = SPLAY_FROM_LEFT;
305
179M
        while (cp->left)
306
134M
        {
307
134M
            cp = cp->left;
308
134M
        }
309
44.6M
    }
310
44.7M
    sw->cp = cp;
311
44.7M
    sw->end = NULL;
312
44.7M
    return cp;
313
44.7M
}
314
315
clump_t *
316
clump_splay_walk_bwd_init(clump_splay_walker *sw, const gs_ref_memory_t *mem)
317
2.13M
{
318
2.13M
    clump_t *cp = mem->root;
319
320
2.13M
    if (cp)
321
2.13M
    {
322
2.13M
        SANITY_CHECK(cp);
323
324
2.13M
        sw->from = SPLAY_FROM_RIGHT;
325
5.46M
        while (cp->right)
326
3.32M
        {
327
3.32M
            cp = cp->right;
328
3.32M
        }
329
2.13M
    }
330
2.13M
    sw->cp = cp;
331
2.13M
    sw->end = NULL;
332
2.13M
    return cp;
333
2.13M
}
334
335
/* When initing 'mid walk' (i.e. with a non-root node), we want to
336
 * return the node we are given as the first one, and continue
337
 * onwards in an in order fashion.
338
 */
339
clump_t *
340
clump_splay_walk_init_mid(clump_splay_walker *sw, clump_t *cp)
341
1.88G
{
342
1.88G
    sw->from = SPLAY_FROM_LEFT;
343
1.88G
    sw->cp = cp;
344
1.88G
    sw->end = cp;
345
1.88G
    if (cp)
346
1.88G
    {
347
1.88G
        SANITY_CHECK_MID(cp);
348
1.88G
    }
349
1.88G
    return cp;
350
1.88G
}
351
352
clump_t *
353
clump_splay_walk_fwd(clump_splay_walker *sw)
354
2.05G
{
355
2.05G
    clump_t *cp = sw->cp;
356
2.05G
    int from = sw->from;
357
358
2.05G
    if (cp == NULL)
359
114k
        return NULL;
360
361
    /* We step through the tree, and stop when we arrive
362
     * at sw->end in an in order manner (i.e. by moving from
363
     * the left). */
364
4.56G
    while (1)
365
4.56G
    {
366
4.56G
        if (from == SPLAY_FROM_ABOVE)
367
1.90G
        {
368
            /* We have arrived from above. Step left. */
369
1.90G
            if (cp->left)
370
1.26G
            {
371
1.26G
                cp = cp->left;
372
1.26G
                from = SPLAY_FROM_ABOVE;
373
1.26G
                continue;
374
1.26G
            }
375
            /* No left to step to, so imagine we have just arrived from there */
376
641M
            from = SPLAY_FROM_LEFT;
377
            /* Have we reached the stopping point? */
378
641M
            if (cp == sw->end)
379
3.51M
                cp = NULL;
380
            /* We want to stop here, for inorder operation. So break out of the loop. */
381
641M
            break;
382
1.90G
        }
383
2.65G
        if (from == SPLAY_FROM_LEFT)
384
2.05G
        {
385
            /* We have arrived from the left. Step right. */
386
2.05G
            if (cp->right)
387
620M
            {
388
620M
                cp = cp->right;
389
620M
                from = SPLAY_FROM_ABOVE;
390
620M
                continue;
391
620M
            }
392
            /* No right to step to, so imagine we have just arrived from there. */
393
1.43G
            from = SPLAY_FROM_RIGHT;
394
1.43G
        }
395
2.03G
        if (from == SPLAY_FROM_RIGHT)
396
2.03G
        {
397
            /* We have arrived from the right. Step up. */
398
2.03G
            clump_t *old = cp;
399
2.03G
            cp = cp->parent;
400
2.03G
            if (cp == NULL)
401
66.3M
            {
402
                /* We've reached the root of the tree. Is this our stopping point? */
403
66.3M
                if (sw->end == NULL)
404
44.6M
                    break;
405
                /* If not, step on. */
406
21.7M
                cp = old;
407
21.7M
                from = SPLAY_FROM_ABOVE;
408
21.7M
            }
409
1.97G
            else
410
1.97G
            {
411
1.97G
                from = (cp->left == old ? SPLAY_FROM_LEFT : SPLAY_FROM_RIGHT);
412
1.97G
                if (from == SPLAY_FROM_LEFT)
413
1.36G
                {
414
                    /* Have we reached the stopping point? */
415
1.36G
                    if (cp == sw->end)
416
13.2M
                        cp = NULL;
417
1.36G
                    break;
418
1.36G
                }
419
1.97G
            }
420
2.03G
        }
421
2.03G
    }
422
2.05G
    sw->cp = cp;
423
2.05G
    sw->from = from;
424
2.05G
    return cp;
425
2.05G
}
426
427
clump_t *
428
clump_splay_walk_bwd(clump_splay_walker *sw)
429
33.0M
{
430
33.0M
    clump_t *cp = sw->cp;
431
33.0M
    int from = sw->from;
432
433
33.0M
    if (cp == NULL)
434
0
        return NULL;
435
436
    /* We step backwards through the tree, and stop when we arrive
437
     * at sw->end in a reverse in order manner (i.e. by moving from
438
     * the right). */
439
78.0M
    while (1)
440
78.0M
    {
441
78.0M
        if (from == SPLAY_FROM_ABOVE)
442
27.6M
        {
443
            /* We have arrived from above. Step right. */
444
27.6M
            if (cp->right)
445
10.2M
            {
446
10.2M
                cp = cp->right;
447
10.2M
                from = SPLAY_FROM_ABOVE;
448
10.2M
                continue;
449
10.2M
            }
450
            /* No right to step to, so imagine we have just arrived from there. */
451
17.3M
            from = SPLAY_FROM_RIGHT;
452
            /* Have we reached our end? */
453
17.3M
            if (cp == sw->end)
454
0
                cp = NULL;
455
            /* Stop to run inorder operation */
456
17.3M
            break;
457
27.6M
        }
458
50.4M
        if (from == SPLAY_FROM_RIGHT)
459
33.0M
        {
460
            /* We have arrived from the right. Step left. */
461
33.0M
            if (cp->left)
462
17.3M
            {
463
17.3M
                cp = cp->left;
464
17.3M
                from = SPLAY_FROM_ABOVE;
465
17.3M
                continue;
466
17.3M
            }
467
            /* No left to step to, so imagine we have just arrived from there. */
468
15.7M
            from = SPLAY_FROM_LEFT;
469
15.7M
        }
470
33.0M
        if (from == SPLAY_FROM_LEFT)
471
33.0M
        {
472
            /* We have arrived from the left. Step up. */
473
33.0M
            clump_t *old = cp;
474
33.0M
            cp = cp->parent;
475
33.0M
            from = (cp == NULL || cp->left != old ? SPLAY_FROM_RIGHT : SPLAY_FROM_LEFT);
476
33.0M
            if (from == SPLAY_FROM_RIGHT)
477
15.7M
            {
478
15.7M
                if (cp == sw->end)
479
2.13M
                    cp = NULL;
480
15.7M
                break;
481
15.7M
            }
482
33.0M
        }
483
33.0M
    }
484
33.0M
    sw->cp = cp;
485
33.0M
    sw->from = from;
486
33.0M
    return cp;
487
33.0M
}
488
489
static clump_t *
490
clump_splay_remove(clump_t *cp, gs_ref_memory_t *imem)
491
413M
{
492
413M
    clump_t *replacement;
493
494
413M
    if (cp->left == NULL)
495
69.8M
    {
496
        /* At most one child - easy */
497
69.8M
        replacement = cp->right;
498
69.8M
    }
499
343M
    else if (cp->right == NULL)
500
170M
    {
501
        /* Strictly one child - easy */
502
170M
        replacement = cp->left;
503
170M
    }
504
172M
    else
505
172M
    {
506
        /* 2 Children - tricky */
507
        /* Find in-order predecessor to f */
508
172M
        replacement = cp->left;
509
201M
        while (replacement->right)
510
28.6M
            replacement = replacement->right;
511
        /* Remove replacement - easy as just one child */
512
172M
        (void)clump_splay_remove(replacement, imem);
513
        /* Replace cp with replacement */
514
172M
        if (cp->left)
515
158M
            cp->left->parent = replacement;
516
172M
        cp->right->parent = replacement;
517
172M
        replacement->left = cp->left;
518
172M
        replacement->right = cp->right;
519
172M
    }
520
413M
    if (cp->parent)
521
206M
    {
522
206M
        if (cp->parent->left == cp)
523
173M
            cp->parent->left = replacement;
524
33.0M
        else
525
33.0M
            cp->parent->right = replacement;
526
206M
    }
527
206M
    else
528
206M
        imem->root = replacement;
529
413M
    if (replacement)
530
360M
        replacement->parent = cp->parent;
531
413M
    return replacement;
532
413M
}
533
534
/* Here we apply a function to all the nodes in a tree in
535
 * depth first order. This means that the given function
536
 * can safely alter: 1) the clump, 2) it's children,
537
 * 3) it's parents child pointer that points to it
538
 * without fear of corruption. Specifically this means
539
 * that the function can free (and unlink) the node
540
 * if it wants.
541
 */
542
clump_t *
543
clump_splay_app(clump_t *root, gs_ref_memory_t *imem, splay_app_result_t (*fn)(clump_t *, void *), void *arg)
544
9.79M
{
545
9.79M
    clump_t *step_to;
546
9.79M
    clump_t *cp = root;
547
9.79M
    int from = SPLAY_FROM_ABOVE;
548
9.79M
    splay_app_result_t res;
549
550
9.79M
    SANITY_CHECK(cp);
551
552
203M
    while (cp)
553
194M
    {
554
194M
        if (from == SPLAY_FROM_ABOVE)
555
102M
        {
556
            /* We have arrived from above. Step left. */
557
102M
            step_to = cp->left;
558
102M
            if (step_to)
559
53.9M
            {
560
53.9M
                from = SPLAY_FROM_ABOVE;
561
53.9M
                cp = step_to;
562
53.9M
            }
563
48.1M
            else
564
48.1M
            {
565
                /* No left to step to, so imagine we have just arrived from the left */
566
48.1M
                from = SPLAY_FROM_LEFT;
567
48.1M
            }
568
102M
        }
569
194M
        if (from == SPLAY_FROM_LEFT)
570
102M
        {
571
            /* We have arrived from the left. Step right. */
572
102M
            step_to = cp->right;
573
102M
            if (step_to)
574
38.3M
            {
575
38.3M
                from = SPLAY_FROM_ABOVE;
576
38.3M
                cp = step_to;
577
38.3M
            }
578
63.7M
            else
579
63.7M
            {
580
                /* No right to step to, so imagine we have just arrived from the right. */
581
63.7M
                from = SPLAY_FROM_RIGHT;
582
63.7M
            }
583
102M
        }
584
194M
        if (from == SPLAY_FROM_RIGHT)
585
102M
        {
586
            /* We have arrived from the right. Step up. */
587
102M
            step_to = cp->parent;
588
102M
            if (step_to)
589
92.3M
            {
590
92.3M
                from = (step_to->left == cp ? SPLAY_FROM_LEFT : SPLAY_FROM_RIGHT);
591
92.3M
            }
592
102M
            res = fn(cp, arg);
593
102M
            if (res & SPLAY_APP_STOP)
594
799k
                return cp;
595
101M
            cp = step_to;
596
101M
        }
597
194M
    }
598
8.99M
    return cp;
599
9.79M
}
600
601
/* Move the given node to the root of the tree, by
602
 * performing a series of the following rotations.
603
 * The key observation here is that all these
604
 * rotations preserve the ordering of the tree, and
605
 * result in 'x' getting higher.
606
 *
607
 * Case 1:   z          x           Case 1b:   z                   x
608
 *          # #        # #                    # #                 # #
609
 *         y   D      A   y                  A   y               y   D
610
 *        # #     =>     # #                    # #     =>      # #
611
 *       x   C          B   z                  B   x           z   C
612
 *      # #                # #                    # #         # #
613
 *     A   B              C   D                  C   D       A   B
614
 *
615
 * Case 2:   z             x        Case 2b:   z                  x
616
 *          # #          ## ##                # #               ## ##
617
 *         y   D        y     z              A   y             z     y
618
 *        # #     =>   # #   # #                # #     =>    # #   # #
619
 *       A   x        A   B C   D              x   D         A   B C   D
620
 *          # #                               # #
621
 *         B   C                             B   C
622
 *
623
 * Case 3:   y          x           Case 3b:  y                  x
624
 *          # #        # #                   # #                # #
625
 *         x   C  =>  A   y                 A   x       =>     y   C
626
 *        # #            # #                   # #            # #
627
 *       A   B          B   C                 B   C          A   B
628
 */
629
static void
630
splay_move_to_root(clump_t *x, gs_ref_memory_t *mem)
631
1.59G
{
632
1.59G
    clump_t *y, *z;
633
634
1.59G
    if (x == NULL)
635
0
        return;
636
637
3.33G
    while ((y = x->parent) != NULL) {
638
1.74G
        if ((z = y->parent) != NULL) {
639
906M
            x->parent = z->parent;
640
906M
            if (x->parent) {
641
341M
                if (x->parent->left == z)
642
175M
                    x->parent->left = x;
643
165M
                else
644
165M
                    x->parent->right  = x;
645
341M
            }
646
906M
            y->parent = x;
647
            /* Case 1, 1b, 2 or 2b */
648
906M
            if (y->left == x) {
649
                /* Case 1 or 2b */
650
547M
                if (z->left == y) {
651
                    /* Case 1 */
652
294M
                    y->left = x->right;
653
294M
                    if (y->left)
654
223M
                        y->left->parent = y;
655
294M
                    z->left = y->right;
656
294M
                    if (z->left)
657
209M
                        z->left->parent = z;
658
294M
                    y->right = z;
659
294M
                    z->parent = y;
660
294M
                } else {
661
                    /* Case 2b */
662
253M
                    z->right = x->left;
663
253M
                    if (z->right)
664
58.9M
                        z->right->parent = z;
665
253M
                    y->left = x->right;
666
253M
                    if (y->left)
667
65.9M
                        y->left->parent = y;
668
253M
                    x->left = z;
669
253M
                    z->parent = x;
670
253M
                }
671
547M
                x->right      = y;
672
547M
            } else {
673
                /* Case 2 or 1b */
674
358M
                if (z->left == y) {
675
                    /* Case 2 */
676
98.5M
                    y->right = x->left;
677
98.5M
                    if (y->right)
678
63.7M
                        y->right->parent = y;
679
98.5M
                    z->left = x->right;
680
98.5M
                    if (z->left)
681
60.8M
                        z->left->parent = z;
682
98.5M
                    x->right = z;
683
98.5M
                    z->parent = x;
684
260M
                } else {
685
                    /* Case 1b */
686
260M
                    z->right = y->left;
687
260M
                    if (z->right)
688
192M
                        z->right->parent = z;
689
260M
                    y->right = x->left;
690
260M
                    if (y->right)
691
190M
                        y->right->parent = y;
692
260M
                    y->left = z;
693
260M
                    z->parent = y;
694
260M
                }
695
358M
                x->left = y;
696
358M
            }
697
906M
        } else {
698
            /* Case 3 or 3b */
699
838M
            x->parent = NULL;
700
838M
            y->parent = x;
701
838M
            if (y->left == x) {
702
                /* Case 3 */
703
399M
                y->left = x->right;
704
399M
                if (y->left)
705
319M
                    y->left->parent = y;
706
399M
                x->right = y;
707
439M
            } else {
708
                /* Case 3b */
709
439M
                y->right = x->left;
710
439M
                if (y->right)
711
336M
                    y->right->parent = y;
712
439M
                x->left = y;
713
439M
            }
714
838M
        }
715
1.74G
    }
716
1.59G
    mem->root = x;
717
1.59G
}
718
719
static void
720
splay_insert(clump_t *cp, gs_ref_memory_t *mem)
721
239M
{
722
239M
    clump_t *node = NULL;
723
239M
    clump_t **root = &mem->root;
724
725
808M
    while (*root) {
726
568M
        node = *root;
727
568M
        if (PTR_LT(cp->cbase, node->cbase)) {
728
284M
            root = &node->left;
729
284M
        } else {
730
283M
            root = &node->right;
731
283M
        }
732
568M
    }
733
239M
    *root = cp;
734
239M
    cp->left = NULL;
735
239M
    cp->right = NULL;
736
239M
    cp->parent = node;
737
239M
    splay_move_to_root(cp, mem);
738
239M
}
739
740
/*
741
 * Allocate and mostly initialize the state of an allocator (system, global,
742
 * or local).  Does not initialize global or space.
743
 */
744
static void *ialloc_solo(gs_memory_t *, gs_memory_type_ptr_t,
745
                          clump_t **);
746
gs_ref_memory_t *
747
ialloc_alloc_state(gs_memory_t * parent, uint clump_size)
748
799k
{
749
799k
    clump_t *cp;
750
799k
    gs_ref_memory_t *iimem = ialloc_solo(parent, &st_ref_memory, &cp);
751
752
799k
    if (iimem == 0)
753
0
        return 0;
754
799k
    iimem->stable_memory = (gs_memory_t *)iimem;
755
799k
    iimem->procs = gs_ref_memory_procs;
756
799k
    iimem->gs_lib_ctx = parent->gs_lib_ctx;
757
799k
    iimem->non_gc_memory = parent;
758
799k
    iimem->thread_safe_memory = parent->thread_safe_memory;
759
799k
    iimem->clump_size = clump_size;
760
#if defined(MEMENTO) || defined(SINGLE_OBJECT_MEMORY_BLOCKS_ONLY)
761
    iimem->large_size = 1;
762
#else
763
799k
    iimem->large_size = ((clump_size / 4) & -obj_align_mod) + 1;
764
799k
#endif
765
799k
    iimem->is_controlled = false;
766
799k
    iimem->gc_status.vm_threshold = clump_size * 3L;
767
799k
    iimem->gc_status.max_vm = MAX_MAX_VM;
768
799k
    iimem->gc_status.signal_value = 0;
769
799k
    iimem->gc_status.enabled = false;
770
799k
    iimem->gc_status.requested = 0;
771
799k
    iimem->gc_allocated = 0;
772
799k
    iimem->previous_status.allocated = 0;
773
799k
    iimem->previous_status.used = 0;
774
799k
    ialloc_reset(iimem);
775
799k
    iimem->root = cp;
776
799k
    ialloc_set_limit(iimem);
777
799k
    iimem->cc = NULL;
778
799k
    iimem->save_level = 0;
779
799k
    iimem->new_mask = 0;
780
799k
    iimem->test_mask = ~0;
781
799k
    iimem->streams = 0;
782
799k
    iimem->names_array = 0;
783
799k
    iimem->roots = 0;
784
799k
    iimem->num_contexts = 0;
785
799k
    iimem->saved = 0;
786
799k
    return iimem;
787
799k
}
788
789
/* Allocate a 'solo' object with its own clump. */
790
static void *
791
ialloc_solo(gs_memory_t * parent, gs_memory_type_ptr_t pstype,
792
            clump_t ** pcp)
793
799k
{ /*
794
         * We can't assume that the parent uses the same object header
795
         * that we do, but the GC requires that allocators have
796
         * such a header.  Therefore, we prepend one explicitly.
797
         */
798
799k
    clump_t *cp =
799
799k
        gs_raw_alloc_struct_immovable(parent, &st_clump,
800
799k
                                      "ialloc_solo(clump)");
801
799k
    uint csize =
802
799k
        ROUND_UP(sizeof(clump_head_t) + sizeof(obj_header_t) +
803
799k
                 pstype->ssize,
804
799k
                 obj_align_mod);
805
799k
    byte *cdata = gs_alloc_bytes_immovable(parent, csize, "ialloc_solo");
806
799k
    obj_header_t *obj = (obj_header_t *) (cdata + sizeof(clump_head_t));
807
808
799k
    if (cp == 0 || cdata == 0) {
809
0
        gs_free_object(parent, cp, "ialloc_solo(allocation failure)");
810
0
        gs_free_object(parent, cdata, "ialloc_solo(allocation failure)");
811
0
        return 0;
812
0
    }
813
799k
    alloc_init_clump(cp, cdata, cdata + csize, false, (clump_t *) NULL);
814
799k
    cp->cbot = cp->ctop;
815
799k
    cp->parent = cp->left = cp->right = 0;
816
799k
    cp->c_alone = true;
817
    /* Construct the object header "by hand". */
818
799k
    obj->o_pad = 0;
819
799k
    obj->o_alone = 1;
820
799k
    obj->o_size = pstype->ssize;
821
799k
    obj->o_type = pstype;
822
799k
    *pcp = cp;
823
799k
    return (void *)(obj + 1);
824
799k
}
825
826
void
827
ialloc_free_state(gs_ref_memory_t *iimem)
828
0
{
829
0
    clump_t *cp;
830
0
    gs_memory_t *mem;
831
0
    if (iimem == NULL)
832
0
        return;
833
0
    cp = iimem->root;
834
0
    mem = iimem->non_gc_memory;
835
0
    if (cp == NULL)
836
0
        return;
837
0
    gs_free_object(mem, cp->chead, "ialloc_solo(allocation failure)");
838
0
    gs_free_object(mem, cp, "ialloc_solo(allocation failure)");
839
0
}
840
841
/*
842
 * Add a clump to an externally controlled allocator.  Such allocators
843
 * allocate all objects as immovable, are not garbage-collected, and
844
 * don't attempt to acquire additional memory on their own.
845
 */
846
int
847
ialloc_add_clump(gs_ref_memory_t *imem, ulong space, client_name_t cname)
848
0
{
849
0
    clump_t *cp;
850
851
    /* Allow acquisition of this clump. */
852
0
    imem->is_controlled = false;
853
0
    imem->large_size = imem->clump_size;
854
0
    imem->limit = imem->gc_status.max_vm = MAX_MAX_VM;
855
856
    /* Acquire the clump. */
857
0
    cp = alloc_add_clump(imem, space, cname);
858
859
    /*
860
     * Make all allocations immovable.  Since the "movable" allocators
861
     * allocate within existing clumps, whereas the "immovable" ones
862
     * allocate in new clumps, we equate the latter to the former, even
863
     * though this seems backwards.
864
     */
865
0
    imem->procs.alloc_bytes_immovable = imem->procs.alloc_bytes;
866
0
    imem->procs.alloc_struct_immovable = imem->procs.alloc_struct;
867
0
    imem->procs.alloc_byte_array_immovable = imem->procs.alloc_byte_array;
868
0
    imem->procs.alloc_struct_array_immovable = imem->procs.alloc_struct_array;
869
0
    imem->procs.alloc_string_immovable = imem->procs.alloc_string;
870
871
    /* Disable acquisition of additional clumps. */
872
0
    imem->is_controlled = true;
873
0
    imem->limit = 0;
874
875
0
    return (cp ? 0 : gs_note_error(gs_error_VMerror));
876
0
}
877
878
/* Prepare for a GC by clearing the stream list. */
879
/* This probably belongs somewhere else.... */
880
void
881
ialloc_gc_prepare(gs_ref_memory_t * mem)
882
6.05M
{ /*
883
         * We have to unlink every stream from its neighbors,
884
         * so that referenced streams don't keep all streams around.
885
         */
886
6.05M
    while (mem->streams != 0) {
887
0
        stream *s = mem->streams;
888
889
0
        mem->streams = s->next;
890
0
        s->prev = s->next = 0;
891
0
    }
892
6.05M
}
893
894
/* Initialize after a save. */
895
void
896
ialloc_reset(gs_ref_memory_t * mem)
897
2.13M
{
898
2.13M
    mem->root = 0;
899
2.13M
    mem->cc = NULL;
900
2.13M
    mem->allocated = 0;
901
2.13M
    mem->changes = 0;
902
2.13M
    mem->scan_limit = 0;
903
2.13M
    mem->total_scanned = 0;
904
2.13M
    mem->total_scanned_after_compacting = 0;
905
2.13M
    ialloc_reset_free(mem);
906
2.13M
}
907
908
/* Initialize after a save or GC. */
909
void
910
ialloc_reset_free(gs_ref_memory_t * mem)
911
8.19M
{
912
8.19M
    int i;
913
8.19M
    obj_header_t **p;
914
915
8.19M
    mem->lost.objects = 0;
916
8.19M
    mem->lost.refs = 0;
917
8.19M
    mem->lost.strings = 0;
918
8.19M
    mem->cfreed.cp = 0;
919
844M
    for (i = 0, p = &mem->freelists[0]; i < num_freelists; i++, p++)
920
836M
        *p = 0;
921
8.19M
    mem->largest_free_size = 0;
922
8.19M
}
923
924
/*
925
 * Set an arbitrary limit so that the amount of allocated VM does not grow
926
 * indefinitely even when GC is disabled.  Benchmarks have shown that
927
 * the resulting GC's are infrequent enough not to degrade performance
928
 * significantly.
929
 */
930
#define FORCE_GC_LIMIT 8000000
931
932
/* Set the allocation limit after a change in one or more of */
933
/* vm_threshold, max_vm, or enabled, or after a GC. */
934
void
935
ialloc_set_limit(register gs_ref_memory_t * mem)
936
1.18G
{ /*
937
         * The following code is intended to set the limit so that
938
         * we stop allocating when allocated + previous_status.allocated
939
         * exceeds the lesser of max_vm or (if GC is enabled)
940
         * gc_allocated + vm_threshold.
941
         */
942
1.18G
    size_t max_allocated =
943
1.18G
    (mem->gc_status.max_vm > mem->previous_status.allocated ?
944
1.18G
     mem->gc_status.max_vm - mem->previous_status.allocated :
945
1.18G
     0);
946
947
1.18G
    if (mem->gc_status.enabled) {
948
69.1M
        size_t limit = mem->gc_allocated + mem->gc_status.vm_threshold;
949
950
69.1M
        if (limit < mem->previous_status.allocated)
951
5.49k
            mem->limit = 0;
952
69.1M
        else {
953
69.1M
            limit -= mem->previous_status.allocated;
954
69.1M
            mem->limit = min(limit, max_allocated);
955
69.1M
        }
956
69.1M
    } else
957
1.12G
        mem->limit = min(max_allocated, mem->gc_allocated + FORCE_GC_LIMIT);
958
1.18G
    if_debug7m('0', (const gs_memory_t *)mem,
959
1.18G
               "[0]space=%d, max_vm=%"PRIdSIZE", prev.alloc=%"PRIdSIZE", enabled=%d, "
960
1.18G
               "gc_alloc=%"PRIdSIZE", threshold=%"PRIdSIZE" => limit=%"PRIdSIZE"\n",
961
1.18G
               mem->space, mem->gc_status.max_vm,
962
1.18G
               mem->previous_status.allocated,
963
1.18G
               mem->gc_status.enabled, mem->gc_allocated,
964
1.18G
               mem->gc_status.vm_threshold, mem->limit);
965
1.18G
}
966
967
struct free_data
968
{
969
    gs_ref_memory_t *imem;
970
    clump_t         *allocator;
971
};
972
973
static splay_app_result_t
974
free_all_not_allocator(clump_t *cp, void *arg)
975
33.8M
{
976
33.8M
    struct free_data *fd = (struct free_data *)arg;
977
978
33.8M
    if (cp->cbase + sizeof(obj_header_t) != (byte *)fd->imem)
979
32.2M
        alloc_free_clump(cp, fd->imem);
980
1.59M
    else
981
1.59M
        fd->allocator = cp;
982
983
33.8M
    return SPLAY_APP_CONTINUE;
984
33.8M
}
985
986
static splay_app_result_t
987
free_all_allocator(clump_t *cp, void *arg)
988
799k
{
989
799k
    struct free_data *fd = (struct free_data *)arg;
990
991
799k
    if (cp->cbase + sizeof(obj_header_t) != (byte *)fd->imem)
992
0
        return SPLAY_APP_CONTINUE;
993
994
799k
    fd->allocator = cp;
995
799k
    alloc_free_clump(cp, fd->imem);
996
799k
    return SPLAY_APP_STOP;
997
799k
}
998
999
/*
1000
 * Free all the memory owned by the allocator, except the allocator itself.
1001
 * Note that this only frees memory at the current save level: the client
1002
 * is responsible for restoring to the outermost level if desired.
1003
 */
1004
static void
1005
i_free_all(gs_memory_t * mem, uint free_mask, client_name_t cname)
1006
2.93M
{
1007
2.93M
    gs_ref_memory_t * imem = (gs_ref_memory_t *)mem;
1008
2.93M
    struct free_data fd;
1009
1010
2.93M
    fd.imem = imem;
1011
2.93M
    fd.allocator = NULL;
1012
1013
2.93M
    if (free_mask & FREE_ALL_DATA && imem->root != NULL) {
1014
        /* Free every clump except the allocator */
1015
2.93M
        clump_splay_app(imem->root, imem, free_all_not_allocator, &fd);
1016
1017
        /* Reinstate the allocator as the sole clump */
1018
2.93M
        imem->root = fd.allocator;
1019
2.93M
        if (fd.allocator)
1020
1.59M
            fd.allocator->parent = fd.allocator->left = fd.allocator->right = NULL;
1021
2.93M
    }
1022
2.93M
    if (free_mask & FREE_ALL_ALLOCATOR) {
1023
        /* Walk the tree to find the allocator. */
1024
799k
        clump_splay_app(imem->root, imem, free_all_allocator, &fd);
1025
799k
    }
1026
2.93M
}
1027
1028
/* ================ Accessors ================ */
1029
1030
/* Get the size of an object from the header. */
1031
static size_t
1032
i_object_size(gs_memory_t * mem, const void /*obj_header_t */ *obj)
1033
29.8M
{
1034
29.8M
    return pre_obj_contents_size((const obj_header_t *)obj - 1);
1035
29.8M
}
1036
1037
/* Get the type of a structure from the header. */
1038
static gs_memory_type_ptr_t
1039
i_object_type(const gs_memory_t * mem, const void /*obj_header_t */ *obj)
1040
5.54M
{
1041
5.54M
    return ((const obj_header_t *)obj - 1)->o_type;
1042
5.54M
}
1043
1044
/* Get the GC status of a memory. */
1045
void
1046
gs_memory_gc_status(const gs_ref_memory_t * mem, gs_memory_gc_status_t * pstat)
1047
1.19G
{
1048
1.19G
    *pstat = mem->gc_status;
1049
1.19G
}
1050
1051
/* Set the GC status of a memory. */
1052
void
1053
gs_memory_set_gc_status(gs_ref_memory_t * mem, const gs_memory_gc_status_t * pstat)
1054
1.18G
{
1055
1.18G
    mem->gc_status = *pstat;
1056
1.18G
    ialloc_set_limit(mem);
1057
1.18G
}
1058
1059
/* Set VM threshold. Value passed as int64_t since it is signed */
1060
void
1061
gs_memory_set_vm_threshold(gs_ref_memory_t * mem, int64_t val)
1062
2.58M
{
1063
2.58M
    gs_memory_gc_status_t stat;
1064
2.58M
    gs_ref_memory_t * stable = (gs_ref_memory_t *)mem->stable_memory;
1065
1066
2.58M
    if (val < MIN_VM_THRESHOLD)
1067
0
        val = MIN_VM_THRESHOLD;
1068
2.58M
    else if (val > MAX_VM_THRESHOLD)
1069
0
        val = MAX_VM_THRESHOLD;
1070
2.58M
    gs_memory_gc_status(mem, &stat);
1071
2.58M
    stat.vm_threshold = val;
1072
2.58M
    gs_memory_set_gc_status(mem, &stat);
1073
2.58M
    gs_memory_gc_status(stable, &stat);
1074
2.58M
    stat.vm_threshold = val;
1075
2.58M
    gs_memory_set_gc_status(stable, &stat);
1076
2.58M
}
1077
1078
/* Set VM reclaim. */
1079
void
1080
gs_memory_set_vm_reclaim(gs_ref_memory_t * mem, bool enabled)
1081
2.58M
{
1082
2.58M
    gs_memory_gc_status_t stat;
1083
2.58M
    gs_ref_memory_t * stable = (gs_ref_memory_t *)mem->stable_memory;
1084
1085
2.58M
    gs_memory_gc_status(mem, &stat);
1086
2.58M
    stat.enabled = enabled;
1087
2.58M
    gs_memory_set_gc_status(mem, &stat);
1088
2.58M
    gs_memory_gc_status(stable, &stat);
1089
2.58M
    stat.enabled = enabled;
1090
2.58M
    gs_memory_set_gc_status(stable, &stat);
1091
2.58M
}
1092
1093
/* ================ Objects ================ */
1094
1095
/* Allocate a small object quickly if possible. */
1096
/* The size must be substantially less than max_uint. */
1097
/* ptr must be declared as obj_header_t *. */
1098
/* pfl must be declared as obj_header_t **. */
1099
#define IF_FREELIST_ALLOC(ptr, imem, size, pstype, pfl)\
1100
3.20G
        if ( size <= max_freelist_size &&\
1101
3.20G
             *(pfl = &imem->freelists[(size + obj_align_mask) >> log2_obj_align_mod]) != 0\
1102
3.20G
           )\
1103
3.20G
        { ptr = *pfl;\
1104
1.80G
                *pfl = *(obj_header_t **)ptr;\
1105
1.80G
                ptr[-1].o_size = (obj_size_t)size;\
1106
1.80G
                ptr[-1].o_type = pstype;\
1107
1.80G
                /* If debugging, clear the block in an attempt to */\
1108
1.80G
                /* track down uninitialized data errors. */\
1109
3.20G
                gs_alloc_fill(ptr, gs_alloc_fill_alloc, size);
1110
#define ELSEIF_BIG_FREELIST_ALLOC(ptr, imem, size, pstype)\
1111
1.80G
        }\
1112
3.20G
        else if (size > max_freelist_size &&\
1113
1.39G
                 (ptr = large_freelist_alloc(imem, size)) != 0)\
1114
1.39G
        { ptr[-1].o_type = pstype;\
1115
109M
                /* If debugging, clear the block in an attempt to */\
1116
109M
                /* track down uninitialized data errors. */\
1117
3.20G
                gs_alloc_fill(ptr, gs_alloc_fill_alloc, size);
1118
#define ELSEIF_LIFO_ALLOC(ptr, imem, size, pstype)\
1119
109M
        }\
1120
1.39G
        else if ( imem->cc && !imem->cc->c_alone && \
1121
1.28G
                (imem->cc->ctop - (byte *)(ptr = (obj_header_t *)imem->cc->cbot))\
1122
1.28G
                >= size + (obj_align_mod + sizeof(obj_header_t) * 2) &&\
1123
1.28G
             size < imem->large_size\
1124
1.28G
           )\
1125
1.28G
        { imem->cc->cbot = (byte *)ptr + obj_size_round(size);\
1126
1.25G
                ptr->o_pad = 0;\
1127
1.25G
                ptr->o_alone = 0;\
1128
1.25G
                ptr->o_size = (obj_size_t)size;\
1129
1.25G
                ptr->o_type = pstype;\
1130
1.25G
                ptr++;\
1131
1.25G
                /* If debugging, clear the block in an attempt to */\
1132
1.25G
                /* track down uninitialized data errors. */\
1133
1.39G
                gs_alloc_fill(ptr, gs_alloc_fill_alloc, size);
1134
#define ELSE_ALLOC\
1135
1.25G
        }\
1136
        else
1137
1138
static byte *
1139
i_alloc_bytes(gs_memory_t * mem, size_t ssize, client_name_t cname)
1140
201M
{
1141
201M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1142
201M
    obj_header_t *obj;
1143
201M
    obj_header_t **pfl;
1144
201M
    obj_size_t size = (obj_size_t)ssize;
1145
1146
#ifdef MEMENTO
1147
    if (Memento_failThisEvent())
1148
        return NULL;
1149
#endif
1150
1151
201M
    if ((size_t)size != ssize)
1152
0
        return NULL;
1153
1154
201M
    IF_FREELIST_ALLOC(obj, imem, size, &st_bytes, pfl)
1155
45.3M
        alloc_trace(":+bf", imem, cname, NULL, size, obj);
1156
45.3M
    ELSEIF_BIG_FREELIST_ALLOC(obj, imem, size, &st_bytes)
1157
9.90M
        alloc_trace(":+bF", imem, cname, NULL, size, obj);
1158
123M
    ELSEIF_LIFO_ALLOC(obj, imem, (uint)size, &st_bytes)
1159
123M
        alloc_trace(":+b ", imem, cname, NULL, size, obj);
1160
123M
    ELSE_ALLOC
1161
22.7M
    {
1162
22.7M
        obj = alloc_obj(imem, size, &st_bytes, 0, cname);
1163
22.7M
        if (obj == 0)
1164
5
            return 0;
1165
22.7M
        alloc_trace(":+b.", imem, cname, NULL, size, obj);
1166
22.7M
    }
1167
#if IGC_PTR_STABILITY_CHECK
1168
        obj[-1].d.o.space_id = imem->space_id;
1169
#endif
1170
201M
    return (byte *)Memento_label(obj, cname);
1171
201M
}
1172
static byte *
1173
i_alloc_bytes_immovable(gs_memory_t * mem, size_t ssize, client_name_t cname)
1174
7.88M
{
1175
7.88M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1176
7.88M
    obj_header_t *obj;
1177
7.88M
    obj_size_t size = (obj_size_t)ssize;
1178
1179
#ifdef MEMENTO
1180
    if (Memento_failThisEvent())
1181
        return NULL;
1182
#endif
1183
1184
7.88M
    if ((size_t)size != ssize)
1185
0
        return NULL;
1186
1187
7.88M
    obj = alloc_obj(imem, size, &st_bytes,
1188
7.88M
                    ALLOC_IMMOVABLE | ALLOC_DIRECT, cname);
1189
7.88M
    if (obj == 0)
1190
0
        return 0;
1191
7.88M
    alloc_trace("|+b.", imem, cname, NULL, size, obj);
1192
7.88M
    return (byte *)Memento_label(obj, cname);
1193
7.88M
}
1194
static void *
1195
i_alloc_struct(gs_memory_t * mem, gs_memory_type_ptr_t pstype,
1196
               client_name_t cname)
1197
3.00G
{
1198
3.00G
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1199
3.00G
    obj_size_t size = pstype->ssize;
1200
3.00G
    obj_header_t *obj;
1201
3.00G
    obj_header_t **pfl;
1202
1203
#ifdef MEMENTO
1204
    if (Memento_failThisEvent())
1205
        return NULL;
1206
#endif
1207
1208
3.00G
    ALLOC_CHECK_SIZE(mem,pstype);
1209
3.00G
    IF_FREELIST_ALLOC(obj, imem, size, pstype, pfl)
1210
1.75G
        alloc_trace(":+<f", imem, cname, pstype, size, obj);
1211
1.75G
    ELSEIF_BIG_FREELIST_ALLOC(obj, imem, size, pstype)
1212
99.8M
        alloc_trace(":+<F", imem, cname, pstype, size, obj);
1213
1.13G
    ELSEIF_LIFO_ALLOC(obj, imem, size, pstype)
1214
1.13G
        alloc_trace(":+< ", imem, cname, pstype, size, obj);
1215
1.13G
    ELSE_ALLOC
1216
10.8M
    {
1217
10.8M
        obj = alloc_obj(imem, size, pstype, 0, cname);
1218
10.8M
        if (obj == 0)
1219
5
            return 0;
1220
10.8M
        alloc_trace(":+<.", imem, cname, pstype, size, obj);
1221
10.8M
    }
1222
#if IGC_PTR_STABILITY_CHECK
1223
        obj[-1].d.o.space_id = imem->space_id;
1224
#endif
1225
3.00G
    return Memento_label(obj, cname);
1226
3.00G
}
1227
static void *
1228
i_alloc_struct_immovable(gs_memory_t * mem, gs_memory_type_ptr_t pstype,
1229
                         client_name_t cname)
1230
165M
{
1231
165M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1232
165M
    obj_size_t size = pstype->ssize;
1233
165M
    obj_header_t *obj;
1234
1235
#ifdef MEMENTO
1236
    if (Memento_failThisEvent())
1237
        return NULL;
1238
#endif
1239
1240
165M
    ALLOC_CHECK_SIZE(mem,pstype);
1241
165M
    obj = alloc_obj(imem, size, pstype, ALLOC_IMMOVABLE | ALLOC_DIRECT, cname);
1242
165M
    alloc_trace("|+<.", imem, cname, pstype, size, obj);
1243
165M
    return Memento_label(obj, cname);
1244
165M
}
1245
1246
static inline bool
1247
alloc_array_check_size(size_t num_elements, size_t elt_size, size_t *lsize)
1248
270M
{
1249
270M
    int shift0, shift1;
1250
270M
    size_t m, n;
1251
1252
    /* Avoid the loops in the overwhelming number of cases. */
1253
270M
    if ((num_elements | elt_size) >= 65536) {
1254
        /* Slightly conservative, but it'll work for our purposes. */
1255
        /* m is the maximum unsigned value representable in shift0 bits */
1256
555k
        for (m=0, shift0 = 0; m < num_elements; m = (m<<1)+1, shift0++);
1257
        /* n is the maximum unsigned value representable in shift1 bits */
1258
156k
        for (n=0, shift1 = 0; n < elt_size; n = (n<<1)+1, shift1++);
1259
        /* An shift0 bit unsigned number multiplied by an shift1 bit
1260
         * unsigned number is guaranteed to fit in n+m-1 bits. */
1261
26.4k
        if (shift0+shift1-1 > 8*sizeof(size_t))
1262
0
            return false; /* Overflow */
1263
26.4k
    }
1264
1265
270M
    *lsize = num_elements * elt_size;
1266
270M
    return true;
1267
270M
}
1268
1269
static byte *
1270
i_alloc_byte_array(gs_memory_t * mem, size_t num_elements, size_t elt_size,
1271
                   client_name_t cname)
1272
32.7M
{
1273
32.7M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1274
32.7M
    obj_header_t *obj;
1275
32.7M
    size_t slsize;
1276
32.7M
    obj_size_t lsize;
1277
#ifdef MEMENTO
1278
    if (Memento_failThisEvent())
1279
        return NULL;
1280
#endif
1281
32.7M
    if (alloc_array_check_size(num_elements, elt_size, &slsize) == false)
1282
0
        return NULL;
1283
32.7M
    lsize = (obj_size_t)slsize;
1284
32.7M
    if ((size_t)lsize != slsize)
1285
0
        return NULL;
1286
32.7M
    obj = alloc_obj(imem, lsize,
1287
32.7M
                    &st_bytes, ALLOC_DIRECT, cname);
1288
1289
32.7M
    if_debug6m('A', mem, "[a%d:+b.]%s -bytes-*(%"PRIuSIZE"=%"PRIuSIZE"*%"PRIuSIZE") = "PRI_INTPTR"\n",
1290
32.7M
               alloc_trace_space(imem), client_name_string(cname),
1291
32.7M
               num_elements * elt_size,
1292
32.7M
               num_elements, elt_size, (intptr_t)obj);
1293
32.7M
    return (byte *)Memento_label(obj, cname);
1294
32.7M
}
1295
static byte *
1296
i_alloc_byte_array_immovable(gs_memory_t * mem, size_t num_elements,
1297
                             size_t elt_size, client_name_t cname)
1298
1.10M
{
1299
1.10M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1300
1.10M
    obj_header_t *obj;
1301
1.10M
    size_t slsize;
1302
1.10M
    obj_size_t lsize;
1303
#ifdef MEMENTO
1304
    if (Memento_failThisEvent())
1305
        return NULL;
1306
#endif
1307
1.10M
    if (alloc_array_check_size(num_elements, elt_size, &slsize) == false)
1308
0
        return NULL;
1309
1.10M
    lsize = (obj_size_t)slsize;
1310
1.10M
    if ((size_t)lsize != slsize)
1311
0
        return NULL;
1312
1.10M
    obj = alloc_obj(imem, lsize,
1313
1.10M
                    &st_bytes, ALLOC_IMMOVABLE | ALLOC_DIRECT,
1314
1.10M
                    cname);
1315
1316
1.10M
    if_debug6m('A', mem, "[a%d|+b.]%s -bytes-*(%"PRIuSIZE"=%"PRIuSIZE"*%"PRIuSIZE") = "PRI_INTPTR"\n",
1317
1.10M
               alloc_trace_space(imem), client_name_string(cname),
1318
1.10M
               num_elements * elt_size,
1319
1.10M
               num_elements, elt_size, (intptr_t)obj);
1320
1.10M
    return (byte *)Memento_label(obj, cname);
1321
1.10M
}
1322
static void *
1323
i_alloc_struct_array(gs_memory_t * mem, size_t num_elements,
1324
                     gs_memory_type_ptr_t pstype, client_name_t cname)
1325
235M
{
1326
235M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1327
235M
    obj_header_t *obj;
1328
235M
    size_t slsize;
1329
235M
    obj_size_t lsize;
1330
#ifdef MEMENTO
1331
    if (Memento_failThisEvent())
1332
        return NULL;
1333
#endif
1334
1335
235M
    ALLOC_CHECK_SIZE(mem,pstype);
1336
#ifdef DEBUG
1337
    if (pstype->enum_ptrs == basic_enum_ptrs) {
1338
        dmprintf2(mem, "  i_alloc_struct_array: called with incorrect structure type (not element), struct='%s', client='%s'\n",
1339
                pstype->sname, cname);
1340
        return NULL;    /* fail */
1341
    }
1342
#endif
1343
235M
    if (alloc_array_check_size(num_elements, pstype->ssize, &slsize) == false)
1344
0
        return NULL;
1345
235M
    lsize = (obj_size_t)slsize;
1346
235M
    if ((size_t)lsize != slsize)
1347
98
        return NULL;
1348
235M
    obj = alloc_obj(imem, lsize, pstype, ALLOC_DIRECT, cname);
1349
235M
    if_debug7m('A', mem, "[a%d:+<.]%s %s*(%"PRIuSIZE"=%"PRIuSIZE"*%u) = "PRI_INTPTR"\n",
1350
235M
               alloc_trace_space(imem), client_name_string(cname),
1351
235M
               struct_type_name_string(pstype),
1352
235M
               num_elements * pstype->ssize,
1353
235M
               num_elements, pstype->ssize, (intptr_t)obj);
1354
235M
    return (char *)Memento_label(obj, cname);
1355
235M
}
1356
static void *
1357
i_alloc_struct_array_immovable(gs_memory_t * mem, size_t num_elements,
1358
                               gs_memory_type_ptr_t pstype, client_name_t cname)
1359
159k
{
1360
159k
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1361
159k
    obj_header_t *obj;
1362
159k
    size_t slsize;
1363
159k
    obj_size_t lsize;
1364
#ifdef MEMENTO
1365
    if (Memento_failThisEvent())
1366
        return NULL;
1367
#endif
1368
1369
159k
    ALLOC_CHECK_SIZE(mem,pstype);
1370
159k
    if (alloc_array_check_size(num_elements, pstype->ssize, &slsize) == false)
1371
0
        return NULL;
1372
159k
    lsize = (obj_size_t)slsize;
1373
159k
    if ((size_t)lsize != slsize)
1374
0
        return NULL;
1375
159k
    obj = alloc_obj(imem, lsize, pstype, ALLOC_IMMOVABLE | ALLOC_DIRECT, cname);
1376
159k
    if_debug7m('A', mem, "[a%d|+<.]%s %s*(%"PRIuSIZE"=%"PRIuSIZE"*%u) = "PRI_INTPTR"\n",
1377
159k
               alloc_trace_space(imem), client_name_string(cname),
1378
159k
               struct_type_name_string(pstype),
1379
159k
               num_elements * pstype->ssize,
1380
159k
               num_elements, pstype->ssize, (intptr_t)obj);
1381
159k
    return (char *)Memento_label(obj, cname);
1382
159k
}
1383
static void *
1384
i_resize_object(gs_memory_t * mem, void *obj, size_t new_num_elements,
1385
                client_name_t cname)
1386
401k
{
1387
401k
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1388
401k
    obj_header_t *pp = (obj_header_t *) obj - 1;
1389
401k
    gs_memory_type_ptr_t pstype = pp->o_type;
1390
401k
    size_t old_size = pre_obj_contents_size(pp);
1391
401k
    size_t new_size = pstype->ssize * new_num_elements;
1392
401k
    size_t old_size_rounded = obj_align_round(old_size);
1393
401k
    size_t new_size_rounded = obj_align_round(new_size);
1394
401k
    void *new_obj = NULL;
1395
1396
#ifdef MEMENTO
1397
    if (Memento_failThisEvent())
1398
        return NULL;
1399
#endif
1400
1401
401k
    if (new_size_rounded != (obj_size_t)new_size_rounded)
1402
0
        return NULL;
1403
1404
401k
    if (old_size_rounded == new_size_rounded) {
1405
386k
        pp->o_size = (obj_size_t)new_size;
1406
386k
        new_obj = obj;
1407
386k
    } else
1408
15.2k
        if (imem->cc && (byte *)obj + old_size_rounded == imem->cc->cbot &&
1409
15.2k
            imem->cc->ctop - (byte *)obj >= new_size_rounded ) {
1410
15.1k
            imem->cc->cbot = (byte *)obj + new_size_rounded;
1411
15.1k
            pp->o_size = (obj_size_t)new_size;
1412
15.1k
            new_obj = obj;
1413
15.1k
        } else /* try and trim the object -- but only if room for a dummy header */
1414
58
            if (new_size_rounded + sizeof(obj_header_t) <= old_size_rounded) {
1415
0
                trim_obj(imem, obj, (obj_size_t)new_size, (clump_t *)0);
1416
0
                new_obj = obj;
1417
0
            }
1418
401k
    if (new_obj) {
1419
401k
        if_debug8m('A', mem, "[a%d:%c%c ]%s %s(%"PRIuSIZE"=>%"PRIuSIZE") "PRI_INTPTR"\n",
1420
401k
                   alloc_trace_space(imem),
1421
401k
                   (new_size > old_size ? '>' : '<'),
1422
401k
                   (pstype == &st_bytes ? 'b' : '<'),
1423
401k
                   client_name_string(cname),
1424
401k
                   struct_type_name_string(pstype),
1425
401k
                   old_size, new_size, (intptr_t)obj);
1426
401k
        return Memento_label(new_obj, cname);
1427
401k
    }
1428
    /* Punt. */
1429
58
    new_obj = gs_alloc_struct_array(mem, new_num_elements, void,
1430
58
                                    pstype, cname);
1431
58
    if (new_obj == 0)
1432
0
        return 0;
1433
58
    memcpy(new_obj, obj, min(old_size, new_size));
1434
58
    gs_free_object(mem, obj, cname);
1435
58
    return Memento_label(new_obj, cname);
1436
58
}
1437
static void
1438
i_free_object(gs_memory_t * mem, void *ptr, client_name_t cname)
1439
3.70G
{
1440
3.70G
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1441
3.70G
    obj_header_t *pp;
1442
3.70G
    gs_memory_type_ptr_t pstype;
1443
3.70G
    gs_memory_struct_type_t saved_stype;
1444
1445
3.70G
    struct_proc_finalize((*finalize));
1446
3.70G
    size_t size, rounded_size;
1447
1448
3.70G
    if (ptr == 0)
1449
390M
        return;
1450
3.31G
    pp = (obj_header_t *) ptr - 1;
1451
3.31G
    pstype = pp->o_type;
1452
#ifdef DEBUG
1453
    if (gs_debug_c('?')) {
1454
        clump_locator_t cld;
1455
1456
        if (pstype == &st_free) {
1457
            mlprintf2(mem, "%s: object "PRI_INTPTR" already free!\n",
1458
                      client_name_string(cname), (intptr_t)ptr);
1459
            return;   /*gs_abort(); */
1460
        }
1461
        /* Check that this allocator owns the object being freed. */
1462
        cld.memory = imem;
1463
        while ((cld.cp = cld.memory->root),
1464
               !clump_locate_ptr(ptr, &cld)
1465
            ) {
1466
            if (!cld.memory->saved) {
1467
                mlprintf3(mem, "%s: freeing "PRI_INTPTR", not owned by memory "PRI_INTPTR"!\n",
1468
                          client_name_string(cname), (intptr_t)ptr,
1469
                          (intptr_t)mem);
1470
                return;   /*gs_abort(); */
1471
            }
1472
                  /****** HACK: we know the saved state is the first ******
1473
                   ****** member of an alloc_save_t. ******/
1474
            cld.memory = (gs_ref_memory_t *) cld.memory->saved;
1475
        }
1476
        /* Check that the object is in the allocated region. */
1477
        if (!(PTR_BETWEEN((const byte *)pp, cld.cp->cbase,
1478
                          cld.cp->cbot))
1479
            ) {
1480
            mlprintf5(mem, "%s: freeing "PRI_INTPTR",\n\toutside clump "PRI_INTPTR" cbase="PRI_INTPTR", cbot="PRI_INTPTR"!\n",
1481
                      client_name_string(cname), (intptr_t) ptr,
1482
                      (intptr_t) cld.cp, (intptr_t) cld.cp->cbase,
1483
                      (intptr_t) cld.cp->cbot);
1484
            return;   /*gs_abort(); */
1485
        }
1486
    }
1487
#endif
1488
3.31G
    size = pre_obj_contents_size(pp);
1489
3.31G
    rounded_size = obj_align_round(size);
1490
3.31G
    finalize = pstype->finalize;
1491
3.31G
    if (finalize != 0) {
1492
1493
        /* unfortunately device finalize procedures will clobber the
1494
           stype which is used for later debugging with "A" debug
1495
           tracing, so we save stype it in a local. */
1496
314M
        if (gs_debug['a'] || gs_debug['A'])
1497
0
            saved_stype = *pstype;
1498
1499
314M
        if_debug3m('u', mem, "[u]finalizing %s "PRI_INTPTR" (%s)\n",
1500
314M
                   struct_type_name_string(pstype),
1501
314M
                   (intptr_t)ptr, client_name_string(cname));
1502
314M
        (*finalize) (mem, ptr);
1503
1504
314M
        if (gs_debug['a'] || gs_debug['A'])
1505
0
            pstype = &saved_stype;
1506
314M
    }
1507
3.31G
    if (imem->cc && (byte *) ptr + rounded_size == imem->cc->cbot) {
1508
910M
        alloc_trace(":-o ", imem, cname, pstype, size, ptr);
1509
910M
        gs_alloc_fill(ptr, gs_alloc_fill_free, size);
1510
910M
        imem->cc->cbot = (byte *) pp;
1511
        /* IFF this object is adjacent to (or below) the byte after the
1512
         * highest free object, do the consolidation within this clump. */
1513
910M
        if ((byte *)pp <= imem->cc->int_freed_top) {
1514
87.8M
            consolidate_clump_free(imem->cc, imem);
1515
87.8M
        }
1516
910M
        return;
1517
910M
    }
1518
2.40G
    if (pp->o_alone) {
1519
                /*
1520
                 * We gave this object its own clump.  Free the entire clump,
1521
                 * unless it belongs to an older save level, in which case
1522
                 * we mustn't overwrite it.
1523
                 */
1524
198M
        clump_locator_t cl;
1525
1526
#ifdef DEBUG
1527
        {
1528
            clump_locator_t cld;
1529
1530
            cld.memory = imem;
1531
            cld.cp = 0;
1532
            if (gs_debug_c('a'))
1533
                alloc_trace(
1534
                            (clump_locate_ptr(ptr, &cld) ? ":-oL" : ":-o~"),
1535
                               imem, cname, pstype, size, ptr);
1536
        }
1537
#endif
1538
198M
        cl.memory = imem;
1539
198M
        cl.cp = 0;
1540
198M
        if (clump_locate_ptr(ptr, &cl)) {
1541
198M
            if (!imem->is_controlled)
1542
198M
                alloc_free_clump(cl.cp, imem);
1543
198M
            return;
1544
198M
        }
1545
        /* Don't overwrite even if gs_alloc_debug is set. */
1546
198M
    }
1547
2.20G
    if (rounded_size >= sizeof(obj_header_t *)) {
1548
        /*
1549
         * Put the object on a freelist, unless it belongs to
1550
         * an older save level, in which case we mustn't
1551
         * overwrite it.
1552
         */
1553
2.20G
        imem->cfreed.memory = imem;
1554
2.20G
        if (clump_locate(ptr, &imem->cfreed)) {
1555
2.20G
            obj_header_t **pfl;
1556
1557
2.20G
            if (size > max_freelist_size) {
1558
121M
                pfl = &imem->freelists[LARGE_FREELIST_INDEX];
1559
121M
                if (rounded_size > imem->largest_free_size)
1560
11.1M
                    imem->largest_free_size = rounded_size;
1561
2.08G
            } else {
1562
2.08G
                pfl = &imem->freelists[(size + obj_align_mask) >>
1563
2.08G
                                      log2_obj_align_mod];
1564
2.08G
            }
1565
            /* keep track of highest object on a freelist */
1566
            /* If we're dealing with a block in the currently open clump
1567
               (in imem->cc) update that, otherwise, update the clump in
1568
               the clump list (in imem->cfreed.cp)
1569
             */
1570
2.20G
            if (imem->cc && imem->cfreed.cp->chead == imem->cc->chead) {
1571
230M
                if ((byte *)pp >= imem->cc->int_freed_top) {
1572
56.0M
                    imem->cc->int_freed_top = (byte *)ptr + rounded_size;
1573
56.0M
                }
1574
230M
            }
1575
1.97G
            else {
1576
1.97G
                if ((byte *)pp >= imem->cfreed.cp->int_freed_top) {
1577
6.48M
                    imem->cfreed.cp->int_freed_top = (byte *)ptr + rounded_size;
1578
6.48M
                }
1579
1.97G
            }
1580
2.20G
            pp->o_type = &st_free;  /* don't confuse GC */
1581
2.20G
            o_set_unmarked(pp);
1582
2.20G
            gs_alloc_fill(ptr, gs_alloc_fill_free, size);
1583
2.20G
            *(obj_header_t **) ptr = *pfl;
1584
2.20G
            *pfl = (obj_header_t *) ptr;
1585
2.20G
            alloc_trace((size > max_freelist_size ? ":-oF" : ":-of"),
1586
2.20G
                        imem, cname, pstype, size, ptr);
1587
2.20G
            return;
1588
2.20G
        }
1589
        /* Don't overwrite even if gs_alloc_debug is set. */
1590
2.20G
    } else {
1591
56.4k
        pp->o_type = &st_free;  /* don't confuse GC */
1592
56.4k
        gs_alloc_fill(ptr, gs_alloc_fill_free, size);
1593
56.4k
    }
1594
2.20G
    alloc_trace(":-o#", imem, cname, pstype, size, ptr);
1595
167k
    imem->lost.objects += obj_size_round(size);
1596
167k
}
1597
static byte *
1598
i_alloc_string(gs_memory_t * mem, size_t nbytes, client_name_t cname)
1599
1.61G
{
1600
1.61G
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1601
1.61G
    byte *str;
1602
1.61G
    clump_splay_walker sw;
1603
1604
    /*
1605
     * Cycle through the clumps at the current save level, starting
1606
     * with the currently open one.
1607
     */
1608
1.61G
    clump_t *cp = clump_splay_walk_init_mid(&sw, imem->cc);
1609
1610
#ifdef MEMENTO
1611
    if (Memento_failThisEvent())
1612
        return NULL;
1613
#endif
1614
1.61G
    if (cp == 0) {
1615
        /* Open an arbitrary clump. */
1616
6.18k
        imem->cc = clump_splay_walk_init(&sw, imem);
1617
6.18k
        alloc_open_clump(imem);
1618
6.18k
    }
1619
1.78G
top:
1620
1.78G
    if (imem->cc && !imem->cc->c_alone && imem->cc->ctop - imem->cc->cbot > nbytes) {
1621
1.61G
        if_debug4m('A', mem, "[a%d:+> ]%s(%"PRIuSIZE") = "PRI_INTPTR"\n",
1622
1.61G
                   alloc_trace_space(imem), client_name_string(cname), nbytes,
1623
1.61G
                   (intptr_t)(imem->cc->ctop - nbytes));
1624
1.61G
        str = imem->cc->ctop -= nbytes;
1625
1.61G
        gs_alloc_fill(str, gs_alloc_fill_alloc, nbytes);
1626
1.61G
        return str;
1627
1.61G
    }
1628
    /* Try the next clump. */
1629
170M
    cp = clump_splay_walk_fwd(&sw);
1630
1631
170M
    if (cp != NULL)
1632
168M
    {
1633
168M
        alloc_close_clump(imem);
1634
168M
        imem->cc = cp;
1635
168M
        alloc_open_clump(imem);
1636
168M
        goto top;
1637
168M
    }
1638
2.16M
    if (nbytes > string_space_quanta(SIZE_MAX - sizeof(clump_head_t)) *
1639
2.16M
        string_data_quantum
1640
2.16M
        ) {     /* Can't represent the size in a uint! */
1641
0
        return 0;
1642
0
    }
1643
2.16M
    if (nbytes >= imem->large_size) { /* Give it a clump all its own. */
1644
416k
        return i_alloc_string_immovable(mem, nbytes, cname);
1645
1.75M
    } else {     /* Add another clump. */
1646
1.75M
        cp = alloc_acquire_clump(imem, (ulong) imem->clump_size, true, "clump");
1647
1648
1.75M
        if (cp == 0)
1649
0
            return 0;
1650
1.75M
        alloc_close_clump(imem);
1651
1.75M
        imem->cc = clump_splay_walk_init_mid(&sw, cp);
1652
1.75M
        gs_alloc_fill(imem->cc->cbase, gs_alloc_fill_free,
1653
1.75M
                      imem->cc->climit - imem->cc->cbase);
1654
1.75M
        goto top;
1655
1.75M
    }
1656
2.16M
}
1657
static byte *
1658
i_alloc_string_immovable(gs_memory_t * mem, size_t nbytes, client_name_t cname)
1659
416k
{
1660
416k
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1661
416k
    byte *str;
1662
416k
    size_t asize;
1663
416k
    clump_t *cp;
1664
1665
#ifdef MEMENTO
1666
    if (Memento_failThisEvent())
1667
        return NULL;
1668
#endif
1669
    /* Give it a clump all its own. */
1670
416k
    asize = string_clump_space(nbytes) + sizeof(clump_head_t);
1671
416k
    cp = alloc_acquire_clump(imem, asize, true, "large string clump");
1672
1673
416k
    if (cp == 0)
1674
0
        return 0;
1675
416k
    cp->c_alone = true;
1676
1677
416k
    str = cp->ctop = cp->climit - nbytes;
1678
416k
    if_debug4m('a', mem, "[a%d|+>L]%s(%"PRIuSIZE") = "PRI_INTPTR"\n",
1679
416k
               alloc_trace_space(imem), client_name_string(cname), nbytes,
1680
416k
               (intptr_t)str);
1681
416k
    gs_alloc_fill(str, gs_alloc_fill_alloc, nbytes);
1682
1683
416k
    return Memento_label(str, cname);
1684
416k
}
1685
1686
static byte *
1687
i_resize_string(gs_memory_t * mem, byte * data, size_t old_num, size_t new_num,
1688
                client_name_t cname)
1689
47.6M
{
1690
47.6M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1691
47.6M
    byte *ptr;
1692
1693
47.6M
    if (old_num == new_num)  /* same size returns the same string */
1694
37.9M
        return data;
1695
1696
9.75M
    if ( imem->cc && data == imem->cc->ctop && /* bottom-most string */
1697
9.75M
        (new_num < old_num ||
1698
9.74M
         imem->cc->ctop - imem->cc->cbot > new_num - old_num)
1699
9.75M
        ) {     /* Resize in place. */
1700
9.74M
        ptr = data + old_num - new_num;
1701
9.74M
        if_debug6m('A', mem, "[a%d:%c> ]%s(%"PRIuSIZE"->%"PRIuSIZE") "PRI_INTPTR"\n",
1702
9.74M
                   alloc_trace_space(imem),
1703
9.74M
                   (new_num > old_num ? '>' : '<'),
1704
9.74M
                   client_name_string(cname), old_num, new_num,
1705
9.74M
                   (intptr_t)ptr);
1706
9.74M
        imem->cc->ctop = ptr;
1707
9.74M
        memmove(ptr, data, min(old_num, new_num));
1708
#ifdef DEBUG
1709
        if (new_num > old_num)
1710
            gs_alloc_fill(ptr + old_num, gs_alloc_fill_alloc,
1711
                          new_num - old_num);
1712
        else
1713
            gs_alloc_fill(data, gs_alloc_fill_free, old_num - new_num);
1714
#endif
1715
9.74M
    } else
1716
3.30k
        if (new_num < old_num) {
1717
            /* trim the string and create a free space hole */
1718
324
            ptr = data;
1719
324
            imem->lost.strings += old_num - new_num;
1720
324
            gs_alloc_fill(data + new_num, gs_alloc_fill_free,
1721
324
                          old_num - new_num);
1722
324
            if_debug5m('A', mem, "[a%d:<> ]%s(%"PRIuSIZE"->%"PRIuSIZE") "PRI_INTPTR"\n",
1723
324
                       alloc_trace_space(imem), client_name_string(cname),
1724
324
                       old_num, new_num, (intptr_t)ptr);
1725
2.97k
        } else {     /* Punt. */
1726
2.97k
            ptr = gs_alloc_string(mem, new_num, cname);
1727
2.97k
            if (ptr == 0)
1728
0
                return 0;
1729
2.97k
            memcpy(ptr, data, min(old_num, new_num));
1730
2.97k
            gs_free_string(mem, data, old_num, cname);
1731
2.97k
        }
1732
1733
9.75M
    return ptr;
1734
9.75M
}
1735
1736
static void
1737
i_free_string(gs_memory_t * mem, byte * data, size_t nbytes,
1738
              client_name_t cname)
1739
85.5M
{
1740
85.5M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1741
1742
85.5M
    if (data) {
1743
85.3M
        if (imem->cc && data == imem->cc->ctop) {
1744
45.0M
            if_debug4m('A', mem, "[a%d:-> ]%s(%"PRIuSIZE") "PRI_INTPTR"\n",
1745
45.0M
                       alloc_trace_space(imem), client_name_string(cname), nbytes,
1746
45.0M
                       (intptr_t)data);
1747
45.0M
            imem->cc->ctop += nbytes;
1748
45.0M
        } else {
1749
40.2M
            if_debug4m('A', mem, "[a%d:->#]%s(%"PRIuSIZE") "PRI_INTPTR"\n",
1750
40.2M
                       alloc_trace_space(imem), client_name_string(cname), nbytes,
1751
40.2M
                       (intptr_t)data);
1752
40.2M
            imem->lost.strings += nbytes;
1753
40.2M
        }
1754
85.3M
        gs_alloc_fill(data, gs_alloc_fill_free, nbytes);
1755
85.3M
    }
1756
85.5M
}
1757
1758
static gs_memory_t *
1759
i_stable(gs_memory_t *mem)
1760
5.46G
{
1761
5.46G
    return mem->stable_memory;
1762
5.46G
}
1763
1764
static void
1765
i_status(gs_memory_t * mem, gs_memory_status_t * pstat)
1766
9.28M
{
1767
9.28M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1768
9.28M
    size_t unused = imem->lost.refs + imem->lost.strings;
1769
9.28M
    size_t inner = 0;
1770
9.28M
    clump_splay_walker sw;
1771
9.28M
    clump_t *cp;
1772
1773
9.28M
    alloc_close_clump(imem);
1774
    /* Add up unallocated space within each clump. */
1775
    /* Also keep track of space allocated to inner clumps, */
1776
    /* which are included in previous_status.allocated. */
1777
147M
    for (cp = clump_splay_walk_init(&sw, imem); cp != NULL; cp = clump_splay_walk_fwd(&sw))
1778
138M
    {
1779
138M
        unused += cp->ctop - cp->cbot;
1780
138M
        if (cp->outer)
1781
18.6M
            inner += cp->cend - (byte *) cp->chead;
1782
138M
    }
1783
9.28M
    unused += compute_free_objects(imem);
1784
9.28M
    pstat->used = imem->allocated + inner - unused +
1785
9.28M
        imem->previous_status.used;
1786
9.28M
    pstat->allocated = imem->allocated +
1787
9.28M
        imem->previous_status.allocated;
1788
9.28M
    pstat->max_used = 0;    /* unknown for this allocator */
1789
9.28M
    pstat->limit = imem->limit;
1790
9.28M
    pstat->is_thread_safe = false; /* this allocator is not thread safe */
1791
9.28M
}
1792
1793
static void
1794
i_enable_free(gs_memory_t * mem, bool enable)
1795
7.54M
{
1796
7.54M
    if (enable)
1797
3.77M
        mem->procs.free_object = i_free_object,
1798
3.77M
            mem->procs.free_string = i_free_string;
1799
3.77M
    else
1800
3.77M
        mem->procs.free_object = gs_ignore_free_object,
1801
3.77M
            mem->procs.free_string = gs_ignore_free_string;
1802
7.54M
}
1803
1804
static void i_set_object_type(gs_memory_t *mem, void *ptr, gs_memory_type_ptr_t type)
1805
31.4k
{
1806
31.4k
    obj_header_t *pp;
1807
1808
31.4k
    if (ptr == 0)
1809
0
        return;
1810
31.4k
    pp = (obj_header_t *) ptr - 1;
1811
31.4k
    pp->o_type = type;
1812
31.4k
}
1813
1814
static void i_defer_frees(gs_memory_t *mem, int defer)
1815
0
{
1816
0
}
1817
1818
/* ------ Internal procedures ------ */
1819
1820
/* Compute the amount of free object space by scanning free lists. */
1821
static size_t
1822
compute_free_objects(gs_ref_memory_t * mem)
1823
9.28M
{
1824
9.28M
    size_t unused = mem->lost.objects;
1825
9.28M
    int i;
1826
1827
    /* Add up space on free lists. */
1828
956M
    for (i = 0; i < num_freelists; i++) {
1829
946M
        const obj_header_t *pfree;
1830
1831
951M
        for (pfree = mem->freelists[i]; pfree != 0;
1832
946M
             pfree = *(const obj_header_t * const *)pfree
1833
946M
            )
1834
4.53M
            unused += obj_align_round(pfree[-1].o_size);
1835
946M
    }
1836
9.28M
    return unused;
1837
9.28M
}
1838
1839
/* Allocate an object from the large-block freelist. */
1840
static obj_header_t * /* rets obj if allocated, else 0 */
1841
large_freelist_alloc(gs_ref_memory_t *mem, obj_size_t size)
1842
263M
{
1843
    /* Scan large object freelist. We'll grab an object up to 1/8 bigger */
1844
    /* right away, else use best fit of entire scan. */
1845
263M
    obj_size_t aligned_size = obj_align_round(size);
1846
263M
    size_t aligned_min_size = aligned_size + sizeof(obj_header_t);
1847
263M
    size_t aligned_max_size =
1848
263M
        aligned_min_size + obj_align_round(aligned_min_size / 8);
1849
263M
    obj_header_t *best_fit = 0;
1850
263M
    obj_header_t **best_fit_prev = NULL; /* Initialize against indeterminism. */
1851
263M
    obj_size_t best_fit_size = (obj_size_t)SIZE_MAX;
1852
263M
    obj_header_t *pfree;
1853
263M
    obj_header_t **ppfprev = &mem->freelists[LARGE_FREELIST_INDEX];
1854
263M
    size_t largest_size = 0;
1855
1856
263M
    if (aligned_size > mem->largest_free_size)
1857
142M
        return 0;    /* definitely no block large enough */
1858
1859
270M
    while ((pfree = *ppfprev) != 0) {
1860
262M
        obj_size_t free_size = obj_align_round(pfree[-1].o_size);
1861
1862
262M
        if (free_size == aligned_size ||
1863
262M
            (free_size >= aligned_min_size && free_size < best_fit_size)
1864
262M
            ) {
1865
125M
            best_fit = pfree;
1866
125M
            best_fit_prev = ppfprev;
1867
125M
            best_fit_size = pfree[-1].o_size;
1868
125M
            if (best_fit_size <= aligned_max_size)
1869
112M
                break; /* good enough fit to spare scan of entire list */
1870
125M
        }
1871
150M
        ppfprev = (obj_header_t **) pfree;
1872
150M
        if (free_size > largest_size)
1873
35.3M
            largest_size = free_size;
1874
150M
    }
1875
120M
    if (best_fit == 0) {
1876
        /*
1877
         * No single free clump is large enough, but since we scanned the
1878
         * entire list, we now have an accurate updated value for
1879
         * largest_free_size.
1880
         */
1881
7.58M
        mem->largest_free_size = largest_size;
1882
7.58M
        return 0;
1883
7.58M
    }
1884
1885
    /* Remove from freelist & return excess memory to free */
1886
112M
    *best_fit_prev = *(obj_header_t **)best_fit;
1887
112M
    trim_obj(mem, best_fit, aligned_size, (clump_t *)0);
1888
1889
    /* Pre-init block header; o_alone & o_type are already init'd */
1890
112M
    best_fit[-1].o_size = size;
1891
1892
112M
    return best_fit;
1893
120M
}
1894
1895
/* Allocate an object.  This handles all but the fastest, simplest case. */
1896
static obj_header_t *
1897
alloc_obj(gs_ref_memory_t *mem, obj_size_t lsize, gs_memory_type_ptr_t pstype,
1898
          alloc_flags_t flags, client_name_t cname)
1899
476M
{
1900
476M
    obj_header_t *ptr;
1901
1902
476M
    if (lsize >= mem->large_size || (flags & ALLOC_IMMOVABLE)) {
1903
        /*
1904
         * Give the object a clump all its own.  Note that this case does
1905
         * not occur if is_controlled is true.
1906
         */
1907
213M
        obj_size_t asize =
1908
213M
            ((lsize + obj_align_mask) & -obj_align_mod) +
1909
213M
            sizeof(obj_header_t);
1910
213M
        clump_t *cp =
1911
213M
            alloc_acquire_clump(mem, asize + sizeof(clump_head_t), false,
1912
213M
                                "large object clump");
1913
1914
213M
        if (asize < lsize)
1915
4
            return 0;
1916
213M
        if (cp == 0)
1917
188
            return 0;
1918
213M
        cp->c_alone = true;
1919
213M
        ptr = (obj_header_t *) cp->cbot;
1920
213M
        cp->cbot += asize;
1921
213M
        ptr->o_pad = 0;
1922
213M
        ptr->o_alone = 1;
1923
213M
        ptr->o_size = (obj_size_t)lsize;
1924
263M
    } else {
1925
        /*
1926
         * Cycle through the clumps at the current save level, starting
1927
         * with the currently open one.
1928
         */
1929
263M
        clump_splay_walker sw;
1930
263M
        clump_t *cp = clump_splay_walk_init_mid(&sw, mem->cc);
1931
263M
        obj_size_t asize = obj_size_round(lsize);
1932
263M
        bool allocate_success = false;
1933
1934
263M
        if (lsize > max_freelist_size && (flags & ALLOC_DIRECT)) {
1935
            /* We haven't checked the large block freelist yet. */
1936
41.4M
            if ((ptr = large_freelist_alloc(mem, lsize)) != 0) {
1937
3.22M
                --ptr;      /* must point to header */
1938
3.22M
                goto done;
1939
3.22M
            }
1940
41.4M
        }
1941
1942
259M
        if (cp == 0) {
1943
            /* Open an arbitrary clump. */
1944
937k
            mem->cc = clump_splay_walk_init(&sw, mem);
1945
937k
            alloc_open_clump(mem);
1946
937k
        }
1947
1948
259M
#define CAN_ALLOC_AT_END(cp)\
1949
1.61G
  ((cp) && !((cp)->c_alone) && (cp)->ctop - (byte *) (ptr = (obj_header_t *) (cp)->cbot)\
1950
1.35G
   > asize + sizeof(obj_header_t))
1951
1952
1.61G
        do {
1953
1.61G
            if (CAN_ALLOC_AT_END(mem->cc)) {
1954
244M
                allocate_success = true;
1955
244M
                break;
1956
1.36G
            } else if (mem->is_controlled) {
1957
                /* Try consolidating free space. */
1958
0
                gs_consolidate_free((gs_memory_t *)mem);
1959
0
                if (CAN_ALLOC_AT_END(mem->cc)) {
1960
0
                    allocate_success = true;
1961
0
                    break;
1962
0
                }
1963
0
            }
1964
            /* No luck, go on to the next clump. */
1965
1.36G
            cp = clump_splay_walk_fwd(&sw);
1966
1.36G
            if (cp == NULL)
1967
15.5M
                break;
1968
1969
1.35G
            alloc_close_clump(mem);
1970
1.35G
            mem->cc = cp;
1971
1.35G
            alloc_open_clump(mem);
1972
1.35G
        }
1973
1.35G
        while (1);
1974
1975
#ifdef CONSOLIDATE_BEFORE_ADDING_CLUMP
1976
        if (!allocate_success) {
1977
            /*
1978
             * Try consolidating free space before giving up.
1979
             * It's not clear this is a good idea, since it requires quite
1980
             * a lot of computation and doesn't seem to improve things much.
1981
             */
1982
            if (!mem->is_controlled) { /* already did this if controlled */
1983
                clump_t *cp;
1984
1985
                alloc_close_clump(mem);
1986
                for (cp = clump_splay_walk_init_mid(&sw, cp_orig); cp != NULL; cp = clump_splay_walk_fwd(&sw))
1987
                {
1988
                    consolidate_clump_free(cp, mem);
1989
                    if (CAN_ALLOC_AT_END(cp)) {
1990
                        mem->cc = cp;
1991
                        alloc_open_clump(mem);
1992
                        allocate_success = true;
1993
                        break;
1994
                    }
1995
                }
1996
            }
1997
        }
1998
#endif
1999
2000
0
#undef CAN_ALLOC_AT_END
2001
2002
259M
        if (!allocate_success) {
2003
            /* Add another clump. */
2004
15.5M
            clump_t *cp =
2005
15.5M
                alloc_add_clump(mem, mem->clump_size, "clump");
2006
2007
15.5M
            if (cp) {
2008
                /* mem->cc == cp */
2009
15.5M
                ptr = (obj_header_t *)cp->cbot;
2010
15.5M
                allocate_success = true;
2011
15.5M
            }
2012
15.5M
        }
2013
2014
        /*
2015
         * If no success, try to scavenge from low free memory. This is
2016
         * only enabled for controlled memory (currently only async
2017
         * renderer) because it's too much work to prevent it from
2018
         * examining outer save levels in the general case.
2019
         */
2020
259M
        if (allocate_success)
2021
259M
            mem->cc->cbot = (byte *) ptr + asize;
2022
5
        else if (!mem->is_controlled ||
2023
5
                 (ptr = scavenge_low_free(mem, lsize)) == 0)
2024
5
            return 0;  /* allocation failed */
2025
259M
        ptr->o_pad = 0;
2026
259M
        ptr->o_alone = 0;
2027
259M
        ptr->o_size = lsize;
2028
259M
    }
2029
476M
done:
2030
476M
    ptr->o_type = pstype;
2031
#   if IGC_PTR_STABILITY_CHECK
2032
        ptr->d.o.space_id = mem->space_id;
2033
#   endif
2034
476M
    ptr++;
2035
476M
    gs_alloc_fill(ptr, gs_alloc_fill_alloc, lsize);
2036
476M
    return Memento_label(ptr, cname);
2037
476M
}
2038
2039
/*
2040
 * Consolidate free objects contiguous to free space at cbot onto the cbot
2041
 * area. Also keep track of end of highest internal free object
2042
 * (int_freed_top).
2043
 */
2044
static void
2045
consolidate_clump_free(clump_t *cp, gs_ref_memory_t *mem)
2046
87.8M
{
2047
87.8M
    obj_header_t *begin_free = 0;
2048
2049
87.8M
    cp->int_freed_top = cp->cbase;  /* below all objects in clump */
2050
786M
    SCAN_CLUMP_OBJECTS(cp)
2051
786M
    DO_ALL
2052
786M
        if (pre->o_type == &st_free) {
2053
86.4M
            if (begin_free == 0)
2054
43.7M
                begin_free = pre;
2055
700M
        } else {
2056
700M
            if (begin_free)
2057
5.93M
                cp->int_freed_top = (byte *)pre; /* first byte following internal free */
2058
700M
            begin_free = 0;
2059
700M
        }
2060
786M
    END_OBJECTS_SCAN
2061
87.8M
    if (begin_free) {
2062
        /* We found free objects at the top of the object area. */
2063
        /* Remove the free objects from the freelists. */
2064
37.7M
        remove_range_from_freelist(mem, begin_free, cp->cbot);
2065
37.7M
        if_debug4m('a', (const gs_memory_t *)mem,
2066
37.7M
                   "[a]resetting clump "PRI_INTPTR" cbot from "PRI_INTPTR" to "PRI_INTPTR" (%lu free)\n",
2067
37.7M
                   (intptr_t)cp, (intptr_t)cp->cbot, (intptr_t)begin_free,
2068
37.7M
                   (intptr_t)((byte *)cp->cbot - (byte *)begin_free));
2069
37.7M
        cp->cbot = (byte *) begin_free;
2070
37.7M
    }
2071
87.8M
}
2072
2073
static splay_app_result_t
2074
consolidate(clump_t *cp, void *arg)
2075
0
{
2076
0
    gs_ref_memory_t *mem = (gs_ref_memory_t *)arg;
2077
2078
0
    consolidate_clump_free(cp, mem);
2079
0
    if (cp->cbot == cp->cbase && cp->ctop == cp->climit) {
2080
        /* The entire clump is free. */
2081
0
        if (!mem->is_controlled) {
2082
0
            alloc_free_clump(cp, mem);
2083
0
            if (mem->cc == cp)
2084
0
                mem->cc = NULL;
2085
0
        }
2086
0
    }
2087
2088
0
    return SPLAY_APP_CONTINUE;
2089
0
}
2090
2091
/* Consolidate free objects. */
2092
void
2093
ialloc_consolidate_free(gs_ref_memory_t *mem)
2094
0
{
2095
0
    alloc_close_clump(mem);
2096
2097
    /* We used to visit clumps in reverse order to encourage LIFO behavior,
2098
     * but with binary trees this is not possible (unless you want to
2099
     * either change the tree during the process, recurse, or otherwise
2100
     * hold the state). */
2101
0
    clump_splay_app(mem->root, mem, consolidate, mem);
2102
2103
    /* NOTE: Previously, if we freed the current clump, we'd move to whatever the
2104
     * bigger of it's children was. We now just move to the root. */
2105
0
    if (mem->cc == NULL)
2106
0
        mem->cc = mem->root;
2107
2108
0
    alloc_open_clump(mem);
2109
0
}
2110
static void
2111
i_consolidate_free(gs_memory_t *mem)
2112
0
{
2113
0
    ialloc_consolidate_free((gs_ref_memory_t *)mem);
2114
0
}
2115
2116
typedef struct
2117
{
2118
    uint need_free;
2119
    obj_header_t *found_pre;
2120
    gs_ref_memory_t *mem;
2121
    obj_size_t request_size;
2122
} scavenge_data;
2123
2124
static splay_app_result_t
2125
scavenge(clump_t *cp, void *arg)
2126
0
{
2127
0
    scavenge_data *sd = (scavenge_data *)arg;
2128
0
    obj_header_t *begin_free = NULL;
2129
0
    obj_size_t found_free = 0;
2130
2131
0
    sd->found_pre = NULL;
2132
2133
0
    SCAN_CLUMP_OBJECTS(cp)
2134
0
    DO_ALL
2135
0
        if (pre->o_type == &st_free) {
2136
0
            if (begin_free == 0) {
2137
0
                found_free = 0;
2138
0
                begin_free = pre;
2139
0
            }
2140
0
            found_free += pre_obj_rounded_size(pre);
2141
0
            if (begin_free != 0 && found_free >= sd->need_free)
2142
0
                break;
2143
0
        } else
2144
0
            begin_free = 0;
2145
0
    END_OBJECTS_SCAN_NO_ABORT
2146
2147
0
    if (begin_free != 0 && found_free >= sd->need_free) {
2148
        /* Fish found pieces out of various freelists */
2149
0
        remove_range_from_freelist(sd->mem, (char*)begin_free,
2150
0
                                   (char*)begin_free + found_free);
2151
2152
        /* Prepare found object */
2153
0
        sd->found_pre = begin_free;
2154
0
        sd->found_pre->o_type = &st_free;  /* don't confuse GC if gets lost */
2155
0
        sd->found_pre->o_size = found_free - sizeof(obj_header_t);
2156
2157
        /* Chop off excess tail piece & toss it back into free pool */
2158
0
        trim_obj(sd->mem, sd->found_pre + 1, sd->request_size, cp);
2159
0
        return SPLAY_APP_STOP;
2160
0
    }
2161
2162
0
    return SPLAY_APP_CONTINUE;
2163
0
}
2164
2165
/* try to free-up given amount of space from freespace below clump base */
2166
static obj_header_t * /* returns uninitialized object hdr, NULL if none found */
2167
scavenge_low_free(gs_ref_memory_t *mem, obj_size_t request_size)
2168
0
{
2169
    /* find 1st range of memory that can be glued back together to fill request */
2170
0
    scavenge_data sd;
2171
0
    obj_size_t request_size_rounded = obj_size_round(request_size);
2172
2173
0
    sd.found_pre = 0;
2174
0
    sd.need_free = request_size_rounded + sizeof(obj_header_t);    /* room for GC's dummy hdr */
2175
0
    sd.mem = mem;
2176
0
    sd.request_size = request_size;
2177
2178
0
    clump_splay_app(mem->root, mem, scavenge, &sd);
2179
0
    return sd.found_pre;
2180
0
}
2181
2182
/* Remove range of memory from a mem's freelists */
2183
static void
2184
remove_range_from_freelist(gs_ref_memory_t *mem, void* bottom, void* top)
2185
37.7M
{
2186
37.7M
    int num_free[num_freelists];
2187
37.7M
    int smallest = num_freelists, largest = -1;
2188
37.7M
    const obj_header_t *cur;
2189
37.7M
    obj_size_t size;
2190
37.7M
    int i;
2191
37.7M
    obj_size_t removed = 0;
2192
2193
    /*
2194
     * Scan from bottom to top, a range containing only free objects,
2195
     * counting the number of objects of each size.
2196
     */
2197
2198
109M
    for (cur = bottom; cur != top;
2199
71.8M
         cur = (const obj_header_t *)
2200
71.8M
             ((const byte *)cur + obj_size_round(size))
2201
71.8M
        ) {
2202
71.8M
        size = cur->o_size;
2203
71.8M
        i = (size > max_freelist_size ? LARGE_FREELIST_INDEX :
2204
71.8M
             (size + obj_align_mask) >> log2_obj_align_mod);
2205
71.8M
        if (i < smallest) {
2206
            /*
2207
             * 0-length free blocks aren't kept on any list, because
2208
             * they don't have room for a pointer.
2209
             */
2210
43.5M
            if (i == 0)
2211
8.68k
                continue;
2212
43.5M
            if (smallest < num_freelists)
2213
5.73M
                memset(&num_free[i], 0, (smallest - i) * sizeof(int));
2214
37.7M
            else
2215
37.7M
                num_free[i] = 0;
2216
43.5M
            smallest = i;
2217
43.5M
        }
2218
71.8M
        if (i > largest) {
2219
44.1M
            if (largest >= 0)
2220
6.31M
                memset(&num_free[largest + 1], 0, (i - largest) * sizeof(int));
2221
44.1M
            largest = i;
2222
44.1M
        }
2223
71.8M
        num_free[i]++;
2224
71.8M
    }
2225
2226
    /*
2227
     * Remove free objects from the freelists, adjusting lost.objects by
2228
     * subtracting the size of the region being processed minus the amount
2229
     * of space reclaimed.
2230
     */
2231
2232
404M
    for (i = smallest; i <= largest; i++) {
2233
366M
        int count = num_free[i];
2234
366M
        obj_header_t *pfree;
2235
366M
        obj_header_t **ppfprev;
2236
2237
366M
        if (!count)
2238
314M
            continue;
2239
52.5M
        ppfprev = &mem->freelists[i];
2240
119M
        for (;;) {
2241
119M
            pfree = *ppfprev;
2242
119M
            if (PTR_GE(pfree, bottom) && PTR_LT(pfree, top)) {
2243
                /* We're removing an object. */
2244
71.8M
                *ppfprev = *(obj_header_t **) pfree;
2245
71.8M
                removed += obj_align_round(pfree[-1].o_size);
2246
71.8M
                if (!--count)
2247
52.5M
                    break;
2248
71.8M
            } else
2249
47.4M
                ppfprev = (obj_header_t **) pfree;
2250
119M
        }
2251
52.5M
    }
2252
37.7M
    mem->lost.objects -= (char*)top - (char*)bottom - removed;
2253
37.7M
}
2254
2255
/* Trim a memory object down to a given size */
2256
static void
2257
trim_obj(gs_ref_memory_t *mem, obj_header_t *obj, obj_size_t size, clump_t *cp)
2258
/* Obj must have rounded size == req'd size, or have enough room for */
2259
/* trailing dummy obj_header */
2260
112M
{
2261
112M
    obj_size_t rounded_size = obj_align_round(size);
2262
112M
    obj_header_t *pre_obj = obj - 1;
2263
112M
    obj_header_t *excess_pre = (obj_header_t*)((char*)obj + rounded_size);
2264
112M
    obj_size_t old_rounded_size = obj_align_round(pre_obj->o_size);
2265
112M
    obj_size_t excess_size = old_rounded_size - rounded_size - sizeof(obj_header_t);
2266
2267
    /* trim object's size to desired */
2268
112M
    pre_obj->o_size = size;
2269
112M
    if (old_rounded_size == rounded_size)
2270
111M
        return;  /* nothing more to do here */
2271
    /*
2272
     * If the object is alone in its clump, move cbot to point to the end
2273
     * of the object.
2274
     */
2275
1.65M
    if (pre_obj->o_alone) {
2276
0
        if (!cp) {
2277
0
            mem->cfreed.memory = mem;
2278
0
            if (clump_locate(obj, &mem->cfreed)) {
2279
0
                cp = mem->cfreed.cp;
2280
0
            }
2281
0
        }
2282
0
        if (cp) {
2283
#ifdef DEBUG
2284
            if (cp->cbot != (byte *)obj + old_rounded_size) {
2285
                lprintf3("resizing "PRI_INTPTR", old size %u, new size %u, cbot wrong!\n",
2286
                         (intptr_t)obj, old_rounded_size, size);
2287
                /* gs_abort */
2288
            } else
2289
#endif
2290
0
                {
2291
0
                    cp->cbot = (byte *)excess_pre;
2292
0
                    return;
2293
0
                }
2294
0
        }
2295
        /*
2296
         * Something very weird is going on.  This probably shouldn't
2297
         * ever happen, but if it does....
2298
         */
2299
0
        pre_obj->o_pad = 0;
2300
0
        pre_obj->o_alone = 0;
2301
0
    }
2302
    /* make excess into free obj */
2303
1.65M
    excess_pre->o_type = &st_free;  /* don't confuse GC */
2304
1.65M
    excess_pre->o_size = excess_size;
2305
1.65M
    excess_pre->o_pad = 0;
2306
1.65M
    excess_pre->o_alone = 0;
2307
1.65M
    if (excess_size >= obj_align_mod) {
2308
        /* Put excess object on a freelist */
2309
1.60M
        obj_header_t **pfl;
2310
2311
1.60M
        if (mem->cc && (byte *)excess_pre >= mem->cc->int_freed_top)
2312
247k
            mem->cc->int_freed_top = (byte *)excess_pre + excess_size;
2313
1.60M
        if (excess_size <= max_freelist_size)
2314
1.09M
            pfl = &mem->freelists[(excess_size + obj_align_mask) >>
2315
1.09M
                                 log2_obj_align_mod];
2316
508k
        else {
2317
508k
            uint rounded_size = obj_align_round(excess_size);
2318
2319
508k
            pfl = &mem->freelists[LARGE_FREELIST_INDEX];
2320
508k
            if (rounded_size > mem->largest_free_size)
2321
0
                mem->largest_free_size = rounded_size;
2322
508k
        }
2323
1.60M
        *(obj_header_t **) (excess_pre + 1) = *pfl;
2324
1.60M
        *pfl = excess_pre + 1;
2325
1.60M
        mem->cfreed.memory = mem;
2326
1.60M
    } else {
2327
        /* excess piece will be "lost" memory */
2328
44.8k
        mem->lost.objects += excess_size + sizeof(obj_header_t);
2329
44.8k
    }
2330
1.65M
}
2331
2332
/* ================ Roots ================ */
2333
2334
/* Register a root. */
2335
static int
2336
i_register_root(gs_memory_t * mem, gs_gc_root_t ** rpp, gs_ptr_type_t ptype,
2337
                void **up, client_name_t cname)
2338
5.86M
{
2339
5.86M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
2340
5.86M
    gs_gc_root_t *rp;
2341
2342
5.86M
    if (rpp == NULL || *rpp == NULL) {
2343
0
        rp = gs_raw_alloc_struct_immovable(imem->non_gc_memory, &st_gc_root_t,
2344
0
                                           "i_register_root");
2345
0
        if (rp == 0)
2346
0
            return_error(gs_error_VMerror);
2347
0
        rp->free_on_unregister = true;
2348
0
        if (rpp && *rpp == NULL)
2349
0
            *rpp = rp;
2350
5.86M
    } else {
2351
5.86M
        rp = *rpp;
2352
5.86M
        rp->free_on_unregister = false;
2353
5.86M
    }
2354
5.86M
    if_debug3m('8', mem, "[8]register root(%s) "PRI_INTPTR" -> "PRI_INTPTR"\n",
2355
5.86M
               client_name_string(cname), (intptr_t)rp, (intptr_t)up);
2356
5.86M
    rp->ptype = ptype;
2357
5.86M
    rp->p = up;
2358
5.86M
    rp->next = imem->roots;
2359
5.86M
    imem->roots = rp;
2360
5.86M
    return 0;
2361
5.86M
}
2362
2363
/* Unregister a root. */
2364
static void
2365
i_unregister_root(gs_memory_t * mem, gs_gc_root_t * rp, client_name_t cname)
2366
5.38M
{
2367
5.38M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
2368
5.38M
    gs_gc_root_t **rpp = &imem->roots;
2369
2370
5.38M
    if_debug2m('8', mem, "[8]unregister root(%s) "PRI_INTPTR"\n",
2371
5.38M
               client_name_string(cname), (intptr_t)rp);
2372
5.38M
    while (*rpp != rp)
2373
0
        rpp = &(*rpp)->next;
2374
5.38M
    *rpp = (*rpp)->next;
2375
5.38M
    if (rp->free_on_unregister)
2376
0
        gs_free_object(imem->non_gc_memory, rp, "i_unregister_root");
2377
5.38M
}
2378
2379
/* ================ clumps ================ */
2380
2381
public_st_clump();
2382
2383
/* Insert a clump in the chain.  This is exported for the GC and for */
2384
/* the forget_save operation. */
2385
void
2386
alloc_link_clump(clump_t * cp, gs_ref_memory_t * imem)
2387
239M
{
2388
239M
    splay_insert(cp, imem);
2389
239M
    SANITY_CHECK(cp);
2390
239M
}
2391
2392
/* Add a clump for ordinary allocation. */
2393
static clump_t *
2394
alloc_add_clump(gs_ref_memory_t * mem, size_t csize, client_name_t cname)
2395
15.5M
{
2396
15.5M
    clump_t *cp = alloc_acquire_clump(mem, csize, true, cname);
2397
2398
15.5M
    if (cp) {
2399
15.5M
        alloc_close_clump(mem);
2400
15.5M
        mem->cc = cp;
2401
15.5M
        gs_alloc_fill(mem->cc->cbase, gs_alloc_fill_free,
2402
15.5M
                      mem->cc->climit - mem->cc->cbase);
2403
15.5M
    }
2404
15.5M
    return cp;
2405
15.5M
}
2406
2407
/* Acquire a clump.  If we would exceed MaxLocalVM (if relevant), */
2408
/* or if we would exceed the VMThreshold and psignal is NULL, */
2409
/* return 0; if we would exceed the VMThreshold but psignal is valid, */
2410
/* just set the signal and return successfully. */
2411
static clump_t *
2412
alloc_acquire_clump(gs_ref_memory_t * mem, size_t csize, bool has_strings,
2413
                    client_name_t cname)
2414
231M
{
2415
231M
    gs_memory_t *parent = mem->non_gc_memory;
2416
231M
    clump_t *cp;
2417
231M
    byte *cdata;
2418
2419
231M
#if ARCH_SIZEOF_LONG > ARCH_SIZEOF_INT
2420
    /* If csize is larger than max_uint, punt. */
2421
231M
    if (csize != (uint) csize)
2422
0
        return 0;
2423
231M
#endif
2424
231M
    cp = gs_raw_alloc_struct_immovable(parent, &st_clump, cname);
2425
2426
    /* gc_status.signal_value is initialised to zero when the
2427
     * allocator is created, only the Postscript interpreter
2428
     * (which implement garbage collection) takes the action to set
2429
     * it to anything other than zero
2430
     */
2431
231M
    if( mem->gc_status.signal_value != 0) {
2432
        /* we have a garbage collector */
2433
226M
        if (mem->allocated >= mem->limit) {
2434
211k
            mem->gc_status.requested += csize;
2435
211k
            if (mem->limit >= mem->gc_status.max_vm) {
2436
0
                gs_free_object(parent, cp, cname);
2437
0
                return 0;
2438
0
            }
2439
211k
            if_debug4m('0', (const gs_memory_t *)mem,
2440
211k
                       "[0]signaling space=%d, allocated=%"PRIdSIZE", limit=%"PRIdSIZE", requested=%"PRIdSIZE"\n",
2441
211k
                       mem->space, mem->allocated,
2442
211k
                       mem->limit, mem->gc_status.requested);
2443
211k
            mem->gs_lib_ctx->gcsignal = mem->gc_status.signal_value;
2444
211k
        }
2445
226M
    }
2446
231M
    cdata = gs_alloc_bytes_immovable(parent, csize, cname);
2447
231M
    if (cp == 0 || cdata == 0) {
2448
193
        gs_free_object(parent, cdata, cname);
2449
193
        gs_free_object(parent, cp, cname);
2450
193
        mem->gc_status.requested = csize;
2451
193
        return 0;
2452
193
    }
2453
231M
    alloc_init_clump(cp, cdata, cdata + csize, has_strings, (clump_t *) 0);
2454
231M
    alloc_link_clump(cp, mem);
2455
231M
    mem->allocated += st_clump.ssize + csize;
2456
231M
    SANITY_CHECK(cp);
2457
231M
    return cp;
2458
231M
}
2459
2460
/* Initialize the pointers in a clump.  This is exported for save/restore. */
2461
/* The bottom pointer must be aligned, but the top pointer need not */
2462
/* be aligned. */
2463
void
2464
alloc_init_clump(clump_t * cp, byte * bot, byte * top, bool has_strings,
2465
                 clump_t * outer)
2466
240M
{
2467
240M
    byte *cdata = bot;
2468
2469
240M
    if (outer != 0)
2470
8.32M
        outer->inner_count++;
2471
240M
    cp->chead = (clump_head_t *) cdata;
2472
240M
    cdata += sizeof(clump_head_t);
2473
240M
    cp->cbot = cp->cbase = cp->int_freed_top = cdata;
2474
240M
    cp->cend = top;
2475
240M
    cp->rcur = 0;
2476
240M
    cp->rtop = 0;
2477
240M
    cp->outer = outer;
2478
240M
    cp->inner_count = 0;
2479
240M
    cp->has_refs = false;
2480
240M
    cp->sbase = cdata;
2481
240M
    cp->c_alone = false; /* should be set correctly by caller */
2482
240M
    if (has_strings && top - cdata >= string_space_quantum + sizeof(long) - 1) {
2483
        /*
2484
         * We allocate a large enough string marking and reloc table
2485
         * to cover the entire clump.
2486
         */
2487
26.0M
        uint nquanta = string_space_quanta(top - cdata);
2488
2489
26.0M
        cp->climit = cdata + nquanta * string_data_quantum;
2490
26.0M
        cp->smark = cp->climit;
2491
26.0M
        cp->smark_size = string_quanta_mark_size(nquanta);
2492
26.0M
        cp->sreloc =
2493
26.0M
            (string_reloc_offset *) (cp->smark + cp->smark_size);
2494
26.0M
        cp->sfree1 = (uint *) cp->sreloc;
2495
214M
    } else {
2496
        /* No strings, don't need the string GC tables. */
2497
214M
        cp->climit = cp->cend;
2498
214M
        cp->sfree1 = 0;
2499
214M
        cp->smark = 0;
2500
214M
        cp->smark_size = 0;
2501
214M
        cp->sreloc = 0;
2502
214M
    }
2503
240M
    cp->ctop = cp->climit;
2504
240M
    alloc_init_free_strings(cp);
2505
240M
}
2506
2507
/* Initialize the string freelists in a clump. */
2508
void
2509
alloc_init_free_strings(clump_t * cp)
2510
240M
{
2511
240M
    if (cp->sfree1)
2512
26.0M
        memset(cp->sfree1, 0, STRING_FREELIST_SPACE(cp));
2513
240M
    cp->sfree = 0;
2514
240M
}
2515
2516
/* Close up the current clump. */
2517
/* This is exported for save/restore and the GC. */
2518
void
2519
alloc_close_clump(gs_ref_memory_t * mem)
2520
1.55G
{
2521
#ifdef DEBUG
2522
    if (gs_debug_c('A')) {
2523
        dmlprintf1((const gs_memory_t *)mem, "[a%d]", alloc_trace_space(mem));
2524
        dmprintf_clump((const gs_memory_t *)mem, "closing clump", mem->cc);
2525
    }
2526
#endif
2527
1.55G
}
2528
2529
/* Reopen the current clump after a GC or restore. */
2530
void
2531
alloc_open_clump(gs_ref_memory_t * mem)
2532
1.52G
{
2533
#ifdef DEBUG
2534
    if (gs_debug_c('A')) {
2535
        dmlprintf1((const gs_memory_t *)mem, "[a%d]", alloc_trace_space(mem));
2536
        dmprintf_clump((const gs_memory_t *)mem, "opening clump", mem->cc);
2537
    }
2538
#endif
2539
1.52G
}
2540
2541
#ifdef DEBUG
2542
static splay_app_result_t
2543
check_in_clump(clump_t *cp, void *arg)
2544
{
2545
    clump_t **cpp = (clump_t **)arg;
2546
2547
    if (*cpp != cp)
2548
        return SPLAY_APP_CONTINUE;
2549
    *cpp = NULL;
2550
2551
    return SPLAY_APP_STOP;
2552
}
2553
#endif
2554
2555
/* Remove a clump from the chain.  This is exported for the GC. */
2556
void
2557
alloc_unlink_clump(clump_t * cp, gs_ref_memory_t * mem)
2558
240M
{
2559
240M
    SANITY_CHECK_MID(cp);
2560
#ifdef DEBUG
2561
    if (gs_alloc_debug) { /* Check to make sure this clump belongs to this allocator. */
2562
        clump_t *found = cp;
2563
        clump_splay_app(mem->root, mem, check_in_clump, &found);
2564
2565
        if (found != NULL) {
2566
            mlprintf2((const gs_memory_t *)mem, "unlink_clump "PRI_INTPTR" not owned by memory "PRI_INTPTR"!\n",
2567
                      (intptr_t)cp, (intptr_t)mem);
2568
            return;   /*gs_abort(); */
2569
        }
2570
    }
2571
#endif
2572
240M
    (void)clump_splay_remove(cp, mem);
2573
240M
    if (mem->cc == cp) {
2574
2.15M
        mem->cc = NULL;
2575
2.15M
    }
2576
240M
}
2577
2578
/*
2579
 * Free a clump.  This is exported for the GC.  Since we eventually use
2580
 * this to free the clump containing the allocator itself, we must be
2581
 * careful not to reference anything in the allocator after freeing the
2582
 * clump data.
2583
 */
2584
void
2585
alloc_free_clump(clump_t * cp, gs_ref_memory_t * mem)
2586
240M
{
2587
240M
    gs_memory_t *parent = mem->non_gc_memory;
2588
240M
    byte *cdata = (byte *)cp->chead;
2589
240M
    ulong csize = (byte *)cp->cend - cdata;
2590
2591
240M
    alloc_unlink_clump(cp, mem);
2592
240M
    mem->allocated -= st_clump.ssize;
2593
240M
    if (mem->cfreed.cp == cp)
2594
1.80M
        mem->cfreed.cp = 0;
2595
240M
    if (cp->outer == 0) {
2596
232M
        mem->allocated -= csize;
2597
232M
        gs_free_object(parent, cdata, "alloc_free_clump(data)");
2598
232M
    } else {
2599
8.32M
        cp->outer->inner_count--;
2600
8.32M
        gs_alloc_fill(cdata, gs_alloc_fill_free, csize);
2601
8.32M
    }
2602
240M
    gs_free_object(parent, cp, "alloc_free_clump(clump struct)");
2603
240M
}
2604
2605
/* Find the clump for a pointer. */
2606
/* Note that this only searches the current save level. */
2607
/* Since a given save level can't contain both a clump and an inner clump */
2608
/* of that clump, we can stop when is_within_clump succeeds, and just test */
2609
/* is_in_inner_clump then. */
2610
bool
2611
clump_locate_ptr(const void *ptr, clump_locator_t * clp)
2612
1.44G
{
2613
1.44G
    clump_t *cp = clp->memory->root;
2614
2615
3.90G
    while (cp)
2616
3.81G
    {
2617
3.81G
        if (PTR_LT(ptr, cp->cbase))
2618
1.35G
        {
2619
1.35G
            cp = cp->left;
2620
1.35G
            continue;
2621
1.35G
        }
2622
2.45G
        if (PTR_GE(ptr, cp->cend))
2623
1.10G
        {
2624
1.10G
            cp = cp->right;
2625
1.10G
            continue;
2626
1.10G
        }
2627
        /* Found it! */
2628
1.35G
        splay_move_to_root(cp, clp->memory);
2629
1.35G
        clp->cp = cp;
2630
1.35G
        return !ptr_is_in_inner_clump(ptr, cp);
2631
2.45G
    }
2632
88.0M
    return false;
2633
1.44G
}
2634
2635
bool ptr_is_within_mem_clumps(const void *ptr, gs_ref_memory_t *mem)
2636
7.84M
{
2637
7.84M
    clump_t *cp = mem->root;
2638
2639
46.5M
    while (cp)
2640
38.7M
    {
2641
38.7M
        if (PTR_LT(ptr, cp->cbase))
2642
31.2M
        {
2643
31.2M
            cp = cp->left;
2644
31.2M
            continue;
2645
31.2M
        }
2646
7.51M
        if (PTR_GE(ptr, cp->cend))
2647
7.51M
        {
2648
7.51M
            cp = cp->right;
2649
7.51M
            continue;
2650
7.51M
        }
2651
        /* Found it! */
2652
25
        splay_move_to_root(cp, mem);
2653
25
        return true;
2654
7.51M
    }
2655
7.84M
    return false;
2656
7.84M
}
2657
2658
/* ------ Debugging ------ */
2659
2660
#ifdef DEBUG
2661
2662
#include "string_.h"
2663
2664
static inline bool
2665
obj_in_control_region(const void *obot, const void *otop,
2666
                      const dump_control_t *pdc)
2667
{
2668
    return
2669
        ((pdc->bottom == NULL || PTR_GT(otop, pdc->bottom)) &&
2670
         (pdc->top == NULL || PTR_LT(obot, pdc->top)));
2671
}
2672
2673
const dump_control_t dump_control_default =
2674
{
2675
    dump_do_default, NULL, NULL
2676
};
2677
const dump_control_t dump_control_all =
2678
{
2679
    dump_do_strings | dump_do_type_addresses | dump_do_pointers |
2680
    dump_do_pointed_strings | dump_do_contents, NULL, NULL
2681
};
2682
2683
const dump_control_t dump_control_no_contents =
2684
{
2685
    dump_do_strings | dump_do_type_addresses | dump_do_pointers |
2686
    dump_do_pointed_strings, NULL, NULL
2687
};
2688
2689
/*
2690
 * Internal procedure to dump a block of memory, in hex and optionally
2691
 * also as characters.
2692
 */
2693
static void
2694
debug_indent(const gs_memory_t *mem, int indent)
2695
{
2696
    int i;
2697
2698
    for (i = indent; i > 0; --i)
2699
        dmputc(mem, ' ');
2700
}
2701
static void
2702
debug_dump_contents(const gs_memory_t *mem, const byte * bot,
2703
                    const byte * top, int indent, bool as_chars)
2704
{
2705
    const byte *block;
2706
2707
#define block_size 16
2708
2709
    if (bot >= top)
2710
        return;
2711
    for (block = bot - ((bot - (byte *) 0) & (block_size - 1));
2712
         block < top; block += block_size
2713
        ) {
2714
        int i;
2715
        char label[12];
2716
2717
        /* Check for repeated blocks. */
2718
        if (block >= bot + block_size &&
2719
            block <= top - (block_size * 2) &&
2720
            !memcmp(block, block - block_size, block_size) &&
2721
            !memcmp(block, block + block_size, block_size)
2722
            ) {
2723
            if (block < bot + block_size * 2 ||
2724
                memcmp(block, block - block_size * 2, block_size)
2725
                ) {
2726
                debug_indent(mem, indent);
2727
                dmputs(mem, "  ...\n");
2728
            }
2729
            continue;
2730
        }
2731
        gs_snprintf(label, sizeof(label), PRI_INTPTR":", (intptr_t)block);
2732
        debug_indent(mem, indent);
2733
        dmputs(mem, label);
2734
        for (i = 0; i < block_size; ++i) {
2735
            const char *sepr = ((i & 3) == 0 && i != 0 ? "  " : " ");
2736
2737
            dmputs(mem, sepr);
2738
            if (block + i >= bot && block + i < top)
2739
                dmprintf1(mem, "%02x", block[i]);
2740
            else
2741
                dmputs(mem, "  ");
2742
        }
2743
        dmputc(mem, '\n');
2744
        if (as_chars) {
2745
            debug_indent(mem, indent + strlen(label));
2746
            for (i = 0; i < block_size; ++i) {
2747
                byte ch;
2748
2749
                if ((i & 3) == 0 && i != 0)
2750
                    dmputc(mem, ' ');
2751
                if (block + i >= bot && block + i < top &&
2752
                    (ch = block[i]) >= 32 && ch <= 126
2753
                    )
2754
                    dmprintf1(mem, "  %c", ch);
2755
                else
2756
                    dmputs(mem, "   ");
2757
            }
2758
            dmputc(mem, '\n');
2759
        }
2760
    }
2761
#undef block_size
2762
}
2763
2764
/* Print one object with the given options. */
2765
/* Relevant options: type_addresses, no_types, pointers, pointed_strings, */
2766
/* contents. */
2767
void
2768
debug_print_object(const gs_memory_t *mem, const void *obj, const dump_control_t * control)
2769
{
2770
    const obj_header_t *pre = ((const obj_header_t *)obj) - 1;
2771
    ulong size = pre_obj_contents_size(pre);
2772
    const gs_memory_struct_type_t *type = pre->o_type;
2773
    dump_options_t options = control->options;
2774
2775
    dmprintf3(mem, "  pre="PRI_INTPTR"(obj="PRI_INTPTR") size=%lu",
2776
              (intptr_t) pre, (intptr_t) obj, size);
2777
    switch (options & (dump_do_type_addresses | dump_do_no_types)) {
2778
    case dump_do_type_addresses + dump_do_no_types: /* addresses only */
2779
        dmprintf1(mem, " type="PRI_INTPTR"", (intptr_t) type);
2780
        break;
2781
    case dump_do_type_addresses:  /* addresses & names */
2782
        dmprintf2(mem, " type=%s("PRI_INTPTR")", struct_type_name_string(type),
2783
                 (intptr_t)type);
2784
        break;
2785
    case 0:   /* names only */
2786
        dmprintf1(mem, " type=%s", struct_type_name_string(type));
2787
    case dump_do_no_types:  /* nothing */
2788
        ;
2789
    }
2790
    if (options & dump_do_marks) {
2791
        dmprintf2(mem, " smark/back=%u (0x%x)", pre->o_smark, pre->o_smark);
2792
    }
2793
    dmputc(mem, '\n');
2794
    if (type == &st_free)
2795
        return;
2796
    if (options & dump_do_pointers) {
2797
        struct_proc_enum_ptrs((*proc)) = type->enum_ptrs;
2798
        uint index = 0;
2799
        enum_ptr_t eptr;
2800
        gs_ptr_type_t ptype;
2801
2802
        if (proc != gs_no_struct_enum_ptrs) {
2803
            if (proc != 0) {
2804
                for (; (ptype = (*proc)(mem, pre + 1, size, index, &eptr, type, NULL)) != 0;
2805
                     ++index
2806
                     ) {
2807
                    const void *ptr = eptr.ptr;
2808
2809
                    dmprintf1(mem, "    ptr %u: ", index);
2810
                    if (ptype == ptr_string_type || ptype == ptr_const_string_type) {
2811
                        const gs_const_string *str = (const gs_const_string *)&eptr;
2812
                        if (!str)
2813
                            dmprintf(mem, "0x0");
2814
                        else
2815
                            dmprintf2(mem, PRI_INTPTR "(%u)", (intptr_t)str->data, str->size);
2816
                        if (options & dump_do_pointed_strings) {
2817
                            dmputs(mem, " =>\n");
2818
                            if (!str)
2819
                                dmprintf(mem, "(null)\n");
2820
                            else
2821
                                debug_dump_contents(mem, str->data, str->data + str->size, 6,
2822
                                                    true);
2823
                        } else {
2824
                            dmputc(mem, '\n');
2825
                        }
2826
                    } else {
2827
                        dmprintf1(mem, (PTR_BETWEEN(ptr, obj, (const byte *)obj + size) ?
2828
                                  "("PRI_INTPTR")\n" : PRI_INTPTR "\n"), (intptr_t) ptr);
2829
                    }
2830
                }
2831
            } else { /* proc == 0 */
2832
                dmprintf(mem, "previous line should be a ref\n");
2833
            }
2834
        } /* proc != gs_no_struct_enum_ptrs */
2835
    }
2836
    if (options & dump_do_contents) {
2837
        debug_dump_contents(mem, (const byte *)obj, (const byte *)obj + size,
2838
                            0, false);
2839
    }
2840
}
2841
2842
/* Print the contents of a clump with the given options. */
2843
/* Relevant options: all. */
2844
void
2845
debug_dump_clump(const gs_memory_t *mem, const clump_t * cp, const dump_control_t * control)
2846
{
2847
    dmprintf1(mem, "clump at "PRI_INTPTR":\n", (intptr_t) cp);
2848
    dmprintf3(mem, "   chead="PRI_INTPTR"  cbase="PRI_INTPTR" sbase="PRI_INTPTR"\n",
2849
              (intptr_t)cp->chead, (intptr_t)cp->cbase, (intptr_t)cp->sbase);
2850
    dmprintf3(mem, "    rcur="PRI_INTPTR"   rtop="PRI_INTPTR"  cbot="PRI_INTPTR"\n",
2851
              (intptr_t)cp->rcur, (intptr_t)cp->rtop, (intptr_t)cp->cbot);
2852
    dmprintf4(mem, "    ctop="PRI_INTPTR" climit="PRI_INTPTR" smark="PRI_INTPTR", size=%u\n",
2853
              (intptr_t)cp->ctop, (intptr_t)cp->climit, (intptr_t)cp->smark,
2854
              cp->smark_size);
2855
    dmprintf2(mem, "  sreloc="PRI_INTPTR"   cend="PRI_INTPTR"\n",
2856
              (intptr_t)cp->sreloc, (intptr_t)cp->cend);
2857
    dmprintf6(mem, "left="PRI_INTPTR" right="PRI_INTPTR" parent="PRI_INTPTR" outer="PRI_INTPTR" inner_count=%u has_refs=%s\n",
2858
              (intptr_t)cp->left, (intptr_t)cp->right, (intptr_t)cp->parent, (intptr_t)cp->outer,
2859
              cp->inner_count, (cp->has_refs ? "true" : "false"));
2860
2861
    dmprintf2(mem, "  sfree1="PRI_INTPTR"   sfree="PRI_INTPTR"\n",
2862
              (intptr_t)cp->sfree1, (intptr_t)cp->sfree);
2863
    if (control->options & dump_do_strings) {
2864
        debug_dump_contents(mem, (control->bottom == 0 ? cp->ctop :
2865
                             max(control->bottom, cp->ctop)),
2866
                            (control->top == 0 ? cp->climit :
2867
                             min(control->top, cp->climit)),
2868
                            0, true);
2869
    }
2870
    SCAN_CLUMP_OBJECTS(cp)
2871
        DO_ALL
2872
        if (obj_in_control_region(pre + 1,
2873
                                  (const byte *)(pre + 1) + size,
2874
                                  control)
2875
        )
2876
        debug_print_object(mem, pre + 1, control);
2877
    END_OBJECTS_SCAN_NO_ABORT
2878
}
2879
void
2880
debug_print_clump(const gs_memory_t *mem, const clump_t * cp)
2881
{
2882
    dump_control_t control;
2883
2884
    control = dump_control_default;
2885
    debug_dump_clump(mem, cp, &control);
2886
}
2887
2888
/* Print the contents of all clumps managed by an allocator. */
2889
/* Relevant options: all. */
2890
void
2891
debug_dump_memory(const gs_ref_memory_t * mem, const dump_control_t * control)
2892
{
2893
    const clump_t *cp;
2894
    clump_splay_walker sw;
2895
2896
    for (cp = clump_splay_walk_init(&sw, mem); cp != NULL; cp = clump_splay_walk_fwd(&sw))
2897
    {
2898
        if (obj_in_control_region(cp->cbase, cp->cend, control))
2899
            debug_dump_clump((const gs_memory_t *)mem, cp, control);
2900
    }
2901
}
2902
2903
void
2904
debug_dump_allocator(const gs_ref_memory_t *mem)
2905
{
2906
    debug_dump_memory(mem, &dump_control_no_contents);
2907
}
2908
2909
/* Find all the objects that contain a given pointer. */
2910
void
2911
debug_find_pointers(const gs_ref_memory_t *mem, const void *target)
2912
{
2913
    clump_splay_walker sw;
2914
    dump_control_t control;
2915
    const clump_t *cp;
2916
2917
    control.options = 0;
2918
    for (cp = clump_splay_walk_init(&sw, mem); cp; cp = clump_splay_walk_fwd(&sw))
2919
    {
2920
        SCAN_CLUMP_OBJECTS(cp);
2921
        DO_ALL
2922
            struct_proc_enum_ptrs((*proc)) = pre->o_type->enum_ptrs;
2923
            uint index = 0;
2924
            enum_ptr_t eptr;
2925
2926
            if (proc)   /* doesn't trace refs NB fix me. */
2927
                for (; (*proc)((const gs_memory_t *)mem, pre + 1, size, index,
2928
                               &eptr, pre->o_type, NULL);
2929
                     ++index)
2930
                    if (eptr.ptr == target) {
2931
                        dmprintf1((const gs_memory_t *)mem, "Index %d in", index);
2932
                        debug_print_object((const gs_memory_t *)mem, pre + 1, &control);
2933
                    }
2934
        END_OBJECTS_SCAN_NO_ABORT
2935
    }
2936
}
2937
static void ddct(const gs_memory_t *mem, clump_t *cp, clump_t *parent, int depth)
2938
{
2939
    int i;
2940
2941
    if (cp == NULL)
2942
        return;
2943
    for (i = 0; i < depth; i++)
2944
        dmlprintf(mem, " ");
2945
2946
    dmlprintf7(mem, "Clump "PRI_INTPTR":"PRI_INTPTR" parent="PRI_INTPTR" left="PRI_INTPTR":"PRI_INTPTR" right="PRI_INTPTR":"PRI_INTPTR"\n",
2947
        (intptr_t)cp, (intptr_t)cp->cbase, (intptr_t)cp->parent,
2948
        (intptr_t)cp->left, (intptr_t)(cp->left ? cp->left->cbase : NULL),
2949
        (intptr_t)cp->right, (intptr_t)(cp->right ? cp->right->cbase : NULL));
2950
    if (cp->parent != parent)
2951
        dmlprintf(mem, "Parent pointer mismatch!\n");
2952
    ddct(mem, cp->left, cp, depth+1);
2953
    ddct(mem, cp->right, cp, depth+1);
2954
}
2955
void
2956
debug_dump_clump_tree(const gs_ref_memory_t *mem)
2957
{
2958
    ddct((const gs_memory_t *)mem, mem->root, NULL, 0);
2959
}
2960
2961
#endif /* DEBUG */