Coverage Report

Created: 2022-04-16 11:23

/src/ghostpdl/base/gsalloc.c
Line
Count
Source (jump to first uncovered line)
1
/* Copyright (C) 2001-2022 Artifex Software, Inc.
2
   All Rights Reserved.
3
4
   This software is provided AS-IS with no warranty, either express or
5
   implied.
6
7
   This software is distributed under license and may not be copied,
8
   modified or distributed except as expressly authorized under the terms
9
   of the license contained in the file LICENSE in this distribution.
10
11
   Refer to licensing information at http://www.artifex.com or contact
12
   Artifex Software, Inc.,  1305 Grant Avenue - Suite 200, Novato,
13
   CA 94945, U.S.A., +1(415)492-9861, for further information.
14
*/
15
16
17
/* Standard memory allocator */
18
#include "gx.h"
19
#include "memory_.h"
20
#include "gserrors.h"
21
#include "gsexit.h"
22
#include "gsmdebug.h"
23
#include "gsstruct.h"
24
#include "gxalloc.h"
25
#include "stream.h"   /* for clearing stream list */
26
#include "malloc_.h" /* For MEMENTO */
27
28
/*
29
 * Define whether to try consolidating space before adding a new clump.
30
 * The default is not to do this, because it is computationally
31
 * expensive and doesn't seem to help much.  However, this is done for
32
 * "controlled" spaces whether or not the #define is in effect.
33
 */
34
/*#define CONSOLIDATE_BEFORE_ADDING_CLUMP */
35
36
/*
37
 * This allocator produces tracing messages of the form
38
 *      [aNMOTS]...
39
 * where
40
 *   N is the VM space number, +1 if we are allocating from stable memory.
41
 *   M is : for movable objects, | for immovable,
42
 *   O is {alloc = +, free = -, grow = >, shrink = <},
43
 *   T is {bytes = b, object = <, ref = $, string = >}, and
44
 *   S is {small freelist = f, large freelist = F, LIFO = space,
45
 *      own clump = L, lost = #, lost own clump = ~, other = .}.
46
 */
47
#ifdef DEBUG
48
static int
49
alloc_trace_space(const gs_ref_memory_t *imem)
50
{
51
    return (int)(imem->space + (imem->stable_memory == (const gs_memory_t *)imem));
52
}
53
static void
54
alloc_trace(const char *chars, gs_ref_memory_t * imem, client_name_t cname,
55
            gs_memory_type_ptr_t stype, uint size, const void *ptr)
56
{
57
    if_debug7m('A', (const gs_memory_t *)imem, "[a%d%s]%s %s(%u) %s"PRI_INTPTR"\n",
58
               alloc_trace_space(imem), chars, client_name_string(cname),
59
               (ptr == 0 || stype == 0 ? "" :
60
                struct_type_name_string(stype)),
61
               size, (chars[1] == '+' ? "= " : ""), (intptr_t)ptr);
62
}
63
static bool
64
alloc_size_is_ok(gs_memory_type_ptr_t stype)
65
{
66
    return (stype->ssize > 0 && stype->ssize < 0x200000);
67
}
68
#  define ALLOC_CHECK_SIZE(mem,stype)\
69
    BEGIN\
70
      if (!alloc_size_is_ok(stype)) {\
71
        mlprintf2(mem,"size of struct type "PRI_INTPTR" is 0x%lx!\n",\
72
                  (intptr_t)(stype), (ulong)((stype)->ssize));\
73
        return 0;\
74
      }\
75
    END
76
#else
77
145M
#  define alloc_trace(chars, imem, cname, stype, size, ptr) DO_NOTHING
78
50.2M
#  define ALLOC_CHECK_SIZE(mem,stype) DO_NOTHING
79
#endif
80
81
/*
82
 * The structure descriptor for allocators.  Even though allocators
83
 * are allocated outside GC space, they reference objects within it.
84
 */
85
public_st_ref_memory();
86
static
87
1.19M
ENUM_PTRS_BEGIN(ref_memory_enum_ptrs) return 0;
88
199k
ENUM_PTR3(0, gs_ref_memory_t, streams, names_array, changes);
89
199k
ENUM_PTR(3, gs_ref_memory_t, saved);
90
1.19M
ENUM_PTR(4, gs_ref_memory_t, scan_limit);
91
1.19M
ENUM_PTRS_END
92
199k
static RELOC_PTRS_WITH(ref_memory_reloc_ptrs, gs_ref_memory_t *mptr)
93
199k
{
94
199k
    RELOC_PTR(gs_ref_memory_t, streams);
95
199k
    RELOC_PTR(gs_ref_memory_t, names_array);
96
199k
    RELOC_PTR(gs_ref_memory_t, changes);
97
199k
    RELOC_PTR(gs_ref_memory_t, scan_limit);
98
    /* Don't relocate the saved pointer now -- see igc.c for details. */
99
199k
    mptr->reloc_saved = RELOC_OBJ(mptr->saved);
100
199k
}
101
199k
RELOC_PTRS_END
102
103
/*
104
 * Define the flags for alloc_obj, which implements all but the fastest
105
 * case of allocation.
106
 */
107
typedef enum {
108
    ALLOC_IMMOVABLE = 1,
109
    ALLOC_DIRECT = 2    /* called directly, without fast-case checks */
110
} alloc_flags_t;
111
112
/* Forward references */
113
static void remove_range_from_freelist(gs_ref_memory_t *mem, void* bottom, void* top);
114
static obj_header_t *large_freelist_alloc(gs_ref_memory_t *mem, obj_size_t size);
115
static obj_header_t *scavenge_low_free(gs_ref_memory_t *mem, unsigned request_size);
116
static size_t compute_free_objects(gs_ref_memory_t *);
117
static obj_header_t *alloc_obj(gs_ref_memory_t *, obj_size_t, gs_memory_type_ptr_t, alloc_flags_t, client_name_t);
118
static void consolidate_clump_free(clump_t *cp, gs_ref_memory_t *mem);
119
static void trim_obj(gs_ref_memory_t *mem, obj_header_t *obj, obj_size_t size, clump_t *cp);
120
static clump_t *alloc_acquire_clump(gs_ref_memory_t *, size_t, bool, client_name_t);
121
static clump_t *alloc_add_clump(gs_ref_memory_t *, size_t, client_name_t);
122
void alloc_close_clump(gs_ref_memory_t *);
123
124
/*
125
 * Define the standard implementation (with garbage collection)
126
 * of Ghostscript's memory manager interface.
127
 */
128
/* Raw memory procedures */
129
static gs_memory_proc_alloc_bytes(i_alloc_bytes_immovable);
130
static gs_memory_proc_resize_object(i_resize_object);
131
static gs_memory_proc_free_object(i_free_object);
132
static gs_memory_proc_stable(i_stable);
133
static gs_memory_proc_status(i_status);
134
static gs_memory_proc_free_all(i_free_all);
135
static gs_memory_proc_consolidate_free(i_consolidate_free);
136
137
/* Object memory procedures */
138
static gs_memory_proc_alloc_bytes(i_alloc_bytes);
139
static gs_memory_proc_alloc_struct(i_alloc_struct);
140
static gs_memory_proc_alloc_struct(i_alloc_struct_immovable);
141
static gs_memory_proc_alloc_byte_array(i_alloc_byte_array);
142
static gs_memory_proc_alloc_byte_array(i_alloc_byte_array_immovable);
143
static gs_memory_proc_alloc_struct_array(i_alloc_struct_array);
144
static gs_memory_proc_alloc_struct_array(i_alloc_struct_array_immovable);
145
static gs_memory_proc_object_size(i_object_size);
146
static gs_memory_proc_object_type(i_object_type);
147
static gs_memory_proc_alloc_string(i_alloc_string);
148
static gs_memory_proc_alloc_string(i_alloc_string_immovable);
149
static gs_memory_proc_resize_string(i_resize_string);
150
static gs_memory_proc_free_string(i_free_string);
151
static gs_memory_proc_register_root(i_register_root);
152
static gs_memory_proc_unregister_root(i_unregister_root);
153
static gs_memory_proc_enable_free(i_enable_free);
154
static gs_memory_proc_set_object_type(i_set_object_type);
155
static gs_memory_proc_defer_frees(i_defer_frees);
156
157
/* We export the procedures for subclasses. */
158
const gs_memory_procs_t gs_ref_memory_procs =
159
{
160
    /* Raw memory procedures */
161
    i_alloc_bytes_immovable,
162
    i_resize_object,
163
    i_free_object,
164
    i_stable,
165
    i_status,
166
    i_free_all,
167
    i_consolidate_free,
168
    /* Object memory procedures */
169
    i_alloc_bytes,
170
    i_alloc_struct,
171
    i_alloc_struct_immovable,
172
    i_alloc_byte_array,
173
    i_alloc_byte_array_immovable,
174
    i_alloc_struct_array,
175
    i_alloc_struct_array_immovable,
176
    i_object_size,
177
    i_object_type,
178
    i_alloc_string,
179
    i_alloc_string_immovable,
180
    i_resize_string,
181
    i_free_string,
182
    i_register_root,
183
    i_unregister_root,
184
    i_enable_free,
185
    i_set_object_type,
186
    i_defer_frees
187
};
188
189
/*
190
 * Previous versions of this code used a simple linked list of
191
 * clumps. We change here to use a splay tree of clumps.
192
 * Splay Trees can be found documented in "Algorithms and Data
193
 * Structures" by Jeffrey H Kingston.
194
 *
195
 * Essentially they are binary trees, ordered by address of the
196
 * 'cbase' pointer. The 'cunning' feature with them is that
197
 * when a node in the tree is accessed, we do a 'move to root'
198
 * operation. This involves performing various 'rotations' as
199
 * we move up the tree, the net effect of which tends to
200
 * lead to more balanced trees (see Kingston for analysis).
201
 * It also leads to better locality of reference in that
202
 * recently accessed nodes stay near the root.
203
 */
204
205
/* #define DEBUG_CLUMPS */
206
#ifdef DEBUG_CLUMPS
207
#define SANITY_CHECK(cp) sanity_check(cp)
208
#define SANITY_CHECK_MID(cp) sanity_check_mid(cp)
209
210
static void
211
broken_splay()
212
{
213
    dlprintf("Broken splay tree!\n");
214
}
215
216
void sanity_check_rec(clump_t *cp)
217
{
218
    splay_dir_t from = SPLAY_FROM_ABOVE;
219
220
    while (cp)
221
    {
222
        if (from == SPLAY_FROM_ABOVE)
223
        {
224
            /* We have arrived from above. Step left. */
225
            if (cp->left)
226
            {
227
                if (cp->left->cbase > cp->cbase || cp->left->parent != cp)
228
                    broken_splay();
229
                cp = cp->left;
230
                from = SPLAY_FROM_ABOVE;
231
                continue;
232
            }
233
            /* No left to step to, so imagine we have just arrived from there */
234
            from = SPLAY_FROM_LEFT;
235
        }
236
        if (from == SPLAY_FROM_LEFT)
237
        {
238
            /* We have arrived from the left. Step right. */
239
            if (cp->right)
240
            {
241
                if (cp->right->cbase < cp->cbase || cp->right->parent != cp)
242
                    broken_splay();
243
                cp = cp->right;
244
                from = SPLAY_FROM_ABOVE;
245
                continue;
246
            }
247
            /* No right to step to, so imagine we have just arrived from there. */
248
            from = SPLAY_FROM_RIGHT;
249
        }
250
        if (from == SPLAY_FROM_RIGHT)
251
        {
252
            /* We have arrived from the right. Step up. */
253
            if (cp->parent == NULL)
254
                break;
255
            if (cp->parent->left != cp && cp->parent->right != cp)
256
                broken_splay();
257
            from = (cp->parent->left == cp ? SPLAY_FROM_LEFT : SPLAY_FROM_RIGHT);
258
            cp = cp->parent;
259
        }
260
    }
261
}
262
263
void sanity_check(clump_t *cp)
264
{
265
    sanity_check_rec(cp);
266
}
267
268
void sanity_check_mid(clump_t *cp)
269
{
270
    clump_t *parent;
271
272
    while ((parent = cp->parent) != NULL)
273
    {
274
        if (parent->left == cp)
275
        {
276
            if (parent->right == cp)
277
                broken_splay();
278
        }
279
        else if (parent->right != cp)
280
            broken_splay();
281
        cp = parent;
282
    }
283
284
    sanity_check_rec(cp);
285
}
286
287
#else
288
3.52M
#define SANITY_CHECK(cp) while (0) {}
289
11.4M
#define SANITY_CHECK_MID(cp) while (0) {}
290
#endif
291
292
/* When initing with the root, we want to pass the smallest inorder one
293
 * back immediately, and set it up so that we step right for the next
294
 * one. */
295
clump_t *
296
clump_splay_walk_init(clump_splay_walker *sw, const gs_ref_memory_t *mem)
297
1.26M
{
298
1.26M
    clump_t *cp = mem->root;
299
300
1.26M
    if (cp)
301
1.26M
    {
302
1.26M
        SANITY_CHECK(cp);
303
304
1.26M
        sw->from = SPLAY_FROM_LEFT;
305
3.26M
        while (cp->left)
306
2.00M
        {
307
2.00M
            cp = cp->left;
308
2.00M
        }
309
1.26M
    }
310
1.26M
    sw->cp = cp;
311
1.26M
    sw->end = NULL;
312
1.26M
    return cp;
313
1.26M
}
314
315
clump_t *
316
clump_splay_walk_bwd_init(clump_splay_walker *sw, const gs_ref_memory_t *mem)
317
19.2k
{
318
19.2k
    clump_t *cp = mem->root;
319
320
19.2k
    if (cp)
321
19.2k
    {
322
19.2k
        SANITY_CHECK(cp);
323
324
19.2k
        sw->from = SPLAY_FROM_RIGHT;
325
42.0k
        while (cp->right)
326
22.7k
        {
327
22.7k
            cp = cp->right;
328
22.7k
        }
329
19.2k
    }
330
19.2k
    sw->cp = cp;
331
19.2k
    sw->end = NULL;
332
19.2k
    return cp;
333
19.2k
}
334
335
/* When initing 'mid walk' (i.e. with a non-root node), we want to
336
 * return the node we are given as the first one, and continue
337
 * onwards in an in order fashion.
338
 */
339
clump_t *
340
clump_splay_walk_init_mid(clump_splay_walker *sw, clump_t *cp)
341
10.4M
{
342
10.4M
    sw->from = SPLAY_FROM_LEFT;
343
10.4M
    sw->cp = cp;
344
10.4M
    sw->end = cp;
345
10.4M
    if (cp)
346
10.4M
    {
347
10.4M
        SANITY_CHECK_MID(cp);
348
10.4M
    }
349
10.4M
    return cp;
350
10.4M
}
351
352
clump_t *
353
clump_splay_walk_fwd(clump_splay_walker *sw)
354
13.8M
{
355
13.8M
    clump_t *cp = sw->cp;
356
13.8M
    int from = sw->from;
357
358
13.8M
    if (cp == NULL)
359
423
        return NULL;
360
361
    /* We step through the tree, and stop when we arrive
362
     * at sw->end in an in order manner (i.e. by moving from
363
     * the left). */
364
29.0M
    while (1)
365
29.0M
    {
366
29.0M
        if (from == SPLAY_FROM_ABOVE)
367
10.7M
        {
368
            /* We have arrived from above. Step left. */
369
10.7M
            if (cp->left)
370
6.09M
            {
371
6.09M
                cp = cp->left;
372
6.09M
                from = SPLAY_FROM_ABOVE;
373
6.09M
                continue;
374
6.09M
            }
375
            /* No left to step to, so imagine we have just arrived from there */
376
4.67M
            from = SPLAY_FROM_LEFT;
377
            /* Have we reached the stopping point? */
378
4.67M
            if (cp == sw->end)
379
25.8k
                cp = NULL;
380
            /* We want to stop here, for inorder operation. So break out of the loop. */
381
4.67M
            break;
382
10.7M
        }
383
18.2M
        if (from == SPLAY_FROM_LEFT)
384
13.8M
        {
385
            /* We have arrived from the left. Step right. */
386
13.8M
            if (cp->right)
387
4.53M
            {
388
4.53M
                cp = cp->right;
389
4.53M
                from = SPLAY_FROM_ABOVE;
390
4.53M
                continue;
391
4.53M
            }
392
            /* No right to step to, so imagine we have just arrived from there. */
393
9.29M
            from = SPLAY_FROM_RIGHT;
394
9.29M
        }
395
13.7M
        if (from == SPLAY_FROM_RIGHT)
396
13.7M
        {
397
            /* We have arrived from the right. Step up. */
398
13.7M
            clump_t *old = cp;
399
13.7M
            cp = cp->parent;
400
13.7M
            if (cp == NULL)
401
1.40M
            {
402
                /* We've reached the root of the tree. Is this our stopping point? */
403
1.40M
                if (sw->end == NULL)
404
1.26M
                    break;
405
                /* If not, step on. */
406
144k
                cp = old;
407
144k
                from = SPLAY_FROM_ABOVE;
408
144k
            }
409
12.3M
            else
410
12.3M
            {
411
12.3M
                from = (cp->left == old ? SPLAY_FROM_LEFT : SPLAY_FROM_RIGHT);
412
12.3M
                if (from == SPLAY_FROM_LEFT)
413
7.89M
                {
414
                    /* Have we reached the stopping point? */
415
7.89M
                    if (cp == sw->end)
416
87.3k
                        cp = NULL;
417
7.89M
                    break;
418
7.89M
                }
419
12.3M
            }
420
13.7M
        }
421
13.7M
    }
422
13.8M
    sw->cp = cp;
423
13.8M
    sw->from = from;
424
13.8M
    return cp;
425
13.8M
}
426
427
clump_t *
428
clump_splay_walk_bwd(clump_splay_walker *sw)
429
196k
{
430
196k
    clump_t *cp = sw->cp;
431
196k
    int from = sw->from;
432
433
196k
    if (cp == NULL)
434
0
        return NULL;
435
436
    /* We step backwards through the tree, and stop when we arrive
437
     * at sw->end in a reverse in order manner (i.e. by moving from
438
     * the right). */
439
444k
    while (1)
440
444k
    {
441
444k
        if (from == SPLAY_FROM_ABOVE)
442
154k
        {
443
            /* We have arrived from above. Step right. */
444
154k
            if (cp->right)
445
60.0k
            {
446
60.0k
                cp = cp->right;
447
60.0k
                from = SPLAY_FROM_ABOVE;
448
60.0k
                continue;
449
60.0k
            }
450
            /* No right to step to, so imagine we have just arrived from there. */
451
94.0k
            from = SPLAY_FROM_RIGHT;
452
            /* Have we reached our end? */
453
94.0k
            if (cp == sw->end)
454
0
                cp = NULL;
455
            /* Stop to run inorder operation */
456
94.0k
            break;
457
154k
        }
458
290k
        if (from == SPLAY_FROM_RIGHT)
459
196k
        {
460
            /* We have arrived from the right. Step left. */
461
196k
            if (cp->left)
462
94.0k
            {
463
94.0k
                cp = cp->left;
464
94.0k
                from = SPLAY_FROM_ABOVE;
465
94.0k
                continue;
466
94.0k
            }
467
            /* No left to step to, so imagine we have just arrived from there. */
468
102k
            from = SPLAY_FROM_LEFT;
469
102k
        }
470
196k
        if (from == SPLAY_FROM_LEFT)
471
196k
        {
472
            /* We have arrived from the left. Step up. */
473
196k
            clump_t *old = cp;
474
196k
            cp = cp->parent;
475
196k
            from = (cp == NULL || cp->left != old ? SPLAY_FROM_RIGHT : SPLAY_FROM_LEFT);
476
196k
            if (from == SPLAY_FROM_RIGHT)
477
102k
            {
478
102k
                if (cp == sw->end)
479
19.2k
                    cp = NULL;
480
102k
                break;
481
102k
            }
482
196k
        }
483
196k
    }
484
196k
    sw->cp = cp;
485
196k
    sw->from = from;
486
196k
    return cp;
487
196k
}
488
489
static clump_t *
490
clump_splay_remove(clump_t *cp, gs_ref_memory_t *imem)
491
1.67M
{
492
1.67M
    clump_t *replacement;
493
494
1.67M
    if (cp->left == NULL)
495
370k
    {
496
        /* At most one child - easy */
497
370k
        replacement = cp->right;
498
370k
    }
499
1.30M
    else if (cp->right == NULL)
500
672k
    {
501
        /* Strictly one child - easy */
502
672k
        replacement = cp->left;
503
672k
    }
504
628k
    else
505
628k
    {
506
        /* 2 Children - tricky */
507
        /* Find in-order predecessor to f */
508
628k
        replacement = cp->left;
509
1.25M
        while (replacement->right)
510
624k
            replacement = replacement->right;
511
        /* Remove replacement - easy as just one child */
512
628k
        (void)clump_splay_remove(replacement, imem);
513
        /* Replace cp with replacement */
514
628k
        if (cp->left)
515
597k
            cp->left->parent = replacement;
516
628k
        cp->right->parent = replacement;
517
628k
        replacement->left = cp->left;
518
628k
        replacement->right = cp->right;
519
628k
    }
520
1.67M
    if (cp->parent)
521
823k
    {
522
823k
        if (cp->parent->left == cp)
523
396k
            cp->parent->left = replacement;
524
426k
        else
525
426k
            cp->parent->right = replacement;
526
823k
    }
527
847k
    else
528
847k
        imem->root = replacement;
529
1.67M
    if (replacement)
530
1.32M
        replacement->parent = cp->parent;
531
1.67M
    return replacement;
532
1.67M
}
533
534
/* Here we apply a function to all the nodes in a tree in
535
 * depth first order. This means that the given function
536
 * can safely alter: 1) the clump, 2) it's children,
537
 * 3) it's parents child pointer that points to it
538
 * without fear of corruption. Specifically this means
539
 * that the function can free (and unlink) the node
540
 * if it wants.
541
 */
542
clump_t *
543
clump_splay_app(clump_t *root, gs_ref_memory_t *imem, splay_app_result_t (*fn)(clump_t *, void *), void *arg)
544
225k
{
545
225k
    clump_t *step_to;
546
225k
    clump_t *cp = root;
547
225k
    int from = SPLAY_FROM_ABOVE;
548
225k
    splay_app_result_t res;
549
550
225k
    SANITY_CHECK(cp);
551
552
1.89M
    while (cp)
553
1.67M
    {
554
1.67M
        if (from == SPLAY_FROM_ABOVE)
555
951k
        {
556
            /* We have arrived from above. Step left. */
557
951k
            step_to = cp->left;
558
951k
            if (step_to)
559
511k
            {
560
511k
                from = SPLAY_FROM_ABOVE;
561
511k
                cp = step_to;
562
511k
            }
563
440k
            else
564
440k
            {
565
                /* No left to step to, so imagine we have just arrived from the left */
566
440k
                from = SPLAY_FROM_LEFT;
567
440k
            }
568
951k
        }
569
1.67M
        if (from == SPLAY_FROM_LEFT)
570
951k
        {
571
            /* We have arrived from the left. Step right. */
572
951k
            step_to = cp->right;
573
951k
            if (step_to)
574
215k
            {
575
215k
                from = SPLAY_FROM_ABOVE;
576
215k
                cp = step_to;
577
215k
            }
578
736k
            else
579
736k
            {
580
                /* No right to step to, so imagine we have just arrived from the right. */
581
736k
                from = SPLAY_FROM_RIGHT;
582
736k
            }
583
951k
        }
584
1.67M
        if (from == SPLAY_FROM_RIGHT)
585
951k
        {
586
            /* We have arrived from the right. Step up. */
587
951k
            step_to = cp->parent;
588
951k
            if (step_to)
589
726k
            {
590
726k
                from = (step_to->left == cp ? SPLAY_FROM_LEFT : SPLAY_FROM_RIGHT);
591
726k
            }
592
951k
            res = fn(cp, arg);
593
951k
            if (res & SPLAY_APP_STOP)
594
3.41k
                return cp;
595
947k
            cp = step_to;
596
947k
        }
597
1.67M
    }
598
221k
    return cp;
599
225k
}
600
601
/* Move the given node to the root of the tree, by
602
 * performing a series of the following rotations.
603
 * The key observation here is that all these
604
 * rotations preserve the ordering of the tree, and
605
 * result in 'x' getting higher.
606
 *
607
 * Case 1:   z          x           Case 1b:   z                   x
608
 *          # #        # #                    # #                 # #
609
 *         y   D      A   y                  A   y               y   D
610
 *        # #     =>     # #                    # #     =>      # #
611
 *       x   C          B   z                  B   x           z   C
612
 *      # #                # #                    # #         # #
613
 *     A   B              C   D                  C   D       A   B
614
 *
615
 * Case 2:   z             x        Case 2b:   z                  x
616
 *          # #          ## ##                # #               ## ##
617
 *         y   D        y     z              A   y             z     y
618
 *        # #     =>   # #   # #                # #     =>    # #   # #
619
 *       A   x        A   B C   D              x   D         A   B C   D
620
 *          # #                               # #
621
 *         B   C                             B   C
622
 *
623
 * Case 3:   y          x           Case 3b:  y                  x
624
 *          # #        # #                   # #                # #
625
 *         x   C  =>  A   y                 A   x       =>     y   C
626
 *        # #            # #                   # #            # #
627
 *       A   B          B   C                 B   C          A   B
628
 */
629
static void
630
splay_move_to_root(clump_t *x, gs_ref_memory_t *mem)
631
13.1M
{
632
13.1M
    clump_t *y, *z;
633
634
13.1M
    if (x == NULL)
635
0
        return;
636
637
30.4M
    while ((y = x->parent) != NULL) {
638
17.2M
        if ((z = y->parent) != NULL) {
639
9.17M
            x->parent = z->parent;
640
9.17M
            if (x->parent) {
641
4.71M
                if (x->parent->left == z)
642
2.34M
                    x->parent->left = x;
643
2.36M
                else
644
2.36M
                    x->parent->right  = x;
645
4.71M
            }
646
9.17M
            y->parent = x;
647
            /* Case 1, 1b, 2 or 2b */
648
9.17M
            if (y->left == x) {
649
                /* Case 1 or 2b */
650
4.76M
                if (z->left == y) {
651
                    /* Case 1 */
652
3.07M
                    y->left = x->right;
653
3.07M
                    if (y->left)
654
2.19M
                        y->left->parent = y;
655
3.07M
                    z->left = y->right;
656
3.07M
                    if (z->left)
657
2.06M
                        z->left->parent = z;
658
3.07M
                    y->right = z;
659
3.07M
                    z->parent = y;
660
3.07M
                } else {
661
                    /* Case 2b */
662
1.69M
                    z->right = x->left;
663
1.69M
                    if (z->right)
664
835k
                        z->right->parent = z;
665
1.69M
                    y->left = x->right;
666
1.69M
                    if (y->left)
667
949k
                        y->left->parent = y;
668
1.69M
                    x->left = z;
669
1.69M
                    z->parent = x;
670
1.69M
                }
671
4.76M
                x->right      = y;
672
4.76M
            } else {
673
                /* Case 2 or 1b */
674
4.40M
                if (z->left == y) {
675
                    /* Case 2 */
676
1.42M
                    y->right = x->left;
677
1.42M
                    if (y->right)
678
952k
                        y->right->parent = y;
679
1.42M
                    z->left = x->right;
680
1.42M
                    if (z->left)
681
905k
                        z->left->parent = z;
682
1.42M
                    x->right = z;
683
1.42M
                    z->parent = x;
684
2.97M
                } else {
685
                    /* Case 1b */
686
2.97M
                    z->right = y->left;
687
2.97M
                    if (z->right)
688
2.02M
                        z->right->parent = z;
689
2.97M
                    y->right = x->left;
690
2.97M
                    if (y->right)
691
2.12M
                        y->right->parent = y;
692
2.97M
                    y->left = z;
693
2.97M
                    z->parent = y;
694
2.97M
                }
695
4.40M
                x->left = y;
696
4.40M
            }
697
9.17M
        } else {
698
            /* Case 3 or 3b */
699
8.12M
            x->parent = NULL;
700
8.12M
            y->parent = x;
701
8.12M
            if (y->left == x) {
702
                /* Case 3 */
703
3.91M
                y->left = x->right;
704
3.91M
                if (y->left)
705
2.79M
                    y->left->parent = y;
706
3.91M
                x->right = y;
707
4.20M
            } else {
708
                /* Case 3b */
709
4.20M
                y->right = x->left;
710
4.20M
                if (y->right)
711
2.87M
                    y->right->parent = y;
712
4.20M
                x->left = y;
713
4.20M
            }
714
8.12M
        }
715
17.2M
    }
716
13.1M
    mem->root = x;
717
13.1M
}
718
719
static void
720
splay_insert(clump_t *cp, gs_ref_memory_t *mem)
721
1.03M
{
722
1.03M
    clump_t *node = NULL;
723
1.03M
    clump_t **root = &mem->root;
724
725
4.37M
    while (*root) {
726
3.33M
        node = *root;
727
3.33M
        if (PTR_LT(cp->cbase, node->cbase)) {
728
1.49M
            root = &node->left;
729
1.83M
        } else {
730
1.83M
            root = &node->right;
731
1.83M
        }
732
3.33M
    }
733
1.03M
    *root = cp;
734
1.03M
    cp->left = NULL;
735
1.03M
    cp->right = NULL;
736
1.03M
    cp->parent = node;
737
1.03M
    splay_move_to_root(cp, mem);
738
1.03M
}
739
740
/*
741
 * Allocate and mostly initialize the state of an allocator (system, global,
742
 * or local).  Does not initialize global or space.
743
 */
744
static void *ialloc_solo(gs_memory_t *, gs_memory_type_ptr_t,
745
                          clump_t **);
746
gs_ref_memory_t *
747
ialloc_alloc_state(gs_memory_t * parent, uint clump_size)
748
3.41k
{
749
3.41k
    clump_t *cp;
750
3.41k
    gs_ref_memory_t *iimem = ialloc_solo(parent, &st_ref_memory, &cp);
751
752
3.41k
    if (iimem == 0)
753
0
        return 0;
754
3.41k
    iimem->stable_memory = (gs_memory_t *)iimem;
755
3.41k
    iimem->procs = gs_ref_memory_procs;
756
3.41k
    iimem->gs_lib_ctx = parent->gs_lib_ctx;
757
3.41k
    iimem->non_gc_memory = parent;
758
3.41k
    iimem->thread_safe_memory = parent->thread_safe_memory;
759
3.41k
    iimem->clump_size = clump_size;
760
#if defined(MEMENTO) || defined(SINGLE_OBJECT_MEMORY_BLOCKS_ONLY)
761
    iimem->large_size = 1;
762
#else
763
3.41k
    iimem->large_size = ((clump_size / 4) & -obj_align_mod) + 1;
764
3.41k
#endif
765
3.41k
    iimem->is_controlled = false;
766
3.41k
    iimem->gc_status.vm_threshold = clump_size * 3L;
767
3.41k
    iimem->gc_status.max_vm = MAX_MAX_VM;
768
3.41k
    iimem->gc_status.signal_value = 0;
769
3.41k
    iimem->gc_status.enabled = false;
770
3.41k
    iimem->gc_status.requested = 0;
771
3.41k
    iimem->gc_allocated = 0;
772
3.41k
    iimem->previous_status.allocated = 0;
773
3.41k
    iimem->previous_status.used = 0;
774
3.41k
    ialloc_reset(iimem);
775
3.41k
    iimem->root = cp;
776
3.41k
    ialloc_set_limit(iimem);
777
3.41k
    iimem->cc = NULL;
778
3.41k
    iimem->save_level = 0;
779
3.41k
    iimem->new_mask = 0;
780
3.41k
    iimem->test_mask = ~0;
781
3.41k
    iimem->streams = 0;
782
3.41k
    iimem->names_array = 0;
783
3.41k
    iimem->roots = 0;
784
3.41k
    iimem->num_contexts = 0;
785
3.41k
    iimem->saved = 0;
786
3.41k
    return iimem;
787
3.41k
}
788
789
/* Allocate a 'solo' object with its own clump. */
790
static void *
791
ialloc_solo(gs_memory_t * parent, gs_memory_type_ptr_t pstype,
792
            clump_t ** pcp)
793
3.41k
{ /*
794
         * We can't assume that the parent uses the same object header
795
         * that we do, but the GC requires that allocators have
796
         * such a header.  Therefore, we prepend one explicitly.
797
         */
798
3.41k
    clump_t *cp =
799
3.41k
        gs_raw_alloc_struct_immovable(parent, &st_clump,
800
3.41k
                                      "ialloc_solo(clump)");
801
3.41k
    uint csize =
802
3.41k
        ROUND_UP(sizeof(clump_head_t) + sizeof(obj_header_t) +
803
3.41k
                 pstype->ssize,
804
3.41k
                 obj_align_mod);
805
3.41k
    byte *cdata = gs_alloc_bytes_immovable(parent, csize, "ialloc_solo");
806
3.41k
    obj_header_t *obj = (obj_header_t *) (cdata + sizeof(clump_head_t));
807
808
3.41k
    if (cp == 0 || cdata == 0) {
809
0
        gs_free_object(parent, cp, "ialloc_solo(allocation failure)");
810
0
        gs_free_object(parent, cdata, "ialloc_solo(allocation failure)");
811
0
        return 0;
812
0
    }
813
3.41k
    alloc_init_clump(cp, cdata, cdata + csize, false, (clump_t *) NULL);
814
3.41k
    cp->cbot = cp->ctop;
815
3.41k
    cp->parent = cp->left = cp->right = 0;
816
3.41k
    cp->c_alone = true;
817
    /* Construct the object header "by hand". */
818
3.41k
    obj->o_pad = 0;
819
3.41k
    obj->o_alone = 1;
820
3.41k
    obj->o_size = pstype->ssize;
821
3.41k
    obj->o_type = pstype;
822
3.41k
    *pcp = cp;
823
3.41k
    return (void *)(obj + 1);
824
3.41k
}
825
826
void
827
ialloc_free_state(gs_ref_memory_t *iimem)
828
0
{
829
0
    clump_t *cp;
830
0
    gs_memory_t *mem;
831
0
    if (iimem == NULL)
832
0
        return;
833
0
    cp = iimem->root;
834
0
    mem = iimem->non_gc_memory;
835
0
    if (cp == NULL)
836
0
        return;
837
0
    gs_free_object(mem, cp->chead, "ialloc_solo(allocation failure)");
838
0
    gs_free_object(mem, cp, "ialloc_solo(allocation failure)");
839
0
}
840
841
/*
842
 * Add a clump to an externally controlled allocator.  Such allocators
843
 * allocate all objects as immovable, are not garbage-collected, and
844
 * don't attempt to acquire additional memory on their own.
845
 */
846
int
847
ialloc_add_clump(gs_ref_memory_t *imem, ulong space, client_name_t cname)
848
0
{
849
0
    clump_t *cp;
850
851
    /* Allow acquisition of this clump. */
852
0
    imem->is_controlled = false;
853
0
    imem->large_size = imem->clump_size;
854
0
    imem->limit = imem->gc_status.max_vm = MAX_MAX_VM;
855
856
    /* Acquire the clump. */
857
0
    cp = alloc_add_clump(imem, space, cname);
858
859
    /*
860
     * Make all allocations immovable.  Since the "movable" allocators
861
     * allocate within existing clumps, whereas the "immovable" ones
862
     * allocate in new clumps, we equate the latter to the former, even
863
     * though this seems backwards.
864
     */
865
0
    imem->procs.alloc_bytes_immovable = imem->procs.alloc_bytes;
866
0
    imem->procs.alloc_struct_immovable = imem->procs.alloc_struct;
867
0
    imem->procs.alloc_byte_array_immovable = imem->procs.alloc_byte_array;
868
0
    imem->procs.alloc_struct_array_immovable = imem->procs.alloc_struct_array;
869
0
    imem->procs.alloc_string_immovable = imem->procs.alloc_string;
870
871
    /* Disable acquisition of additional clumps. */
872
0
    imem->is_controlled = true;
873
0
    imem->limit = 0;
874
875
0
    return (cp ? 0 : gs_note_error(gs_error_VMerror));
876
0
}
877
878
/* Prepare for a GC by clearing the stream list. */
879
/* This probably belongs somewhere else.... */
880
void
881
ialloc_gc_prepare(gs_ref_memory_t * mem)
882
198k
{ /*
883
         * We have to unlink every stream from its neighbors,
884
         * so that referenced streams don't keep all streams around.
885
         */
886
198k
    while (mem->streams != 0) {
887
0
        stream *s = mem->streams;
888
889
0
        mem->streams = s->next;
890
0
        s->prev = s->next = 0;
891
0
    }
892
198k
}
893
894
/* Initialize after a save. */
895
void
896
ialloc_reset(gs_ref_memory_t * mem)
897
19.2k
{
898
19.2k
    mem->root = 0;
899
19.2k
    mem->cc = NULL;
900
19.2k
    mem->allocated = 0;
901
19.2k
    mem->changes = 0;
902
19.2k
    mem->scan_limit = 0;
903
19.2k
    mem->total_scanned = 0;
904
19.2k
    mem->total_scanned_after_compacting = 0;
905
19.2k
    ialloc_reset_free(mem);
906
19.2k
}
907
908
/* Initialize after a save or GC. */
909
void
910
ialloc_reset_free(gs_ref_memory_t * mem)
911
218k
{
912
218k
    int i;
913
218k
    obj_header_t **p;
914
915
218k
    mem->lost.objects = 0;
916
218k
    mem->lost.refs = 0;
917
218k
    mem->lost.strings = 0;
918
218k
    mem->cfreed.cp = 0;
919
22.4M
    for (i = 0, p = &mem->freelists[0]; i < num_freelists; i++, p++)
920
22.2M
        *p = 0;
921
218k
    mem->largest_free_size = 0;
922
218k
}
923
924
/*
925
 * Set an arbitrary limit so that the amount of allocated VM does not grow
926
 * indefinitely even when GC is disabled.  Benchmarks have shown that
927
 * the resulting GC's are infrequent enough not to degrade performance
928
 * significantly.
929
 */
930
#define FORCE_GC_LIMIT 8000000
931
932
/* Set the allocation limit after a change in one or more of */
933
/* vm_threshold, max_vm, or enabled, or after a GC. */
934
void
935
ialloc_set_limit(register gs_ref_memory_t * mem)
936
4.08M
{ /*
937
         * The following code is intended to set the limit so that
938
         * we stop allocating when allocated + previous_status.allocated
939
         * exceeds the lesser of max_vm or (if GC is enabled)
940
         * gc_allocated + vm_threshold.
941
         */
942
4.08M
    size_t max_allocated =
943
4.08M
    (mem->gc_status.max_vm > mem->previous_status.allocated ?
944
4.08M
     mem->gc_status.max_vm - mem->previous_status.allocated :
945
4.08M
     0);
946
947
4.08M
    if (mem->gc_status.enabled) {
948
220k
        size_t limit = mem->gc_allocated + mem->gc_status.vm_threshold;
949
950
220k
        if (limit < mem->previous_status.allocated)
951
50
            mem->limit = 0;
952
220k
        else {
953
220k
            limit -= mem->previous_status.allocated;
954
220k
            mem->limit = min(limit, max_allocated);
955
220k
        }
956
220k
    } else
957
3.86M
        mem->limit = min(max_allocated, mem->gc_allocated + FORCE_GC_LIMIT);
958
4.08M
    if_debug7m('0', (const gs_memory_t *)mem,
959
4.08M
               "[0]space=%d, max_vm=%"PRIdSIZE", prev.alloc=%"PRIdSIZE", enabled=%d, "
960
4.08M
               "gc_alloc=%"PRIdSIZE", threshold=%"PRIdSIZE" => limit=%"PRIdSIZE"\n",
961
4.08M
               mem->space, mem->gc_status.max_vm,
962
4.08M
               mem->previous_status.allocated,
963
4.08M
               mem->gc_status.enabled, mem->gc_allocated,
964
4.08M
               mem->gc_status.vm_threshold, mem->limit);
965
4.08M
}
966
967
struct free_data
968
{
969
    gs_ref_memory_t *imem;
970
    clump_t         *allocator;
971
};
972
973
static splay_app_result_t
974
free_all_not_allocator(clump_t *cp, void *arg)
975
199k
{
976
199k
    struct free_data *fd = (struct free_data *)arg;
977
978
199k
    if (cp->cbase + sizeof(obj_header_t) != (byte *)fd->imem)
979
192k
        alloc_free_clump(cp, fd->imem);
980
6.83k
    else
981
6.83k
        fd->allocator = cp;
982
983
199k
    return SPLAY_APP_CONTINUE;
984
199k
}
985
986
static splay_app_result_t
987
free_all_allocator(clump_t *cp, void *arg)
988
3.41k
{
989
3.41k
    struct free_data *fd = (struct free_data *)arg;
990
991
3.41k
    if (cp->cbase + sizeof(obj_header_t) != (byte *)fd->imem)
992
0
        return SPLAY_APP_CONTINUE;
993
994
3.41k
    fd->allocator = cp;
995
3.41k
    alloc_free_clump(cp, fd->imem);
996
3.41k
    return SPLAY_APP_STOP;
997
3.41k
}
998
999
/*
1000
 * Free all the memory owned by the allocator, except the allocator itself.
1001
 * Note that this only frees memory at the current save level: the client
1002
 * is responsible for restoring to the outermost level if desired.
1003
 */
1004
static void
1005
i_free_all(gs_memory_t * mem, uint free_mask, client_name_t cname)
1006
22.6k
{
1007
22.6k
    gs_ref_memory_t * imem = (gs_ref_memory_t *)mem;
1008
22.6k
    struct free_data fd;
1009
1010
22.6k
    fd.imem = imem;
1011
22.6k
    fd.allocator = NULL;
1012
1013
22.6k
    if (free_mask & FREE_ALL_DATA && imem->root != NULL) {
1014
        /* Free every clump except the allocator */
1015
22.6k
        clump_splay_app(imem->root, imem, free_all_not_allocator, &fd);
1016
1017
        /* Reinstate the allocator as the sole clump */
1018
22.6k
        imem->root = fd.allocator;
1019
22.6k
        if (fd.allocator)
1020
6.83k
            fd.allocator->parent = fd.allocator->left = fd.allocator->right = NULL;
1021
22.6k
    }
1022
22.6k
    if (free_mask & FREE_ALL_ALLOCATOR) {
1023
        /* Walk the tree to find the allocator. */
1024
3.41k
        clump_splay_app(imem->root, imem, free_all_allocator, &fd);
1025
3.41k
    }
1026
22.6k
}
1027
1028
/* ================ Accessors ================ */
1029
1030
/* Get the size of an object from the header. */
1031
static size_t
1032
i_object_size(gs_memory_t * mem, const void /*obj_header_t */ *obj)
1033
779k
{
1034
779k
    return pre_obj_contents_size((const obj_header_t *)obj - 1);
1035
779k
}
1036
1037
/* Get the type of a structure from the header. */
1038
static gs_memory_type_ptr_t
1039
i_object_type(const gs_memory_t * mem, const void /*obj_header_t */ *obj)
1040
10.3k
{
1041
10.3k
    return ((const obj_header_t *)obj - 1)->o_type;
1042
10.3k
}
1043
1044
/* Get the GC status of a memory. */
1045
void
1046
gs_memory_gc_status(const gs_ref_memory_t * mem, gs_memory_gc_status_t * pstat)
1047
4.09M
{
1048
4.09M
    *pstat = mem->gc_status;
1049
4.09M
}
1050
1051
/* Set the GC status of a memory. */
1052
void
1053
gs_memory_set_gc_status(gs_ref_memory_t * mem, const gs_memory_gc_status_t * pstat)
1054
4.07M
{
1055
4.07M
    mem->gc_status = *pstat;
1056
4.07M
    ialloc_set_limit(mem);
1057
4.07M
}
1058
1059
/* Set VM threshold. Value passed as int64_t since it is signed */
1060
void
1061
gs_memory_set_vm_threshold(gs_ref_memory_t * mem, int64_t val)
1062
10.3k
{
1063
10.3k
    gs_memory_gc_status_t stat;
1064
10.3k
    gs_ref_memory_t * stable = (gs_ref_memory_t *)mem->stable_memory;
1065
1066
10.3k
    if (val < MIN_VM_THRESHOLD)
1067
0
        val = MIN_VM_THRESHOLD;
1068
10.3k
    else if (val > MAX_VM_THRESHOLD)
1069
0
        val = MAX_VM_THRESHOLD;
1070
10.3k
    gs_memory_gc_status(mem, &stat);
1071
10.3k
    stat.vm_threshold = val;
1072
10.3k
    gs_memory_set_gc_status(mem, &stat);
1073
10.3k
    gs_memory_gc_status(stable, &stat);
1074
10.3k
    stat.vm_threshold = val;
1075
10.3k
    gs_memory_set_gc_status(stable, &stat);
1076
10.3k
}
1077
1078
/* Set VM reclaim. */
1079
void
1080
gs_memory_set_vm_reclaim(gs_ref_memory_t * mem, bool enabled)
1081
10.3k
{
1082
10.3k
    gs_memory_gc_status_t stat;
1083
10.3k
    gs_ref_memory_t * stable = (gs_ref_memory_t *)mem->stable_memory;
1084
1085
10.3k
    gs_memory_gc_status(mem, &stat);
1086
10.3k
    stat.enabled = enabled;
1087
10.3k
    gs_memory_set_gc_status(mem, &stat);
1088
10.3k
    gs_memory_gc_status(stable, &stat);
1089
10.3k
    stat.enabled = enabled;
1090
10.3k
    gs_memory_set_gc_status(stable, &stat);
1091
10.3k
}
1092
1093
/* ================ Objects ================ */
1094
1095
/* Allocate a small object quickly if possible. */
1096
/* The size must be substantially less than max_uint. */
1097
/* ptr must be declared as obj_header_t *. */
1098
/* pfl must be declared as obj_header_t **. */
1099
#define IF_FREELIST_ALLOC(ptr, imem, size, pstype, pfl)\
1100
52.1M
        if ( size <= max_freelist_size &&\
1101
52.1M
             *(pfl = &imem->freelists[(size + obj_align_mask) >> log2_obj_align_mod]) != 0\
1102
52.1M
           )\
1103
52.1M
        { ptr = *pfl;\
1104
30.1M
                *pfl = *(obj_header_t **)ptr;\
1105
30.1M
                ptr[-1].o_size = (obj_size_t)size;\
1106
30.1M
                ptr[-1].o_type = pstype;\
1107
30.1M
                /* If debugging, clear the block in an attempt to */\
1108
30.1M
                /* track down uninitialized data errors. */\
1109
52.1M
                gs_alloc_fill(ptr, gs_alloc_fill_alloc, size);
1110
#define ELSEIF_BIG_FREELIST_ALLOC(ptr, imem, size, pstype)\
1111
30.1M
        }\
1112
52.1M
        else if (size > max_freelist_size &&\
1113
21.9M
                 (ptr = large_freelist_alloc(imem, size)) != 0)\
1114
21.9M
        { ptr[-1].o_type = pstype;\
1115
7.49M
                /* If debugging, clear the block in an attempt to */\
1116
7.49M
                /* track down uninitialized data errors. */\
1117
52.1M
                gs_alloc_fill(ptr, gs_alloc_fill_alloc, size);
1118
#define ELSEIF_LIFO_ALLOC(ptr, imem, size, pstype)\
1119
7.49M
        }\
1120
21.9M
        else if ( imem->cc && !imem->cc->c_alone && \
1121
14.4M
                (imem->cc->ctop - (byte *)(ptr = (obj_header_t *)imem->cc->cbot))\
1122
14.4M
                >= size + (obj_align_mod + sizeof(obj_header_t) * 2) &&\
1123
14.4M
             size < imem->large_size\
1124
14.4M
           )\
1125
14.4M
        { imem->cc->cbot = (byte *)ptr + obj_size_round(size);\
1126
13.8M
                ptr->o_pad = 0;\
1127
13.8M
                ptr->o_alone = 0;\
1128
13.8M
                ptr->o_size = (obj_size_t)size;\
1129
13.8M
                ptr->o_type = pstype;\
1130
13.8M
                ptr++;\
1131
13.8M
                /* If debugging, clear the block in an attempt to */\
1132
13.8M
                /* track down uninitialized data errors. */\
1133
21.9M
                gs_alloc_fill(ptr, gs_alloc_fill_alloc, size);
1134
#define ELSE_ALLOC\
1135
13.8M
        }\
1136
        else
1137
1138
static byte *
1139
i_alloc_bytes(gs_memory_t * mem, size_t ssize, client_name_t cname)
1140
3.84M
{
1141
3.84M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1142
3.84M
    obj_header_t *obj;
1143
3.84M
    obj_header_t **pfl;
1144
3.84M
    obj_size_t size = (obj_size_t)ssize;
1145
1146
#ifdef MEMENTO
1147
    if (Memento_failThisEvent())
1148
        return NULL;
1149
#endif
1150
1151
3.84M
    if ((size_t)size != ssize)
1152
0
        return NULL;
1153
1154
3.84M
    IF_FREELIST_ALLOC(obj, imem, size, &st_bytes, pfl)
1155
1.01M
        alloc_trace(":+bf", imem, cname, NULL, size, obj);
1156
1.01M
    ELSEIF_BIG_FREELIST_ALLOC(obj, imem, size, &st_bytes)
1157
259k
        alloc_trace(":+bF", imem, cname, NULL, size, obj);
1158
2.08M
    ELSEIF_LIFO_ALLOC(obj, imem, (uint)size, &st_bytes)
1159
2.08M
        alloc_trace(":+b ", imem, cname, NULL, size, obj);
1160
2.08M
    ELSE_ALLOC
1161
478k
    {
1162
478k
        obj = alloc_obj(imem, size, &st_bytes, 0, cname);
1163
478k
        if (obj == 0)
1164
2
            return 0;
1165
478k
        alloc_trace(":+b.", imem, cname, NULL, size, obj);
1166
478k
    }
1167
#if IGC_PTR_STABILITY_CHECK
1168
        obj[-1].d.o.space_id = imem->space_id;
1169
#endif
1170
3.84M
    return (byte *)Memento_label(obj, cname);
1171
3.84M
}
1172
static byte *
1173
i_alloc_bytes_immovable(gs_memory_t * mem, size_t ssize, client_name_t cname)
1174
116k
{
1175
116k
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1176
116k
    obj_header_t *obj;
1177
116k
    obj_size_t size = (obj_size_t)ssize;
1178
1179
#ifdef MEMENTO
1180
    if (Memento_failThisEvent())
1181
        return NULL;
1182
#endif
1183
1184
116k
    if ((size_t)size != ssize)
1185
0
        return NULL;
1186
1187
116k
    obj = alloc_obj(imem, size, &st_bytes,
1188
116k
                    ALLOC_IMMOVABLE | ALLOC_DIRECT, cname);
1189
116k
    if (obj == 0)
1190
0
        return 0;
1191
116k
    alloc_trace("|+b.", imem, cname, NULL, size, obj);
1192
116k
    return (byte *)Memento_label(obj, cname);
1193
116k
}
1194
static void *
1195
i_alloc_struct(gs_memory_t * mem, gs_memory_type_ptr_t pstype,
1196
               client_name_t cname)
1197
48.2M
{
1198
48.2M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1199
48.2M
    obj_size_t size = pstype->ssize;
1200
48.2M
    obj_header_t *obj;
1201
48.2M
    obj_header_t **pfl;
1202
1203
#ifdef MEMENTO
1204
    if (Memento_failThisEvent())
1205
        return NULL;
1206
#endif
1207
1208
48.2M
    ALLOC_CHECK_SIZE(mem,pstype);
1209
48.2M
    IF_FREELIST_ALLOC(obj, imem, size, pstype, pfl)
1210
29.1M
        alloc_trace(":+<f", imem, cname, pstype, size, obj);
1211
29.1M
    ELSEIF_BIG_FREELIST_ALLOC(obj, imem, size, pstype)
1212
7.23M
        alloc_trace(":+<F", imem, cname, pstype, size, obj);
1213
11.7M
    ELSEIF_LIFO_ALLOC(obj, imem, size, pstype)
1214
11.7M
        alloc_trace(":+< ", imem, cname, pstype, size, obj);
1215
11.7M
    ELSE_ALLOC
1216
104k
    {
1217
104k
        obj = alloc_obj(imem, size, pstype, 0, cname);
1218
104k
        if (obj == 0)
1219
0
            return 0;
1220
104k
        alloc_trace(":+<.", imem, cname, pstype, size, obj);
1221
104k
    }
1222
#if IGC_PTR_STABILITY_CHECK
1223
        obj[-1].d.o.space_id = imem->space_id;
1224
#endif
1225
48.2M
    return Memento_label(obj, cname);
1226
48.2M
}
1227
static void *
1228
i_alloc_struct_immovable(gs_memory_t * mem, gs_memory_type_ptr_t pstype,
1229
                         client_name_t cname)
1230
58.2k
{
1231
58.2k
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1232
58.2k
    obj_size_t size = pstype->ssize;
1233
58.2k
    obj_header_t *obj;
1234
1235
#ifdef MEMENTO
1236
    if (Memento_failThisEvent())
1237
        return NULL;
1238
#endif
1239
1240
58.2k
    ALLOC_CHECK_SIZE(mem,pstype);
1241
58.2k
    obj = alloc_obj(imem, size, pstype, ALLOC_IMMOVABLE | ALLOC_DIRECT, cname);
1242
58.2k
    alloc_trace("|+<.", imem, cname, pstype, size, obj);
1243
58.2k
    return Memento_label(obj, cname);
1244
58.2k
}
1245
1246
static inline bool
1247
alloc_array_check_size(size_t num_elements, size_t elt_size, size_t *lsize)
1248
2.83M
{
1249
2.83M
    int shift0, shift1;
1250
2.83M
    size_t m, n;
1251
1252
    /* Avoid the loops in the overwhelming number of cases. */
1253
2.83M
    if ((num_elements | elt_size) >= 65536) {
1254
        /* Slightly conservative, but it'll work for our purposes. */
1255
        /* m is the maximum unsigned value representable in shift0 bits */
1256
18
        for (m=0, shift0 = 0; m < num_elements; m = (m<<1)+1, shift0++);
1257
        /* n is the maximum unsigned value representable in shift1 bits */
1258
6
        for (n=0, shift1 = 0; n < elt_size; n = (n<<1)+1, shift1++);
1259
        /* An shift0 bit unsigned number multiplied by an shift1 bit
1260
         * unsigned number is guaranteed to fit in n+m-1 bits. */
1261
1
        if (shift0+shift1-1 > 8*sizeof(size_t))
1262
0
            return false; /* Overflow */
1263
1
    }
1264
1265
2.83M
    *lsize = num_elements * elt_size;
1266
2.83M
    return true;
1267
2.83M
}
1268
1269
static byte *
1270
i_alloc_byte_array(gs_memory_t * mem, size_t num_elements, size_t elt_size,
1271
                   client_name_t cname)
1272
871k
{
1273
871k
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1274
871k
    obj_header_t *obj;
1275
871k
    size_t slsize;
1276
871k
    obj_size_t lsize;
1277
#ifdef MEMENTO
1278
    if (Memento_failThisEvent())
1279
        return NULL;
1280
#endif
1281
871k
    if (alloc_array_check_size(num_elements, elt_size, &slsize) == false)
1282
0
        return NULL;
1283
871k
    lsize = (obj_size_t)slsize;
1284
871k
    if ((size_t)lsize != slsize)
1285
0
        return NULL;
1286
871k
    obj = alloc_obj(imem, lsize,
1287
871k
                    &st_bytes, ALLOC_DIRECT, cname);
1288
1289
871k
    if_debug6m('A', mem, "[a%d:+b.]%s -bytes-*(%"PRIuSIZE"=%"PRIuSIZE"*%"PRIuSIZE") = "PRI_INTPTR"\n",
1290
871k
               alloc_trace_space(imem), client_name_string(cname),
1291
871k
               num_elements * elt_size,
1292
871k
               num_elements, elt_size, (intptr_t)obj);
1293
871k
    return (byte *)Memento_label(obj, cname);
1294
871k
}
1295
static byte *
1296
i_alloc_byte_array_immovable(gs_memory_t * mem, size_t num_elements,
1297
                             size_t elt_size, client_name_t cname)
1298
0
{
1299
0
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1300
0
    obj_header_t *obj;
1301
0
    size_t slsize;
1302
0
    obj_size_t lsize;
1303
#ifdef MEMENTO
1304
    if (Memento_failThisEvent())
1305
        return NULL;
1306
#endif
1307
0
    if (alloc_array_check_size(num_elements, elt_size, &slsize) == false)
1308
0
        return NULL;
1309
0
    lsize = (obj_size_t)slsize;
1310
0
    if ((size_t)lsize != slsize)
1311
0
        return NULL;
1312
0
    obj = alloc_obj(imem, lsize,
1313
0
                    &st_bytes, ALLOC_IMMOVABLE | ALLOC_DIRECT,
1314
0
                    cname);
1315
1316
0
    if_debug6m('A', mem, "[a%d|+b.]%s -bytes-*(%"PRIuSIZE"=%"PRIuSIZE"*%"PRIuSIZE") = "PRI_INTPTR"\n",
1317
0
               alloc_trace_space(imem), client_name_string(cname),
1318
0
               num_elements * elt_size,
1319
0
               num_elements, elt_size, (intptr_t)obj);
1320
0
    return (byte *)Memento_label(obj, cname);
1321
0
}
1322
static void *
1323
i_alloc_struct_array(gs_memory_t * mem, size_t num_elements,
1324
                     gs_memory_type_ptr_t pstype, client_name_t cname)
1325
1.95M
{
1326
1.95M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1327
1.95M
    obj_header_t *obj;
1328
1.95M
    size_t slsize;
1329
1.95M
    obj_size_t lsize;
1330
#ifdef MEMENTO
1331
    if (Memento_failThisEvent())
1332
        return NULL;
1333
#endif
1334
1335
1.95M
    ALLOC_CHECK_SIZE(mem,pstype);
1336
#ifdef DEBUG
1337
    if (pstype->enum_ptrs == basic_enum_ptrs) {
1338
        dmprintf2(mem, "  i_alloc_struct_array: called with incorrect structure type (not element), struct='%s', client='%s'\n",
1339
                pstype->sname, cname);
1340
        return NULL;    /* fail */
1341
    }
1342
#endif
1343
1.95M
    if (alloc_array_check_size(num_elements, pstype->ssize, &slsize) == false)
1344
0
        return NULL;
1345
1.95M
    lsize = (obj_size_t)slsize;
1346
1.95M
    if ((size_t)lsize != slsize)
1347
0
        return NULL;
1348
1.95M
    obj = alloc_obj(imem, lsize, pstype, ALLOC_DIRECT, cname);
1349
1.95M
    if_debug7m('A', mem, "[a%d:+<.]%s %s*(%"PRIuSIZE"=%"PRIuSIZE"*%u) = "PRI_INTPTR"\n",
1350
1.95M
               alloc_trace_space(imem), client_name_string(cname),
1351
1.95M
               struct_type_name_string(pstype),
1352
1.95M
               num_elements * pstype->ssize,
1353
1.95M
               num_elements, pstype->ssize, (intptr_t)obj);
1354
1.95M
    return (char *)Memento_label(obj, cname);
1355
1.95M
}
1356
static void *
1357
i_alloc_struct_array_immovable(gs_memory_t * mem, size_t num_elements,
1358
                               gs_memory_type_ptr_t pstype, client_name_t cname)
1359
683
{
1360
683
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1361
683
    obj_header_t *obj;
1362
683
    size_t slsize;
1363
683
    obj_size_t lsize;
1364
#ifdef MEMENTO
1365
    if (Memento_failThisEvent())
1366
        return NULL;
1367
#endif
1368
1369
683
    ALLOC_CHECK_SIZE(mem,pstype);
1370
683
    if (alloc_array_check_size(num_elements, pstype->ssize, &slsize) == false)
1371
0
        return NULL;
1372
683
    lsize = (obj_size_t)slsize;
1373
683
    if ((size_t)lsize != slsize)
1374
0
        return NULL;
1375
683
    obj = alloc_obj(imem, lsize, pstype, ALLOC_IMMOVABLE | ALLOC_DIRECT, cname);
1376
683
    if_debug7m('A', mem, "[a%d|+<.]%s %s*(%"PRIuSIZE"=%"PRIuSIZE"*%u) = "PRI_INTPTR"\n",
1377
683
               alloc_trace_space(imem), client_name_string(cname),
1378
683
               struct_type_name_string(pstype),
1379
683
               num_elements * pstype->ssize,
1380
683
               num_elements, pstype->ssize, (intptr_t)obj);
1381
683
    return (char *)Memento_label(obj, cname);
1382
683
}
1383
static void *
1384
i_resize_object(gs_memory_t * mem, void *obj, size_t new_num_elements,
1385
                client_name_t cname)
1386
0
{
1387
0
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1388
0
    obj_header_t *pp = (obj_header_t *) obj - 1;
1389
0
    gs_memory_type_ptr_t pstype = pp->o_type;
1390
0
    size_t old_size = pre_obj_contents_size(pp);
1391
0
    size_t new_size = pstype->ssize * new_num_elements;
1392
0
    size_t old_size_rounded = obj_align_round(old_size);
1393
0
    size_t new_size_rounded = obj_align_round(new_size);
1394
0
    void *new_obj = NULL;
1395
1396
#ifdef MEMENTO
1397
    if (Memento_failThisEvent())
1398
        return NULL;
1399
#endif
1400
1401
0
    if (new_size_rounded != (obj_size_t)new_size_rounded)
1402
0
        return NULL;
1403
1404
0
    if (old_size_rounded == new_size_rounded) {
1405
0
        pp->o_size = (obj_size_t)new_size;
1406
0
        new_obj = obj;
1407
0
    } else
1408
0
        if (imem->cc && (byte *)obj + old_size_rounded == imem->cc->cbot &&
1409
0
            imem->cc->ctop - (byte *)obj >= new_size_rounded ) {
1410
0
            imem->cc->cbot = (byte *)obj + new_size_rounded;
1411
0
            pp->o_size = (obj_size_t)new_size;
1412
0
            new_obj = obj;
1413
0
        } else /* try and trim the object -- but only if room for a dummy header */
1414
0
            if (new_size_rounded + sizeof(obj_header_t) <= old_size_rounded) {
1415
0
                trim_obj(imem, obj, (obj_size_t)new_size, (clump_t *)0);
1416
0
                new_obj = obj;
1417
0
            }
1418
0
    if (new_obj) {
1419
0
        if_debug8m('A', mem, "[a%d:%c%c ]%s %s(%"PRIuSIZE"=>%"PRIuSIZE") "PRI_INTPTR"\n",
1420
0
                   alloc_trace_space(imem),
1421
0
                   (new_size > old_size ? '>' : '<'),
1422
0
                   (pstype == &st_bytes ? 'b' : '<'),
1423
0
                   client_name_string(cname),
1424
0
                   struct_type_name_string(pstype),
1425
0
                   old_size, new_size, (intptr_t)obj);
1426
0
        return Memento_label(new_obj, cname);
1427
0
    }
1428
    /* Punt. */
1429
0
    new_obj = gs_alloc_struct_array(mem, new_num_elements, void,
1430
0
                                    pstype, cname);
1431
0
    if (new_obj == 0)
1432
0
        return 0;
1433
0
    memcpy(new_obj, obj, min(old_size, new_size));
1434
0
    gs_free_object(mem, obj, cname);
1435
0
    return Memento_label(new_obj, cname);
1436
0
}
1437
static void
1438
i_free_object(gs_memory_t * mem, void *ptr, client_name_t cname)
1439
58.7M
{
1440
58.7M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1441
58.7M
    obj_header_t *pp;
1442
58.7M
    gs_memory_type_ptr_t pstype;
1443
58.7M
    gs_memory_struct_type_t saved_stype;
1444
1445
58.7M
    struct_proc_finalize((*finalize));
1446
58.7M
    size_t size, rounded_size;
1447
1448
58.7M
    if (ptr == 0)
1449
5.73M
        return;
1450
53.0M
    pp = (obj_header_t *) ptr - 1;
1451
53.0M
    pstype = pp->o_type;
1452
#ifdef DEBUG
1453
    if (gs_debug_c('?')) {
1454
        clump_locator_t cld;
1455
1456
        if (pstype == &st_free) {
1457
            mlprintf2(mem, "%s: object "PRI_INTPTR" already free!\n",
1458
                      client_name_string(cname), (intptr_t)ptr);
1459
            return;   /*gs_abort(); */
1460
        }
1461
        /* Check that this allocator owns the object being freed. */
1462
        cld.memory = imem;
1463
        while ((cld.cp = cld.memory->root),
1464
               !clump_locate_ptr(ptr, &cld)
1465
            ) {
1466
            if (!cld.memory->saved) {
1467
                mlprintf3(mem, "%s: freeing "PRI_INTPTR", not owned by memory "PRI_INTPTR"!\n",
1468
                          client_name_string(cname), (intptr_t)ptr,
1469
                          (intptr_t)mem);
1470
                return;   /*gs_abort(); */
1471
            }
1472
                  /****** HACK: we know the saved state is the first ******
1473
                   ****** member of an alloc_save_t. ******/
1474
            cld.memory = (gs_ref_memory_t *) cld.memory->saved;
1475
        }
1476
        /* Check that the object is in the allocated region. */
1477
        if (!(PTR_BETWEEN((const byte *)pp, cld.cp->cbase,
1478
                          cld.cp->cbot))
1479
            ) {
1480
            mlprintf5(mem, "%s: freeing "PRI_INTPTR",\n\toutside clump "PRI_INTPTR" cbase="PRI_INTPTR", cbot="PRI_INTPTR"!\n",
1481
                      client_name_string(cname), (intptr_t) ptr,
1482
                      (intptr_t) cld.cp, (intptr_t) cld.cp->cbase,
1483
                      (intptr_t) cld.cp->cbot);
1484
            return;   /*gs_abort(); */
1485
        }
1486
    }
1487
#endif
1488
53.0M
    size = pre_obj_contents_size(pp);
1489
53.0M
    rounded_size = obj_align_round(size);
1490
53.0M
    finalize = pstype->finalize;
1491
53.0M
    if (finalize != 0) {
1492
1493
        /* unfortunately device finalize procedures will clobber the
1494
           stype which is used for later debugging with "A" debug
1495
           tracing, so we save stype it in a local. */
1496
4.81M
        if (gs_debug['a'] || gs_debug['A'])
1497
0
            saved_stype = *pstype;
1498
1499
4.81M
        if_debug3m('u', mem, "[u]finalizing %s "PRI_INTPTR" (%s)\n",
1500
4.81M
                   struct_type_name_string(pstype),
1501
4.81M
                   (intptr_t)ptr, client_name_string(cname));
1502
4.81M
        (*finalize) (mem, ptr);
1503
1504
4.81M
        if (gs_debug['a'] || gs_debug['A'])
1505
0
            pstype = &saved_stype;
1506
4.81M
    }
1507
53.0M
    if (imem->cc && (byte *) ptr + rounded_size == imem->cc->cbot) {
1508
11.7M
        alloc_trace(":-o ", imem, cname, pstype, size, ptr);
1509
11.7M
        gs_alloc_fill(ptr, gs_alloc_fill_free, size);
1510
11.7M
        imem->cc->cbot = (byte *) pp;
1511
        /* IFF this object is adjacent to (or below) the byte after the
1512
         * highest free object, do the consolidation within this clump. */
1513
11.7M
        if ((byte *)pp <= imem->cc->int_freed_top) {
1514
1.37M
            consolidate_clump_free(imem->cc, imem);
1515
1.37M
        }
1516
11.7M
        return;
1517
11.7M
    }
1518
41.2M
    if (pp->o_alone) {
1519
                /*
1520
                 * We gave this object its own clump.  Free the entire clump,
1521
                 * unless it belongs to an older save level, in which case
1522
                 * we mustn't overwrite it.
1523
                 */
1524
804k
        clump_locator_t cl;
1525
1526
#ifdef DEBUG
1527
        {
1528
            clump_locator_t cld;
1529
1530
            cld.memory = imem;
1531
            cld.cp = 0;
1532
            if (gs_debug_c('a'))
1533
                alloc_trace(
1534
                            (clump_locate_ptr(ptr, &cld) ? ":-oL" : ":-o~"),
1535
                               imem, cname, pstype, size, ptr);
1536
        }
1537
#endif
1538
804k
        cl.memory = imem;
1539
804k
        cl.cp = 0;
1540
804k
        if (clump_locate_ptr(ptr, &cl)) {
1541
804k
            if (!imem->is_controlled)
1542
804k
                alloc_free_clump(cl.cp, imem);
1543
804k
            return;
1544
804k
        }
1545
        /* Don't overwrite even if gs_alloc_debug is set. */
1546
804k
    }
1547
40.4M
    if (rounded_size >= sizeof(obj_header_t *)) {
1548
        /*
1549
         * Put the object on a freelist, unless it belongs to
1550
         * an older save level, in which case we mustn't
1551
         * overwrite it.
1552
         */
1553
40.4M
        imem->cfreed.memory = imem;
1554
40.4M
        if (clump_locate(ptr, &imem->cfreed)) {
1555
40.4M
            obj_header_t **pfl;
1556
1557
40.4M
            if (size > max_freelist_size) {
1558
7.84M
                pfl = &imem->freelists[LARGE_FREELIST_INDEX];
1559
7.84M
                if (rounded_size > imem->largest_free_size)
1560
208k
                    imem->largest_free_size = rounded_size;
1561
32.6M
            } else {
1562
32.6M
                pfl = &imem->freelists[(size + obj_align_mask) >>
1563
32.6M
                                      log2_obj_align_mod];
1564
32.6M
            }
1565
            /* keep track of highest object on a freelist */
1566
            /* If we're dealing with a block in the currently open clump
1567
               (in imem->cc) update that, otherwise, update the clump in
1568
               the clump list (in imem->cfreed.cp)
1569
             */
1570
40.4M
            if (imem->cc && imem->cfreed.cp->chead == imem->cc->chead) {
1571
3.62M
                if ((byte *)pp >= imem->cc->int_freed_top) {
1572
1.05M
                    imem->cc->int_freed_top = (byte *)ptr + rounded_size;
1573
1.05M
                }
1574
3.62M
            }
1575
36.8M
            else {
1576
36.8M
                if ((byte *)pp >= imem->cfreed.cp->int_freed_top) {
1577
68.0k
                    imem->cfreed.cp->int_freed_top = (byte *)ptr + rounded_size;
1578
68.0k
                }
1579
36.8M
            }
1580
40.4M
            pp->o_type = &st_free;  /* don't confuse GC */
1581
40.4M
            o_set_unmarked(pp);
1582
40.4M
            gs_alloc_fill(ptr, gs_alloc_fill_free, size);
1583
40.4M
            *(obj_header_t **) ptr = *pfl;
1584
40.4M
            *pfl = (obj_header_t *) ptr;
1585
40.4M
            alloc_trace((size > max_freelist_size ? ":-oF" : ":-of"),
1586
40.4M
                        imem, cname, pstype, size, ptr);
1587
40.4M
            return;
1588
40.4M
        }
1589
        /* Don't overwrite even if gs_alloc_debug is set. */
1590
40.4M
    } else {
1591
19
        pp->o_type = &st_free;  /* don't confuse GC */
1592
19
        gs_alloc_fill(ptr, gs_alloc_fill_free, size);
1593
19
    }
1594
40.4M
    alloc_trace(":-o#", imem, cname, pstype, size, ptr);
1595
19
    imem->lost.objects += obj_size_round(size);
1596
19
}
1597
static byte *
1598
i_alloc_string(gs_memory_t * mem, size_t nbytes, client_name_t cname)
1599
7.68M
{
1600
7.68M
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1601
7.68M
    byte *str;
1602
7.68M
    clump_splay_walker sw;
1603
1604
    /*
1605
     * Cycle through the clumps at the current save level, starting
1606
     * with the currently open one.
1607
     */
1608
7.68M
    clump_t *cp = clump_splay_walk_init_mid(&sw, imem->cc);
1609
1610
#ifdef MEMENTO
1611
    if (Memento_failThisEvent())
1612
        return NULL;
1613
#endif
1614
7.68M
    if (cp == 0) {
1615
        /* Open an arbitrary clump. */
1616
0
        imem->cc = clump_splay_walk_init(&sw, imem);
1617
0
        alloc_open_clump(imem);
1618
0
    }
1619
8.57M
top:
1620
8.57M
    if (imem->cc && !imem->cc->c_alone && imem->cc->ctop - imem->cc->cbot > nbytes) {
1621
7.68M
        if_debug4m('A', mem, "[a%d:+> ]%s(%"PRIuSIZE") = "PRI_INTPTR"\n",
1622
7.68M
                   alloc_trace_space(imem), client_name_string(cname), nbytes,
1623
7.68M
                   (intptr_t)(imem->cc->ctop - nbytes));
1624
7.68M
        str = imem->cc->ctop -= nbytes;
1625
7.68M
        gs_alloc_fill(str, gs_alloc_fill_alloc, nbytes);
1626
7.68M
        return str;
1627
7.68M
    }
1628
    /* Try the next clump. */
1629
894k
    cp = clump_splay_walk_fwd(&sw);
1630
1631
894k
    if (cp != NULL)
1632
886k
    {
1633
886k
        alloc_close_clump(imem);
1634
886k
        imem->cc = cp;
1635
886k
        alloc_open_clump(imem);
1636
886k
        goto top;
1637
886k
    }
1638
8.39k
    if (nbytes > string_space_quanta(SIZE_MAX - sizeof(clump_head_t)) *
1639
8.39k
        string_data_quantum
1640
8.39k
        ) {     /* Can't represent the size in a uint! */
1641
0
        return 0;
1642
0
    }
1643
8.39k
    if (nbytes >= imem->large_size) { /* Give it a clump all its own. */
1644
2.74k
        return i_alloc_string_immovable(mem, nbytes, cname);
1645
5.64k
    } else {     /* Add another clump. */
1646
5.64k
        cp = alloc_acquire_clump(imem, (ulong) imem->clump_size, true, "clump");
1647
1648
5.64k
        if (cp == 0)
1649
0
            return 0;
1650
5.64k
        alloc_close_clump(imem);
1651
5.64k
        imem->cc = clump_splay_walk_init_mid(&sw, cp);
1652
5.64k
        gs_alloc_fill(imem->cc->cbase, gs_alloc_fill_free,
1653
5.64k
                      imem->cc->climit - imem->cc->cbase);
1654
5.64k
        goto top;
1655
5.64k
    }
1656
8.39k
}
1657
static byte *
1658
i_alloc_string_immovable(gs_memory_t * mem, size_t nbytes, client_name_t cname)
1659
2.74k
{
1660
2.74k
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1661
2.74k
    byte *str;
1662
2.74k
    size_t asize;
1663
2.74k
    clump_t *cp;
1664
1665
#ifdef MEMENTO
1666
    if (Memento_failThisEvent())
1667
        return NULL;
1668
#endif
1669
    /* Give it a clump all its own. */
1670
2.74k
    asize = string_clump_space(nbytes) + sizeof(clump_head_t);
1671
2.74k
    cp = alloc_acquire_clump(imem, asize, true, "large string clump");
1672
1673
2.74k
    if (cp == 0)
1674
0
        return 0;
1675
2.74k
    cp->c_alone = true;
1676
1677
2.74k
    str = cp->ctop = cp->climit - nbytes;
1678
2.74k
    if_debug4m('a', mem, "[a%d|+>L]%s(%"PRIuSIZE") = "PRI_INTPTR"\n",
1679
2.74k
               alloc_trace_space(imem), client_name_string(cname), nbytes,
1680
2.74k
               (intptr_t)str);
1681
2.74k
    gs_alloc_fill(str, gs_alloc_fill_alloc, nbytes);
1682
1683
2.74k
    return Memento_label(str, cname);
1684
2.74k
}
1685
1686
static byte *
1687
i_resize_string(gs_memory_t * mem, byte * data, size_t old_num, size_t new_num,
1688
                client_name_t cname)
1689
116k
{
1690
116k
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1691
116k
    byte *ptr;
1692
1693
116k
    if (old_num == new_num)  /* same size returns the same string */
1694
76.9k
        return data;
1695
1696
39.7k
    if ( imem->cc && data == imem->cc->ctop && /* bottom-most string */
1697
39.7k
        (new_num < old_num ||
1698
39.6k
         imem->cc->ctop - imem->cc->cbot > new_num - old_num)
1699
39.7k
        ) {     /* Resize in place. */
1700
39.6k
        ptr = data + old_num - new_num;
1701
39.6k
        if_debug6m('A', mem, "[a%d:%c> ]%s(%"PRIuSIZE"->%"PRIuSIZE") "PRI_INTPTR"\n",
1702
39.6k
                   alloc_trace_space(imem),
1703
39.6k
                   (new_num > old_num ? '>' : '<'),
1704
39.6k
                   client_name_string(cname), old_num, new_num,
1705
39.6k
                   (intptr_t)ptr);
1706
39.6k
        imem->cc->ctop = ptr;
1707
39.6k
        memmove(ptr, data, min(old_num, new_num));
1708
#ifdef DEBUG
1709
        if (new_num > old_num)
1710
            gs_alloc_fill(ptr + old_num, gs_alloc_fill_alloc,
1711
                          new_num - old_num);
1712
        else
1713
            gs_alloc_fill(data, gs_alloc_fill_free, old_num - new_num);
1714
#endif
1715
39.6k
    } else
1716
27
        if (new_num < old_num) {
1717
            /* trim the string and create a free space hole */
1718
0
            ptr = data;
1719
0
            imem->lost.strings += old_num - new_num;
1720
0
            gs_alloc_fill(data + new_num, gs_alloc_fill_free,
1721
0
                          old_num - new_num);
1722
0
            if_debug5m('A', mem, "[a%d:<> ]%s(%"PRIuSIZE"->%"PRIuSIZE") "PRI_INTPTR"\n",
1723
0
                       alloc_trace_space(imem), client_name_string(cname),
1724
0
                       old_num, new_num, (intptr_t)ptr);
1725
27
        } else {     /* Punt. */
1726
27
            ptr = gs_alloc_string(mem, new_num, cname);
1727
27
            if (ptr == 0)
1728
0
                return 0;
1729
27
            memcpy(ptr, data, min(old_num, new_num));
1730
27
            gs_free_string(mem, data, old_num, cname);
1731
27
        }
1732
1733
39.7k
    return ptr;
1734
39.7k
}
1735
1736
static void
1737
i_free_string(gs_memory_t * mem, byte * data, size_t nbytes,
1738
              client_name_t cname)
1739
286k
{
1740
286k
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1741
1742
286k
    if (data) {
1743
286k
        if (imem->cc && data == imem->cc->ctop) {
1744
281k
            if_debug4m('A', mem, "[a%d:-> ]%s(%"PRIuSIZE") "PRI_INTPTR"\n",
1745
281k
                       alloc_trace_space(imem), client_name_string(cname), nbytes,
1746
281k
                       (intptr_t)data);
1747
281k
            imem->cc->ctop += nbytes;
1748
281k
        } else {
1749
4.84k
            if_debug4m('A', mem, "[a%d:->#]%s(%"PRIuSIZE") "PRI_INTPTR"\n",
1750
4.84k
                       alloc_trace_space(imem), client_name_string(cname), nbytes,
1751
4.84k
                       (intptr_t)data);
1752
4.84k
            imem->lost.strings += nbytes;
1753
4.84k
        }
1754
286k
        gs_alloc_fill(data, gs_alloc_fill_free, nbytes);
1755
286k
    }
1756
286k
}
1757
1758
static gs_memory_t *
1759
i_stable(gs_memory_t *mem)
1760
70.5M
{
1761
70.5M
    return mem->stable_memory;
1762
70.5M
}
1763
1764
static void
1765
i_status(gs_memory_t * mem, gs_memory_status_t * pstat)
1766
218k
{
1767
218k
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
1768
218k
    size_t unused = imem->lost.refs + imem->lost.strings;
1769
218k
    size_t inner = 0;
1770
218k
    clump_splay_walker sw;
1771
218k
    clump_t *cp;
1772
1773
218k
    alloc_close_clump(imem);
1774
    /* Add up unallocated space within each clump. */
1775
    /* Also keep track of space allocated to inner clumps, */
1776
    /* which are included in previous_status.allocated. */
1777
1.16M
    for (cp = clump_splay_walk_init(&sw, imem); cp != NULL; cp = clump_splay_walk_fwd(&sw))
1778
949k
    {
1779
949k
        unused += cp->ctop - cp->cbot;
1780
949k
        if (cp->outer)
1781
328k
            inner += cp->cend - (byte *) cp->chead;
1782
949k
    }
1783
218k
    unused += compute_free_objects(imem);
1784
218k
    pstat->used = imem->allocated + inner - unused +
1785
218k
        imem->previous_status.used;
1786
218k
    pstat->allocated = imem->allocated +
1787
218k
        imem->previous_status.allocated;
1788
218k
    pstat->max_used = 0;    /* unknown for this allocator */
1789
218k
    pstat->is_thread_safe = false; /* this allocator is not thread safe */
1790
218k
}
1791
1792
static void
1793
i_enable_free(gs_memory_t * mem, bool enable)
1794
52.2k
{
1795
52.2k
    if (enable)
1796
26.1k
        mem->procs.free_object = i_free_object,
1797
26.1k
            mem->procs.free_string = i_free_string;
1798
26.1k
    else
1799
26.1k
        mem->procs.free_object = gs_ignore_free_object,
1800
26.1k
            mem->procs.free_string = gs_ignore_free_string;
1801
52.2k
}
1802
1803
static void i_set_object_type(gs_memory_t *mem, void *ptr, gs_memory_type_ptr_t type)
1804
0
{
1805
0
    obj_header_t *pp;
1806
1807
0
    if (ptr == 0)
1808
0
        return;
1809
0
    pp = (obj_header_t *) ptr - 1;
1810
0
    pp->o_type = type;
1811
0
}
1812
1813
static void i_defer_frees(gs_memory_t *mem, int defer)
1814
0
{
1815
0
}
1816
1817
/* ------ Internal procedures ------ */
1818
1819
/* Compute the amount of free object space by scanning free lists. */
1820
static size_t
1821
compute_free_objects(gs_ref_memory_t * mem)
1822
218k
{
1823
218k
    size_t unused = mem->lost.objects;
1824
218k
    int i;
1825
1826
    /* Add up space on free lists. */
1827
22.5M
    for (i = 0; i < num_freelists; i++) {
1828
22.3M
        const obj_header_t *pfree;
1829
1830
22.3M
        for (pfree = mem->freelists[i]; pfree != 0;
1831
22.3M
             pfree = *(const obj_header_t * const *)pfree
1832
22.3M
            )
1833
1.33k
            unused += obj_align_round(pfree[-1].o_size);
1834
22.3M
    }
1835
218k
    return unused;
1836
218k
}
1837
1838
/* Allocate an object from the large-block freelist. */
1839
static obj_header_t * /* rets obj if allocated, else 0 */
1840
large_freelist_alloc(gs_ref_memory_t *mem, obj_size_t size)
1841
9.38M
{
1842
    /* Scan large object freelist. We'll grab an object up to 1/8 bigger */
1843
    /* right away, else use best fit of entire scan. */
1844
9.38M
    obj_size_t aligned_size = obj_align_round(size);
1845
9.38M
    size_t aligned_min_size = aligned_size + sizeof(obj_header_t);
1846
9.38M
    size_t aligned_max_size =
1847
9.38M
        aligned_min_size + obj_align_round(aligned_min_size / 8);
1848
9.38M
    obj_header_t *best_fit = 0;
1849
9.38M
    obj_header_t **best_fit_prev = NULL; /* Initialize against indeterminism. */
1850
9.38M
    obj_size_t best_fit_size = (obj_size_t)SIZE_MAX;
1851
9.38M
    obj_header_t *pfree;
1852
9.38M
    obj_header_t **ppfprev = &mem->freelists[LARGE_FREELIST_INDEX];
1853
9.38M
    size_t largest_size = 0;
1854
1855
9.38M
    if (aligned_size > mem->largest_free_size)
1856
1.52M
        return 0;    /* definitely no block large enough */
1857
1858
22.2M
    while ((pfree = *ppfprev) != 0) {
1859
22.1M
        obj_size_t free_size = obj_align_round(pfree[-1].o_size);
1860
1861
22.1M
        if (free_size == aligned_size ||
1862
22.1M
            (free_size >= aligned_min_size && free_size < best_fit_size)
1863
22.1M
            ) {
1864
9.06M
            best_fit = pfree;
1865
9.06M
            best_fit_prev = ppfprev;
1866
9.06M
            best_fit_size = pfree[-1].o_size;
1867
9.06M
            if (best_fit_size <= aligned_max_size)
1868
7.69M
                break; /* good enough fit to spare scan of entire list */
1869
9.06M
        }
1870
14.4M
        ppfprev = (obj_header_t **) pfree;
1871
14.4M
        if (free_size > largest_size)
1872
3.06M
            largest_size = free_size;
1873
14.4M
    }
1874
7.85M
    if (best_fit == 0) {
1875
        /*
1876
         * No single free clump is large enough, but since we scanned the
1877
         * entire list, we now have an accurate updated value for
1878
         * largest_free_size.
1879
         */
1880
142k
        mem->largest_free_size = largest_size;
1881
142k
        return 0;
1882
142k
    }
1883
1884
    /* Remove from freelist & return excess memory to free */
1885
7.71M
    *best_fit_prev = *(obj_header_t **)best_fit;
1886
7.71M
    trim_obj(mem, best_fit, aligned_size, (clump_t *)0);
1887
1888
    /* Pre-init block header; o_alone & o_type are already init'd */
1889
7.71M
    best_fit[-1].o_size = size;
1890
1891
7.71M
    return best_fit;
1892
7.85M
}
1893
1894
/* Allocate an object.  This handles all but the fastest, simplest case. */
1895
static obj_header_t *
1896
alloc_obj(gs_ref_memory_t *mem, obj_size_t lsize, gs_memory_type_ptr_t pstype,
1897
          alloc_flags_t flags, client_name_t cname)
1898
3.58M
{
1899
3.58M
    obj_header_t *ptr;
1900
1901
3.58M
    if (lsize >= mem->large_size || (flags & ALLOC_IMMOVABLE)) {
1902
        /*
1903
         * Give the object a clump all its own.  Note that this case does
1904
         * not occur if is_controlled is true.
1905
         */
1906
865k
        obj_size_t asize =
1907
865k
            ((lsize + obj_align_mask) & -obj_align_mod) +
1908
865k
            sizeof(obj_header_t);
1909
865k
        clump_t *cp =
1910
865k
            alloc_acquire_clump(mem, asize + sizeof(clump_head_t), false,
1911
865k
                                "large object clump");
1912
1913
865k
        if (asize < lsize)
1914
0
            return 0;
1915
865k
        if (cp == 0)
1916
2
            return 0;
1917
865k
        cp->c_alone = true;
1918
865k
        ptr = (obj_header_t *) cp->cbot;
1919
865k
        cp->cbot += asize;
1920
865k
        ptr->o_pad = 0;
1921
865k
        ptr->o_alone = 1;
1922
865k
        ptr->o_size = (obj_size_t)lsize;
1923
2.72M
    } else {
1924
        /*
1925
         * Cycle through the clumps at the current save level, starting
1926
         * with the currently open one.
1927
         */
1928
2.72M
        clump_splay_walker sw;
1929
2.72M
        clump_t *cp = clump_splay_walk_init_mid(&sw, mem->cc);
1930
2.72M
        obj_size_t asize = obj_size_round(lsize);
1931
2.72M
        bool allocate_success = false;
1932
1933
2.72M
        if (lsize > max_freelist_size && (flags & ALLOC_DIRECT)) {
1934
            /* We haven't checked the large block freelist yet. */
1935
664k
            if ((ptr = large_freelist_alloc(mem, lsize)) != 0) {
1936
224k
                --ptr;      /* must point to header */
1937
224k
                goto done;
1938
224k
            }
1939
664k
        }
1940
1941
2.49M
        if (cp == 0) {
1942
            /* Open an arbitrary clump. */
1943
4.30k
            mem->cc = clump_splay_walk_init(&sw, mem);
1944
4.30k
            alloc_open_clump(mem);
1945
4.30k
        }
1946
1947
2.49M
#define CAN_ALLOC_AT_END(cp)\
1948
10.4M
  ((cp) && !((cp)->c_alone) && (cp)->ctop - (byte *) (ptr = (obj_header_t *) (cp)->cbot)\
1949
8.15M
   > asize + sizeof(obj_header_t))
1950
1951
10.4M
        do {
1952
10.4M
            if (CAN_ALLOC_AT_END(mem->cc)) {
1953
2.38M
                allocate_success = true;
1954
2.38M
                break;
1955
8.01M
            } else if (mem->is_controlled) {
1956
                /* Try consolidating free space. */
1957
0
                gs_consolidate_free((gs_memory_t *)mem);
1958
0
                if (CAN_ALLOC_AT_END(mem->cc)) {
1959
0
                    allocate_success = true;
1960
0
                    break;
1961
0
                }
1962
0
            }
1963
            /* No luck, go on to the next clump. */
1964
8.01M
            cp = clump_splay_walk_fwd(&sw);
1965
8.01M
            if (cp == NULL)
1966
108k
                break;
1967
1968
7.90M
            alloc_close_clump(mem);
1969
7.90M
            mem->cc = cp;
1970
7.90M
            alloc_open_clump(mem);
1971
7.90M
        }
1972
7.90M
        while (1);
1973
1974
#ifdef CONSOLIDATE_BEFORE_ADDING_CLUMP
1975
        if (!allocate_success) {
1976
            /*
1977
             * Try consolidating free space before giving up.
1978
             * It's not clear this is a good idea, since it requires quite
1979
             * a lot of computation and doesn't seem to improve things much.
1980
             */
1981
            if (!mem->is_controlled) { /* already did this if controlled */
1982
                clump_t *cp;
1983
1984
                alloc_close_clump(mem);
1985
                for (cp = clump_splay_walk_init_mid(&sw, cp_orig); cp != NULL; cp = clump_splay_walk_fwd(&sw))
1986
                {
1987
                    consolidate_clump_free(cp, mem);
1988
                    if (CAN_ALLOC_AT_END(cp)) {
1989
                        mem->cc = cp;
1990
                        alloc_open_clump(mem);
1991
                        allocate_success = true;
1992
                        break;
1993
                    }
1994
                }
1995
            }
1996
        }
1997
#endif
1998
1999
0
#undef CAN_ALLOC_AT_END
2000
2001
2.49M
        if (!allocate_success) {
2002
            /* Add another clump. */
2003
108k
            clump_t *cp =
2004
108k
                alloc_add_clump(mem, mem->clump_size, "clump");
2005
2006
108k
            if (cp) {
2007
                /* mem->cc == cp */
2008
108k
                ptr = (obj_header_t *)cp->cbot;
2009
108k
                allocate_success = true;
2010
108k
            }
2011
108k
        }
2012
2013
        /*
2014
         * If no success, try to scavenge from low free memory. This is
2015
         * only enabled for controlled memory (currently only async
2016
         * renderer) because it's too much work to prevent it from
2017
         * examining outer save levels in the general case.
2018
         */
2019
2.49M
        if (allocate_success)
2020
2.49M
            mem->cc->cbot = (byte *) ptr + asize;
2021
0
        else if (!mem->is_controlled ||
2022
0
                 (ptr = scavenge_low_free(mem, lsize)) == 0)
2023
0
            return 0; /* allocation failed */
2024
2.49M
        ptr->o_pad = 0;
2025
2.49M
        ptr->o_alone = 0;
2026
2.49M
        ptr->o_size = lsize;
2027
2.49M
    }
2028
3.58M
done:
2029
3.58M
    ptr->o_type = pstype;
2030
#   if IGC_PTR_STABILITY_CHECK
2031
        ptr->d.o.space_id = mem->space_id;
2032
#   endif
2033
3.58M
    ptr++;
2034
3.58M
    gs_alloc_fill(ptr, gs_alloc_fill_alloc, lsize);
2035
3.58M
    return Memento_label(ptr, cname);
2036
3.58M
}
2037
2038
/*
2039
 * Consolidate free objects contiguous to free space at cbot onto the cbot
2040
 * area. Also keep track of end of highest internal free object
2041
 * (int_freed_top).
2042
 */
2043
static void
2044
consolidate_clump_free(clump_t *cp, gs_ref_memory_t *mem)
2045
1.37M
{
2046
1.37M
    obj_header_t *begin_free = 0;
2047
2048
1.37M
    cp->int_freed_top = cp->cbase;  /* below all objects in clump */
2049
7.73M
    SCAN_CLUMP_OBJECTS(cp)
2050
7.73M
    DO_ALL
2051
7.73M
        if (pre->o_type == &st_free) {
2052
2.31M
            if (begin_free == 0)
2053
970k
                begin_free = pre;
2054
5.41M
        } else {
2055
5.41M
            if (begin_free)
2056
267k
                cp->int_freed_top = (byte *)pre; /* first byte following internal free */
2057
5.41M
            begin_free = 0;
2058
5.41M
        }
2059
7.73M
    END_OBJECTS_SCAN
2060
1.37M
    if (begin_free) {
2061
        /* We found free objects at the top of the object area. */
2062
        /* Remove the free objects from the freelists. */
2063
703k
        remove_range_from_freelist(mem, begin_free, cp->cbot);
2064
703k
        if_debug4m('a', (const gs_memory_t *)mem,
2065
703k
                   "[a]resetting clump "PRI_INTPTR" cbot from "PRI_INTPTR" to "PRI_INTPTR" (%lu free)\n",
2066
703k
                   (intptr_t)cp, (intptr_t)cp->cbot, (intptr_t)begin_free,
2067
703k
                   (intptr_t)((byte *)cp->cbot - (byte *)begin_free));
2068
703k
        cp->cbot = (byte *) begin_free;
2069
703k
    }
2070
1.37M
}
2071
2072
static splay_app_result_t
2073
consolidate(clump_t *cp, void *arg)
2074
0
{
2075
0
    gs_ref_memory_t *mem = (gs_ref_memory_t *)arg;
2076
2077
0
    consolidate_clump_free(cp, mem);
2078
0
    if (cp->cbot == cp->cbase && cp->ctop == cp->climit) {
2079
        /* The entire clump is free. */
2080
0
        if (!mem->is_controlled) {
2081
0
            alloc_free_clump(cp, mem);
2082
0
            if (mem->cc == cp)
2083
0
                mem->cc = NULL;
2084
0
        }
2085
0
    }
2086
2087
0
    return SPLAY_APP_CONTINUE;
2088
0
}
2089
2090
/* Consolidate free objects. */
2091
void
2092
ialloc_consolidate_free(gs_ref_memory_t *mem)
2093
0
{
2094
0
    alloc_close_clump(mem);
2095
2096
    /* We used to visit clumps in reverse order to encourage LIFO behavior,
2097
     * but with binary trees this is not possible (unless you want to
2098
     * either change the tree during the process, recurse, or otherwise
2099
     * hold the state). */
2100
0
    clump_splay_app(mem->root, mem, consolidate, mem);
2101
2102
    /* NOTE: Previously, if we freed the current clump, we'd move to whatever the
2103
     * bigger of it's children was. We now just move to the root. */
2104
0
    if (mem->cc == NULL)
2105
0
        mem->cc = mem->root;
2106
2107
0
    alloc_open_clump(mem);
2108
0
}
2109
static void
2110
i_consolidate_free(gs_memory_t *mem)
2111
0
{
2112
0
    ialloc_consolidate_free((gs_ref_memory_t *)mem);
2113
0
}
2114
2115
typedef struct
2116
{
2117
    uint need_free;
2118
    obj_header_t *found_pre;
2119
    gs_ref_memory_t *mem;
2120
    obj_size_t request_size;
2121
} scavenge_data;
2122
2123
static splay_app_result_t
2124
scavenge(clump_t *cp, void *arg)
2125
0
{
2126
0
    scavenge_data *sd = (scavenge_data *)arg;
2127
0
    obj_header_t *begin_free = NULL;
2128
0
    obj_size_t found_free = 0;
2129
2130
0
    sd->found_pre = NULL;
2131
2132
0
    SCAN_CLUMP_OBJECTS(cp)
2133
0
    DO_ALL
2134
0
        if (pre->o_type == &st_free) {
2135
0
            if (begin_free == 0) {
2136
0
                found_free = 0;
2137
0
                begin_free = pre;
2138
0
            }
2139
0
            found_free += pre_obj_rounded_size(pre);
2140
0
            if (begin_free != 0 && found_free >= sd->need_free)
2141
0
                break;
2142
0
        } else
2143
0
            begin_free = 0;
2144
0
    END_OBJECTS_SCAN_NO_ABORT
2145
2146
0
    if (begin_free != 0 && found_free >= sd->need_free) {
2147
        /* Fish found pieces out of various freelists */
2148
0
        remove_range_from_freelist(sd->mem, (char*)begin_free,
2149
0
                                   (char*)begin_free + found_free);
2150
2151
        /* Prepare found object */
2152
0
        sd->found_pre = begin_free;
2153
0
        sd->found_pre->o_type = &st_free;  /* don't confuse GC if gets lost */
2154
0
        sd->found_pre->o_size = found_free - sizeof(obj_header_t);
2155
2156
        /* Chop off excess tail piece & toss it back into free pool */
2157
0
        trim_obj(sd->mem, sd->found_pre + 1, sd->request_size, cp);
2158
0
        return SPLAY_APP_STOP;
2159
0
    }
2160
2161
0
    return SPLAY_APP_CONTINUE;
2162
0
}
2163
2164
/* try to free-up given amount of space from freespace below clump base */
2165
static obj_header_t * /* returns uninitialized object hdr, NULL if none found */
2166
scavenge_low_free(gs_ref_memory_t *mem, obj_size_t request_size)
2167
0
{
2168
    /* find 1st range of memory that can be glued back together to fill request */
2169
0
    scavenge_data sd;
2170
0
    obj_size_t request_size_rounded = obj_size_round(request_size);
2171
2172
0
    sd.found_pre = 0;
2173
0
    sd.need_free = request_size_rounded + sizeof(obj_header_t);    /* room for GC's dummy hdr */
2174
0
    sd.mem = mem;
2175
0
    sd.request_size = request_size;
2176
2177
0
    clump_splay_app(mem->root, mem, scavenge, &sd);
2178
0
    return sd.found_pre;
2179
0
}
2180
2181
/* Remove range of memory from a mem's freelists */
2182
static void
2183
remove_range_from_freelist(gs_ref_memory_t *mem, void* bottom, void* top)
2184
703k
{
2185
703k
    int num_free[num_freelists];
2186
703k
    int smallest = num_freelists, largest = -1;
2187
703k
    const obj_header_t *cur;
2188
703k
    obj_size_t size;
2189
703k
    int i;
2190
703k
    obj_size_t removed = 0;
2191
2192
    /*
2193
     * Scan from bottom to top, a range containing only free objects,
2194
     * counting the number of objects of each size.
2195
     */
2196
2197
2.37M
    for (cur = bottom; cur != top;
2198
1.67M
         cur = (const obj_header_t *)
2199
1.67M
             ((const byte *)cur + obj_size_round(size))
2200
1.67M
        ) {
2201
1.67M
        size = cur->o_size;
2202
1.67M
        i = (size > max_freelist_size ? LARGE_FREELIST_INDEX :
2203
1.67M
             (size + obj_align_mask) >> log2_obj_align_mod);
2204
1.67M
        if (i < smallest) {
2205
            /*
2206
             * 0-length free blocks aren't kept on any list, because
2207
             * they don't have room for a pointer.
2208
             */
2209
942k
            if (i == 0)
2210
59
                continue;
2211
942k
            if (smallest < num_freelists)
2212
239k
                memset(&num_free[i], 0, (smallest - i) * sizeof(int));
2213
703k
            else
2214
703k
                num_free[i] = 0;
2215
942k
            smallest = i;
2216
942k
        }
2217
1.67M
        if (i > largest) {
2218
892k
            if (largest >= 0)
2219
189k
                memset(&num_free[largest + 1], 0, (i - largest) * sizeof(int));
2220
892k
            largest = i;
2221
892k
        }
2222
1.67M
        num_free[i]++;
2223
1.67M
    }
2224
2225
    /*
2226
     * Remove free objects from the freelists, adjusting lost.objects by
2227
     * subtracting the size of the region being processed minus the amount
2228
     * of space reclaimed.
2229
     */
2230
2231
13.4M
    for (i = smallest; i <= largest; i++) {
2232
12.7M
        int count = num_free[i];
2233
12.7M
        obj_header_t *pfree;
2234
12.7M
        obj_header_t **ppfprev;
2235
2236
12.7M
        if (!count)
2237
11.5M
            continue;
2238
1.23M
        ppfprev = &mem->freelists[i];
2239
1.92M
        for (;;) {
2240
1.92M
            pfree = *ppfprev;
2241
1.92M
            if (PTR_GE(pfree, bottom) && PTR_LT(pfree, top)) {
2242
                /* We're removing an object. */
2243
1.67M
                *ppfprev = *(obj_header_t **) pfree;
2244
1.67M
                removed += obj_align_round(pfree[-1].o_size);
2245
1.67M
                if (!--count)
2246
1.23M
                    break;
2247
1.67M
            } else
2248
257k
                ppfprev = (obj_header_t **) pfree;
2249
1.92M
        }
2250
1.23M
    }
2251
703k
    mem->lost.objects -= (char*)top - (char*)bottom - removed;
2252
703k
}
2253
2254
/* Trim a memory object down to a given size */
2255
static void
2256
trim_obj(gs_ref_memory_t *mem, obj_header_t *obj, obj_size_t size, clump_t *cp)
2257
/* Obj must have rounded size == req'd size, or have enough room for */
2258
/* trailing dummy obj_header */
2259
7.71M
{
2260
7.71M
    obj_size_t rounded_size = obj_align_round(size);
2261
7.71M
    obj_header_t *pre_obj = obj - 1;
2262
7.71M
    obj_header_t *excess_pre = (obj_header_t*)((char*)obj + rounded_size);
2263
7.71M
    obj_size_t old_rounded_size = obj_align_round(pre_obj->o_size);
2264
7.71M
    obj_size_t excess_size = old_rounded_size - rounded_size - sizeof(obj_header_t);
2265
2266
    /* trim object's size to desired */
2267
7.71M
    pre_obj->o_size = size;
2268
7.71M
    if (old_rounded_size == rounded_size)
2269
7.67M
        return;  /* nothing more to do here */
2270
    /*
2271
     * If the object is alone in its clump, move cbot to point to the end
2272
     * of the object.
2273
     */
2274
46.0k
    if (pre_obj->o_alone) {
2275
0
        if (!cp) {
2276
0
            mem->cfreed.memory = mem;
2277
0
            if (clump_locate(obj, &mem->cfreed)) {
2278
0
                cp = mem->cfreed.cp;
2279
0
            }
2280
0
        }
2281
0
        if (cp) {
2282
#ifdef DEBUG
2283
            if (cp->cbot != (byte *)obj + old_rounded_size) {
2284
                lprintf3("resizing "PRI_INTPTR", old size %u, new size %u, cbot wrong!\n",
2285
                         (intptr_t)obj, old_rounded_size, size);
2286
                /* gs_abort */
2287
            } else
2288
#endif
2289
0
                {
2290
0
                    cp->cbot = (byte *)excess_pre;
2291
0
                    return;
2292
0
                }
2293
0
        }
2294
        /*
2295
         * Something very weird is going on.  This probably shouldn't
2296
         * ever happen, but if it does....
2297
         */
2298
0
        pre_obj->o_pad = 0;
2299
0
        pre_obj->o_alone = 0;
2300
0
    }
2301
    /* make excess into free obj */
2302
46.0k
    excess_pre->o_type = &st_free;  /* don't confuse GC */
2303
46.0k
    excess_pre->o_size = excess_size;
2304
46.0k
    excess_pre->o_pad = 0;
2305
46.0k
    excess_pre->o_alone = 0;
2306
46.0k
    if (excess_size >= obj_align_mod) {
2307
        /* Put excess object on a freelist */
2308
45.0k
        obj_header_t **pfl;
2309
2310
45.0k
        if (mem->cc && (byte *)excess_pre >= mem->cc->int_freed_top)
2311
4.67k
            mem->cc->int_freed_top = (byte *)excess_pre + excess_size;
2312
45.0k
        if (excess_size <= max_freelist_size)
2313
31.9k
            pfl = &mem->freelists[(excess_size + obj_align_mask) >>
2314
31.9k
                                 log2_obj_align_mod];
2315
13.1k
        else {
2316
13.1k
            uint rounded_size = obj_align_round(excess_size);
2317
2318
13.1k
            pfl = &mem->freelists[LARGE_FREELIST_INDEX];
2319
13.1k
            if (rounded_size > mem->largest_free_size)
2320
0
                mem->largest_free_size = rounded_size;
2321
13.1k
        }
2322
45.0k
        *(obj_header_t **) (excess_pre + 1) = *pfl;
2323
45.0k
        *pfl = excess_pre + 1;
2324
45.0k
        mem->cfreed.memory = mem;
2325
45.0k
    } else {
2326
        /* excess piece will be "lost" memory */
2327
1.01k
        mem->lost.objects += excess_size + sizeof(obj_header_t);
2328
1.01k
    }
2329
46.0k
}
2330
2331
/* ================ Roots ================ */
2332
2333
/* Register a root. */
2334
static int
2335
i_register_root(gs_memory_t * mem, gs_gc_root_t ** rpp, gs_ptr_type_t ptype,
2336
                void **up, client_name_t cname)
2337
22.7k
{
2338
22.7k
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
2339
22.7k
    gs_gc_root_t *rp;
2340
2341
22.7k
    if (rpp == NULL || *rpp == NULL) {
2342
0
        rp = gs_raw_alloc_struct_immovable(imem->non_gc_memory, &st_gc_root_t,
2343
0
                                           "i_register_root");
2344
0
        if (rp == 0)
2345
0
            return_error(gs_error_VMerror);
2346
0
        rp->free_on_unregister = true;
2347
0
        if (rpp && *rpp == NULL)
2348
0
            *rpp = rp;
2349
22.7k
    } else {
2350
22.7k
        rp = *rpp;
2351
22.7k
        rp->free_on_unregister = false;
2352
22.7k
    }
2353
22.7k
    if_debug3m('8', mem, "[8]register root(%s) "PRI_INTPTR" -> "PRI_INTPTR"\n",
2354
22.7k
               client_name_string(cname), (intptr_t)rp, (intptr_t)up);
2355
22.7k
    rp->ptype = ptype;
2356
22.7k
    rp->p = up;
2357
22.7k
    rp->next = imem->roots;
2358
22.7k
    imem->roots = rp;
2359
22.7k
    return 0;
2360
22.7k
}
2361
2362
/* Unregister a root. */
2363
static void
2364
i_unregister_root(gs_memory_t * mem, gs_gc_root_t * rp, client_name_t cname)
2365
20.7k
{
2366
20.7k
    gs_ref_memory_t * const imem = (gs_ref_memory_t *)mem;
2367
20.7k
    gs_gc_root_t **rpp = &imem->roots;
2368
2369
20.7k
    if_debug2m('8', mem, "[8]unregister root(%s) "PRI_INTPTR"\n",
2370
20.7k
               client_name_string(cname), (intptr_t)rp);
2371
20.7k
    while (*rpp != rp)
2372
0
        rpp = &(*rpp)->next;
2373
20.7k
    *rpp = (*rpp)->next;
2374
20.7k
    if (rp->free_on_unregister)
2375
0
        gs_free_object(imem->non_gc_memory, rp, "i_unregister_root");
2376
20.7k
}
2377
2378
/* ================ clumps ================ */
2379
2380
public_st_clump();
2381
2382
/* Insert a clump in the chain.  This is exported for the GC and for */
2383
/* the forget_save operation. */
2384
void
2385
alloc_link_clump(clump_t * cp, gs_ref_memory_t * imem)
2386
1.03M
{
2387
1.03M
    splay_insert(cp, imem);
2388
1.03M
    SANITY_CHECK(cp);
2389
1.03M
}
2390
2391
/* Add a clump for ordinary allocation. */
2392
static clump_t *
2393
alloc_add_clump(gs_ref_memory_t * mem, size_t csize, client_name_t cname)
2394
108k
{
2395
108k
    clump_t *cp = alloc_acquire_clump(mem, csize, true, cname);
2396
2397
108k
    if (cp) {
2398
108k
        alloc_close_clump(mem);
2399
108k
        mem->cc = cp;
2400
108k
        gs_alloc_fill(mem->cc->cbase, gs_alloc_fill_free,
2401
108k
                      mem->cc->climit - mem->cc->cbase);
2402
108k
    }
2403
108k
    return cp;
2404
108k
}
2405
2406
/* Acquire a clump.  If we would exceed MaxLocalVM (if relevant), */
2407
/* or if we would exceed the VMThreshold and psignal is NULL, */
2408
/* return 0; if we would exceed the VMThreshold but psignal is valid, */
2409
/* just set the signal and return successfully. */
2410
static clump_t *
2411
alloc_acquire_clump(gs_ref_memory_t * mem, size_t csize, bool has_strings,
2412
                    client_name_t cname)
2413
982k
{
2414
982k
    gs_memory_t *parent = mem->non_gc_memory;
2415
982k
    clump_t *cp;
2416
982k
    byte *cdata;
2417
2418
982k
#if ARCH_SIZEOF_LONG > ARCH_SIZEOF_INT
2419
    /* If csize is larger than max_uint, punt. */
2420
982k
    if (csize != (uint) csize)
2421
0
        return 0;
2422
982k
#endif
2423
982k
    cp = gs_raw_alloc_struct_immovable(parent, &st_clump, cname);
2424
2425
    /* gc_status.signal_value is initialised to zero when the
2426
     * allocator is created, only the Postscript interpreter
2427
     * (which implement garbage collection) takes the action to set
2428
     * it to anything other than zero
2429
     */
2430
982k
    if( mem->gc_status.signal_value != 0) {
2431
        /* we have a garbage collector */
2432
963k
        if (mem->allocated >= mem->limit) {
2433
693
            mem->gc_status.requested += csize;
2434
693
            if (mem->limit >= mem->gc_status.max_vm) {
2435
0
                gs_free_object(parent, cp, cname);
2436
0
                return 0;
2437
0
            }
2438
693
            if_debug4m('0', (const gs_memory_t *)mem,
2439
693
                       "[0]signaling space=%d, allocated=%"PRIdSIZE", limit=%"PRIdSIZE", requested=%"PRIdSIZE"\n",
2440
693
                       mem->space, mem->allocated,
2441
693
                       mem->limit, mem->gc_status.requested);
2442
693
            mem->gs_lib_ctx->gcsignal = mem->gc_status.signal_value;
2443
693
        }
2444
963k
    }
2445
982k
    cdata = gs_alloc_bytes_immovable(parent, csize, cname);
2446
982k
    if (cp == 0 || cdata == 0) {
2447
2
        gs_free_object(parent, cdata, cname);
2448
2
        gs_free_object(parent, cp, cname);
2449
2
        mem->gc_status.requested = csize;
2450
2
        return 0;
2451
2
    }
2452
982k
    alloc_init_clump(cp, cdata, cdata + csize, has_strings, (clump_t *) 0);
2453
982k
    alloc_link_clump(cp, mem);
2454
982k
    mem->allocated += st_clump.ssize + csize;
2455
982k
    SANITY_CHECK(cp);
2456
982k
    return cp;
2457
982k
}
2458
2459
/* Initialize the pointers in a clump.  This is exported for save/restore. */
2460
/* The bottom pointer must be aligned, but the top pointer need not */
2461
/* be aligned. */
2462
void
2463
alloc_init_clump(clump_t * cp, byte * bot, byte * top, bool has_strings,
2464
                 clump_t * outer)
2465
1.04M
{
2466
1.04M
    byte *cdata = bot;
2467
2468
1.04M
    if (outer != 0)
2469
56.7k
        outer->inner_count++;
2470
1.04M
    cp->chead = (clump_head_t *) cdata;
2471
1.04M
    cdata += sizeof(clump_head_t);
2472
1.04M
    cp->cbot = cp->cbase = cp->int_freed_top = cdata;
2473
1.04M
    cp->cend = top;
2474
1.04M
    cp->rcur = 0;
2475
1.04M
    cp->rtop = 0;
2476
1.04M
    cp->outer = outer;
2477
1.04M
    cp->inner_count = 0;
2478
1.04M
    cp->has_refs = false;
2479
1.04M
    cp->sbase = cdata;
2480
1.04M
    cp->c_alone = false; /* should be set correctly by caller */
2481
1.04M
    if (has_strings && top - cdata >= string_space_quantum + sizeof(long) - 1) {
2482
        /*
2483
         * We allocate a large enough string marking and reloc table
2484
         * to cover the entire clump.
2485
         */
2486
173k
        uint nquanta = string_space_quanta(top - cdata);
2487
2488
173k
        cp->climit = cdata + nquanta * string_data_quantum;
2489
173k
        cp->smark = cp->climit;
2490
173k
        cp->smark_size = string_quanta_mark_size(nquanta);
2491
173k
        cp->sreloc =
2492
173k
            (string_reloc_offset *) (cp->smark + cp->smark_size);
2493
173k
        cp->sfree1 = (uint *) cp->sreloc;
2494
868k
    } else {
2495
        /* No strings, don't need the string GC tables. */
2496
868k
        cp->climit = cp->cend;
2497
868k
        cp->sfree1 = 0;
2498
868k
        cp->smark = 0;
2499
868k
        cp->smark_size = 0;
2500
868k
        cp->sreloc = 0;
2501
868k
    }
2502
1.04M
    cp->ctop = cp->climit;
2503
1.04M
    alloc_init_free_strings(cp);
2504
1.04M
}
2505
2506
/* Initialize the string freelists in a clump. */
2507
void
2508
alloc_init_free_strings(clump_t * cp)
2509
1.04M
{
2510
1.04M
    if (cp->sfree1)
2511
173k
        memset(cp->sfree1, 0, STRING_FREELIST_SPACE(cp));
2512
1.04M
    cp->sfree = 0;
2513
1.04M
}
2514
2515
/* Close up the current clump. */
2516
/* This is exported for save/restore and the GC. */
2517
void
2518
alloc_close_clump(gs_ref_memory_t * mem)
2519
9.16M
{
2520
#ifdef DEBUG
2521
    if (gs_debug_c('A')) {
2522
        dmlprintf1((const gs_memory_t *)mem, "[a%d]", alloc_trace_space(mem));
2523
        dmprintf_clump((const gs_memory_t *)mem, "closing clump", mem->cc);
2524
    }
2525
#endif
2526
9.16M
}
2527
2528
/* Reopen the current clump after a GC or restore. */
2529
void
2530
alloc_open_clump(gs_ref_memory_t * mem)
2531
8.83M
{
2532
#ifdef DEBUG
2533
    if (gs_debug_c('A')) {
2534
        dmlprintf1((const gs_memory_t *)mem, "[a%d]", alloc_trace_space(mem));
2535
        dmprintf_clump((const gs_memory_t *)mem, "opening clump", mem->cc);
2536
    }
2537
#endif
2538
8.83M
}
2539
2540
#ifdef DEBUG
2541
static splay_app_result_t
2542
check_in_clump(clump_t *cp, void *arg)
2543
{
2544
    clump_t **cpp = (clump_t **)arg;
2545
2546
    if (*cpp != cp)
2547
        return SPLAY_APP_CONTINUE;
2548
    *cpp = NULL;
2549
2550
    return SPLAY_APP_STOP;
2551
}
2552
#endif
2553
2554
/* Remove a clump from the chain.  This is exported for the GC. */
2555
void
2556
alloc_unlink_clump(clump_t * cp, gs_ref_memory_t * mem)
2557
1.04M
{
2558
1.04M
    SANITY_CHECK_MID(cp);
2559
#ifdef DEBUG
2560
    if (gs_alloc_debug) { /* Check to make sure this clump belongs to this allocator. */
2561
        clump_t *found = cp;
2562
        clump_splay_app(mem->root, mem, check_in_clump, &found);
2563
2564
        if (found != NULL) {
2565
            mlprintf2((const gs_memory_t *)mem, "unlink_clump "PRI_INTPTR" not owned by memory "PRI_INTPTR"!\n",
2566
                      (intptr_t)cp, (intptr_t)mem);
2567
            return;   /*gs_abort(); */
2568
        }
2569
    }
2570
#endif
2571
1.04M
    (void)clump_splay_remove(cp, mem);
2572
1.04M
    if (mem->cc == cp) {
2573
19.6k
        mem->cc = NULL;
2574
19.6k
    }
2575
1.04M
}
2576
2577
/*
2578
 * Free a clump.  This is exported for the GC.  Since we eventually use
2579
 * this to free the clump containing the allocator itself, we must be
2580
 * careful not to reference anything in the allocator after freeing the
2581
 * clump data.
2582
 */
2583
void
2584
alloc_free_clump(clump_t * cp, gs_ref_memory_t * mem)
2585
1.04M
{
2586
1.04M
    gs_memory_t *parent = mem->non_gc_memory;
2587
1.04M
    byte *cdata = (byte *)cp->chead;
2588
1.04M
    ulong csize = (byte *)cp->cend - cdata;
2589
2590
1.04M
    alloc_unlink_clump(cp, mem);
2591
1.04M
    mem->allocated -= st_clump.ssize;
2592
1.04M
    if (mem->cfreed.cp == cp)
2593
17.1k
        mem->cfreed.cp = 0;
2594
1.04M
    if (cp->outer == 0) {
2595
985k
        mem->allocated -= csize;
2596
985k
        gs_free_object(parent, cdata, "alloc_free_clump(data)");
2597
985k
    } else {
2598
56.7k
        cp->outer->inner_count--;
2599
56.7k
        gs_alloc_fill(cdata, gs_alloc_fill_free, csize);
2600
56.7k
    }
2601
1.04M
    gs_free_object(parent, cp, "alloc_free_clump(clump struct)");
2602
1.04M
}
2603
2604
/* Find the clump for a pointer. */
2605
/* Note that this only searches the current save level. */
2606
/* Since a given save level can't contain both a clump and an inner clump */
2607
/* of that clump, we can stop when is_within_clump succeeds, and just test */
2608
/* is_in_inner_clump then. */
2609
bool
2610
clump_locate_ptr(const void *ptr, clump_locator_t * clp)
2611
12.6M
{
2612
12.6M
    clump_t *cp = clp->memory->root;
2613
2614
37.3M
    while (cp)
2615
36.8M
    {
2616
36.8M
        if (PTR_LT(ptr, cp->cbase))
2617
13.0M
        {
2618
13.0M
            cp = cp->left;
2619
13.0M
            continue;
2620
13.0M
        }
2621
23.7M
        if (PTR_GE(ptr, cp->cend))
2622
11.6M
        {
2623
11.6M
            cp = cp->right;
2624
11.6M
            continue;
2625
11.6M
        }
2626
        /* Found it! */
2627
12.0M
        splay_move_to_root(cp, clp->memory);
2628
12.0M
        clp->cp = cp;
2629
12.0M
        return !ptr_is_in_inner_clump(ptr, cp);
2630
23.7M
    }
2631
559k
    return false;
2632
12.6M
}
2633
2634
bool ptr_is_within_mem_clumps(const void *ptr, gs_ref_memory_t *mem)
2635
25.8k
{
2636
25.8k
    clump_t *cp = mem->root;
2637
2638
110k
    while (cp)
2639
84.7k
    {
2640
84.7k
        if (PTR_LT(ptr, cp->cbase))
2641
57.8k
        {
2642
57.8k
            cp = cp->left;
2643
57.8k
            continue;
2644
57.8k
        }
2645
26.9k
        if (PTR_GE(ptr, cp->cend))
2646
26.9k
        {
2647
26.9k
            cp = cp->right;
2648
26.9k
            continue;
2649
26.9k
        }
2650
        /* Found it! */
2651
0
        splay_move_to_root(cp, mem);
2652
0
        return true;
2653
26.9k
    }
2654
25.8k
    return false;
2655
25.8k
}
2656
2657
/* ------ Debugging ------ */
2658
2659
#ifdef DEBUG
2660
2661
#include "string_.h"
2662
2663
static inline bool
2664
obj_in_control_region(const void *obot, const void *otop,
2665
                      const dump_control_t *pdc)
2666
{
2667
    return
2668
        ((pdc->bottom == NULL || PTR_GT(otop, pdc->bottom)) &&
2669
         (pdc->top == NULL || PTR_LT(obot, pdc->top)));
2670
}
2671
2672
const dump_control_t dump_control_default =
2673
{
2674
    dump_do_default, NULL, NULL
2675
};
2676
const dump_control_t dump_control_all =
2677
{
2678
    dump_do_strings | dump_do_type_addresses | dump_do_pointers |
2679
    dump_do_pointed_strings | dump_do_contents, NULL, NULL
2680
};
2681
2682
const dump_control_t dump_control_no_contents =
2683
{
2684
    dump_do_strings | dump_do_type_addresses | dump_do_pointers |
2685
    dump_do_pointed_strings, NULL, NULL
2686
};
2687
2688
/*
2689
 * Internal procedure to dump a block of memory, in hex and optionally
2690
 * also as characters.
2691
 */
2692
static void
2693
debug_indent(const gs_memory_t *mem, int indent)
2694
{
2695
    int i;
2696
2697
    for (i = indent; i > 0; --i)
2698
        dmputc(mem, ' ');
2699
}
2700
static void
2701
debug_dump_contents(const gs_memory_t *mem, const byte * bot,
2702
                    const byte * top, int indent, bool as_chars)
2703
{
2704
    const byte *block;
2705
2706
#define block_size 16
2707
2708
    if (bot >= top)
2709
        return;
2710
    for (block = bot - ((bot - (byte *) 0) & (block_size - 1));
2711
         block < top; block += block_size
2712
        ) {
2713
        int i;
2714
        char label[12];
2715
2716
        /* Check for repeated blocks. */
2717
        if (block >= bot + block_size &&
2718
            block <= top - (block_size * 2) &&
2719
            !memcmp(block, block - block_size, block_size) &&
2720
            !memcmp(block, block + block_size, block_size)
2721
            ) {
2722
            if (block < bot + block_size * 2 ||
2723
                memcmp(block, block - block_size * 2, block_size)
2724
                ) {
2725
                debug_indent(mem, indent);
2726
                dmputs(mem, "  ...\n");
2727
            }
2728
            continue;
2729
        }
2730
        gs_snprintf(label, sizeof(label), PRI_INTPTR":", (intptr_t)block);
2731
        debug_indent(mem, indent);
2732
        dmputs(mem, label);
2733
        for (i = 0; i < block_size; ++i) {
2734
            const char *sepr = ((i & 3) == 0 && i != 0 ? "  " : " ");
2735
2736
            dmputs(mem, sepr);
2737
            if (block + i >= bot && block + i < top)
2738
                dmprintf1(mem, "%02x", block[i]);
2739
            else
2740
                dmputs(mem, "  ");
2741
        }
2742
        dmputc(mem, '\n');
2743
        if (as_chars) {
2744
            debug_indent(mem, indent + strlen(label));
2745
            for (i = 0; i < block_size; ++i) {
2746
                byte ch;
2747
2748
                if ((i & 3) == 0 && i != 0)
2749
                    dmputc(mem, ' ');
2750
                if (block + i >= bot && block + i < top &&
2751
                    (ch = block[i]) >= 32 && ch <= 126
2752
                    )
2753
                    dmprintf1(mem, "  %c", ch);
2754
                else
2755
                    dmputs(mem, "   ");
2756
            }
2757
            dmputc(mem, '\n');
2758
        }
2759
    }
2760
#undef block_size
2761
}
2762
2763
/* Print one object with the given options. */
2764
/* Relevant options: type_addresses, no_types, pointers, pointed_strings, */
2765
/* contents. */
2766
void
2767
debug_print_object(const gs_memory_t *mem, const void *obj, const dump_control_t * control)
2768
{
2769
    const obj_header_t *pre = ((const obj_header_t *)obj) - 1;
2770
    ulong size = pre_obj_contents_size(pre);
2771
    const gs_memory_struct_type_t *type = pre->o_type;
2772
    dump_options_t options = control->options;
2773
2774
    dmprintf3(mem, "  pre="PRI_INTPTR"(obj="PRI_INTPTR") size=%lu",
2775
              (intptr_t) pre, (intptr_t) obj, size);
2776
    switch (options & (dump_do_type_addresses | dump_do_no_types)) {
2777
    case dump_do_type_addresses + dump_do_no_types: /* addresses only */
2778
        dmprintf1(mem, " type="PRI_INTPTR"", (intptr_t) type);
2779
        break;
2780
    case dump_do_type_addresses:  /* addresses & names */
2781
        dmprintf2(mem, " type=%s("PRI_INTPTR")", struct_type_name_string(type),
2782
                 (intptr_t)type);
2783
        break;
2784
    case 0:   /* names only */
2785
        dmprintf1(mem, " type=%s", struct_type_name_string(type));
2786
    case dump_do_no_types:  /* nothing */
2787
        ;
2788
    }
2789
    if (options & dump_do_marks) {
2790
        dmprintf2(mem, " smark/back=%u (0x%x)", pre->o_smark, pre->o_smark);
2791
    }
2792
    dmputc(mem, '\n');
2793
    if (type == &st_free)
2794
        return;
2795
    if (options & dump_do_pointers) {
2796
        struct_proc_enum_ptrs((*proc)) = type->enum_ptrs;
2797
        uint index = 0;
2798
        enum_ptr_t eptr;
2799
        gs_ptr_type_t ptype;
2800
2801
        if (proc != gs_no_struct_enum_ptrs) {
2802
            if (proc != 0) {
2803
                for (; (ptype = (*proc)(mem, pre + 1, size, index, &eptr, type, NULL)) != 0;
2804
                     ++index
2805
                     ) {
2806
                    const void *ptr = eptr.ptr;
2807
2808
                    dmprintf1(mem, "    ptr %u: ", index);
2809
                    if (ptype == ptr_string_type || ptype == ptr_const_string_type) {
2810
                        const gs_const_string *str = (const gs_const_string *)&eptr;
2811
                        if (!str)
2812
                            dmprintf(mem, "0x0");
2813
                        else
2814
                            dmprintf2(mem, PRI_INTPTR "(%u)", (intptr_t)str->data, str->size);
2815
                        if (options & dump_do_pointed_strings) {
2816
                            dmputs(mem, " =>\n");
2817
                            if (!str)
2818
                                dmprintf(mem, "(null)\n");
2819
                            else
2820
                                debug_dump_contents(mem, str->data, str->data + str->size, 6,
2821
                                                    true);
2822
                        } else {
2823
                            dmputc(mem, '\n');
2824
                        }
2825
                    } else {
2826
                        dmprintf1(mem, (PTR_BETWEEN(ptr, obj, (const byte *)obj + size) ?
2827
                                  "("PRI_INTPTR")\n" : PRI_INTPTR "\n"), (intptr_t) ptr);
2828
                    }
2829
                }
2830
            } else { /* proc == 0 */
2831
                dmprintf(mem, "previous line should be a ref\n");
2832
            }
2833
        } /* proc != gs_no_struct_enum_ptrs */
2834
    }
2835
    if (options & dump_do_contents) {
2836
        debug_dump_contents(mem, (const byte *)obj, (const byte *)obj + size,
2837
                            0, false);
2838
    }
2839
}
2840
2841
/* Print the contents of a clump with the given options. */
2842
/* Relevant options: all. */
2843
void
2844
debug_dump_clump(const gs_memory_t *mem, const clump_t * cp, const dump_control_t * control)
2845
{
2846
    dmprintf1(mem, "clump at "PRI_INTPTR":\n", (intptr_t) cp);
2847
    dmprintf3(mem, "   chead="PRI_INTPTR"  cbase="PRI_INTPTR" sbase="PRI_INTPTR"\n",
2848
              (intptr_t)cp->chead, (intptr_t)cp->cbase, (intptr_t)cp->sbase);
2849
    dmprintf3(mem, "    rcur="PRI_INTPTR"   rtop="PRI_INTPTR"  cbot="PRI_INTPTR"\n",
2850
              (intptr_t)cp->rcur, (intptr_t)cp->rtop, (intptr_t)cp->cbot);
2851
    dmprintf4(mem, "    ctop="PRI_INTPTR" climit="PRI_INTPTR" smark="PRI_INTPTR", size=%u\n",
2852
              (intptr_t)cp->ctop, (intptr_t)cp->climit, (intptr_t)cp->smark,
2853
              cp->smark_size);
2854
    dmprintf2(mem, "  sreloc="PRI_INTPTR"   cend="PRI_INTPTR"\n",
2855
              (intptr_t)cp->sreloc, (intptr_t)cp->cend);
2856
    dmprintf6(mem, "left="PRI_INTPTR" right="PRI_INTPTR" parent="PRI_INTPTR" outer="PRI_INTPTR" inner_count=%u has_refs=%s\n",
2857
              (intptr_t)cp->left, (intptr_t)cp->right, (intptr_t)cp->parent, (intptr_t)cp->outer,
2858
              cp->inner_count, (cp->has_refs ? "true" : "false"));
2859
2860
    dmprintf2(mem, "  sfree1="PRI_INTPTR"   sfree="PRI_INTPTR"\n",
2861
              (intptr_t)cp->sfree1, (intptr_t)cp->sfree);
2862
    if (control->options & dump_do_strings) {
2863
        debug_dump_contents(mem, (control->bottom == 0 ? cp->ctop :
2864
                             max(control->bottom, cp->ctop)),
2865
                            (control->top == 0 ? cp->climit :
2866
                             min(control->top, cp->climit)),
2867
                            0, true);
2868
    }
2869
    SCAN_CLUMP_OBJECTS(cp)
2870
        DO_ALL
2871
        if (obj_in_control_region(pre + 1,
2872
                                  (const byte *)(pre + 1) + size,
2873
                                  control)
2874
        )
2875
        debug_print_object(mem, pre + 1, control);
2876
    END_OBJECTS_SCAN_NO_ABORT
2877
}
2878
void
2879
debug_print_clump(const gs_memory_t *mem, const clump_t * cp)
2880
{
2881
    dump_control_t control;
2882
2883
    control = dump_control_default;
2884
    debug_dump_clump(mem, cp, &control);
2885
}
2886
2887
/* Print the contents of all clumps managed by an allocator. */
2888
/* Relevant options: all. */
2889
void
2890
debug_dump_memory(const gs_ref_memory_t * mem, const dump_control_t * control)
2891
{
2892
    const clump_t *cp;
2893
    clump_splay_walker sw;
2894
2895
    for (cp = clump_splay_walk_init(&sw, mem); cp != NULL; cp = clump_splay_walk_fwd(&sw))
2896
    {
2897
        if (obj_in_control_region(cp->cbase, cp->cend, control))
2898
            debug_dump_clump((const gs_memory_t *)mem, cp, control);
2899
    }
2900
}
2901
2902
void
2903
debug_dump_allocator(const gs_ref_memory_t *mem)
2904
{
2905
    debug_dump_memory(mem, &dump_control_no_contents);
2906
}
2907
2908
/* Find all the objects that contain a given pointer. */
2909
void
2910
debug_find_pointers(const gs_ref_memory_t *mem, const void *target)
2911
{
2912
    clump_splay_walker sw;
2913
    dump_control_t control;
2914
    const clump_t *cp;
2915
2916
    control.options = 0;
2917
    for (cp = clump_splay_walk_init(&sw, mem); cp; cp = clump_splay_walk_fwd(&sw))
2918
    {
2919
        SCAN_CLUMP_OBJECTS(cp);
2920
        DO_ALL
2921
            struct_proc_enum_ptrs((*proc)) = pre->o_type->enum_ptrs;
2922
            uint index = 0;
2923
            enum_ptr_t eptr;
2924
2925
            if (proc)   /* doesn't trace refs NB fix me. */
2926
                for (; (*proc)((const gs_memory_t *)mem, pre + 1, size, index,
2927
                               &eptr, pre->o_type, NULL);
2928
                     ++index)
2929
                    if (eptr.ptr == target) {
2930
                        dmprintf1((const gs_memory_t *)mem, "Index %d in", index);
2931
                        debug_print_object((const gs_memory_t *)mem, pre + 1, &control);
2932
                    }
2933
        END_OBJECTS_SCAN_NO_ABORT
2934
    }
2935
}
2936
static void ddct(const gs_memory_t *mem, clump_t *cp, clump_t *parent, int depth)
2937
{
2938
    int i;
2939
2940
    if (cp == NULL)
2941
        return;
2942
    for (i = 0; i < depth; i++)
2943
        dmlprintf(mem, " ");
2944
2945
    dmlprintf7(mem, "Clump "PRI_INTPTR":"PRI_INTPTR" parent="PRI_INTPTR" left="PRI_INTPTR":"PRI_INTPTR" right="PRI_INTPTR":"PRI_INTPTR"\n",
2946
        (intptr_t)cp, (intptr_t)cp->cbase, (intptr_t)cp->parent,
2947
        (intptr_t)cp->left, (intptr_t)(cp->left ? cp->left->cbase : NULL),
2948
        (intptr_t)cp->right, (intptr_t)(cp->right ? cp->right->cbase : NULL));
2949
    if (cp->parent != parent)
2950
        dmlprintf(mem, "Parent pointer mismatch!\n");
2951
    ddct(mem, cp->left, cp, depth+1);
2952
    ddct(mem, cp->right, cp, depth+1);
2953
}
2954
void
2955
debug_dump_clump_tree(const gs_ref_memory_t *mem)
2956
{
2957
    ddct((const gs_memory_t *)mem, mem->root, NULL, 0);
2958
}
2959
2960
#endif /* DEBUG */