/src/ghostpdl/psi/igcref.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* Copyright (C) 2001-2023 Artifex Software, Inc. |
2 | | All Rights Reserved. |
3 | | |
4 | | This software is provided AS-IS with no warranty, either express or |
5 | | implied. |
6 | | |
7 | | This software is distributed under license and may not be copied, |
8 | | modified or distributed except as expressly authorized under the terms |
9 | | of the license contained in the file LICENSE in this distribution. |
10 | | |
11 | | Refer to licensing information at http://www.artifex.com or contact |
12 | | Artifex Software, Inc., 39 Mesa Street, Suite 108A, San Francisco, |
13 | | CA 94129, USA, for further information. |
14 | | */ |
15 | | |
16 | | |
17 | | /* ref garbage collector for Ghostscript */ |
18 | | #include "memory_.h" |
19 | | #include "ghost.h" |
20 | | #include "gsexit.h" |
21 | | #include "gsstruct.h" /* for gxalloc.h included by iastate.h */ |
22 | | #include "iname.h" |
23 | | #include "iastate.h" |
24 | | #include "idebug.h" |
25 | | #include "igc.h" |
26 | | #include "ipacked.h" |
27 | | #include "store.h" /* for ref_assign_inline */ |
28 | | |
29 | | /* Define whether to trace every step of relocating ref pointers. */ |
30 | | #if 0 |
31 | | # define rputc(m,c) dmputc(m,c) |
32 | | #else |
33 | 1.22G | # define rputc(m,c) DO_NOTHING |
34 | | #endif |
35 | | |
36 | | /* Forward references */ |
37 | | ptr_proc_reloc(igc_reloc_ref_ptr, ref_packed); |
38 | | ptr_proc_reloc(igc_reloc_ref_ptr_nocheck, ref_packed); |
39 | | refs_proc_reloc(igc_reloc_refs); |
40 | | |
41 | | /* |
42 | | * Define the 'structure' type descriptor for refs. |
43 | | * This is special because it has different shared procs. |
44 | | */ |
45 | | static gc_proc_clear_reloc(refs_clear_reloc); |
46 | | static gc_proc_set_reloc(refs_set_reloc); |
47 | | static gc_proc_compact(refs_compact); |
48 | | static const struct_shared_procs_t refs_shared_procs = |
49 | | {refs_clear_reloc, refs_set_reloc, refs_compact}; |
50 | | static struct_proc_clear_marks(refs_clear_marks); |
51 | | static struct_proc_reloc_ptrs(refs_do_reloc); |
52 | | const gs_memory_struct_type_t st_refs = |
53 | | {sizeof(ref), "refs", &refs_shared_procs, refs_clear_marks, 0, refs_do_reloc}; |
54 | | |
55 | | /* |
56 | | * Define the GC procedures for structs that actually contain refs. |
57 | | * These are special because the shared refs_* procedures |
58 | | * are never called. Instead, we unmark the individual refs in clear_marks, |
59 | | * disregard refs_*_reloc (because we will never relocate a ptr_ref_type |
60 | | * pointer pointing into the structure), disregard refs_compact (because |
61 | | * compaction is never required), and remove the marks in reloc_ptrs. |
62 | | * See also the comment about ptr_ref_type in imemory.h. |
63 | | */ |
64 | | CLEAR_MARKS_PROC(ref_struct_clear_marks) |
65 | 36.0k | { |
66 | 36.0k | ref *pref = (ref *) vptr; |
67 | 36.0k | ref *end = (ref *) ((char *)vptr + size); |
68 | | |
69 | 908k | for (; pref < end; pref++) |
70 | 872k | r_clear_attrs(pref, l_mark); |
71 | 36.0k | } |
72 | | ENUM_PTRS_BEGIN_PROC(ref_struct_enum_ptrs) |
73 | 454k | { |
74 | 454k | if (index >= size / sizeof(ref)) |
75 | 18.0k | return 0; |
76 | 436k | pep->ptr = (const ref *)vptr + index; |
77 | 436k | return ptr_ref_type; |
78 | 454k | ENUM_PTRS_END_PROC |
79 | 454k | } |
80 | 18.0k | RELOC_PTRS_BEGIN(ref_struct_reloc_ptrs) |
81 | 18.0k | { |
82 | 18.0k | vm_spaces spaces = gcst->spaces; |
83 | 18.0k | const gs_memory_t *cmem = space_system->stable_memory; |
84 | | |
85 | 18.0k | ref *beg = vptr; |
86 | 18.0k | ref *end = (ref *) ((char *)vptr + size); |
87 | | |
88 | 18.0k | igc_reloc_refs((ref_packed *) beg, (ref_packed *) end, gcst); |
89 | 18.0k | ref_struct_clear_marks(cmem, vptr, size, pstype); |
90 | 18.0k | } RELOC_PTRS_END |
91 | | |
92 | | /* ------ Unmarking phase ------ */ |
93 | | |
94 | | /* Unmark a single ref. */ |
95 | | void |
96 | | ptr_ref_unmark(enum_ptr_t *pep, gc_state_t * ignored) |
97 | 1.79k | { |
98 | 1.79k | ref_packed *rpp = (ref_packed *)pep->ptr; |
99 | | |
100 | 1.79k | if (r_is_packed(rpp)) |
101 | 0 | r_clear_pmark(rpp); |
102 | 1.79k | else |
103 | 1.79k | r_clear_attrs((ref *)rpp, l_mark); |
104 | 1.79k | } |
105 | | |
106 | | /* Unmarking routine for ref objects. */ |
107 | | static void |
108 | | refs_clear_marks(const gs_memory_t *cmem, |
109 | | void /*obj_header_t */ *vptr, uint size, |
110 | | const gs_memory_struct_type_t * pstype) |
111 | 3.93M | { |
112 | 3.93M | ref_packed *rp = (ref_packed *) vptr; |
113 | 3.93M | ref_packed *end = (ref_packed *) ((byte *) vptr + size); |
114 | | |
115 | | /* Since the last ref is full-size, we only need to check for */ |
116 | | /* the end of the block when we see one of those. */ |
117 | 445M | for (;;) { |
118 | 445M | if (r_is_packed(rp)) { |
119 | | #ifdef DEBUG |
120 | | if (gs_debug_c('8')) { |
121 | | dmlprintf1(cmem, " [8]unmark packed "PRI_INTPTR" ", (intptr_t) rp); |
122 | | debug_print_ref(cmem, (const ref *)rp); |
123 | | dmputs(cmem, "\n"); |
124 | | } |
125 | | #endif |
126 | 157M | r_clear_pmark(rp); |
127 | 157M | rp++; |
128 | 288M | } else { /* full-size ref */ |
129 | 288M | ref *const pref = (ref *)rp; |
130 | | |
131 | | #ifdef DEBUG |
132 | | if (gs_debug_c('8')) { |
133 | | dmlprintf1(cmem, " [8]unmark ref "PRI_INTPTR" ", (intptr_t)rp); |
134 | | debug_print_ref(cmem, pref); |
135 | | dmputs(cmem, "\n"); |
136 | | } |
137 | | #endif |
138 | 288M | r_clear_attrs(pref, l_mark); |
139 | 288M | rp += packed_per_ref; |
140 | 288M | if (rp >= (ref_packed *) end) |
141 | 3.93M | break; |
142 | 288M | } |
143 | 445M | } |
144 | 3.93M | } |
145 | | |
146 | | /* ------ Marking phase ------ */ |
147 | | |
148 | | /* Mark a ref. Return true if new mark. */ |
149 | | bool |
150 | | ptr_ref_mark(enum_ptr_t *pep, gc_state_t * ignored) |
151 | 0 | { |
152 | 0 | ref_packed *rpp = (void *)pep->ptr; |
153 | |
|
154 | 0 | if (r_is_packed(rpp)) { |
155 | 0 | if (r_has_pmark(rpp)) |
156 | 0 | return false; |
157 | 0 | r_set_pmark(rpp); |
158 | 0 | } else { |
159 | 0 | ref *const pref = (ref *)rpp; |
160 | |
|
161 | 0 | if (r_has_attr(pref, l_mark)) |
162 | 0 | return false; |
163 | 0 | r_set_attrs(pref, l_mark); |
164 | 0 | } |
165 | 0 | return true; |
166 | 0 | } |
167 | | |
168 | | /* ------ Relocation planning phase ------ */ |
169 | | |
170 | | /* |
171 | | * We store relocation in the size field of refs that don't use it, |
172 | | * so that we don't have to scan all the way to an unmarked object. |
173 | | * We must avoid nulls, which sometimes have useful information |
174 | | * in their size fields, and the types above t_next_index, which are |
175 | | * actually operators in disguise and also use the size field. |
176 | | */ |
177 | | |
178 | | /* Clear the relocation for a ref object. */ |
179 | | static void |
180 | | refs_clear_reloc(obj_header_t *hdr, uint size) |
181 | 9.26k | { |
182 | 9.26k | ref_packed *rp = (ref_packed *) (hdr + 1); |
183 | 9.26k | ref_packed *end = (ref_packed *) ((byte *) rp + size); |
184 | | |
185 | 989k | while (rp < end) { |
186 | 980k | if (r_is_packed(rp)) |
187 | 353k | rp++; |
188 | 626k | else { |
189 | | /* Full-size ref. Store the relocation here if possible. */ |
190 | 626k | ref *const pref = (ref *)rp; |
191 | | |
192 | 626k | if (!ref_type_uses_size_or_null(r_type(pref))) { |
193 | 146k | if_debug1('8', " [8]clearing reloc at "PRI_INTPTR"\n", (intptr_t)rp); |
194 | 146k | r_set_size(pref, 0); |
195 | 146k | } |
196 | 626k | rp += packed_per_ref; |
197 | 626k | } |
198 | 980k | } |
199 | 9.26k | } |
200 | | |
201 | | /* Set the relocation for a ref object. */ |
202 | | static bool |
203 | | refs_set_reloc(obj_header_t * hdr, uint reloc, uint size) |
204 | 3.92M | { |
205 | 3.92M | ref_packed *rp = (ref_packed *) (hdr + 1); |
206 | 3.92M | ref_packed *end = (ref_packed *) ((byte *) rp + size); |
207 | 3.92M | uint freed = 0; |
208 | | |
209 | | /* |
210 | | * We have to be careful to keep refs aligned properly. |
211 | | * For the moment, we do this by either keeping or discarding |
212 | | * an entire (aligned) block of align_packed_per_ref packed elements |
213 | | * as a unit. We know that align_packed_per_ref <= packed_per_ref, |
214 | | * and we also know that packed refs are always allocated in blocks |
215 | | * of align_packed_per_ref, so this makes things relatively easy. |
216 | | */ |
217 | 330M | while (rp < end) { |
218 | 326M | if (r_is_packed(rp)) { |
219 | | #if align_packed_per_ref == 1 |
220 | | if (r_has_pmark(rp)) { |
221 | | if_debug1('8', |
222 | | " [8]packed ref "PRI_INTPTR" is marked\n", |
223 | | (intptr_t)rp); |
224 | | rp++; |
225 | | } else { |
226 | | #else |
227 | 39.2M | int i; |
228 | | |
229 | | /* |
230 | | * Note: align_packed_per_ref is typically |
231 | | * 2 or 4 for 32-bit processors. |
232 | | */ |
233 | 39.2M | #define all_marked (align_packed_per_ref * lp_mark) |
234 | | # if align_packed_per_ref == 2 |
235 | | # if ARCH_SIZEOF_INT == ARCH_SIZEOF_SHORT * 2 |
236 | | # undef all_marked |
237 | | # define all_marked ( (lp_mark << (sizeof(short) * 8)) + lp_mark ) |
238 | | # define marked (*(int *)rp & all_marked) |
239 | | # else |
240 | | # define marked ((*rp & lp_mark) + (rp[1] & lp_mark)) |
241 | | # endif |
242 | | # else |
243 | 39.2M | # if align_packed_per_ref == 4 |
244 | 39.2M | # define marked ((*rp & lp_mark) + (rp[1] & lp_mark) +\ |
245 | 39.2M | (rp[2] & lp_mark) + (rp[3] & lp_mark)) |
246 | | # else |
247 | | /* |
248 | | * The value of marked is logically a uint, not an int: |
249 | | * we declare it as int only to avoid a compiler warning |
250 | | * message about using a non-int value in a switch statement. |
251 | | */ |
252 | | int marked = *rp & lp_mark; |
253 | | |
254 | | for (i = 1; i < align_packed_per_ref; i++) |
255 | | marked += rp[i] & lp_mark; |
256 | | # endif |
257 | 39.2M | # endif |
258 | | /* |
259 | | * Now marked is lp_mark * the number of marked |
260 | | * packed refs in the aligned block, except for |
261 | | * a couple of special cases above. |
262 | | */ |
263 | 39.2M | switch (marked) { |
264 | 13.7M | case all_marked: |
265 | 13.7M | if_debug2('8', |
266 | 13.7M | " [8]packed refs "PRI_INTPTR".."PRI_INTPTR" are marked\n", |
267 | 13.7M | (intptr_t)rp, |
268 | 13.7M | (intptr_t)(rp + (align_packed_per_ref - 1))); |
269 | 13.7M | rp += align_packed_per_ref; |
270 | 13.7M | break; |
271 | 16.9M | default: |
272 | | /* At least one packed ref in the block */ |
273 | | /* is marked: Keep the whole block. */ |
274 | 84.7M | for (i = align_packed_per_ref; i--; rp++) { |
275 | 67.7M | r_set_pmark(rp); |
276 | 67.7M | if_debug1('8', |
277 | 67.7M | " [8]packed ref "PRI_INTPTR" is marked\n", |
278 | 67.7M | (intptr_t)rp); |
279 | 67.7M | } |
280 | 16.9M | break; |
281 | 8.56M | case 0: |
282 | 8.56M | #endif |
283 | 8.56M | if_debug2('8', " [8]%d packed ref(s) at "PRI_INTPTR" are unmarked\n", |
284 | 8.56M | align_packed_per_ref, (intptr_t)rp); |
285 | 8.56M | { |
286 | 8.56M | uint rel = reloc + freed; |
287 | | |
288 | | /* Change this to an integer so we can */ |
289 | | /* store the relocation here. */ |
290 | 8.56M | *rp = pt_tag(pt_integer) + |
291 | 8.56M | min(rel, packed_max_value); |
292 | 8.56M | } |
293 | 8.56M | rp += align_packed_per_ref; |
294 | 8.56M | freed += sizeof(ref_packed) * align_packed_per_ref; |
295 | 39.2M | } |
296 | 287M | } else { /* full-size ref */ |
297 | 287M | uint rel = reloc + freed; |
298 | | |
299 | | /* The following assignment is logically */ |
300 | | /* unnecessary; we do it only for convenience */ |
301 | | /* in debugging. */ |
302 | 287M | ref *pref = (ref *) rp; |
303 | | |
304 | 287M | if (!r_has_attr(pref, l_mark)) { |
305 | 40.1M | if_debug1('8', " [8]ref "PRI_INTPTR" is unmarked\n", |
306 | 40.1M | (intptr_t)pref); |
307 | | /* Change this to a mark so we can */ |
308 | | /* store the relocation. */ |
309 | 40.1M | r_set_type(pref, t_mark); |
310 | 40.1M | r_set_size(pref, rel); |
311 | 40.1M | freed += sizeof(ref); |
312 | 247M | } else { |
313 | 247M | if_debug1('8', " [8]ref "PRI_INTPTR" is marked\n", |
314 | 247M | (intptr_t)pref); |
315 | | /* Store the relocation here if possible. */ |
316 | 247M | if (!ref_type_uses_size_or_null(r_type(pref))) { |
317 | 50.5M | if_debug2('8', " [8]storing reloc %u at "PRI_INTPTR"\n", |
318 | 50.5M | rel, (intptr_t)pref); |
319 | 50.5M | r_set_size(pref, rel); |
320 | 50.5M | } |
321 | 247M | } |
322 | 287M | rp += packed_per_ref; |
323 | 287M | } |
324 | 326M | } |
325 | 3.92M | if_debug3('7', " [7]at end of refs "PRI_INTPTR", size = %u, freed = %u\n", |
326 | 3.92M | (intptr_t)(hdr + 1), size, freed); |
327 | 3.92M | if (freed == size) |
328 | 525k | return false; |
329 | 3.39M | #if ARCH_SIZEOF_INT > ARCH_SIZEOF_SHORT |
330 | | /* |
331 | | * If the final relocation can't fit in the r_size field |
332 | | * (which can't happen if the object shares a clump with |
333 | | * any other objects, so we know reloc = 0 in this case), |
334 | | * we have to keep the entire object unless there are no |
335 | | * references to any ref in it. |
336 | | */ |
337 | 3.39M | if (freed <= max_ushort) |
338 | 3.39M | return true; |
339 | | /* |
340 | | * We have to mark all surviving refs, but we also must |
341 | | * overwrite any non-surviving refs with something that |
342 | | * doesn't contain any pointers. |
343 | | */ |
344 | 0 | rp = (ref_packed *) (hdr + 1); |
345 | 0 | while (rp < end) { |
346 | 0 | if (r_is_packed(rp)) { |
347 | 0 | if (!r_has_pmark(rp)) |
348 | 0 | *rp = pt_tag(pt_integer) | lp_mark; |
349 | 0 | ++rp; |
350 | 0 | } else { /* The following assignment is logically */ |
351 | | /* unnecessary; we do it only for convenience */ |
352 | | /* in debugging. */ |
353 | 0 | ref *pref = (ref *) rp; |
354 | |
|
355 | 0 | if (!r_has_attr(pref, l_mark)) { |
356 | 0 | r_set_type_attrs(pref, t_mark, l_mark); |
357 | 0 | r_set_size(pref, reloc); |
358 | 0 | } else { |
359 | 0 | if (!ref_type_uses_size_or_null(r_type(pref))) |
360 | 0 | r_set_size(pref, reloc); |
361 | 0 | } |
362 | 0 | rp += packed_per_ref; |
363 | 0 | } |
364 | 0 | } |
365 | | /* The last ref has to remain unmarked. */ |
366 | 0 | r_clear_attrs((ref *) rp - 1, l_mark); |
367 | 0 | #endif |
368 | 0 | return true; |
369 | 3.39M | } |
370 | | |
371 | | /* ------ Relocation phase ------ */ |
372 | | |
373 | | /* Relocate all the pointers in a block of refs. */ |
374 | | static void |
375 | | refs_do_reloc(void /*obj_header_t */ *vptr, uint size, |
376 | | const gs_memory_struct_type_t * pstype, gc_state_t * gcst) |
377 | 3.40M | { |
378 | 3.40M | igc_reloc_refs((ref_packed *) vptr, |
379 | 3.40M | (ref_packed *) ((char *)vptr + size), |
380 | 3.40M | gcst); |
381 | 3.40M | } |
382 | | /* Relocate the contents of a block of refs. */ |
383 | | /* If gcst->relocating_untraced is true, we are relocating pointers from an */ |
384 | | /* untraced space, so relocate all refs, not just marked ones. */ |
385 | | void |
386 | | igc_reloc_refs(ref_packed * from, ref_packed * to, gc_state_t * gcst) |
387 | 3.48M | { |
388 | 3.48M | int min_trace = gcst->min_collect; |
389 | 3.48M | ref_packed *rp = from; |
390 | 3.48M | bool do_all = gcst->relocating_untraced; |
391 | | |
392 | 3.48M | vm_spaces spaces = gcst->spaces; |
393 | 3.48M | const gs_memory_t *cmem = space_system->stable_memory; |
394 | | |
395 | 412M | while (rp < to) { |
396 | 409M | ref *pref; |
397 | | #ifdef DEBUG |
398 | | const void *before = 0; |
399 | | const void *after = 0; |
400 | | # define DO_RELOC(var, stat)\ |
401 | | BEGIN before = (var); stat; after = (var); END |
402 | | # define SET_RELOC(var, expr)\ |
403 | | BEGIN before = (var); after = (var) = (expr); END |
404 | | #else |
405 | 409M | # define DO_RELOC(var, stat) stat |
406 | 409M | # define SET_RELOC(var, expr) var = expr |
407 | 409M | #endif |
408 | | |
409 | 409M | if (r_is_packed(rp)) { |
410 | 151M | rp++; |
411 | 151M | continue; |
412 | 151M | } |
413 | | /* The following assignment is logically unnecessary; */ |
414 | | /* we do it only for convenience in debugging. */ |
415 | 257M | pref = (ref *) rp; |
416 | 257M | if_debug3m('8', gcst->heap, " [8]relocating %s %d ref at "PRI_INTPTR"\n", |
417 | 257M | (r_has_attr(pref, l_mark) ? "marked" : "unmarked"), |
418 | 257M | r_btype(pref), (intptr_t)pref); |
419 | 257M | if ((r_has_attr(pref, l_mark) || do_all) && |
420 | 257M | r_space(pref) >= min_trace |
421 | 257M | ) { |
422 | 100M | switch (r_type(pref)) { |
423 | | /* Struct cases */ |
424 | 19.8k | case t_file: |
425 | 19.8k | DO_RELOC(pref->value.pfile, RELOC_VAR(pref->value.pfile)); |
426 | 19.8k | break; |
427 | 78.4k | case t_device: |
428 | 78.4k | DO_RELOC(pref->value.pdevice, |
429 | 78.4k | RELOC_VAR(pref->value.pdevice)); |
430 | 78.4k | break; |
431 | 3.59k | case t_fontID: |
432 | 14.3k | case t_struct: |
433 | 17.9k | case t_astruct: |
434 | 17.9k | case t_pdfctx: |
435 | 17.9k | DO_RELOC(pref->value.pstruct, |
436 | 17.9k | RELOC_VAR(pref->value.pstruct)); |
437 | 17.9k | break; |
438 | | /* Non-trivial non-struct cases */ |
439 | 1.92M | case t_dictionary: |
440 | 1.92M | rputc(gcst->heap, 'd'); |
441 | 1.92M | SET_RELOC(pref->value.pdict, |
442 | 1.92M | (dict *)igc_reloc_ref_ptr((ref_packed *)pref->value.pdict, gcst)); |
443 | 1.92M | break; |
444 | 17.9M | case t_array: |
445 | 17.9M | { |
446 | 17.9M | uint size = r_size(pref); |
447 | | |
448 | 17.9M | if (size != 0) { /* value.refs might be NULL */ |
449 | | |
450 | | /* |
451 | | * If the array is large, we allocated it in its |
452 | | * own object (at least originally -- this might |
453 | | * be a pointer to a subarray.) In this case, |
454 | | * we know it is the only object in its |
455 | | * containing st_refs object, so we know that |
456 | | * the mark containing the relocation appears |
457 | | * just after it. |
458 | | */ |
459 | 17.8M | if (size < max_size_st_refs / sizeof(ref)) { |
460 | 17.5M | rputc(gcst->heap, 'a'); |
461 | 17.5M | SET_RELOC(pref->value.refs, |
462 | 17.5M | (ref *) igc_reloc_ref_ptr( |
463 | 17.5M | (ref_packed *) pref->value.refs, gcst)); |
464 | 17.5M | } else { |
465 | 305k | rputc(gcst->heap, 'A'); |
466 | | /* |
467 | | * See the t_shortarray case below for why we |
468 | | * decrement size. |
469 | | */ |
470 | 305k | --size; |
471 | 305k | SET_RELOC(pref->value.refs, |
472 | 305k | (ref *) igc_reloc_ref_ptr( |
473 | 305k | (ref_packed *) (pref->value.refs + size), |
474 | 305k | gcst) - size); |
475 | 305k | } |
476 | 17.8M | } |
477 | 17.9M | } |
478 | 17.9M | break; |
479 | 13.1M | case t_mixedarray: |
480 | 13.1M | if (r_size(pref) != 0) { /* value.refs might be NULL */ |
481 | 13.1M | rputc(gcst->heap, 'm'); |
482 | 13.1M | SET_RELOC(pref->value.packed, |
483 | 13.1M | igc_reloc_ref_ptr(pref->value.packed, gcst)); |
484 | 13.1M | } |
485 | 13.1M | break; |
486 | 5.81M | case t_shortarray: |
487 | 5.81M | { |
488 | 5.81M | uint size = r_size(pref); |
489 | | |
490 | | /* |
491 | | * Since we know that igc_reloc_ref_ptr works by |
492 | | * scanning forward, and we know that all the |
493 | | * elements of this array itself are marked, we can |
494 | | * save some scanning time by relocating the pointer |
495 | | * to the end of the array rather than the |
496 | | * beginning. |
497 | | */ |
498 | 5.81M | if (size != 0) { /* value.refs might be NULL */ |
499 | 5.41M | rputc(gcst->heap, 's'); |
500 | | /* |
501 | | * igc_reloc_ref_ptr has to be able to determine |
502 | | * whether the pointer points into a space that |
503 | | * isn't being collected. It does this by |
504 | | * checking whether the referent of the pointer |
505 | | * is marked. For this reason, we have to pass |
506 | | * a pointer to the last real element of the |
507 | | * array, rather than just beyond it. |
508 | | */ |
509 | 5.41M | --size; |
510 | 5.41M | SET_RELOC(pref->value.packed, |
511 | 5.41M | igc_reloc_ref_ptr(pref->value.packed + size, |
512 | 5.41M | gcst) - size); |
513 | 5.41M | } |
514 | 5.81M | } |
515 | 5.81M | break; |
516 | 56.8M | case t_name: |
517 | 56.8M | { |
518 | 56.8M | void *psub = name_ref_sub_table(cmem, pref); |
519 | 56.8M | void *rsub = RELOC_OBJ(psub); /* gcst implicit */ |
520 | | |
521 | 56.8M | SET_RELOC(pref->value.pname, |
522 | 56.8M | (name *) |
523 | 56.8M | ((char *)rsub + ((char *)pref->value.pname - |
524 | 56.8M | (char *)psub))); |
525 | 56.8M | } break; |
526 | 3.16M | case t_string: |
527 | 3.16M | { |
528 | 3.16M | gs_string str; |
529 | | |
530 | 3.16M | str.data = pref->value.bytes; |
531 | 3.16M | str.size = r_size(pref); |
532 | | |
533 | 3.16M | DO_RELOC(str.data, RELOC_STRING_VAR(str)); |
534 | 3.16M | pref->value.bytes = str.data; |
535 | 3.16M | } |
536 | 3.16M | break; |
537 | 1.49M | case t_oparray: |
538 | 1.49M | rputc(gcst->heap, 'o'); |
539 | 1.49M | SET_RELOC(pref->value.const_refs, |
540 | 1.49M | (const ref *)igc_reloc_ref_ptr((const ref_packed *)pref->value.const_refs, gcst)); |
541 | 1.49M | break; |
542 | 0 | default: |
543 | 0 | goto no_reloc; /* don't print trace message */ |
544 | 100M | } |
545 | 100M | if_debug2m('8', gcst->heap, " [8]relocated "PRI_INTPTR" => "PRI_INTPTR"\n", |
546 | 100M | (intptr_t)before, (intptr_t)after); |
547 | 100M | } |
548 | 257M | no_reloc: |
549 | 257M | rp += packed_per_ref; |
550 | 257M | } |
551 | 3.48M | } |
552 | | |
553 | | /* Relocate a pointer to a ref. */ |
554 | | /* See gsmemory.h for why the argument is const and the result is not. */ |
555 | | ref_packed * |
556 | | igc_reloc_ref_ptr_nocheck(const ref_packed * prp, gc_state_t *gcst) |
557 | 39.8M | { |
558 | | /* |
559 | | * Search forward for relocation. This algorithm is intrinsically very |
560 | | * inefficient; we hope eventually to replace it with a better one. |
561 | | */ |
562 | 39.8M | const ref_packed *rp = prp; |
563 | 39.8M | uint dec = 0; |
564 | | #ifdef ALIGNMENT_ALIASING_BUG |
565 | | const ref *rpref; |
566 | | # define RP_REF(rp) (rpref = (const ref *)rp, rpref) |
567 | | #else |
568 | 39.8M | # define RP_REF(rp) ((const ref *)rp) |
569 | 39.8M | #endif |
570 | 1.14G | for (;;) { |
571 | | |
572 | 1.14G | if (r_is_packed(rp)) { |
573 | | /* |
574 | | * Normally, an unmarked packed ref will be an |
575 | | * integer whose value is the amount of relocation. |
576 | | * However, the relocation value might have been |
577 | | * too large to fit. If this is the case, for |
578 | | * each such unmarked packed ref we pass over, |
579 | | * we have to decrement the final relocation. |
580 | | */ |
581 | 324M | rputc(gcst->heap, (*rp & lp_mark ? '1' : '0')); |
582 | 324M | if (!(*rp & lp_mark)) { |
583 | 8.43M | if (*rp != pt_tag(pt_integer) + packed_max_value) { |
584 | | /* This is a stored relocation value. */ |
585 | 7.57M | rputc(gcst->heap, '\n'); |
586 | 7.57M | rp = print_reloc(prp, "ref", |
587 | 7.57M | (const ref_packed *) |
588 | 7.57M | ((const char *)prp - |
589 | 7.57M | (*rp & packed_value_mask) + dec)); |
590 | 7.57M | break; |
591 | 7.57M | } |
592 | | /* |
593 | | * We know this is the first of an aligned block |
594 | | * of packed refs. Skip over the entire block, |
595 | | * decrementing the final relocation. |
596 | | */ |
597 | 865k | dec += sizeof(ref_packed) * align_packed_per_ref; |
598 | 865k | rp += align_packed_per_ref; |
599 | 865k | } else |
600 | 315M | rp++; |
601 | 316M | continue; |
602 | 324M | } |
603 | 816M | if (!ref_type_uses_size_or_null(r_type(RP_REF(rp)))) { |
604 | | /* reloc is in r_size */ |
605 | 32.2M | rputc(gcst->heap, '\n'); |
606 | 32.2M | rp = print_reloc(prp, "ref", |
607 | 32.2M | (const ref_packed *) |
608 | 32.2M | (r_size(RP_REF(rp)) == 0 ? prp : |
609 | 32.2M | (const ref_packed *)((const char *)prp - |
610 | 32.2M | r_size(RP_REF(rp)) + dec))); |
611 | 32.2M | break; |
612 | 32.2M | } |
613 | 816M | rputc(gcst->heap, 'u'); |
614 | 784M | rp += packed_per_ref; |
615 | 784M | } |
616 | | /* Use a severely deprecated pun to remove the const property. */ |
617 | 39.8M | { |
618 | 39.8M | union { const ref_packed *r; ref_packed *w; } u; |
619 | | |
620 | 39.8M | u.r = rp; |
621 | 39.8M | return u.w; |
622 | 39.8M | } |
623 | 39.8M | #undef RP_REF |
624 | 39.8M | } |
625 | | ref_packed * |
626 | | igc_reloc_ref_ptr(const ref_packed * prp, gc_state_t *gcst) |
627 | 39.8M | { |
628 | | /* |
629 | | * Search forward for relocation. This algorithm is intrinsically very |
630 | | * inefficient; we hope eventually to replace it with a better one. |
631 | | */ |
632 | 39.8M | const ref_packed *rp = prp; |
633 | | #ifdef ALIGNMENT_ALIASING_BUG |
634 | | const ref *rpref; |
635 | | # define RP_REF(rp) (rpref = (const ref *)rp, rpref) |
636 | | #else |
637 | 39.8M | # define RP_REF(rp) ((const ref *)rp) |
638 | 39.8M | #endif |
639 | | /* |
640 | | * Iff this pointer points into a space that wasn't traced, |
641 | | * the referent won't be marked. In this case, we shouldn't |
642 | | * do any relocation. Check for this first. |
643 | | */ |
644 | 39.8M | if (r_is_packed(rp)) { |
645 | 13.5M | if (!r_has_pmark(rp)) |
646 | 8 | goto ret_rp; |
647 | 26.2M | } else { |
648 | 26.2M | if (!r_has_attr(RP_REF(rp), l_mark)) |
649 | 45 | goto ret_rp; |
650 | 26.2M | } |
651 | 39.8M | return igc_reloc_ref_ptr_nocheck(prp, gcst); |
652 | 53 | ret_rp: |
653 | | /* Use a severely deprecated pun to remove the const property. */ |
654 | 53 | { |
655 | 53 | union { const ref_packed *r; ref_packed *w; } u; |
656 | | |
657 | 53 | u.r = rp; |
658 | 53 | return u.w; |
659 | 39.8M | } |
660 | 39.8M | } |
661 | | |
662 | | /* ------ Compaction phase ------ */ |
663 | | |
664 | | /* Compact a ref object. */ |
665 | | /* Remove the marks at the same time. */ |
666 | | static void |
667 | | refs_compact(const gs_memory_t *mem, obj_header_t * pre, obj_header_t * dpre, uint size) |
668 | 3.39M | { |
669 | 3.39M | ref_packed *dest; |
670 | 3.39M | ref_packed *src; |
671 | 3.39M | ref_packed *end; |
672 | 3.39M | uint new_size; |
673 | | |
674 | | /* The next switch controls an optimization |
675 | | for the loop termination condition. |
676 | | It was useful during the development, |
677 | | when some assumptions were temporary wrong. |
678 | | We keep it for records. */ |
679 | | |
680 | 3.39M | src = (ref_packed *) (pre + 1); |
681 | 3.39M | end = (ref_packed *) ((byte *) src + size); |
682 | | /* |
683 | | * We know that a block of refs always ends with a |
684 | | * full-size ref, so we only need to check for reaching the end |
685 | | * of the block when we see one of those. |
686 | | */ |
687 | 3.39M | if (dpre == pre) /* Loop while we don't need to copy. */ |
688 | 184M | for (;;) { |
689 | 184M | if (r_is_packed(src)) { |
690 | 15.5M | if (!r_has_pmark(src)) |
691 | 536k | break; |
692 | 15.5M | if_debug1m('8', mem, " [8]packed ref "PRI_INTPTR" \"copied\"\n", |
693 | 14.9M | (intptr_t)src); |
694 | 14.9M | *src &= ~lp_mark; |
695 | 14.9M | src++; |
696 | 168M | } else { /* full-size ref */ |
697 | 168M | ref *const pref = (ref *)src; |
698 | | |
699 | 168M | if (!r_has_attr(pref, l_mark)) |
700 | 702k | break; |
701 | 168M | if_debug1m('8', mem, " [8]ref "PRI_INTPTR" \"copied\"\n", (intptr_t)src); |
702 | 167M | r_clear_attrs(pref, l_mark); |
703 | 167M | src += packed_per_ref; |
704 | 167M | } |
705 | 184M | } else |
706 | 2.15M | *dpre = *pre; |
707 | 3.39M | dest = (ref_packed *) ((char *)dpre + ((char *)src - (char *)pre)); |
708 | 224M | for (;;) { |
709 | 224M | if (r_is_packed(src)) { |
710 | 136M | if (r_has_pmark(src)) { |
711 | 107M | if_debug2m('8', mem, " [8]packed ref "PRI_INTPTR" copied to "PRI_INTPTR"\n", |
712 | 107M | (intptr_t)src, (intptr_t)dest); |
713 | 107M | *dest++ = *src & ~lp_mark; |
714 | 107M | } |
715 | 136M | src++; |
716 | 136M | } else { /* full-size ref */ |
717 | 88.6M | if (r_has_attr((ref *) src, l_mark)) { |
718 | 79.5M | ref rtemp; |
719 | | |
720 | 79.5M | if_debug2m('8', mem, " [8]ref "PRI_INTPTR" copied to "PRI_INTPTR"\n", |
721 | 79.5M | (intptr_t)src, (intptr_t)dest); |
722 | | /* We can't just use ref_assign_inline, */ |
723 | | /* because the source and destination */ |
724 | | /* might overlap! */ |
725 | 79.5M | ref_assign_inline(&rtemp, (ref *) src); |
726 | 79.5M | r_clear_attrs(&rtemp, l_mark); |
727 | 79.5M | ref_assign_inline((ref *) dest, &rtemp); |
728 | 79.5M | src += packed_per_ref; |
729 | 79.5M | dest += packed_per_ref; |
730 | 79.5M | } else { /* check for end of block */ |
731 | 9.03M | src += packed_per_ref; |
732 | 9.03M | if (src >= end) |
733 | 3.39M | break; |
734 | 9.03M | } |
735 | 88.6M | } |
736 | 224M | } |
737 | 3.39M | new_size = (byte *) dest - (byte *) (dpre + 1) + sizeof(ref); |
738 | | #ifdef DEBUG |
739 | | /* Check that the relocation came out OK. */ |
740 | | /* NOTE: this check only works within a single clump. */ |
741 | | if ((byte *) src - (byte *) dest != r_size((ref *) src - 1) + sizeof(ref)) { |
742 | | mlprintf3(mem, "Reloc error for refs "PRI_INTPTR": reloc = %lu, stored = %u\n", |
743 | | (intptr_t) dpre, (ulong) ((byte *) src - (byte *) dest), |
744 | | (uint) r_size((ref *) src - 1)); |
745 | | gs_abort(mem); |
746 | | } |
747 | | #endif |
748 | | /* Pad to a multiple of sizeof(ref). */ |
749 | 7.71M | while (new_size % sizeof(ref)) |
750 | 4.31M | *dest++ = pt_tag(pt_integer), |
751 | 4.31M | new_size += sizeof(ref_packed); |
752 | | /* We want to make the newly freed space into a free block, */ |
753 | | /* but we can only do this if we have enough room. */ |
754 | 3.39M | if (size - new_size < sizeof(obj_header_t)) { /* Not enough room. Pad to original size. */ |
755 | 2.31M | while (new_size < size) |
756 | 0 | *dest++ = pt_tag(pt_integer), |
757 | 0 | new_size += sizeof(ref_packed); |
758 | 2.31M | } else { |
759 | 1.08M | obj_header_t *pfree = (obj_header_t *) ((ref *) dest + 1); |
760 | | |
761 | 1.08M | pfree->o_pad = 0; |
762 | 1.08M | pfree->o_alone = 0; |
763 | 1.08M | pfree->o_size = size - new_size - sizeof(obj_header_t); |
764 | 1.08M | pfree->o_type = &st_bytes; |
765 | 1.08M | } |
766 | | /* Re-create the final ref. */ |
767 | 3.39M | r_set_type((ref *) dest, t_integer); |
768 | 3.39M | dpre->o_size = new_size; |
769 | 3.39M | } |