/src/ghostpdl/psi/igcref.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* Copyright (C) 2001-2023 Artifex Software, Inc. |
2 | | All Rights Reserved. |
3 | | |
4 | | This software is provided AS-IS with no warranty, either express or |
5 | | implied. |
6 | | |
7 | | This software is distributed under license and may not be copied, |
8 | | modified or distributed except as expressly authorized under the terms |
9 | | of the license contained in the file LICENSE in this distribution. |
10 | | |
11 | | Refer to licensing information at http://www.artifex.com or contact |
12 | | Artifex Software, Inc., 39 Mesa Street, Suite 108A, San Francisco, |
13 | | CA 94129, USA, for further information. |
14 | | */ |
15 | | |
16 | | |
17 | | /* ref garbage collector for Ghostscript */ |
18 | | #include "memory_.h" |
19 | | #include "ghost.h" |
20 | | #include "gsexit.h" |
21 | | #include "gsstruct.h" /* for gxalloc.h included by iastate.h */ |
22 | | #include "iname.h" |
23 | | #include "iastate.h" |
24 | | #include "idebug.h" |
25 | | #include "igc.h" |
26 | | #include "ipacked.h" |
27 | | #include "store.h" /* for ref_assign_inline */ |
28 | | |
29 | | /* Define whether to trace every step of relocating ref pointers. */ |
30 | | #if 0 |
31 | | # define rputc(m,c) dmputc(m,c) |
32 | | #else |
33 | 112G | # define rputc(m,c) DO_NOTHING |
34 | | #endif |
35 | | |
36 | | /* Forward references */ |
37 | | ptr_proc_reloc(igc_reloc_ref_ptr, ref_packed); |
38 | | ptr_proc_reloc(igc_reloc_ref_ptr_nocheck, ref_packed); |
39 | | refs_proc_reloc(igc_reloc_refs); |
40 | | |
41 | | /* |
42 | | * Define the 'structure' type descriptor for refs. |
43 | | * This is special because it has different shared procs. |
44 | | */ |
45 | | static gc_proc_clear_reloc(refs_clear_reloc); |
46 | | static gc_proc_set_reloc(refs_set_reloc); |
47 | | static gc_proc_compact(refs_compact); |
48 | | static const struct_shared_procs_t refs_shared_procs = |
49 | | {refs_clear_reloc, refs_set_reloc, refs_compact}; |
50 | | static struct_proc_clear_marks(refs_clear_marks); |
51 | | static struct_proc_reloc_ptrs(refs_do_reloc); |
52 | | const gs_memory_struct_type_t st_refs = |
53 | | {sizeof(ref), "refs", &refs_shared_procs, refs_clear_marks, 0, refs_do_reloc}; |
54 | | |
55 | | /* |
56 | | * Define the GC procedures for structs that actually contain refs. |
57 | | * These are special because the shared refs_* procedures |
58 | | * are never called. Instead, we unmark the individual refs in clear_marks, |
59 | | * disregard refs_*_reloc (because we will never relocate a ptr_ref_type |
60 | | * pointer pointing into the structure), disregard refs_compact (because |
61 | | * compaction is never required), and remove the marks in reloc_ptrs. |
62 | | * See also the comment about ptr_ref_type in imemory.h. |
63 | | */ |
64 | | CLEAR_MARKS_PROC(ref_struct_clear_marks) |
65 | 27.5M | { |
66 | 27.5M | ref *pref = (ref *) vptr; |
67 | 27.5M | ref *end = (ref *) ((char *)vptr + size); |
68 | | |
69 | 993M | for (; pref < end; pref++) |
70 | 965M | r_clear_attrs(pref, l_mark); |
71 | 27.5M | } |
72 | | ENUM_PTRS_BEGIN_PROC(ref_struct_enum_ptrs) |
73 | 496M | { |
74 | 496M | if (index >= size / sizeof(ref)) |
75 | 13.7M | return 0; |
76 | 482M | pep->ptr = (const ref *)vptr + index; |
77 | 482M | return ptr_ref_type; |
78 | 496M | ENUM_PTRS_END_PROC |
79 | 496M | } |
80 | 13.7M | RELOC_PTRS_BEGIN(ref_struct_reloc_ptrs) |
81 | 13.7M | { |
82 | 13.7M | vm_spaces spaces = gcst->spaces; |
83 | 13.7M | const gs_memory_t *cmem = space_system->stable_memory; |
84 | | |
85 | 13.7M | ref *beg = vptr; |
86 | 13.7M | ref *end = (ref *) ((char *)vptr + size); |
87 | | |
88 | 13.7M | igc_reloc_refs((ref_packed *) beg, (ref_packed *) end, gcst); |
89 | 13.7M | ref_struct_clear_marks(cmem, vptr, size, pstype); |
90 | 13.7M | } RELOC_PTRS_END |
91 | | |
92 | | /* ------ Unmarking phase ------ */ |
93 | | |
94 | | /* Unmark a single ref. */ |
95 | | void |
96 | | ptr_ref_unmark(enum_ptr_t *pep, gc_state_t * ignored) |
97 | 177k | { |
98 | 177k | ref_packed *rpp = (ref_packed *)pep->ptr; |
99 | | |
100 | 177k | if (r_is_packed(rpp)) |
101 | 0 | r_clear_pmark(rpp); |
102 | 177k | else |
103 | 177k | r_clear_attrs((ref *)rpp, l_mark); |
104 | 177k | } |
105 | | |
106 | | /* Unmarking routine for ref objects. */ |
107 | | static void |
108 | | refs_clear_marks(const gs_memory_t *cmem, |
109 | | void /*obj_header_t */ *vptr, uint size, |
110 | | const gs_memory_struct_type_t * pstype) |
111 | 380M | { |
112 | 380M | ref_packed *rp = (ref_packed *) vptr; |
113 | 380M | ref_packed *end = (ref_packed *) ((byte *) vptr + size); |
114 | | |
115 | | /* Since the last ref is full-size, we only need to check for */ |
116 | | /* the end of the block when we see one of those. */ |
117 | 81.7G | for (;;) { |
118 | 81.7G | if (r_is_packed(rp)) { |
119 | | #ifdef DEBUG |
120 | | if (gs_debug_c('8')) { |
121 | | dmlprintf1(cmem, " [8]unmark packed "PRI_INTPTR" ", (intptr_t) rp); |
122 | | debug_print_ref(cmem, (const ref *)rp); |
123 | | dmputs(cmem, "\n"); |
124 | | } |
125 | | #endif |
126 | 15.7G | r_clear_pmark(rp); |
127 | 15.7G | rp++; |
128 | 65.9G | } else { /* full-size ref */ |
129 | 65.9G | ref *const pref = (ref *)rp; |
130 | | |
131 | | #ifdef DEBUG |
132 | | if (gs_debug_c('8')) { |
133 | | dmlprintf1(cmem, " [8]unmark ref "PRI_INTPTR" ", (intptr_t)rp); |
134 | | debug_print_ref(cmem, pref); |
135 | | dmputs(cmem, "\n"); |
136 | | } |
137 | | #endif |
138 | 65.9G | r_clear_attrs(pref, l_mark); |
139 | 65.9G | rp += packed_per_ref; |
140 | 65.9G | if (rp >= (ref_packed *) end) |
141 | 380M | break; |
142 | 65.9G | } |
143 | 81.7G | } |
144 | 380M | } |
145 | | |
146 | | /* ------ Marking phase ------ */ |
147 | | |
148 | | /* Mark a ref. Return true if new mark. */ |
149 | | bool |
150 | | ptr_ref_mark(enum_ptr_t *pep, gc_state_t * ignored) |
151 | 0 | { |
152 | 0 | ref_packed *rpp = (void *)pep->ptr; |
153 | |
|
154 | 0 | if (r_is_packed(rpp)) { |
155 | 0 | if (r_has_pmark(rpp)) |
156 | 0 | return false; |
157 | 0 | r_set_pmark(rpp); |
158 | 0 | } else { |
159 | 0 | ref *const pref = (ref *)rpp; |
160 | |
|
161 | 0 | if (r_has_attr(pref, l_mark)) |
162 | 0 | return false; |
163 | 0 | r_set_attrs(pref, l_mark); |
164 | 0 | } |
165 | 0 | return true; |
166 | 0 | } |
167 | | |
168 | | /* ------ Relocation planning phase ------ */ |
169 | | |
170 | | /* |
171 | | * We store relocation in the size field of refs that don't use it, |
172 | | * so that we don't have to scan all the way to an unmarked object. |
173 | | * We must avoid nulls, which sometimes have useful information |
174 | | * in their size fields, and the types above t_next_index, which are |
175 | | * actually operators in disguise and also use the size field. |
176 | | */ |
177 | | |
178 | | /* Clear the relocation for a ref object. */ |
179 | | static void |
180 | | refs_clear_reloc(obj_header_t *hdr, uint size) |
181 | 13.7M | { |
182 | 13.7M | ref_packed *rp = (ref_packed *) (hdr + 1); |
183 | 13.7M | ref_packed *end = (ref_packed *) ((byte *) rp + size); |
184 | | |
185 | 1.50G | while (rp < end) { |
186 | 1.49G | if (r_is_packed(rp)) |
187 | 513M | rp++; |
188 | 979M | else { |
189 | | /* Full-size ref. Store the relocation here if possible. */ |
190 | 979M | ref *const pref = (ref *)rp; |
191 | | |
192 | 979M | if (!ref_type_uses_size_or_null(r_type(pref))) { |
193 | 226M | if_debug1('8', " [8]clearing reloc at "PRI_INTPTR"\n", (intptr_t)rp); |
194 | 226M | r_set_size(pref, 0); |
195 | 226M | } |
196 | 979M | rp += packed_per_ref; |
197 | 979M | } |
198 | 1.49G | } |
199 | 13.7M | } |
200 | | |
201 | | /* Set the relocation for a ref object. */ |
202 | | static bool |
203 | | refs_set_reloc(obj_header_t * hdr, uint reloc, uint size) |
204 | 366M | { |
205 | 366M | ref_packed *rp = (ref_packed *) (hdr + 1); |
206 | 366M | ref_packed *end = (ref_packed *) ((byte *) rp + size); |
207 | 366M | uint freed = 0; |
208 | | |
209 | | /* |
210 | | * We have to be careful to keep refs aligned properly. |
211 | | * For the moment, we do this by either keeping or discarding |
212 | | * an entire (aligned) block of align_packed_per_ref packed elements |
213 | | * as a unit. We know that align_packed_per_ref <= packed_per_ref, |
214 | | * and we also know that packed refs are always allocated in blocks |
215 | | * of align_packed_per_ref, so this makes things relatively easy. |
216 | | */ |
217 | 69.1G | while (rp < end) { |
218 | 68.8G | if (r_is_packed(rp)) { |
219 | | #if align_packed_per_ref == 1 |
220 | | if (r_has_pmark(rp)) { |
221 | | if_debug1('8', |
222 | | " [8]packed ref "PRI_INTPTR" is marked\n", |
223 | | (intptr_t)rp); |
224 | | rp++; |
225 | | } else { |
226 | | #else |
227 | 3.81G | int i; |
228 | | |
229 | | /* |
230 | | * Note: align_packed_per_ref is typically |
231 | | * 2 or 4 for 32-bit processors. |
232 | | */ |
233 | 3.81G | #define all_marked (align_packed_per_ref * lp_mark) |
234 | | # if align_packed_per_ref == 2 |
235 | | # if ARCH_SIZEOF_INT == ARCH_SIZEOF_SHORT * 2 |
236 | | # undef all_marked |
237 | | # define all_marked ( (lp_mark << (sizeof(short) * 8)) + lp_mark ) |
238 | | # define marked (*(int *)rp & all_marked) |
239 | | # else |
240 | | # define marked ((*rp & lp_mark) + (rp[1] & lp_mark)) |
241 | | # endif |
242 | | # else |
243 | 3.81G | # if align_packed_per_ref == 4 |
244 | 3.81G | # define marked ((*rp & lp_mark) + (rp[1] & lp_mark) +\ |
245 | 3.81G | (rp[2] & lp_mark) + (rp[3] & lp_mark)) |
246 | | # else |
247 | | /* |
248 | | * The value of marked is logically a uint, not an int: |
249 | | * we declare it as int only to avoid a compiler warning |
250 | | * message about using a non-int value in a switch statement. |
251 | | */ |
252 | | int marked = *rp & lp_mark; |
253 | | |
254 | | for (i = 1; i < align_packed_per_ref; i++) |
255 | | marked += rp[i] & lp_mark; |
256 | | # endif |
257 | 3.81G | # endif |
258 | | /* |
259 | | * Now marked is lp_mark * the number of marked |
260 | | * packed refs in the aligned block, except for |
261 | | * a couple of special cases above. |
262 | | */ |
263 | 3.81G | switch (marked) { |
264 | 1.38G | case all_marked: |
265 | 1.38G | if_debug2('8', |
266 | 1.38G | " [8]packed refs "PRI_INTPTR".."PRI_INTPTR" are marked\n", |
267 | 1.38G | (intptr_t)rp, |
268 | 1.38G | (intptr_t)(rp + (align_packed_per_ref - 1))); |
269 | 1.38G | rp += align_packed_per_ref; |
270 | 1.38G | break; |
271 | 1.53G | default: |
272 | | /* At least one packed ref in the block */ |
273 | | /* is marked: Keep the whole block. */ |
274 | 7.68G | for (i = align_packed_per_ref; i--; rp++) { |
275 | 6.14G | r_set_pmark(rp); |
276 | 6.14G | if_debug1('8', |
277 | 6.14G | " [8]packed ref "PRI_INTPTR" is marked\n", |
278 | 6.14G | (intptr_t)rp); |
279 | 6.14G | } |
280 | 1.53G | break; |
281 | 897M | case 0: |
282 | 897M | #endif |
283 | 897M | if_debug2('8', " [8]%d packed ref(s) at "PRI_INTPTR" are unmarked\n", |
284 | 897M | align_packed_per_ref, (intptr_t)rp); |
285 | 897M | { |
286 | 897M | uint rel = reloc + freed; |
287 | | |
288 | | /* Change this to an integer so we can */ |
289 | | /* store the relocation here. */ |
290 | 897M | *rp = pt_tag(pt_integer) + |
291 | 897M | min(rel, packed_max_value); |
292 | 897M | } |
293 | 897M | rp += align_packed_per_ref; |
294 | 897M | freed += sizeof(ref_packed) * align_packed_per_ref; |
295 | 3.81G | } |
296 | 64.9G | } else { /* full-size ref */ |
297 | 64.9G | uint rel = reloc + freed; |
298 | | |
299 | | /* The following assignment is logically */ |
300 | | /* unnecessary; we do it only for convenience */ |
301 | | /* in debugging. */ |
302 | 64.9G | ref *pref = (ref *) rp; |
303 | | |
304 | 64.9G | if (!r_has_attr(pref, l_mark)) { |
305 | 40.9G | if_debug1('8', " [8]ref "PRI_INTPTR" is unmarked\n", |
306 | 40.9G | (intptr_t)pref); |
307 | | /* Change this to a mark so we can */ |
308 | | /* store the relocation. */ |
309 | 40.9G | r_set_type(pref, t_mark); |
310 | 40.9G | r_set_size(pref, rel); |
311 | 40.9G | freed += sizeof(ref); |
312 | 40.9G | } else { |
313 | 24.0G | if_debug1('8', " [8]ref "PRI_INTPTR" is marked\n", |
314 | 24.0G | (intptr_t)pref); |
315 | | /* Store the relocation here if possible. */ |
316 | 24.0G | if (!ref_type_uses_size_or_null(r_type(pref))) { |
317 | 4.74G | if_debug2('8', " [8]storing reloc %u at "PRI_INTPTR"\n", |
318 | 4.74G | rel, (intptr_t)pref); |
319 | 4.74G | r_set_size(pref, rel); |
320 | 4.74G | } |
321 | 24.0G | } |
322 | 64.9G | rp += packed_per_ref; |
323 | 64.9G | } |
324 | 68.8G | } |
325 | 366M | if_debug3('7', " [7]at end of refs "PRI_INTPTR", size = %u, freed = %u\n", |
326 | 366M | (intptr_t)(hdr + 1), size, freed); |
327 | 366M | if (freed == size) |
328 | 58.3M | return false; |
329 | 308M | #if ARCH_SIZEOF_INT > ARCH_SIZEOF_SHORT |
330 | | /* |
331 | | * If the final relocation can't fit in the r_size field |
332 | | * (which can't happen if the object shares a clump with |
333 | | * any other objects, so we know reloc = 0 in this case), |
334 | | * we have to keep the entire object unless there are no |
335 | | * references to any ref in it. |
336 | | */ |
337 | 308M | if (freed <= max_ushort) |
338 | 308M | return true; |
339 | | /* |
340 | | * We have to mark all surviving refs, but we also must |
341 | | * overwrite any non-surviving refs with something that |
342 | | * doesn't contain any pointers. |
343 | | */ |
344 | 5 | rp = (ref_packed *) (hdr + 1); |
345 | 40.6k | while (rp < end) { |
346 | 40.6k | if (r_is_packed(rp)) { |
347 | 0 | if (!r_has_pmark(rp)) |
348 | 0 | *rp = pt_tag(pt_integer) | lp_mark; |
349 | 0 | ++rp; |
350 | 40.6k | } else { /* The following assignment is logically */ |
351 | | /* unnecessary; we do it only for convenience */ |
352 | | /* in debugging. */ |
353 | 40.6k | ref *pref = (ref *) rp; |
354 | | |
355 | 40.6k | if (!r_has_attr(pref, l_mark)) { |
356 | 40.6k | r_set_type_attrs(pref, t_mark, l_mark); |
357 | 40.6k | r_set_size(pref, reloc); |
358 | 40.6k | } else { |
359 | 5 | if (!ref_type_uses_size_or_null(r_type(pref))) |
360 | 0 | r_set_size(pref, reloc); |
361 | 5 | } |
362 | 40.6k | rp += packed_per_ref; |
363 | 40.6k | } |
364 | 40.6k | } |
365 | | /* The last ref has to remain unmarked. */ |
366 | 5 | r_clear_attrs((ref *) rp - 1, l_mark); |
367 | 5 | #endif |
368 | 5 | return true; |
369 | 308M | } |
370 | | |
371 | | /* ------ Relocation phase ------ */ |
372 | | |
373 | | /* Relocate all the pointers in a block of refs. */ |
374 | | static void |
375 | | refs_do_reloc(void /*obj_header_t */ *vptr, uint size, |
376 | | const gs_memory_struct_type_t * pstype, gc_state_t * gcst) |
377 | 322M | { |
378 | 322M | igc_reloc_refs((ref_packed *) vptr, |
379 | 322M | (ref_packed *) ((char *)vptr + size), |
380 | 322M | gcst); |
381 | 322M | } |
382 | | /* Relocate the contents of a block of refs. */ |
383 | | /* If gcst->relocating_untraced is true, we are relocating pointers from an */ |
384 | | /* untraced space, so relocate all refs, not just marked ones. */ |
385 | | void |
386 | | igc_reloc_refs(ref_packed * from, ref_packed * to, gc_state_t * gcst) |
387 | 348M | { |
388 | 348M | int min_trace = gcst->min_collect; |
389 | 348M | ref_packed *rp = from; |
390 | 348M | bool do_all = gcst->relocating_untraced; |
391 | | |
392 | 348M | vm_spaces spaces = gcst->spaces; |
393 | 348M | const gs_memory_t *cmem = space_system->stable_memory; |
394 | | |
395 | 41.4G | while (rp < to) { |
396 | 41.1G | ref *pref; |
397 | | #ifdef DEBUG |
398 | | const void *before = 0; |
399 | | const void *after = 0; |
400 | | # define DO_RELOC(var, stat)\ |
401 | | BEGIN before = (var); stat; after = (var); END |
402 | | # define SET_RELOC(var, expr)\ |
403 | | BEGIN before = (var); after = (var) = (expr); END |
404 | | #else |
405 | 41.1G | # define DO_RELOC(var, stat) stat |
406 | 41.1G | # define SET_RELOC(var, expr) var = expr |
407 | 41.1G | #endif |
408 | | |
409 | 41.1G | if (r_is_packed(rp)) { |
410 | 14.7G | rp++; |
411 | 14.7G | continue; |
412 | 14.7G | } |
413 | | /* The following assignment is logically unnecessary; */ |
414 | | /* we do it only for convenience in debugging. */ |
415 | 26.3G | pref = (ref *) rp; |
416 | 26.3G | if_debug3m('8', gcst->heap, " [8]relocating %s %d ref at "PRI_INTPTR"\n", |
417 | 26.3G | (r_has_attr(pref, l_mark) ? "marked" : "unmarked"), |
418 | 26.3G | r_btype(pref), (intptr_t)pref); |
419 | 26.3G | if ((r_has_attr(pref, l_mark) || do_all) && |
420 | 26.3G | r_space(pref) >= min_trace |
421 | 26.3G | ) { |
422 | 9.38G | switch (r_type(pref)) { |
423 | | /* Struct cases */ |
424 | 1.90M | case t_file: |
425 | 1.90M | DO_RELOC(pref->value.pfile, RELOC_VAR(pref->value.pfile)); |
426 | 1.90M | break; |
427 | 7.13M | case t_device: |
428 | 7.13M | DO_RELOC(pref->value.pdevice, |
429 | 7.13M | RELOC_VAR(pref->value.pdevice)); |
430 | 7.13M | break; |
431 | 467k | case t_fontID: |
432 | 3.97M | case t_struct: |
433 | 4.30M | case t_astruct: |
434 | 4.30M | case t_pdfctx: |
435 | 4.30M | DO_RELOC(pref->value.pstruct, |
436 | 4.30M | RELOC_VAR(pref->value.pstruct)); |
437 | 4.30M | break; |
438 | | /* Non-trivial non-struct cases */ |
439 | 193M | case t_dictionary: |
440 | 193M | rputc(gcst->heap, 'd'); |
441 | 193M | SET_RELOC(pref->value.pdict, |
442 | 193M | (dict *)igc_reloc_ref_ptr((ref_packed *)pref->value.pdict, gcst)); |
443 | 193M | break; |
444 | 1.66G | case t_array: |
445 | 1.66G | { |
446 | 1.66G | uint size = r_size(pref); |
447 | | |
448 | 1.66G | if (size != 0) { /* value.refs might be NULL */ |
449 | | |
450 | | /* |
451 | | * If the array is large, we allocated it in its |
452 | | * own object (at least originally -- this might |
453 | | * be a pointer to a subarray.) In this case, |
454 | | * we know it is the only object in its |
455 | | * containing st_refs object, so we know that |
456 | | * the mark containing the relocation appears |
457 | | * just after it. |
458 | | */ |
459 | 1.64G | if (size < max_size_st_refs / sizeof(ref)) { |
460 | 1.62G | rputc(gcst->heap, 'a'); |
461 | 1.62G | SET_RELOC(pref->value.refs, |
462 | 1.62G | (ref *) igc_reloc_ref_ptr( |
463 | 1.62G | (ref_packed *) pref->value.refs, gcst)); |
464 | 1.62G | } else { |
465 | 28.2M | rputc(gcst->heap, 'A'); |
466 | | /* |
467 | | * See the t_shortarray case below for why we |
468 | | * decrement size. |
469 | | */ |
470 | 28.2M | --size; |
471 | 28.2M | SET_RELOC(pref->value.refs, |
472 | 28.2M | (ref *) igc_reloc_ref_ptr( |
473 | 28.2M | (ref_packed *) (pref->value.refs + size), |
474 | 28.2M | gcst) - size); |
475 | 28.2M | } |
476 | 1.64G | } |
477 | 1.66G | } |
478 | 1.66G | break; |
479 | 1.19G | case t_mixedarray: |
480 | 1.19G | if (r_size(pref) != 0) { /* value.refs might be NULL */ |
481 | 1.19G | rputc(gcst->heap, 'm'); |
482 | 1.19G | SET_RELOC(pref->value.packed, |
483 | 1.19G | igc_reloc_ref_ptr(pref->value.packed, gcst)); |
484 | 1.19G | } |
485 | 1.19G | break; |
486 | 531M | case t_shortarray: |
487 | 531M | { |
488 | 531M | uint size = r_size(pref); |
489 | | |
490 | | /* |
491 | | * Since we know that igc_reloc_ref_ptr works by |
492 | | * scanning forward, and we know that all the |
493 | | * elements of this array itself are marked, we can |
494 | | * save some scanning time by relocating the pointer |
495 | | * to the end of the array rather than the |
496 | | * beginning. |
497 | | */ |
498 | 531M | if (size != 0) { /* value.refs might be NULL */ |
499 | 494M | rputc(gcst->heap, 's'); |
500 | | /* |
501 | | * igc_reloc_ref_ptr has to be able to determine |
502 | | * whether the pointer points into a space that |
503 | | * isn't being collected. It does this by |
504 | | * checking whether the referent of the pointer |
505 | | * is marked. For this reason, we have to pass |
506 | | * a pointer to the last real element of the |
507 | | * array, rather than just beyond it. |
508 | | */ |
509 | 494M | --size; |
510 | 494M | SET_RELOC(pref->value.packed, |
511 | 494M | igc_reloc_ref_ptr(pref->value.packed + size, |
512 | 494M | gcst) - size); |
513 | 494M | } |
514 | 531M | } |
515 | 531M | break; |
516 | 5.32G | case t_name: |
517 | 5.32G | { |
518 | 5.32G | void *psub = name_ref_sub_table(cmem, pref); |
519 | 5.32G | void *rsub = RELOC_OBJ(psub); /* gcst implicit */ |
520 | | |
521 | 5.32G | SET_RELOC(pref->value.pname, |
522 | 5.32G | (name *) |
523 | 5.32G | ((char *)rsub + ((char *)pref->value.pname - |
524 | 5.32G | (char *)psub))); |
525 | 5.32G | } break; |
526 | 322M | case t_string: |
527 | 322M | { |
528 | 322M | gs_string str; |
529 | | |
530 | 322M | str.data = pref->value.bytes; |
531 | 322M | str.size = r_size(pref); |
532 | | |
533 | 322M | DO_RELOC(str.data, RELOC_STRING_VAR(str)); |
534 | 322M | pref->value.bytes = str.data; |
535 | 322M | } |
536 | 322M | break; |
537 | 133M | case t_oparray: |
538 | 133M | rputc(gcst->heap, 'o'); |
539 | 133M | SET_RELOC(pref->value.const_refs, |
540 | 133M | (const ref *)igc_reloc_ref_ptr((const ref_packed *)pref->value.const_refs, gcst)); |
541 | 133M | break; |
542 | 0 | default: |
543 | 0 | goto no_reloc; /* don't print trace message */ |
544 | 9.38G | } |
545 | 9.38G | if_debug2m('8', gcst->heap, " [8]relocated "PRI_INTPTR" => "PRI_INTPTR"\n", |
546 | 9.38G | (intptr_t)before, (intptr_t)after); |
547 | 9.38G | } |
548 | 26.3G | no_reloc: |
549 | 26.3G | rp += packed_per_ref; |
550 | 26.3G | } |
551 | 348M | } |
552 | | |
553 | | /* Relocate a pointer to a ref. */ |
554 | | /* See gsmemory.h for why the argument is const and the result is not. */ |
555 | | ref_packed * |
556 | | igc_reloc_ref_ptr_nocheck(const ref_packed * prp, gc_state_t *gcst) |
557 | 3.67G | { |
558 | | /* |
559 | | * Search forward for relocation. This algorithm is intrinsically very |
560 | | * inefficient; we hope eventually to replace it with a better one. |
561 | | */ |
562 | 3.67G | const ref_packed *rp = prp; |
563 | 3.67G | uint dec = 0; |
564 | | #ifdef ALIGNMENT_ALIASING_BUG |
565 | | const ref *rpref; |
566 | | # define RP_REF(rp) (rpref = (const ref *)rp, rpref) |
567 | | #else |
568 | 3.67G | # define RP_REF(rp) ((const ref *)rp) |
569 | 3.67G | #endif |
570 | 104G | for (;;) { |
571 | | |
572 | 104G | if (r_is_packed(rp)) { |
573 | | /* |
574 | | * Normally, an unmarked packed ref will be an |
575 | | * integer whose value is the amount of relocation. |
576 | | * However, the relocation value might have been |
577 | | * too large to fit. If this is the case, for |
578 | | * each such unmarked packed ref we pass over, |
579 | | * we have to decrement the final relocation. |
580 | | */ |
581 | 30.1G | rputc(gcst->heap, (*rp & lp_mark ? '1' : '0')); |
582 | 30.1G | if (!(*rp & lp_mark)) { |
583 | 759M | if (*rp != pt_tag(pt_integer) + packed_max_value) { |
584 | | /* This is a stored relocation value. */ |
585 | 677M | rputc(gcst->heap, '\n'); |
586 | 677M | rp = print_reloc(prp, "ref", |
587 | 677M | (const ref_packed *) |
588 | 677M | ((const char *)prp - |
589 | 677M | (*rp & packed_value_mask) + dec)); |
590 | 677M | break; |
591 | 677M | } |
592 | | /* |
593 | | * We know this is the first of an aligned block |
594 | | * of packed refs. Skip over the entire block, |
595 | | * decrementing the final relocation. |
596 | | */ |
597 | 81.4M | dec += sizeof(ref_packed) * align_packed_per_ref; |
598 | 81.4M | rp += align_packed_per_ref; |
599 | 81.4M | } else |
600 | 29.3G | rp++; |
601 | 29.4G | continue; |
602 | 30.1G | } |
603 | 74.7G | if (!ref_type_uses_size_or_null(r_type(RP_REF(rp)))) { |
604 | | /* reloc is in r_size */ |
605 | 3.00G | rputc(gcst->heap, '\n'); |
606 | 3.00G | rp = print_reloc(prp, "ref", |
607 | 3.00G | (const ref_packed *) |
608 | 3.00G | (r_size(RP_REF(rp)) == 0 ? prp : |
609 | 3.00G | (const ref_packed *)((const char *)prp - |
610 | 3.00G | r_size(RP_REF(rp)) + dec))); |
611 | 3.00G | break; |
612 | 3.00G | } |
613 | 74.7G | rputc(gcst->heap, 'u'); |
614 | 71.7G | rp += packed_per_ref; |
615 | 71.7G | } |
616 | | /* Use a severely deprecated pun to remove the const property. */ |
617 | 3.67G | { |
618 | 3.67G | union { const ref_packed *r; ref_packed *w; } u; |
619 | | |
620 | 3.67G | u.r = rp; |
621 | 3.67G | return u.w; |
622 | 3.67G | } |
623 | 3.67G | #undef RP_REF |
624 | 3.67G | } |
625 | | ref_packed * |
626 | | igc_reloc_ref_ptr(const ref_packed * prp, gc_state_t *gcst) |
627 | 3.67G | { |
628 | | /* |
629 | | * Search forward for relocation. This algorithm is intrinsically very |
630 | | * inefficient; we hope eventually to replace it with a better one. |
631 | | */ |
632 | 3.67G | const ref_packed *rp = prp; |
633 | | #ifdef ALIGNMENT_ALIASING_BUG |
634 | | const ref *rpref; |
635 | | # define RP_REF(rp) (rpref = (const ref *)rp, rpref) |
636 | | #else |
637 | 3.67G | # define RP_REF(rp) ((const ref *)rp) |
638 | 3.67G | #endif |
639 | | /* |
640 | | * Iff this pointer points into a space that wasn't traced, |
641 | | * the referent won't be marked. In this case, we shouldn't |
642 | | * do any relocation. Check for this first. |
643 | | */ |
644 | 3.67G | if (r_is_packed(rp)) { |
645 | 1.23G | if (!r_has_pmark(rp)) |
646 | 193 | goto ret_rp; |
647 | 2.43G | } else { |
648 | 2.43G | if (!r_has_attr(RP_REF(rp), l_mark)) |
649 | 47.0k | goto ret_rp; |
650 | 2.43G | } |
651 | 3.67G | return igc_reloc_ref_ptr_nocheck(prp, gcst); |
652 | 47.2k | ret_rp: |
653 | | /* Use a severely deprecated pun to remove the const property. */ |
654 | 47.2k | { |
655 | 47.2k | union { const ref_packed *r; ref_packed *w; } u; |
656 | | |
657 | 47.2k | u.r = rp; |
658 | 47.2k | return u.w; |
659 | 3.67G | } |
660 | 3.67G | } |
661 | | |
662 | | /* ------ Compaction phase ------ */ |
663 | | |
664 | | /* Compact a ref object. */ |
665 | | /* Remove the marks at the same time. */ |
666 | | static void |
667 | | refs_compact(const gs_memory_t *mem, obj_header_t * pre, obj_header_t * dpre, uint size) |
668 | 308M | { |
669 | 308M | ref_packed *dest; |
670 | 308M | ref_packed *src; |
671 | 308M | ref_packed *end; |
672 | 308M | uint new_size; |
673 | | |
674 | | /* The next switch controls an optimization |
675 | | for the loop termination condition. |
676 | | It was useful during the development, |
677 | | when some assumptions were temporary wrong. |
678 | | We keep it for records. */ |
679 | | |
680 | 308M | src = (ref_packed *) (pre + 1); |
681 | 308M | end = (ref_packed *) ((byte *) src + size); |
682 | | /* |
683 | | * We know that a block of refs always ends with a |
684 | | * full-size ref, so we only need to check for reaching the end |
685 | | * of the block when we see one of those. |
686 | | */ |
687 | 308M | if (dpre == pre) /* Loop while we don't need to copy. */ |
688 | 18.8G | for (;;) { |
689 | 18.8G | if (r_is_packed(src)) { |
690 | 1.98G | if (!r_has_pmark(src)) |
691 | 45.9M | break; |
692 | 1.98G | if_debug1m('8', mem, " [8]packed ref "PRI_INTPTR" \"copied\"\n", |
693 | 1.94G | (intptr_t)src); |
694 | 1.94G | *src &= ~lp_mark; |
695 | 1.94G | src++; |
696 | 16.8G | } else { /* full-size ref */ |
697 | 16.8G | ref *const pref = (ref *)src; |
698 | | |
699 | 16.8G | if (!r_has_attr(pref, l_mark)) |
700 | 67.5M | break; |
701 | 16.8G | if_debug1m('8', mem, " [8]ref "PRI_INTPTR" \"copied\"\n", (intptr_t)src); |
702 | 16.7G | r_clear_attrs(pref, l_mark); |
703 | 16.7G | src += packed_per_ref; |
704 | 16.7G | } |
705 | 18.8G | } else |
706 | 194M | *dpre = *pre; |
707 | 308M | dest = (ref_packed *) ((char *)dpre + ((char *)src - (char *)pre)); |
708 | 20.4G | for (;;) { |
709 | 20.4G | if (r_is_packed(src)) { |
710 | 12.3G | if (r_has_pmark(src)) { |
711 | 9.73G | if_debug2m('8', mem, " [8]packed ref "PRI_INTPTR" copied to "PRI_INTPTR"\n", |
712 | 9.73G | (intptr_t)src, (intptr_t)dest); |
713 | 9.73G | *dest++ = *src & ~lp_mark; |
714 | 9.73G | } |
715 | 12.3G | src++; |
716 | 12.3G | } else { /* full-size ref */ |
717 | 8.12G | if (r_has_attr((ref *) src, l_mark)) { |
718 | 7.29G | ref rtemp; |
719 | | |
720 | 7.29G | if_debug2m('8', mem, " [8]ref "PRI_INTPTR" copied to "PRI_INTPTR"\n", |
721 | 7.29G | (intptr_t)src, (intptr_t)dest); |
722 | | /* We can't just use ref_assign_inline, */ |
723 | | /* because the source and destination */ |
724 | | /* might overlap! */ |
725 | 7.29G | ref_assign_inline(&rtemp, (ref *) src); |
726 | 7.29G | r_clear_attrs(&rtemp, l_mark); |
727 | 7.29G | ref_assign_inline((ref *) dest, &rtemp); |
728 | 7.29G | src += packed_per_ref; |
729 | 7.29G | dest += packed_per_ref; |
730 | 7.29G | } else { /* check for end of block */ |
731 | 829M | src += packed_per_ref; |
732 | 829M | if (src >= end) |
733 | 308M | break; |
734 | 829M | } |
735 | 8.12G | } |
736 | 20.4G | } |
737 | 308M | new_size = (byte *) dest - (byte *) (dpre + 1) + sizeof(ref); |
738 | | #ifdef DEBUG |
739 | | /* Check that the relocation came out OK. */ |
740 | | /* NOTE: this check only works within a single clump. */ |
741 | | if ((byte *) src - (byte *) dest != r_size((ref *) src - 1) + sizeof(ref)) { |
742 | | mlprintf3(mem, "Reloc error for refs "PRI_INTPTR": reloc = %lu, stored = %u\n", |
743 | | (intptr_t) dpre, (ulong) ((byte *) src - (byte *) dest), |
744 | | (uint) r_size((ref *) src - 1)); |
745 | | gs_abort(mem); |
746 | | } |
747 | | #endif |
748 | | /* Pad to a multiple of sizeof(ref). */ |
749 | 681M | while (new_size % sizeof(ref)) |
750 | 373M | *dest++ = pt_tag(pt_integer), |
751 | 373M | new_size += sizeof(ref_packed); |
752 | | /* We want to make the newly freed space into a free block, */ |
753 | | /* but we can only do this if we have enough room. */ |
754 | 308M | if (size - new_size < sizeof(obj_header_t)) { /* Not enough room. Pad to original size. */ |
755 | 209M | while (new_size < size) |
756 | 0 | *dest++ = pt_tag(pt_integer), |
757 | 0 | new_size += sizeof(ref_packed); |
758 | 209M | } else { |
759 | 98.6M | obj_header_t *pfree = (obj_header_t *) ((ref *) dest + 1); |
760 | | |
761 | 98.6M | pfree->o_pad = 0; |
762 | 98.6M | pfree->o_alone = 0; |
763 | 98.6M | pfree->o_size = size - new_size - sizeof(obj_header_t); |
764 | 98.6M | pfree->o_type = &st_bytes; |
765 | 98.6M | } |
766 | | /* Re-create the final ref. */ |
767 | 308M | r_set_type((ref *) dest, t_integer); |
768 | 308M | dpre->o_size = new_size; |
769 | 308M | } |