/src/php-src/Zend/zend_gc.c
Line | Count | Source |
1 | | /* |
2 | | +----------------------------------------------------------------------+ |
3 | | | Zend Engine | |
4 | | +----------------------------------------------------------------------+ |
5 | | | Copyright (c) Zend Technologies Ltd. (http://www.zend.com) | |
6 | | +----------------------------------------------------------------------+ |
7 | | | This source file is subject to version 2.00 of the Zend license, | |
8 | | | that is bundled with this package in the file LICENSE, and is | |
9 | | | available through the world-wide-web at the following url: | |
10 | | | http://www.zend.com/license/2_00.txt. | |
11 | | | If you did not receive a copy of the Zend license and are unable to | |
12 | | | obtain it through the world-wide-web, please send a note to | |
13 | | | license@zend.com so we can mail you a copy immediately. | |
14 | | +----------------------------------------------------------------------+ |
15 | | | Authors: David Wang <planetbeing@gmail.com> | |
16 | | | Dmitry Stogov <dmitry@php.net> | |
17 | | +----------------------------------------------------------------------+ |
18 | | */ |
19 | | |
20 | | /** |
21 | | * zend_gc_collect_cycles |
22 | | * ====================== |
23 | | * |
24 | | * Colors and its meaning |
25 | | * ---------------------- |
26 | | * |
27 | | * BLACK (GC_BLACK) - In use or free. |
28 | | * GREY (GC_GREY) - Possible member of cycle. |
29 | | * WHITE (GC_WHITE) - Member of garbage cycle. |
30 | | * PURPLE (GC_PURPLE) - Possible root of cycle. |
31 | | * |
32 | | * Colors described in the paper but not used |
33 | | * ------------------------------------------ |
34 | | * |
35 | | * GREEN - Acyclic |
36 | | * RED - Candidate cycle undergoing |
37 | | * ORANGE - Candidate cycle awaiting epoch boundary. |
38 | | * |
39 | | * |
40 | | * Flow |
41 | | * ===== |
42 | | * |
43 | | * The garbage collect cycle starts from 'gc_mark_roots', which traverses the |
44 | | * possible roots, and calls mark_grey for roots are marked purple with |
45 | | * depth-first traverse. |
46 | | * |
47 | | * After all possible roots are traversed and marked, |
48 | | * gc_scan_roots will be called, and each root will be called with |
49 | | * gc_scan(root->ref) |
50 | | * |
51 | | * gc_scan checks the colors of possible members. |
52 | | * |
53 | | * If the node is marked as grey and the refcount > 0 |
54 | | * gc_scan_black will be called on that node to scan it's subgraph. |
55 | | * otherwise (refcount == 0), it marks the node white. |
56 | | * |
57 | | * A node MAY be added to possible roots when ZEND_UNSET_VAR happens or |
58 | | * zend_assign_to_variable is called only when possible garbage node is |
59 | | * produced. |
60 | | * gc_possible_root() will be called to add the nodes to possible roots. |
61 | | * |
62 | | * |
63 | | * For objects, we call their get_gc handler (by default 'zend_std_get_gc') to |
64 | | * get the object properties to scan. |
65 | | * |
66 | | * |
67 | | * @see http://researcher.watson.ibm.com/researcher/files/us-bacon/Bacon01Concurrent.pdf |
68 | | */ |
69 | | #include "zend.h" |
70 | | #include "zend_API.h" |
71 | | #include "zend_compile.h" |
72 | | #include "zend_errors.h" |
73 | | #include "zend_fibers.h" |
74 | | #include "zend_hrtime.h" |
75 | | #include "zend_portability.h" |
76 | | #include "zend_types.h" |
77 | | #include "zend_weakrefs.h" |
78 | | #include "zend_string.h" |
79 | | |
80 | | #ifndef GC_BENCH |
81 | | # define GC_BENCH 0 |
82 | | #endif |
83 | | |
84 | | #ifndef ZEND_GC_DEBUG |
85 | | # define ZEND_GC_DEBUG 0 |
86 | | #endif |
87 | | |
88 | | /* GC_INFO layout */ |
89 | 2.98M | #define GC_ADDRESS 0x0fffffu |
90 | 17.3M | #define GC_COLOR 0x300000u |
91 | | |
92 | | #define GC_BLACK 0x000000u /* must be zero */ |
93 | | #define GC_WHITE 0x100000u |
94 | | #define GC_GREY 0x200000u |
95 | | #define GC_PURPLE 0x300000u |
96 | | |
97 | | /* Debug tracing */ |
98 | | #if ZEND_GC_DEBUG > 1 |
99 | | # define GC_TRACE(format, ...) fprintf(stderr, format "\n", ##__VA_ARGS__); |
100 | | # define GC_TRACE_REF(ref, format, ...) \ |
101 | | do { \ |
102 | | gc_trace_ref((zend_refcounted *) ref); \ |
103 | | fprintf(stderr, format "\n", ##__VA_ARGS__); \ |
104 | | } while (0) |
105 | | # define GC_TRACE_SET_COLOR(ref, color) \ |
106 | | GC_TRACE_REF(ref, "->%s", gc_color_name(color)) |
107 | | #else |
108 | | # define GC_TRACE_REF(ref, format, ...) |
109 | | # define GC_TRACE_SET_COLOR(ref, new_color) |
110 | | # define GC_TRACE(str) |
111 | | #endif |
112 | | |
113 | | /* GC_INFO access */ |
114 | | #define GC_REF_ADDRESS(ref) \ |
115 | 2.98M | (((GC_TYPE_INFO(ref)) & (GC_ADDRESS << GC_INFO_SHIFT)) >> GC_INFO_SHIFT) |
116 | | |
117 | | #define GC_REF_COLOR(ref) \ |
118 | | (((GC_TYPE_INFO(ref)) & (GC_COLOR << GC_INFO_SHIFT)) >> GC_INFO_SHIFT) |
119 | | |
120 | | #define GC_REF_CHECK_COLOR(ref, color) \ |
121 | 12.3M | ((GC_TYPE_INFO(ref) & (GC_COLOR << GC_INFO_SHIFT)) == ((color) << GC_INFO_SHIFT)) |
122 | | |
123 | 6.10M | #define GC_REF_SET_INFO(ref, info) do { \ |
124 | 6.10M | GC_TYPE_INFO(ref) = \ |
125 | 6.10M | (GC_TYPE_INFO(ref) & (GC_TYPE_MASK | GC_FLAGS_MASK)) | \ |
126 | 6.10M | ((info) << GC_INFO_SHIFT); \ |
127 | 6.10M | } while (0) |
128 | | |
129 | 3.26M | #define GC_REF_SET_COLOR(ref, c) do { \ |
130 | 3.26M | GC_TRACE_SET_COLOR(ref, c); \ |
131 | 3.26M | GC_TYPE_INFO(ref) = \ |
132 | 3.26M | (GC_TYPE_INFO(ref) & ~(GC_COLOR << GC_INFO_SHIFT)) | \ |
133 | 3.26M | ((c) << GC_INFO_SHIFT); \ |
134 | 3.26M | } while (0) |
135 | | |
136 | 1.70M | #define GC_REF_SET_BLACK(ref) do { \ |
137 | 1.70M | GC_TRACE_SET_COLOR(ref, GC_BLACK); \ |
138 | 1.70M | GC_TYPE_INFO(ref) &= ~(GC_COLOR << GC_INFO_SHIFT); \ |
139 | 1.70M | } while (0) |
140 | | |
141 | | #define GC_REF_SET_PURPLE(ref) do { \ |
142 | | GC_TRACE_SET_COLOR(ref, GC_PURPLE); \ |
143 | | GC_TYPE_INFO(ref) |= (GC_COLOR << GC_INFO_SHIFT); \ |
144 | | } while (0) |
145 | | |
146 | | /* bit stealing tags for gc_root_buffer.ref */ |
147 | 2.75M | #define GC_BITS 0x3 |
148 | | |
149 | 567k | #define GC_ROOT 0x0 /* possible root of circular garbage */ |
150 | 3.19M | #define GC_UNUSED 0x1 /* part of linked list of unused buffers */ |
151 | 2.95M | #define GC_GARBAGE 0x2 /* garbage to delete */ |
152 | 12.7k | #define GC_DTOR_GARBAGE 0x3 /* garbage on which only the dtor should be invoked */ |
153 | | |
154 | | #define GC_GET_PTR(ptr) \ |
155 | 113k | ((void*)(((uintptr_t)(ptr)) & ~GC_BITS)) |
156 | | |
157 | | #define GC_IS_ROOT(ptr) \ |
158 | 567k | ((((uintptr_t)(ptr)) & GC_BITS) == GC_ROOT) |
159 | | #define GC_IS_UNUSED(ptr) \ |
160 | 125k | ((((uintptr_t)(ptr)) & GC_BITS) == GC_UNUSED) |
161 | | #define GC_IS_GARBAGE(ptr) \ |
162 | 1.94M | ((((uintptr_t)(ptr)) & GC_BITS) == GC_GARBAGE) |
163 | | #define GC_IS_DTOR_GARBAGE(ptr) \ |
164 | 9.85k | ((((uintptr_t)(ptr)) & GC_BITS) == GC_DTOR_GARBAGE) |
165 | | |
166 | | #define GC_MAKE_GARBAGE(ptr) \ |
167 | 1.01M | ((void*)(((uintptr_t)(ptr)) | GC_GARBAGE)) |
168 | | #define GC_MAKE_DTOR_GARBAGE(ptr) \ |
169 | 2.86k | ((void*)(((uintptr_t)(ptr)) | GC_DTOR_GARBAGE)) |
170 | | |
171 | | /* GC address conversion */ |
172 | 8.25M | #define GC_IDX2PTR(idx) (GC_G(buf) + (idx)) |
173 | 3.07M | #define GC_PTR2IDX(ptr) ((ptr) - GC_G(buf)) |
174 | | |
175 | | /* Get the value to be placed in an unused buffer entry with the specified next unused list index */ |
176 | 3.07M | #define GC_IDX2LIST(idx) ((void*)(uintptr_t)(((idx) * sizeof(void*)) | GC_UNUSED)) |
177 | | /* Get the index of the next item in the unused list from the given root buffer entry. */ |
178 | 772k | #define GC_LIST2IDX(list) (((uint32_t)(uintptr_t)(list)) / sizeof(void*)) |
179 | | |
180 | | /* GC buffers */ |
181 | 1.50M | #define GC_INVALID 0 |
182 | 1.82M | #define GC_FIRST_ROOT 1 |
183 | | |
184 | 16 | #define GC_DEFAULT_BUF_SIZE (16 * 1024) |
185 | 1 | #define GC_BUF_GROW_STEP (128 * 1024) |
186 | | |
187 | 0 | #define GC_MAX_UNCOMPRESSED (512 * 1024) |
188 | 2 | #define GC_MAX_BUF_SIZE 0x40000000 |
189 | | |
190 | 16 | #define GC_THRESHOLD_DEFAULT (10000 + GC_FIRST_ROOT) |
191 | 0 | #define GC_THRESHOLD_STEP 10000 |
192 | 0 | #define GC_THRESHOLD_MAX 1000000000 |
193 | 0 | #define GC_THRESHOLD_TRIGGER 100 |
194 | | |
195 | | /* GC flags */ |
196 | 7.13k | #define GC_HAS_DESTRUCTORS (1<<0) |
197 | | |
198 | | /* Weak maps */ |
199 | 1.55k | #define Z_FROM_WEAKMAP_KEY (1<<0) |
200 | 1.50k | #define Z_FROM_WEAKMAP (1<<1) |
201 | | |
202 | | /* The WeakMap entry zv is reachable from roots by following the virtual |
203 | | * reference from the a WeakMap key to the entry */ |
204 | | #define GC_FROM_WEAKMAP_KEY(zv) \ |
205 | 375 | (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT)) |
206 | | |
207 | 580 | #define GC_SET_FROM_WEAKMAP_KEY(zv) do { \ |
208 | 580 | zval *_z = (zv); \ |
209 | 580 | Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \ |
210 | 580 | } while (0) |
211 | | |
212 | 595 | #define GC_UNSET_FROM_WEAKMAP_KEY(zv) do { \ |
213 | 595 | zval *_z = (zv); \ |
214 | 595 | Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \ |
215 | 595 | } while (0) |
216 | | |
217 | | /* The WeakMap entry zv is reachable from roots by following the reference from |
218 | | * the WeakMap */ |
219 | | #define GC_FROM_WEAKMAP(zv) \ |
220 | 1.08k | (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT)) |
221 | | |
222 | 192 | #define GC_SET_FROM_WEAKMAP(zv) do { \ |
223 | 192 | zval *_z = (zv); \ |
224 | 192 | Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \ |
225 | 192 | } while (0) |
226 | | |
227 | 231 | #define GC_UNSET_FROM_WEAKMAP(zv) do { \ |
228 | 231 | zval *_z = (zv); \ |
229 | 231 | Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \ |
230 | 231 | } while (0) |
231 | | |
232 | | /* unused buffers */ |
233 | | |
234 | | /* Are there any unused root buffer entries? */ |
235 | | #define GC_HAS_UNUSED() \ |
236 | 826k | (GC_G(unused) != GC_INVALID) |
237 | | |
238 | | /* Get the next unused entry and remove it from the list */ |
239 | | #define GC_FETCH_UNUSED() \ |
240 | 772k | gc_fetch_unused() |
241 | | |
242 | | /* Add a root buffer entry to the unused list */ |
243 | | #define GC_LINK_UNUSED(root) \ |
244 | 3.07M | gc_link_unused(root) |
245 | | |
246 | | #define GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD() \ |
247 | | (GC_G(first_unused) < GC_G(gc_threshold)) |
248 | | #define GC_HAS_NEXT_UNUSED() \ |
249 | 826k | (GC_G(first_unused) != GC_G(buf_size)) |
250 | | #define GC_FETCH_NEXT_UNUSED() \ |
251 | 2.29M | gc_fetch_next_unused() |
252 | | |
253 | | ZEND_API int (*gc_collect_cycles)(void); |
254 | | |
255 | | /* The type of a root buffer entry. |
256 | | * |
257 | | * The lower two bits are used for flags and need to be masked out to |
258 | | * reconstruct a pointer. |
259 | | * |
260 | | * When a node in the root buffer is removed, the non-flag bits of the |
261 | | * unused entry are used to store the index of the next entry in the unused |
262 | | * list. */ |
263 | | typedef struct _gc_root_buffer { |
264 | | zend_refcounted *ref; |
265 | | } gc_root_buffer; |
266 | | |
267 | | typedef struct _zend_gc_globals { |
268 | | /* The root buffer, which stores possible roots of reference cycles. It is |
269 | | * also used to store garbage to be collected at the end of a run. |
270 | | * A single array which is reallocated as necessary. */ |
271 | | gc_root_buffer *buf; |
272 | | |
273 | | bool gc_enabled; |
274 | | bool gc_active; /* GC currently running, forbid nested GC */ |
275 | | bool gc_protected; /* GC protected, forbid root additions */ |
276 | | bool gc_full; |
277 | | |
278 | | uint32_t unused; /* linked list of unused buffers */ |
279 | | uint32_t first_unused; /* first unused buffer */ |
280 | | uint32_t gc_threshold; /* GC collection threshold */ |
281 | | uint32_t buf_size; /* size of the GC buffer */ |
282 | | uint32_t num_roots; /* number of roots in GC buffer */ |
283 | | |
284 | | uint32_t gc_runs; /* number of GC runs since reset */ |
285 | | uint32_t collected; /* number of collected nodes since reset */ |
286 | | |
287 | | zend_hrtime_t activated_at; /* the timestamp of the last reset */ |
288 | | zend_hrtime_t collector_time; /* time spent running GC (ns) */ |
289 | | zend_hrtime_t dtor_time; /* time spent calling destructors (ns) */ |
290 | | zend_hrtime_t free_time; /* time spent destroying nodes and freeing memory (ns) */ |
291 | | |
292 | | uint32_t dtor_idx; /* root buffer index */ |
293 | | uint32_t dtor_end; |
294 | | zend_fiber *dtor_fiber; |
295 | | bool dtor_fiber_running; |
296 | | |
297 | | #if GC_BENCH |
298 | | uint32_t root_buf_length; |
299 | | uint32_t root_buf_peak; |
300 | | uint32_t zval_possible_root; |
301 | | uint32_t zval_buffered; |
302 | | uint32_t zval_remove_from_buffer; |
303 | | uint32_t zval_marked_grey; |
304 | | #endif |
305 | | } zend_gc_globals; |
306 | | |
307 | | #ifdef ZTS |
308 | | static int gc_globals_id; |
309 | | static size_t gc_globals_offset; |
310 | | #define GC_G(v) ZEND_TSRMG_FAST(gc_globals_offset, zend_gc_globals *, v) |
311 | | #else |
312 | 44.7M | #define GC_G(v) (gc_globals.v) |
313 | | static zend_gc_globals gc_globals; |
314 | | #endif |
315 | | |
316 | | #if GC_BENCH |
317 | | # define GC_BENCH_INC(counter) GC_G(counter)++ |
318 | | # define GC_BENCH_DEC(counter) GC_G(counter)-- |
319 | | # define GC_BENCH_PEAK(peak, counter) do { \ |
320 | | if (GC_G(counter) > GC_G(peak)) { \ |
321 | | GC_G(peak) = GC_G(counter); \ |
322 | | } \ |
323 | | } while (0) |
324 | | #else |
325 | | # define GC_BENCH_INC(counter) |
326 | | # define GC_BENCH_DEC(counter) |
327 | | # define GC_BENCH_PEAK(peak, counter) |
328 | | #endif |
329 | | |
330 | | |
331 | 3.98k | #define GC_STACK_SEGMENT_SIZE (((4096 - ZEND_MM_OVERHEAD) / sizeof(void*)) - 2) |
332 | | |
333 | | typedef struct _gc_stack gc_stack; |
334 | | |
335 | | /* The stack used for graph traversal is stored as a linked list of segments */ |
336 | | struct _gc_stack { |
337 | | gc_stack *prev; |
338 | | gc_stack *next; |
339 | | zend_refcounted *data[GC_STACK_SEGMENT_SIZE]; |
340 | | }; |
341 | | |
342 | | #define GC_STACK_DCL(init) \ |
343 | 185k | gc_stack *_stack = init; \ |
344 | 185k | size_t _top = 0; |
345 | | |
346 | | #define GC_STACK_PUSH(ref) \ |
347 | 2.53M | gc_stack_push(&_stack, &_top, ref); |
348 | | |
349 | | #define GC_STACK_POP() \ |
350 | 2.71M | gc_stack_pop(&_stack, &_top) |
351 | | |
352 | | static zend_never_inline gc_stack* gc_stack_next(gc_stack *stack) |
353 | 20.1k | { |
354 | 20.1k | if (UNEXPECTED(!stack->next)) { |
355 | 18.7k | gc_stack *segment = emalloc(sizeof(gc_stack)); |
356 | 18.7k | segment->prev = stack; |
357 | 18.7k | segment->next = NULL; |
358 | 18.7k | stack->next = segment; |
359 | 18.7k | } |
360 | 20.1k | return stack->next; |
361 | 20.1k | } |
362 | | |
363 | | static zend_always_inline void gc_stack_push(gc_stack **stack, size_t *top, zend_refcounted *ref) |
364 | 2.53M | { |
365 | 2.53M | if (UNEXPECTED(*top == GC_STACK_SEGMENT_SIZE)) { |
366 | 1.99k | (*stack) = gc_stack_next(*stack); |
367 | 1.99k | (*top) = 0; |
368 | 1.99k | } |
369 | 2.53M | (*stack)->data[(*top)++] = ref; |
370 | 2.53M | } |
371 | | |
372 | | static zend_always_inline zend_refcounted* gc_stack_pop(gc_stack **stack, size_t *top) |
373 | 2.71M | { |
374 | 2.71M | if (UNEXPECTED((*top) == 0)) { |
375 | 187k | if (!(*stack)->prev) { |
376 | 185k | return NULL; |
377 | 185k | } else { |
378 | 1.99k | (*stack) = (*stack)->prev; |
379 | 1.99k | (*top) = GC_STACK_SEGMENT_SIZE - 1; |
380 | 1.99k | return (*stack)->data[GC_STACK_SEGMENT_SIZE - 1]; |
381 | 1.99k | } |
382 | 2.52M | } else { |
383 | 2.52M | return (*stack)->data[--(*top)]; |
384 | 2.52M | } |
385 | 2.71M | } |
386 | | |
387 | | static void gc_stack_free(gc_stack *stack) |
388 | 21.7k | { |
389 | 21.7k | gc_stack *p = stack->next; |
390 | | |
391 | 40.4k | while (p) { |
392 | 18.7k | stack = p->next; |
393 | 18.7k | efree(p); |
394 | 18.7k | p = stack; |
395 | 18.7k | } |
396 | 21.7k | } |
397 | | |
398 | | /* Map a full index to a compressed index. |
399 | | * |
400 | | * The root buffer can have up to 2^30 entries, but we only have 20 bits to |
401 | | * store the index. So we use the 1<<19 bit as a compression flag and use the |
402 | | * other 19 bits to store the index modulo 2^19. */ |
403 | | static zend_always_inline uint32_t gc_compress(uint32_t idx) |
404 | 3.07M | { |
405 | 3.07M | if (EXPECTED(idx < GC_MAX_UNCOMPRESSED)) { |
406 | 3.07M | return idx; |
407 | 3.07M | } |
408 | 0 | return (idx % GC_MAX_UNCOMPRESSED) | GC_MAX_UNCOMPRESSED; |
409 | 3.07M | } |
410 | | |
411 | | /* Find the root buffer entry given a pointer and a compressed index. |
412 | | * Iterate through the root buffer in steps of 2^19 until the pointer |
413 | | * matches. */ |
414 | | static zend_always_inline gc_root_buffer* gc_decompress(zend_refcounted *ref, uint32_t idx) |
415 | 0 | { |
416 | 0 | gc_root_buffer *root = GC_IDX2PTR(idx); |
417 | |
|
418 | 0 | if (EXPECTED(GC_GET_PTR(root->ref) == ref)) { |
419 | 0 | return root; |
420 | 0 | } |
421 | | |
422 | 0 | while (1) { |
423 | 0 | idx += GC_MAX_UNCOMPRESSED; |
424 | 0 | ZEND_ASSERT(idx < GC_G(first_unused)); |
425 | 0 | root = GC_IDX2PTR(idx); |
426 | 0 | if (GC_GET_PTR(root->ref) == ref) { |
427 | 0 | return root; |
428 | 0 | } |
429 | 0 | } |
430 | 0 | } |
431 | | |
432 | | /* Get the index of the next unused root buffer entry, and remove it from the |
433 | | * unused list. GC_HAS_UNUSED() must be true before calling this. */ |
434 | | static zend_always_inline uint32_t gc_fetch_unused(void) |
435 | 772k | { |
436 | 772k | uint32_t idx; |
437 | 772k | gc_root_buffer *root; |
438 | | |
439 | 772k | ZEND_ASSERT(GC_HAS_UNUSED()); |
440 | 772k | idx = GC_G(unused); |
441 | 772k | root = GC_IDX2PTR(idx); |
442 | 772k | ZEND_ASSERT(GC_IS_UNUSED(root->ref)); |
443 | 772k | GC_G(unused) = GC_LIST2IDX(root->ref); |
444 | 772k | return idx; |
445 | 772k | } |
446 | | |
447 | | /* Add a root buffer entry to the unused list */ |
448 | | static zend_always_inline void gc_link_unused(gc_root_buffer *root) |
449 | 3.07M | { |
450 | 3.07M | root->ref = GC_IDX2LIST(GC_G(unused)); |
451 | 3.07M | GC_G(unused) = GC_PTR2IDX(root); |
452 | 3.07M | } |
453 | | |
454 | | static zend_always_inline uint32_t gc_fetch_next_unused(void) |
455 | 2.29M | { |
456 | 2.29M | uint32_t idx; |
457 | | |
458 | 2.29M | ZEND_ASSERT(GC_HAS_NEXT_UNUSED()); |
459 | 2.29M | idx = GC_G(first_unused); |
460 | 2.29M | GC_G(first_unused) = GC_G(first_unused) + 1; |
461 | 2.29M | return idx; |
462 | 2.29M | } |
463 | | |
464 | | #if ZEND_GC_DEBUG > 1 |
465 | | static const char *gc_color_name(uint32_t color) { |
466 | | switch (color) { |
467 | | case GC_BLACK: return "black"; |
468 | | case GC_WHITE: return "white"; |
469 | | case GC_GREY: return "grey"; |
470 | | case GC_PURPLE: return "purple"; |
471 | | default: return "unknown"; |
472 | | } |
473 | | } |
474 | | static void gc_trace_ref(zend_refcounted *ref) { |
475 | | if (GC_TYPE(ref) == IS_OBJECT) { |
476 | | zend_object *obj = (zend_object *) ref; |
477 | | fprintf(stderr, "[%p] rc=%d addr=%d %s object(%s)#%d ", |
478 | | ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref), |
479 | | gc_color_name(GC_REF_COLOR(ref)), |
480 | | obj->ce->name->val, obj->handle); |
481 | | } else if (GC_TYPE(ref) == IS_ARRAY) { |
482 | | zend_array *arr = (zend_array *) ref; |
483 | | fprintf(stderr, "[%p] rc=%d addr=%d %s array(%d) ", |
484 | | ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref), |
485 | | gc_color_name(GC_REF_COLOR(ref)), |
486 | | zend_hash_num_elements(arr)); |
487 | | } else { |
488 | | fprintf(stderr, "[%p] rc=%d addr=%d %s %s ", |
489 | | ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref), |
490 | | gc_color_name(GC_REF_COLOR(ref)), |
491 | | GC_TYPE(ref) == IS_REFERENCE |
492 | | ? "reference" : zend_get_type_by_const(GC_TYPE(ref))); |
493 | | } |
494 | | } |
495 | | #endif |
496 | | |
497 | | /* Mark a root buffer entry unused */ |
498 | | static zend_always_inline void gc_remove_from_roots(gc_root_buffer *root) |
499 | 3.02M | { |
500 | 3.02M | GC_LINK_UNUSED(root); |
501 | 3.02M | GC_G(num_roots)--; |
502 | 3.02M | GC_BENCH_DEC(root_buf_length); |
503 | 3.02M | } |
504 | | |
505 | | static void root_buffer_dtor(zend_gc_globals *gc_globals) |
506 | 0 | { |
507 | 0 | if (gc_globals->buf) { |
508 | 0 | free(gc_globals->buf); |
509 | 0 | gc_globals->buf = NULL; |
510 | 0 | } |
511 | 0 | } |
512 | | |
513 | | static void gc_globals_ctor_ex(zend_gc_globals *gc_globals) |
514 | 16 | { |
515 | 16 | gc_globals->gc_enabled = false; |
516 | 16 | gc_globals->gc_active = false; |
517 | 16 | gc_globals->gc_protected = true; |
518 | 16 | gc_globals->gc_full = false; |
519 | | |
520 | 16 | gc_globals->buf = NULL; |
521 | 16 | gc_globals->unused = GC_INVALID; |
522 | 16 | gc_globals->first_unused = GC_INVALID; |
523 | 16 | gc_globals->gc_threshold = GC_INVALID; |
524 | 16 | gc_globals->buf_size = GC_INVALID; |
525 | 16 | gc_globals->num_roots = 0; |
526 | | |
527 | 16 | gc_globals->gc_runs = 0; |
528 | 16 | gc_globals->collected = 0; |
529 | 16 | gc_globals->collector_time = 0; |
530 | 16 | gc_globals->dtor_time = 0; |
531 | 16 | gc_globals->free_time = 0; |
532 | 16 | gc_globals->activated_at = 0; |
533 | | |
534 | 16 | gc_globals->dtor_idx = GC_FIRST_ROOT; |
535 | 16 | gc_globals->dtor_end = 0; |
536 | 16 | gc_globals->dtor_fiber = NULL; |
537 | 16 | gc_globals->dtor_fiber_running = false; |
538 | | |
539 | | #if GC_BENCH |
540 | | gc_globals->root_buf_length = 0; |
541 | | gc_globals->root_buf_peak = 0; |
542 | | gc_globals->zval_possible_root = 0; |
543 | | gc_globals->zval_buffered = 0; |
544 | | gc_globals->zval_remove_from_buffer = 0; |
545 | | gc_globals->zval_marked_grey = 0; |
546 | | #endif |
547 | 16 | } |
548 | | |
549 | | void gc_globals_ctor(void) |
550 | 16 | { |
551 | | #ifdef ZTS |
552 | | ts_allocate_fast_id(&gc_globals_id, &gc_globals_offset, sizeof(zend_gc_globals), (ts_allocate_ctor) gc_globals_ctor_ex, (ts_allocate_dtor) root_buffer_dtor); |
553 | | #else |
554 | 16 | gc_globals_ctor_ex(&gc_globals); |
555 | 16 | #endif |
556 | 16 | } |
557 | | |
558 | | void gc_globals_dtor(void) |
559 | 0 | { |
560 | 0 | #ifndef ZTS |
561 | 0 | root_buffer_dtor(&gc_globals); |
562 | 0 | #endif |
563 | 0 | } |
564 | | |
565 | | void gc_reset(void) |
566 | 278k | { |
567 | 278k | if (GC_G(buf)) { |
568 | 278k | GC_G(gc_active) = 0; |
569 | 278k | GC_G(gc_protected) = 0; |
570 | 278k | GC_G(gc_full) = 0; |
571 | 278k | GC_G(unused) = GC_INVALID; |
572 | 278k | GC_G(first_unused) = GC_FIRST_ROOT; |
573 | 278k | GC_G(num_roots) = 0; |
574 | | |
575 | 278k | GC_G(gc_runs) = 0; |
576 | 278k | GC_G(collected) = 0; |
577 | | |
578 | 278k | GC_G(collector_time) = 0; |
579 | 278k | GC_G(dtor_time) = 0; |
580 | 278k | GC_G(free_time) = 0; |
581 | | |
582 | 278k | GC_G(dtor_idx) = GC_FIRST_ROOT; |
583 | 278k | GC_G(dtor_end) = 0; |
584 | 278k | GC_G(dtor_fiber) = NULL; |
585 | 278k | GC_G(dtor_fiber_running) = false; |
586 | | |
587 | | #if GC_BENCH |
588 | | GC_G(root_buf_length) = 0; |
589 | | GC_G(root_buf_peak) = 0; |
590 | | GC_G(zval_possible_root) = 0; |
591 | | GC_G(zval_buffered) = 0; |
592 | | GC_G(zval_remove_from_buffer) = 0; |
593 | | GC_G(zval_marked_grey) = 0; |
594 | | #endif |
595 | 278k | } |
596 | | |
597 | 278k | GC_G(activated_at) = zend_hrtime(); |
598 | 278k | } |
599 | | |
600 | | /* Enable/disable the garbage collector. |
601 | | * Initialize globals if necessary. */ |
602 | | ZEND_API bool gc_enable(bool enable) |
603 | 121k | { |
604 | 121k | bool old_enabled = GC_G(gc_enabled); |
605 | 121k | GC_G(gc_enabled) = enable; |
606 | 121k | if (enable && !old_enabled && GC_G(buf) == NULL) { |
607 | 16 | GC_G(buf) = (gc_root_buffer*) pemalloc(sizeof(gc_root_buffer) * GC_DEFAULT_BUF_SIZE, 1); |
608 | 16 | GC_G(buf)[0].ref = NULL; |
609 | 16 | GC_G(buf_size) = GC_DEFAULT_BUF_SIZE; |
610 | 16 | GC_G(gc_threshold) = GC_THRESHOLD_DEFAULT; |
611 | 16 | gc_reset(); |
612 | 16 | } |
613 | 121k | return old_enabled; |
614 | 121k | } |
615 | | |
616 | | ZEND_API bool gc_enabled(void) |
617 | 107 | { |
618 | 107 | return GC_G(gc_enabled); |
619 | 107 | } |
620 | | |
621 | | /* Protect the GC root buffer (prevent additions) */ |
622 | | ZEND_API bool gc_protect(bool protect) |
623 | 27.4k | { |
624 | 27.4k | bool old_protected = GC_G(gc_protected); |
625 | 27.4k | GC_G(gc_protected) = protect; |
626 | 27.4k | return old_protected; |
627 | 27.4k | } |
628 | | |
629 | | ZEND_API bool gc_protected(void) |
630 | 0 | { |
631 | 0 | return GC_G(gc_protected); |
632 | 0 | } |
633 | | |
634 | | static void gc_grow_root_buffer(void) |
635 | 1 | { |
636 | 1 | size_t new_size; |
637 | | |
638 | 1 | if (GC_G(buf_size) >= GC_MAX_BUF_SIZE) { |
639 | 0 | if (!GC_G(gc_full)) { |
640 | 0 | zend_error(E_WARNING, "GC buffer overflow (GC disabled)\n"); |
641 | 0 | GC_G(gc_active) = 1; |
642 | 0 | GC_G(gc_protected) = 1; |
643 | 0 | GC_G(gc_full) = 1; |
644 | 0 | return; |
645 | 0 | } |
646 | 0 | } |
647 | 1 | if (GC_G(buf_size) < GC_BUF_GROW_STEP) { |
648 | 1 | new_size = GC_G(buf_size) * 2; |
649 | 1 | } else { |
650 | 0 | new_size = GC_G(buf_size) + GC_BUF_GROW_STEP; |
651 | 0 | } |
652 | 1 | if (new_size > GC_MAX_BUF_SIZE) { |
653 | 0 | new_size = GC_MAX_BUF_SIZE; |
654 | 0 | } |
655 | 1 | GC_G(buf) = perealloc(GC_G(buf), sizeof(gc_root_buffer) * new_size, 1); |
656 | 1 | GC_G(buf_size) = new_size; |
657 | 1 | } |
658 | | |
659 | | /* Adjust the GC activation threshold given the number of nodes collected by the last run */ |
660 | | static void gc_adjust_threshold(int count) |
661 | 0 | { |
662 | 0 | uint32_t new_threshold; |
663 | | |
664 | | /* TODO Very simple heuristic for dynamic GC buffer resizing: |
665 | | * If there are "too few" collections, increase the collection threshold |
666 | | * by a fixed step */ |
667 | 0 | if (count < GC_THRESHOLD_TRIGGER || GC_G(num_roots) >= GC_G(gc_threshold)) { |
668 | | /* increase */ |
669 | 0 | if (GC_G(gc_threshold) < GC_THRESHOLD_MAX) { |
670 | 0 | new_threshold = GC_G(gc_threshold) + GC_THRESHOLD_STEP; |
671 | 0 | if (new_threshold > GC_THRESHOLD_MAX) { |
672 | 0 | new_threshold = GC_THRESHOLD_MAX; |
673 | 0 | } |
674 | 0 | if (new_threshold > GC_G(buf_size)) { |
675 | 0 | gc_grow_root_buffer(); |
676 | 0 | } |
677 | 0 | if (new_threshold <= GC_G(buf_size)) { |
678 | 0 | GC_G(gc_threshold) = new_threshold; |
679 | 0 | } |
680 | 0 | } |
681 | 0 | } else if (GC_G(gc_threshold) > GC_THRESHOLD_DEFAULT) { |
682 | 0 | new_threshold = GC_G(gc_threshold) - GC_THRESHOLD_STEP; |
683 | 0 | if (new_threshold < GC_THRESHOLD_DEFAULT) { |
684 | 0 | new_threshold = GC_THRESHOLD_DEFAULT; |
685 | 0 | } |
686 | 0 | GC_G(gc_threshold) = new_threshold; |
687 | 0 | } |
688 | 0 | } |
689 | | |
690 | | /* Perform a GC run and then add a node as a possible root. */ |
691 | | static zend_never_inline void ZEND_FASTCALL gc_possible_root_when_full(zend_refcounted *ref) |
692 | 0 | { |
693 | 0 | uint32_t idx; |
694 | 0 | gc_root_buffer *newRoot; |
695 | |
|
696 | 0 | ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT); |
697 | 0 | ZEND_ASSERT(GC_INFO(ref) == 0); |
698 | |
|
699 | 0 | if (GC_G(gc_enabled) && !GC_G(gc_active)) { |
700 | 0 | GC_ADDREF(ref); |
701 | 0 | gc_adjust_threshold(gc_collect_cycles()); |
702 | 0 | if (UNEXPECTED(GC_DELREF(ref) == 0)) { |
703 | 0 | rc_dtor_func(ref); |
704 | 0 | return; |
705 | 0 | } else if (UNEXPECTED(GC_INFO(ref))) { |
706 | 0 | return; |
707 | 0 | } |
708 | 0 | } |
709 | | |
710 | 0 | if (GC_HAS_UNUSED()) { |
711 | 0 | idx = GC_FETCH_UNUSED(); |
712 | 0 | } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) { |
713 | 0 | idx = GC_FETCH_NEXT_UNUSED(); |
714 | 0 | } else { |
715 | 0 | gc_grow_root_buffer(); |
716 | 0 | if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) { |
717 | 0 | return; |
718 | 0 | } |
719 | 0 | idx = GC_FETCH_NEXT_UNUSED(); |
720 | 0 | } |
721 | | |
722 | 0 | newRoot = GC_IDX2PTR(idx); |
723 | 0 | newRoot->ref = ref; /* GC_ROOT tag is 0 */ |
724 | 0 | GC_TRACE_SET_COLOR(ref, GC_PURPLE); |
725 | |
|
726 | 0 | idx = gc_compress(idx); |
727 | 0 | GC_REF_SET_INFO(ref, idx | GC_PURPLE); |
728 | 0 | GC_G(num_roots)++; |
729 | |
|
730 | 0 | GC_BENCH_INC(zval_buffered); |
731 | 0 | GC_BENCH_INC(root_buf_length); |
732 | 0 | GC_BENCH_PEAK(root_buf_peak, root_buf_length); |
733 | 0 | } |
734 | | |
735 | | /* Add a possible root node to the buffer. |
736 | | * Maybe perform a GC run. */ |
737 | | ZEND_API void ZEND_FASTCALL gc_possible_root(zend_refcounted *ref) |
738 | 2.39M | { |
739 | 2.39M | uint32_t idx; |
740 | 2.39M | gc_root_buffer *newRoot; |
741 | | |
742 | 2.39M | if (UNEXPECTED(GC_G(gc_protected))) { |
743 | 153k | return; |
744 | 153k | } |
745 | | |
746 | 2.24M | GC_BENCH_INC(zval_possible_root); |
747 | | |
748 | 2.24M | if (EXPECTED(GC_HAS_UNUSED())) { |
749 | 772k | idx = GC_FETCH_UNUSED(); |
750 | 1.47M | } else if (EXPECTED(GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD())) { |
751 | 1.47M | idx = GC_FETCH_NEXT_UNUSED(); |
752 | 1.47M | } else { |
753 | 0 | gc_possible_root_when_full(ref); |
754 | 0 | return; |
755 | 0 | } |
756 | | |
757 | 2.24M | ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT); |
758 | 2.24M | ZEND_ASSERT(GC_INFO(ref) == 0); |
759 | | |
760 | 2.24M | newRoot = GC_IDX2PTR(idx); |
761 | 2.24M | newRoot->ref = ref; /* GC_ROOT tag is 0 */ |
762 | 2.24M | GC_TRACE_SET_COLOR(ref, GC_PURPLE); |
763 | | |
764 | 2.24M | idx = gc_compress(idx); |
765 | 2.24M | GC_REF_SET_INFO(ref, idx | GC_PURPLE); |
766 | 2.24M | GC_G(num_roots)++; |
767 | | |
768 | 2.24M | GC_BENCH_INC(zval_buffered); |
769 | 2.24M | GC_BENCH_INC(root_buf_length); |
770 | 2.24M | GC_BENCH_PEAK(root_buf_peak, root_buf_length); |
771 | 2.24M | } |
772 | | |
773 | | /* Add an extra root during a GC run */ |
774 | | static void ZEND_FASTCALL gc_extra_root(zend_refcounted *ref) |
775 | 33 | { |
776 | 33 | uint32_t idx; |
777 | 33 | gc_root_buffer *newRoot; |
778 | | |
779 | 33 | if (EXPECTED(GC_HAS_UNUSED())) { |
780 | 0 | idx = GC_FETCH_UNUSED(); |
781 | 33 | } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) { |
782 | 33 | idx = GC_FETCH_NEXT_UNUSED(); |
783 | 33 | } else { |
784 | 0 | gc_grow_root_buffer(); |
785 | 0 | if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) { |
786 | | /* TODO: can this really happen? */ |
787 | 0 | return; |
788 | 0 | } |
789 | 0 | idx = GC_FETCH_NEXT_UNUSED(); |
790 | 0 | } |
791 | | |
792 | 33 | ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT); |
793 | 33 | ZEND_ASSERT(GC_REF_ADDRESS(ref) == 0); |
794 | | |
795 | 33 | newRoot = GC_IDX2PTR(idx); |
796 | 33 | newRoot->ref = ref; /* GC_ROOT tag is 0 */ |
797 | | |
798 | 33 | idx = gc_compress(idx); |
799 | 33 | GC_REF_SET_INFO(ref, idx | GC_REF_COLOR(ref)); |
800 | 33 | GC_G(num_roots)++; |
801 | | |
802 | 33 | GC_BENCH_INC(zval_buffered); |
803 | 33 | GC_BENCH_INC(root_buf_length); |
804 | 33 | GC_BENCH_PEAK(root_buf_peak, root_buf_length); |
805 | 33 | } |
806 | | |
807 | | /* Remove a node from the root buffer given its compressed index */ |
808 | | static zend_never_inline void ZEND_FASTCALL gc_remove_compressed(zend_refcounted *ref, uint32_t idx) |
809 | 0 | { |
810 | 0 | gc_root_buffer *root = gc_decompress(ref, idx); |
811 | 0 | gc_remove_from_roots(root); |
812 | 0 | } |
813 | | |
814 | | ZEND_API void ZEND_FASTCALL gc_remove_from_buffer(zend_refcounted *ref) |
815 | 2.97M | { |
816 | 2.97M | gc_root_buffer *root; |
817 | 2.97M | uint32_t idx = GC_REF_ADDRESS(ref); |
818 | | |
819 | 2.97M | GC_BENCH_INC(zval_remove_from_buffer); |
820 | | |
821 | 2.97M | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
822 | 2.05M | GC_TRACE_SET_COLOR(ref, GC_BLACK); |
823 | 2.05M | } |
824 | 2.97M | GC_REF_SET_INFO(ref, 0); |
825 | | |
826 | | /* Perform decompression only in case of large buffers */ |
827 | 2.97M | if (UNEXPECTED(GC_G(first_unused) >= GC_MAX_UNCOMPRESSED)) { |
828 | 0 | gc_remove_compressed(ref, idx); |
829 | 0 | return; |
830 | 0 | } |
831 | | |
832 | 2.97M | ZEND_ASSERT(idx); |
833 | 2.97M | root = GC_IDX2PTR(idx); |
834 | 2.97M | gc_remove_from_roots(root); |
835 | 2.97M | } |
836 | | |
837 | | /* Mark all nodes reachable from ref as black (live). Restore the reference |
838 | | * counts decremented by gc_mark_grey(). See ScanBlack() in Bacon & Rajan. |
839 | | * To implement a depth-first search, discovered nodes are added to a stack |
840 | | * which is processed iteratively. */ |
841 | | static void gc_scan_black(zend_refcounted *ref, gc_stack *stack) |
842 | 32.8k | { |
843 | 32.8k | HashTable *ht; |
844 | 32.8k | Bucket *p; |
845 | 32.8k | zval *zv; |
846 | 32.8k | uint32_t n; |
847 | 32.8k | GC_STACK_DCL(stack); |
848 | | |
849 | 183k | tail_call: |
850 | 183k | if (GC_TYPE(ref) == IS_OBJECT) { |
851 | 35.4k | zend_object *obj = (zend_object*)ref; |
852 | | |
853 | 35.4k | if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) { |
854 | 35.4k | zval *table; |
855 | 35.4k | int len; |
856 | | |
857 | 35.4k | if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) { |
858 | 678 | zend_weakmap_get_object_key_entry_gc(obj, &table, &len); |
859 | 678 | n = len; |
860 | 678 | zv = table; |
861 | 1.26k | for (; n != 0; n-=2) { |
862 | 589 | ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR); |
863 | 589 | zval *entry = (zval*) Z_PTR_P(zv); |
864 | 589 | zval *weakmap = zv+1; |
865 | 589 | ZEND_ASSERT(Z_REFCOUNTED_P(weakmap)); |
866 | 589 | if (Z_OPT_COLLECTABLE_P(entry)) { |
867 | 481 | GC_UNSET_FROM_WEAKMAP_KEY(entry); |
868 | 481 | if (GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_GREY)) { |
869 | | /* Weakmap was scanned in gc_mark_roots, we must |
870 | | * ensure that it's eventually scanned in |
871 | | * gc_scan_roots as well. */ |
872 | 50 | if (!GC_REF_ADDRESS(Z_COUNTED_P(weakmap))) { |
873 | 26 | gc_extra_root(Z_COUNTED_P(weakmap)); |
874 | 26 | } |
875 | 431 | } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK) && */ !GC_FROM_WEAKMAP(entry)) { |
876 | | /* Both the entry weakmap and key are BLACK, so we |
877 | | * can mark the entry BLACK as well. |
878 | | * !GC_FROM_WEAKMAP(entry) means that the weakmap |
879 | | * was already scanned black (or will not be |
880 | | * scanned), so it's our responsibility to mark the |
881 | | * entry */ |
882 | 407 | ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK)); |
883 | 407 | ref = Z_COUNTED_P(entry); |
884 | 407 | GC_ADDREF(ref); |
885 | 407 | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
886 | 33 | GC_REF_SET_BLACK(ref); |
887 | 33 | GC_STACK_PUSH(ref); |
888 | 33 | } |
889 | 407 | } |
890 | 481 | } |
891 | 589 | zv+=2; |
892 | 589 | } |
893 | 678 | } |
894 | | |
895 | 35.4k | if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) { |
896 | 120 | zend_weakmap_get_key_entry_gc(obj, &table, &len); |
897 | 120 | n = len; |
898 | 120 | zv = table; |
899 | 336 | for (; n != 0; n-=2) { |
900 | 216 | ZEND_ASSERT(Z_TYPE_P(zv+1) == IS_PTR); |
901 | 216 | zval *key = zv; |
902 | 216 | zval *entry = (zval*) Z_PTR_P(zv+1); |
903 | 216 | if (Z_OPT_COLLECTABLE_P(entry)) { |
904 | 117 | GC_UNSET_FROM_WEAKMAP(entry); |
905 | 117 | if (GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_GREY)) { |
906 | | /* Key was scanned in gc_mark_roots, we must |
907 | | * ensure that it's eventually scanned in |
908 | | * gc_scan_roots as well. */ |
909 | 33 | if (!GC_REF_ADDRESS(Z_COUNTED_P(key))) { |
910 | 7 | gc_extra_root(Z_COUNTED_P(key)); |
911 | 7 | } |
912 | 84 | } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK) && */ !GC_FROM_WEAKMAP_KEY(entry)) { |
913 | | /* Both the entry weakmap and key are BLACK, so we |
914 | | * can mark the entry BLACK as well. |
915 | | * !GC_FROM_WEAKMAP_KEY(entry) means that the key |
916 | | * was already scanned black (or will not be |
917 | | * scanned), so it's our responsibility to mark the |
918 | | * entry */ |
919 | 72 | ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK)); |
920 | 72 | ref = Z_COUNTED_P(entry); |
921 | 72 | GC_ADDREF(ref); |
922 | 72 | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
923 | 64 | GC_REF_SET_BLACK(ref); |
924 | 64 | GC_STACK_PUSH(ref); |
925 | 64 | } |
926 | 72 | } |
927 | 117 | } |
928 | 216 | zv += 2; |
929 | 216 | } |
930 | 120 | goto next; |
931 | 120 | } |
932 | | |
933 | 35.3k | ht = obj->handlers->get_gc(obj, &table, &len); |
934 | 35.3k | n = len; |
935 | 35.3k | zv = table; |
936 | 35.3k | if (UNEXPECTED(ht)) { |
937 | 12.0k | GC_ADDREF(ht); |
938 | 12.0k | if (!GC_REF_CHECK_COLOR(ht, GC_BLACK)) { |
939 | 12.0k | GC_REF_SET_BLACK(ht); |
940 | 15.9k | for (; n != 0; n--) { |
941 | 3.92k | if (Z_COLLECTABLE_P(zv)) { |
942 | 649 | ref = Z_COUNTED_P(zv); |
943 | 649 | GC_ADDREF(ref); |
944 | 649 | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
945 | 513 | GC_REF_SET_BLACK(ref); |
946 | 513 | GC_STACK_PUSH(ref); |
947 | 513 | } |
948 | 649 | } |
949 | 3.92k | zv++; |
950 | 3.92k | } |
951 | 12.0k | goto handle_ht; |
952 | 12.0k | } |
953 | 12.0k | } |
954 | | |
955 | 101k | handle_zvals: |
956 | 2.43M | for (; n != 0; n--) { |
957 | 2.35M | if (Z_COLLECTABLE_P(zv)) { |
958 | 25.1k | ref = Z_COUNTED_P(zv); |
959 | 25.1k | GC_ADDREF(ref); |
960 | 25.1k | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
961 | 21.5k | GC_REF_SET_BLACK(ref); |
962 | 21.5k | zv++; |
963 | 96.2k | while (--n) { |
964 | 74.6k | if (Z_COLLECTABLE_P(zv)) { |
965 | 64.6k | zend_refcounted *ref = Z_COUNTED_P(zv); |
966 | 64.6k | GC_ADDREF(ref); |
967 | 64.6k | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
968 | 62.9k | GC_REF_SET_BLACK(ref); |
969 | 62.9k | GC_STACK_PUSH(ref); |
970 | 62.9k | } |
971 | 64.6k | } |
972 | 74.6k | zv++; |
973 | 74.6k | } |
974 | 21.5k | goto tail_call; |
975 | 21.5k | } |
976 | 25.1k | } |
977 | 2.33M | zv++; |
978 | 2.33M | } |
979 | 101k | } |
980 | 148k | } else if (GC_TYPE(ref) == IS_ARRAY) { |
981 | 145k | ZEND_ASSERT((zend_array*)ref != &EG(symbol_table)); |
982 | 145k | ht = (zend_array*)ref; |
983 | 157k | handle_ht: |
984 | 157k | n = ht->nNumUsed; |
985 | 157k | zv = ht->arPacked; |
986 | 157k | if (HT_IS_PACKED(ht)) { |
987 | 78.5k | goto handle_zvals; |
988 | 78.5k | } |
989 | | |
990 | 78.9k | p = (Bucket*)zv; |
991 | 230k | for (; n != 0; n--) { |
992 | 214k | zv = &p->val; |
993 | 214k | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
994 | 5.53k | zv = Z_INDIRECT_P(zv); |
995 | 5.53k | } |
996 | 214k | if (Z_COLLECTABLE_P(zv)) { |
997 | 63.7k | ref = Z_COUNTED_P(zv); |
998 | 63.7k | GC_ADDREF(ref); |
999 | 63.7k | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
1000 | 62.3k | GC_REF_SET_BLACK(ref); |
1001 | 62.3k | p++; |
1002 | 124k | while (--n) { |
1003 | 61.7k | zv = &p->val; |
1004 | 61.7k | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1005 | 259 | zv = Z_INDIRECT_P(zv); |
1006 | 259 | } |
1007 | 61.7k | if (Z_COLLECTABLE_P(zv)) { |
1008 | 3.21k | zend_refcounted *ref = Z_COUNTED_P(zv); |
1009 | 3.21k | GC_ADDREF(ref); |
1010 | 3.21k | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
1011 | 2.56k | GC_REF_SET_BLACK(ref); |
1012 | 2.56k | GC_STACK_PUSH(ref); |
1013 | 2.56k | } |
1014 | 3.21k | } |
1015 | 61.7k | p++; |
1016 | 61.7k | } |
1017 | 62.3k | goto tail_call; |
1018 | 62.3k | } |
1019 | 63.7k | } |
1020 | 151k | p++; |
1021 | 151k | } |
1022 | 78.9k | } else if (GC_TYPE(ref) == IS_REFERENCE) { |
1023 | 2.94k | if (Z_COLLECTABLE(((zend_reference*)ref)->val)) { |
1024 | 1.09k | ref = Z_COUNTED(((zend_reference*)ref)->val); |
1025 | 1.09k | GC_ADDREF(ref); |
1026 | 1.09k | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
1027 | 1.00k | GC_REF_SET_BLACK(ref); |
1028 | 1.00k | goto tail_call; |
1029 | 1.00k | } |
1030 | 1.09k | } |
1031 | 2.94k | } |
1032 | | |
1033 | 98.9k | next: |
1034 | 98.9k | ref = GC_STACK_POP(); |
1035 | 98.9k | if (ref) { |
1036 | 66.1k | goto tail_call; |
1037 | 66.1k | } |
1038 | 98.9k | } |
1039 | | |
1040 | | /* Traverse the graph of nodes referred to by ref. Decrement the reference |
1041 | | * counts and mark visited nodes grey. See MarkGray() in Bacon & Rajan. */ |
1042 | | static void gc_mark_grey(zend_refcounted *ref, gc_stack *stack) |
1043 | 62.9k | { |
1044 | 62.9k | HashTable *ht; |
1045 | 62.9k | Bucket *p; |
1046 | 62.9k | zval *zv; |
1047 | 62.9k | uint32_t n; |
1048 | 62.9k | GC_STACK_DCL(stack); |
1049 | | |
1050 | 1.16M | tail_call: |
1051 | 1.16M | GC_BENCH_INC(zval_marked_grey); |
1052 | | |
1053 | 1.16M | if (GC_TYPE(ref) == IS_OBJECT) { |
1054 | 578k | zend_object *obj = (zend_object*)ref; |
1055 | | |
1056 | 578k | if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) { |
1057 | 578k | zval *table; |
1058 | 578k | int len; |
1059 | | |
1060 | 578k | if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) { |
1061 | 911 | zend_weakmap_get_object_key_entry_gc(obj, &table, &len); |
1062 | 911 | n = len; |
1063 | 911 | zv = table; |
1064 | 1.60k | for (; n != 0; n-=2) { |
1065 | 693 | ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR); |
1066 | 693 | zval *entry = (zval*) Z_PTR_P(zv); |
1067 | 693 | zval *weakmap = zv+1; |
1068 | 693 | ZEND_ASSERT(Z_REFCOUNTED_P(weakmap)); |
1069 | 693 | if (Z_COLLECTABLE_P(entry)) { |
1070 | 580 | GC_SET_FROM_WEAKMAP_KEY(entry); |
1071 | 580 | ref = Z_COUNTED_P(entry); |
1072 | | /* Only DELREF if the contribution from the weakmap has |
1073 | | * not been cancelled yet */ |
1074 | 580 | if (!GC_FROM_WEAKMAP(entry)) { |
1075 | 505 | GC_DELREF(ref); |
1076 | 505 | } |
1077 | 580 | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1078 | 80 | GC_REF_SET_COLOR(ref, GC_GREY); |
1079 | 80 | GC_STACK_PUSH(ref); |
1080 | 80 | } |
1081 | 580 | } |
1082 | 693 | zv+=2; |
1083 | 693 | } |
1084 | 911 | } |
1085 | | |
1086 | 578k | if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) { |
1087 | 177 | zend_weakmap_get_entry_gc(obj, &table, &len); |
1088 | 177 | n = len; |
1089 | 177 | zv = table; |
1090 | 468 | for (; n != 0; n--) { |
1091 | 291 | ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR); |
1092 | 291 | zval *entry = (zval*) Z_PTR_P(zv); |
1093 | 291 | if (Z_COLLECTABLE_P(entry)) { |
1094 | 192 | GC_SET_FROM_WEAKMAP(entry); |
1095 | 192 | ref = Z_COUNTED_P(entry); |
1096 | | /* Only DELREF if the contribution from the weakmap key |
1097 | | * has not been cancelled yet */ |
1098 | 192 | if (!GC_FROM_WEAKMAP_KEY(entry)) { |
1099 | 88 | GC_DELREF(ref); |
1100 | 88 | } |
1101 | 192 | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1102 | 70 | GC_REF_SET_COLOR(ref, GC_GREY); |
1103 | 70 | GC_STACK_PUSH(ref); |
1104 | 70 | } |
1105 | 192 | } |
1106 | 291 | zv++; |
1107 | 291 | } |
1108 | 177 | goto next; |
1109 | 177 | } |
1110 | | |
1111 | 578k | ht = obj->handlers->get_gc(obj, &table, &len); |
1112 | 578k | n = len; |
1113 | 578k | zv = table; |
1114 | 578k | if (UNEXPECTED(ht)) { |
1115 | 537k | GC_DELREF(ht); |
1116 | 537k | if (!GC_REF_CHECK_COLOR(ht, GC_GREY)) { |
1117 | 537k | GC_REF_SET_COLOR(ht, GC_GREY); |
1118 | 723k | for (; n != 0; n--) { |
1119 | 186k | if (Z_COLLECTABLE_P(zv)) { |
1120 | 182k | ref = Z_COUNTED_P(zv); |
1121 | 182k | GC_DELREF(ref); |
1122 | 182k | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1123 | 177k | GC_REF_SET_COLOR(ref, GC_GREY); |
1124 | 177k | GC_STACK_PUSH(ref); |
1125 | 177k | } |
1126 | 182k | } |
1127 | 186k | zv++; |
1128 | 186k | } |
1129 | 537k | goto handle_ht; |
1130 | 537k | } |
1131 | 537k | } |
1132 | 135k | handle_zvals: |
1133 | 2.61M | for (; n != 0; n--) { |
1134 | 2.52M | if (Z_COLLECTABLE_P(zv)) { |
1135 | 70.9k | ref = Z_COUNTED_P(zv); |
1136 | 70.9k | GC_DELREF(ref); |
1137 | 70.9k | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1138 | 38.1k | GC_REF_SET_COLOR(ref, GC_GREY); |
1139 | 38.1k | zv++; |
1140 | 124k | while (--n) { |
1141 | 86.1k | if (Z_COLLECTABLE_P(zv)) { |
1142 | 65.0k | zend_refcounted *ref = Z_COUNTED_P(zv); |
1143 | 65.0k | GC_DELREF(ref); |
1144 | 65.0k | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1145 | 60.3k | GC_REF_SET_COLOR(ref, GC_GREY); |
1146 | 60.3k | GC_STACK_PUSH(ref); |
1147 | 60.3k | } |
1148 | 65.0k | } |
1149 | 86.1k | zv++; |
1150 | 86.1k | } |
1151 | 38.1k | goto tail_call; |
1152 | 38.1k | } |
1153 | 70.9k | } |
1154 | 2.48M | zv++; |
1155 | 2.48M | } |
1156 | 135k | } |
1157 | 588k | } else if (GC_TYPE(ref) == IS_ARRAY) { |
1158 | 573k | ZEND_ASSERT(((zend_array*)ref) != &EG(symbol_table)); |
1159 | 573k | ht = (zend_array*)ref; |
1160 | 1.11M | handle_ht: |
1161 | 1.11M | n = ht->nNumUsed; |
1162 | 1.11M | if (HT_IS_PACKED(ht)) { |
1163 | 94.6k | zv = ht->arPacked; |
1164 | 94.6k | goto handle_zvals; |
1165 | 94.6k | } |
1166 | | |
1167 | 1.01M | p = ht->arData; |
1168 | 2.10M | for (; n != 0; n--) { |
1169 | 1.46M | zv = &p->val; |
1170 | 1.46M | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1171 | 760k | zv = Z_INDIRECT_P(zv); |
1172 | 760k | } |
1173 | 1.46M | if (Z_COLLECTABLE_P(zv)) { |
1174 | 490k | ref = Z_COUNTED_P(zv); |
1175 | 490k | GC_DELREF(ref); |
1176 | 490k | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1177 | 378k | GC_REF_SET_COLOR(ref, GC_GREY); |
1178 | 378k | p++; |
1179 | 1.40M | while (--n) { |
1180 | 1.02M | zv = &p->val; |
1181 | 1.02M | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1182 | 131k | zv = Z_INDIRECT_P(zv); |
1183 | 131k | } |
1184 | 1.02M | if (Z_COLLECTABLE_P(zv)) { |
1185 | 789k | zend_refcounted *ref = Z_COUNTED_P(zv); |
1186 | 789k | GC_DELREF(ref); |
1187 | 789k | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1188 | 447k | GC_REF_SET_COLOR(ref, GC_GREY); |
1189 | 447k | GC_STACK_PUSH(ref); |
1190 | 447k | } |
1191 | 789k | } |
1192 | 1.02M | p++; |
1193 | 1.02M | } |
1194 | 378k | goto tail_call; |
1195 | 378k | } |
1196 | 490k | } |
1197 | 1.09M | p++; |
1198 | 1.09M | } |
1199 | 1.01M | } else if (GC_TYPE(ref) == IS_REFERENCE) { |
1200 | 14.7k | if (Z_COLLECTABLE(((zend_reference*)ref)->val)) { |
1201 | 11.7k | ref = Z_COUNTED(((zend_reference*)ref)->val); |
1202 | 11.7k | GC_DELREF(ref); |
1203 | 11.7k | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1204 | 2.09k | GC_REF_SET_COLOR(ref, GC_GREY); |
1205 | 2.09k | goto tail_call; |
1206 | 2.09k | } |
1207 | 11.7k | } |
1208 | 14.7k | } |
1209 | | |
1210 | 748k | next: |
1211 | 748k | ref = GC_STACK_POP(); |
1212 | 748k | if (ref) { |
1213 | 685k | goto tail_call; |
1214 | 685k | } |
1215 | 748k | } |
1216 | | |
1217 | | /* Two-Finger compaction algorithm */ |
1218 | | static void gc_compact(void) |
1219 | 817k | { |
1220 | 817k | if (GC_G(num_roots) + GC_FIRST_ROOT != GC_G(first_unused)) { |
1221 | 396k | if (GC_G(num_roots)) { |
1222 | 9.74k | gc_root_buffer *free = GC_IDX2PTR(GC_FIRST_ROOT); |
1223 | 9.74k | gc_root_buffer *scan = GC_IDX2PTR(GC_G(first_unused) - 1); |
1224 | 9.74k | gc_root_buffer *end = GC_IDX2PTR(GC_G(num_roots)); |
1225 | 9.74k | uint32_t idx; |
1226 | 9.74k | zend_refcounted *p; |
1227 | | |
1228 | 18.4k | while (free < scan) { |
1229 | 92.5k | while (!GC_IS_UNUSED(free->ref)) { |
1230 | 80.4k | free++; |
1231 | 80.4k | } |
1232 | 32.6k | while (GC_IS_UNUSED(scan->ref)) { |
1233 | 20.5k | scan--; |
1234 | 20.5k | } |
1235 | 12.1k | if (scan > free) { |
1236 | 6.06k | p = scan->ref; |
1237 | 6.06k | free->ref = p; |
1238 | 6.06k | p = GC_GET_PTR(p); |
1239 | 6.06k | idx = gc_compress(GC_PTR2IDX(free)); |
1240 | 6.06k | GC_REF_SET_INFO(p, idx | GC_REF_COLOR(p)); |
1241 | 6.06k | free++; |
1242 | 6.06k | scan--; |
1243 | 6.06k | if (scan <= end) { |
1244 | 3.43k | break; |
1245 | 3.43k | } |
1246 | 6.06k | } |
1247 | 12.1k | } |
1248 | 9.74k | } |
1249 | 396k | GC_G(unused) = GC_INVALID; |
1250 | 396k | GC_G(first_unused) = GC_G(num_roots) + GC_FIRST_ROOT; |
1251 | 396k | } |
1252 | 817k | } |
1253 | | |
1254 | | /* For all roots marked purple, traverse the graph, decrementing the reference |
1255 | | * count of their child nodes. Mark visited nodes grey so that they are not |
1256 | | * visited again. See MarkRoots() in Bacon & Rajan. */ |
1257 | | static void gc_mark_roots(gc_stack *stack) |
1258 | 21.7k | { |
1259 | 21.7k | gc_root_buffer *current, *last; |
1260 | | |
1261 | 21.7k | gc_compact(); |
1262 | | |
1263 | 21.7k | current = GC_IDX2PTR(GC_FIRST_ROOT); |
1264 | 21.7k | last = GC_IDX2PTR(GC_G(first_unused)); |
1265 | 210k | while (current != last) { |
1266 | 189k | if (GC_IS_ROOT(current->ref)) { |
1267 | 189k | if (GC_REF_CHECK_COLOR(current->ref, GC_PURPLE)) { |
1268 | 62.9k | GC_REF_SET_COLOR(current->ref, GC_GREY); |
1269 | 62.9k | gc_mark_grey(current->ref, stack); |
1270 | 62.9k | } |
1271 | 189k | } |
1272 | 189k | current++; |
1273 | 189k | } |
1274 | 21.7k | } |
1275 | | |
1276 | | /* Traverse the reference graph of ref. Evaluate grey nodes and mark them |
1277 | | * black (to keep) or white (to free). Note that nodes initially marked white |
1278 | | * may later become black if they are visited from a live node. |
1279 | | * See Scan() in Bacon & Rajan. */ |
1280 | | static void gc_scan(zend_refcounted *ref, gc_stack *stack) |
1281 | 62.9k | { |
1282 | 62.9k | HashTable *ht; |
1283 | 62.9k | Bucket *p; |
1284 | 62.9k | zval *zv; |
1285 | 62.9k | uint32_t n; |
1286 | 62.9k | GC_STACK_DCL(stack); |
1287 | | |
1288 | 1.55M | tail_call: |
1289 | 1.55M | if (!GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1290 | 337 | goto next; |
1291 | 337 | } |
1292 | | |
1293 | 1.55M | if (GC_REFCOUNT(ref) > 0) { |
1294 | 32.8k | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
1295 | 32.8k | GC_REF_SET_BLACK(ref); |
1296 | 32.8k | if (UNEXPECTED(!_stack->next)) { |
1297 | 18.1k | gc_stack_next(_stack); |
1298 | 18.1k | } |
1299 | | /* Split stack and reuse the tail */ |
1300 | 32.8k | _stack->next->prev = NULL; |
1301 | 32.8k | gc_scan_black(ref, _stack->next); |
1302 | 32.8k | _stack->next->prev = _stack; |
1303 | 32.8k | } |
1304 | 32.8k | goto next; |
1305 | 32.8k | } |
1306 | | |
1307 | 1.52M | if (GC_TYPE(ref) == IS_OBJECT) { |
1308 | 545k | zend_object *obj = (zend_object*)ref; |
1309 | 545k | if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) { |
1310 | 545k | zval *table; |
1311 | 545k | int len; |
1312 | | |
1313 | 545k | if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) { |
1314 | 292 | zend_weakmap_get_object_entry_gc(obj, &table, &len); |
1315 | 292 | n = len; |
1316 | 292 | zv = table; |
1317 | 451 | for (; n != 0; n--) { |
1318 | 159 | ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR); |
1319 | 159 | zval *entry = (zval*) Z_PTR_P(zv); |
1320 | 159 | if (Z_OPT_COLLECTABLE_P(entry)) { |
1321 | 104 | ref = Z_COUNTED_P(entry); |
1322 | 104 | if (GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1323 | 29 | GC_REF_SET_COLOR(ref, GC_WHITE); |
1324 | 29 | GC_STACK_PUSH(ref); |
1325 | 29 | } |
1326 | 104 | } |
1327 | 159 | zv++; |
1328 | 159 | } |
1329 | 292 | } |
1330 | | |
1331 | 545k | ht = obj->handlers->get_gc(obj, &table, &len); |
1332 | 545k | n = len; |
1333 | 545k | zv = table; |
1334 | 545k | if (UNEXPECTED(ht)) { |
1335 | 526k | if (GC_REF_CHECK_COLOR(ht, GC_GREY)) { |
1336 | 526k | GC_REF_SET_COLOR(ht, GC_WHITE); |
1337 | 526k | GC_STACK_PUSH((zend_refcounted *) ht); |
1338 | 709k | for (; n != 0; n--) { |
1339 | 182k | if (Z_COLLECTABLE_P(zv)) { |
1340 | 181k | ref = Z_COUNTED_P(zv); |
1341 | 181k | if (GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1342 | 177k | GC_REF_SET_COLOR(ref, GC_WHITE); |
1343 | 177k | GC_STACK_PUSH(ref); |
1344 | 177k | } |
1345 | 181k | } |
1346 | 182k | zv++; |
1347 | 182k | } |
1348 | 526k | goto handle_ht; |
1349 | 526k | } |
1350 | 526k | } |
1351 | | |
1352 | 55.2k | handle_zvals: |
1353 | 298k | for (; n != 0; n--) { |
1354 | 262k | if (Z_COLLECTABLE_P(zv)) { |
1355 | 70.6k | ref = Z_COUNTED_P(zv); |
1356 | 70.6k | if (GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1357 | 19.0k | GC_REF_SET_COLOR(ref, GC_WHITE); |
1358 | 19.0k | zv++; |
1359 | 38.6k | while (--n) { |
1360 | 19.5k | if (Z_COLLECTABLE_P(zv)) { |
1361 | 6.54k | zend_refcounted *ref = Z_COUNTED_P(zv); |
1362 | 6.54k | if (GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1363 | 3.64k | GC_REF_SET_COLOR(ref, GC_WHITE); |
1364 | 3.64k | GC_STACK_PUSH(ref); |
1365 | 3.64k | } |
1366 | 6.54k | } |
1367 | 19.5k | zv++; |
1368 | 19.5k | } |
1369 | 19.0k | goto tail_call; |
1370 | 19.0k | } |
1371 | 70.6k | } |
1372 | 243k | zv++; |
1373 | 243k | } |
1374 | 55.2k | } |
1375 | 977k | } else if (GC_TYPE(ref) == IS_ARRAY) { |
1376 | 965k | ht = (HashTable *)ref; |
1377 | 965k | ZEND_ASSERT(ht != &EG(symbol_table)); |
1378 | | |
1379 | 1.49M | handle_ht: |
1380 | 1.49M | n = ht->nNumUsed; |
1381 | 1.49M | if (HT_IS_PACKED(ht)) { |
1382 | 36.7k | zv = ht->arPacked; |
1383 | 36.7k | goto handle_zvals; |
1384 | 36.7k | } |
1385 | | |
1386 | 1.45M | p = ht->arData; |
1387 | 4.15M | for (; n != 0; n--) { |
1388 | 3.01M | zv = &p->val; |
1389 | 3.01M | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1390 | 1.64M | zv = Z_INDIRECT_P(zv); |
1391 | 1.64M | } |
1392 | 3.01M | if (Z_COLLECTABLE_P(zv)) { |
1393 | 1.22M | ref = Z_COUNTED_P(zv); |
1394 | 1.22M | if (GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1395 | 317k | GC_REF_SET_COLOR(ref, GC_WHITE); |
1396 | 317k | p++; |
1397 | 1.28M | while (--n) { |
1398 | 966k | zv = &p->val; |
1399 | 966k | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1400 | 131k | zv = Z_INDIRECT_P(zv); |
1401 | 131k | } |
1402 | 966k | if (Z_COLLECTABLE_P(zv)) { |
1403 | 788k | zend_refcounted *ref = Z_COUNTED_P(zv); |
1404 | 788k | if (GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1405 | 446k | GC_REF_SET_COLOR(ref, GC_WHITE); |
1406 | 446k | GC_STACK_PUSH(ref); |
1407 | 446k | } |
1408 | 788k | } |
1409 | 966k | p++; |
1410 | 966k | } |
1411 | 317k | goto tail_call; |
1412 | 317k | } |
1413 | 1.22M | } |
1414 | 2.69M | p++; |
1415 | 2.69M | } |
1416 | 1.45M | } else if (GC_TYPE(ref) == IS_REFERENCE) { |
1417 | 12.0k | if (Z_COLLECTABLE(((zend_reference*)ref)->val)) { |
1418 | 10.8k | ref = Z_COUNTED(((zend_reference*)ref)->val); |
1419 | 10.8k | if (GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1420 | 1.62k | GC_REF_SET_COLOR(ref, GC_WHITE); |
1421 | 1.62k | goto tail_call; |
1422 | 1.62k | } |
1423 | 10.8k | } |
1424 | 12.0k | } |
1425 | | |
1426 | 1.21M | next: |
1427 | 1.21M | ref = GC_STACK_POP(); |
1428 | 1.21M | if (ref) { |
1429 | 1.15M | goto tail_call; |
1430 | 1.15M | } |
1431 | 1.21M | } |
1432 | | |
1433 | | /* Scan all roots, coloring grey nodes black or white */ |
1434 | | static void gc_scan_roots(gc_stack *stack) |
1435 | 21.7k | { |
1436 | 21.7k | uint32_t idx, end; |
1437 | 21.7k | gc_root_buffer *current; |
1438 | | |
1439 | | /* Root buffer might be reallocated during gc_scan, |
1440 | | * make sure to reload pointers. */ |
1441 | 21.7k | idx = GC_FIRST_ROOT; |
1442 | 21.7k | end = GC_G(first_unused); |
1443 | 210k | while (idx != end) { |
1444 | 189k | current = GC_IDX2PTR(idx); |
1445 | 189k | if (GC_IS_ROOT(current->ref)) { |
1446 | 189k | if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) { |
1447 | 62.9k | GC_REF_SET_COLOR(current->ref, GC_WHITE); |
1448 | 62.9k | gc_scan(current->ref, stack); |
1449 | 62.9k | } |
1450 | 189k | } |
1451 | 189k | idx++; |
1452 | 189k | } |
1453 | | |
1454 | | /* Scan extra roots added during gc_scan */ |
1455 | 21.8k | while (idx != GC_G(first_unused)) { |
1456 | 33 | current = GC_IDX2PTR(idx); |
1457 | 33 | if (GC_IS_ROOT(current->ref)) { |
1458 | 33 | if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) { |
1459 | 33 | GC_REF_SET_COLOR(current->ref, GC_WHITE); |
1460 | 33 | gc_scan(current->ref, stack); |
1461 | 33 | } |
1462 | 33 | } |
1463 | 33 | idx++; |
1464 | 33 | } |
1465 | 21.7k | } |
1466 | | |
1467 | | /* Add a node to the buffer with the garbage flag, so that it will be |
1468 | | * destroyed and freed when the scan is complete. */ |
1469 | | static void gc_add_garbage(zend_refcounted *ref) |
1470 | 826k | { |
1471 | 826k | uint32_t idx; |
1472 | 826k | gc_root_buffer *buf; |
1473 | | |
1474 | 826k | if (GC_HAS_UNUSED()) { |
1475 | 0 | idx = GC_FETCH_UNUSED(); |
1476 | 826k | } else if (GC_HAS_NEXT_UNUSED()) { |
1477 | 826k | idx = GC_FETCH_NEXT_UNUSED(); |
1478 | 826k | } else { |
1479 | 1 | gc_grow_root_buffer(); |
1480 | 1 | if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) { |
1481 | 0 | return; |
1482 | 0 | } |
1483 | 1 | idx = GC_FETCH_NEXT_UNUSED(); |
1484 | 1 | } |
1485 | | |
1486 | 826k | buf = GC_IDX2PTR(idx); |
1487 | 826k | buf->ref = GC_MAKE_GARBAGE(ref); |
1488 | | |
1489 | 826k | idx = gc_compress(idx); |
1490 | 826k | GC_REF_SET_INFO(ref, idx | GC_BLACK); |
1491 | 826k | GC_G(num_roots)++; |
1492 | 826k | } |
1493 | | |
1494 | | /* Traverse the reference graph from ref, marking any white nodes as garbage. */ |
1495 | | static int gc_collect_white(zend_refcounted *ref, uint32_t *flags, gc_stack *stack) |
1496 | 24.0k | { |
1497 | 24.0k | int count = 0; |
1498 | 24.0k | HashTable *ht; |
1499 | 24.0k | Bucket *p; |
1500 | 24.0k | zval *zv; |
1501 | 24.0k | uint32_t n; |
1502 | 24.0k | GC_STACK_DCL(stack); |
1503 | | |
1504 | 983k | tail_call: |
1505 | | /* don't count references for compatibility ??? */ |
1506 | 983k | if (GC_TYPE(ref) != IS_REFERENCE) { |
1507 | 971k | count++; |
1508 | 971k | } |
1509 | | |
1510 | 983k | if (GC_TYPE(ref) == IS_OBJECT) { |
1511 | 543k | zend_object *obj = (zend_object*)ref; |
1512 | | |
1513 | 543k | if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) { |
1514 | 543k | int len; |
1515 | 543k | zval *table; |
1516 | | |
1517 | | /* optimization: color is GC_BLACK (0) */ |
1518 | 543k | if (!GC_INFO(ref)) { |
1519 | 403k | gc_add_garbage(ref); |
1520 | 403k | } |
1521 | 543k | if (!(OBJ_FLAGS(obj) & IS_OBJ_DESTRUCTOR_CALLED) |
1522 | 416k | && (obj->handlers->dtor_obj != zend_objects_destroy_object |
1523 | 416k | || obj->ce->destructor != NULL)) { |
1524 | 2.86k | *flags |= GC_HAS_DESTRUCTORS; |
1525 | 2.86k | } |
1526 | | |
1527 | 543k | if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) { |
1528 | 233 | zend_weakmap_get_object_entry_gc(obj, &table, &len); |
1529 | 233 | n = len; |
1530 | 233 | zv = table; |
1531 | 337 | for (; n != 0; n--) { |
1532 | 104 | ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR); |
1533 | 104 | zval *entry = (zval*) Z_PTR_P(zv); |
1534 | 104 | if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP_KEY(entry)) { |
1535 | 67 | GC_UNSET_FROM_WEAKMAP_KEY(entry); |
1536 | 67 | GC_UNSET_FROM_WEAKMAP(entry); |
1537 | 67 | ref = Z_COUNTED_P(entry); |
1538 | 67 | GC_ADDREF(ref); |
1539 | 67 | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1540 | 18 | GC_REF_SET_BLACK(ref); |
1541 | 18 | GC_STACK_PUSH(ref); |
1542 | 18 | } |
1543 | 67 | } |
1544 | 104 | zv++; |
1545 | 104 | } |
1546 | 233 | } |
1547 | | |
1548 | 543k | if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) { |
1549 | 57 | zend_weakmap_get_entry_gc(obj, &table, &len); |
1550 | 57 | n = len; |
1551 | 57 | zv = table; |
1552 | 132 | for (; n != 0; n--) { |
1553 | 75 | ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR); |
1554 | 75 | zval *entry = (zval*) Z_PTR_P(zv); |
1555 | 75 | if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP(entry)) { |
1556 | 47 | GC_UNSET_FROM_WEAKMAP_KEY(entry); |
1557 | 47 | GC_UNSET_FROM_WEAKMAP(entry); |
1558 | 47 | ref = Z_COUNTED_P(entry); |
1559 | 47 | GC_ADDREF(ref); |
1560 | 47 | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1561 | 37 | GC_REF_SET_BLACK(ref); |
1562 | 37 | GC_STACK_PUSH(ref); |
1563 | 37 | } |
1564 | 47 | } |
1565 | 75 | zv++; |
1566 | 75 | } |
1567 | 57 | goto next; |
1568 | 57 | } |
1569 | | |
1570 | 543k | ht = obj->handlers->get_gc(obj, &table, &len); |
1571 | 543k | n = len; |
1572 | 543k | zv = table; |
1573 | 543k | if (UNEXPECTED(ht)) { |
1574 | 525k | GC_ADDREF(ht); |
1575 | 525k | if (GC_REF_CHECK_COLOR(ht, GC_WHITE)) { |
1576 | 525k | GC_REF_SET_BLACK(ht); |
1577 | 707k | for (; n != 0; n--) { |
1578 | 182k | if (Z_COLLECTABLE_P(zv)) { |
1579 | 181k | ref = Z_COUNTED_P(zv); |
1580 | 181k | GC_ADDREF(ref); |
1581 | 181k | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1582 | 177k | GC_REF_SET_BLACK(ref); |
1583 | 177k | GC_STACK_PUSH(ref); |
1584 | 177k | } |
1585 | 181k | } |
1586 | 182k | zv++; |
1587 | 182k | } |
1588 | 525k | goto handle_ht; |
1589 | 525k | } |
1590 | 525k | } |
1591 | | |
1592 | 33.8k | handle_zvals: |
1593 | 176k | for (; n != 0; n--) { |
1594 | 160k | if (Z_COLLECTABLE_P(zv)) { |
1595 | 43.2k | ref = Z_COUNTED_P(zv); |
1596 | 43.2k | GC_ADDREF(ref); |
1597 | 43.2k | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1598 | 17.8k | GC_REF_SET_BLACK(ref); |
1599 | 17.8k | zv++; |
1600 | 31.9k | while (--n) { |
1601 | 14.1k | if (Z_COLLECTABLE_P(zv)) { |
1602 | 2.90k | zend_refcounted *ref = Z_COUNTED_P(zv); |
1603 | 2.90k | GC_ADDREF(ref); |
1604 | 2.90k | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1605 | 681 | GC_REF_SET_BLACK(ref); |
1606 | 681 | GC_STACK_PUSH(ref); |
1607 | 681 | } |
1608 | 2.90k | } |
1609 | 14.1k | zv++; |
1610 | 14.1k | } |
1611 | 17.8k | goto tail_call; |
1612 | 17.8k | } |
1613 | 43.2k | } |
1614 | 143k | zv++; |
1615 | 143k | } |
1616 | 33.8k | } |
1617 | 543k | } else if (GC_TYPE(ref) == IS_ARRAY) { |
1618 | | /* optimization: color is GC_BLACK (0) */ |
1619 | 428k | if (!GC_INFO(ref)) { |
1620 | 423k | gc_add_garbage(ref); |
1621 | 423k | } |
1622 | 428k | ht = (zend_array*)ref; |
1623 | | |
1624 | 954k | handle_ht: |
1625 | 954k | n = ht->nNumUsed; |
1626 | 954k | if (HT_IS_PACKED(ht)) { |
1627 | 16.0k | zv = ht->arPacked; |
1628 | 16.0k | goto handle_zvals; |
1629 | 16.0k | } |
1630 | | |
1631 | 938k | p = ht->arData; |
1632 | 1.87M | for (; n != 0; n--) { |
1633 | 1.25M | zv = &p->val; |
1634 | 1.25M | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1635 | 755k | zv = Z_INDIRECT_P(zv); |
1636 | 755k | } |
1637 | 1.25M | if (Z_COLLECTABLE_P(zv)) { |
1638 | 425k | ref = Z_COUNTED_P(zv); |
1639 | 425k | GC_ADDREF(ref); |
1640 | 425k | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1641 | 316k | GC_REF_SET_BLACK(ref); |
1642 | 316k | p++; |
1643 | 1.27M | while (--n) { |
1644 | 963k | zv = &p->val; |
1645 | 963k | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1646 | 131k | zv = Z_INDIRECT_P(zv); |
1647 | 131k | } |
1648 | 963k | if (Z_COLLECTABLE_P(zv)) { |
1649 | 787k | zend_refcounted *ref = Z_COUNTED_P(zv); |
1650 | 787k | GC_ADDREF(ref); |
1651 | 787k | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1652 | 445k | GC_REF_SET_BLACK(ref); |
1653 | 445k | GC_STACK_PUSH(ref); |
1654 | 445k | } |
1655 | 787k | } |
1656 | 963k | p++; |
1657 | 963k | } |
1658 | 316k | goto tail_call; |
1659 | 316k | } |
1660 | 425k | } |
1661 | 935k | p++; |
1662 | 935k | } |
1663 | 938k | } else if (GC_TYPE(ref) == IS_REFERENCE) { |
1664 | 11.7k | if (Z_COLLECTABLE(((zend_reference*)ref)->val)) { |
1665 | 10.6k | ref = Z_COUNTED(((zend_reference*)ref)->val); |
1666 | 10.6k | GC_ADDREF(ref); |
1667 | 10.6k | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1668 | 1.42k | GC_REF_SET_BLACK(ref); |
1669 | 1.42k | goto tail_call; |
1670 | 1.42k | } |
1671 | 10.6k | } |
1672 | 11.7k | } |
1673 | | |
1674 | 647k | next: |
1675 | 647k | ref = GC_STACK_POP(); |
1676 | 647k | if (ref) { |
1677 | 623k | goto tail_call; |
1678 | 623k | } |
1679 | | |
1680 | 24.0k | return count; |
1681 | 647k | } |
1682 | | |
1683 | | /* Traverse the reference graph from all roots, marking white nodes as garbage. */ |
1684 | | static int gc_collect_roots(uint32_t *flags, gc_stack *stack) |
1685 | 21.7k | { |
1686 | 21.7k | uint32_t idx, end; |
1687 | 21.7k | zend_refcounted *ref; |
1688 | 21.7k | int count = 0; |
1689 | 21.7k | gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT); |
1690 | 21.7k | gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused)); |
1691 | | |
1692 | | /* remove non-garbage from the list */ |
1693 | 210k | while (current != last) { |
1694 | 189k | if (GC_IS_ROOT(current->ref)) { |
1695 | 189k | if (GC_REF_CHECK_COLOR(current->ref, GC_BLACK)) { |
1696 | 44.0k | GC_REF_SET_INFO(current->ref, 0); /* reset GC_ADDRESS() and keep GC_BLACK */ |
1697 | 44.0k | gc_remove_from_roots(current); |
1698 | 44.0k | } |
1699 | 189k | } |
1700 | 189k | current++; |
1701 | 189k | } |
1702 | | |
1703 | 21.7k | gc_compact(); |
1704 | | |
1705 | | /* Root buffer might be reallocated during gc_collect_white, |
1706 | | * make sure to reload pointers. */ |
1707 | 21.7k | idx = GC_FIRST_ROOT; |
1708 | 21.7k | end = GC_G(first_unused); |
1709 | 166k | while (idx != end) { |
1710 | 145k | current = GC_IDX2PTR(idx); |
1711 | 145k | ref = current->ref; |
1712 | 145k | ZEND_ASSERT(GC_IS_ROOT(ref)); |
1713 | 145k | current->ref = GC_MAKE_GARBAGE(ref); |
1714 | 145k | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1715 | 24.0k | GC_REF_SET_BLACK(ref); |
1716 | 24.0k | count += gc_collect_white(ref, flags, stack); |
1717 | 24.0k | } |
1718 | 145k | idx++; |
1719 | 145k | } |
1720 | | |
1721 | 21.7k | return count; |
1722 | 21.7k | } |
1723 | | |
1724 | | static int gc_remove_nested_data_from_buffer(zend_refcounted *ref, gc_root_buffer *root, gc_stack *stack) |
1725 | 2.86k | { |
1726 | 2.86k | HashTable *ht; |
1727 | 2.86k | Bucket *p; |
1728 | 2.86k | zval *zv; |
1729 | 2.86k | uint32_t n; |
1730 | 2.86k | int count = 0; |
1731 | 2.86k | GC_STACK_DCL(stack); |
1732 | | |
1733 | 8.02k | tail_call: |
1734 | 8.02k | if (root) { |
1735 | 2.86k | root = NULL; |
1736 | 2.86k | count++; |
1737 | 5.15k | } else if (GC_REF_ADDRESS(ref) != 0 |
1738 | 4.35k | && GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
1739 | 1.56k | GC_TRACE_REF(ref, "removing from buffer"); |
1740 | 1.56k | GC_REMOVE_FROM_BUFFER(ref); |
1741 | 1.56k | count++; |
1742 | 3.59k | } else if (GC_TYPE(ref) == IS_REFERENCE) { |
1743 | 488 | if (Z_COLLECTABLE(((zend_reference*)ref)->val)) { |
1744 | 240 | ref = Z_COUNTED(((zend_reference*)ref)->val); |
1745 | 240 | goto tail_call; |
1746 | 240 | } |
1747 | 248 | goto next; |
1748 | 3.10k | } else { |
1749 | 3.10k | goto next; |
1750 | 3.10k | } |
1751 | | |
1752 | 4.42k | if (GC_TYPE(ref) == IS_OBJECT) { |
1753 | 4.11k | zend_object *obj = (zend_object*)ref; |
1754 | | |
1755 | 4.11k | if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) { |
1756 | 4.11k | int len; |
1757 | 4.11k | zval *table; |
1758 | | |
1759 | 4.11k | if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) { |
1760 | 72 | zend_weakmap_get_object_entry_gc(obj, &table, &len); |
1761 | 72 | n = len; |
1762 | 72 | zv = table; |
1763 | 77 | for (; n != 0; n--) { |
1764 | 5 | ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR); |
1765 | 5 | zval *entry = (zval*) Z_PTR_P(zv); |
1766 | 5 | if (Z_OPT_COLLECTABLE_P(entry)) { |
1767 | 5 | ref = Z_COUNTED_P(entry); |
1768 | 5 | GC_STACK_PUSH(ref); |
1769 | 5 | } |
1770 | 5 | zv++; |
1771 | 5 | } |
1772 | 72 | } |
1773 | | |
1774 | 4.11k | ht = obj->handlers->get_gc(obj, &table, &len); |
1775 | 4.11k | n = len; |
1776 | 4.11k | zv = table; |
1777 | 4.11k | if (UNEXPECTED(ht)) { |
1778 | 1.73k | for (; n != 0; n--) { |
1779 | 726 | if (Z_COLLECTABLE_P(zv)) { |
1780 | 525 | ref = Z_COUNTED_P(zv); |
1781 | 525 | GC_STACK_PUSH(ref); |
1782 | 525 | } |
1783 | 726 | zv++; |
1784 | 726 | } |
1785 | 1.00k | if (GC_REF_ADDRESS(ht) != 0 && GC_REF_CHECK_COLOR(ht, GC_BLACK)) { |
1786 | 5 | GC_TRACE_REF(ht, "removing from buffer"); |
1787 | 5 | GC_REMOVE_FROM_BUFFER(ht); |
1788 | 5 | } |
1789 | 1.00k | goto handle_ht; |
1790 | 1.00k | } |
1791 | | |
1792 | 3.38k | handle_zvals: |
1793 | 3.51k | for (; n != 0; n--) { |
1794 | 2.75k | if (Z_COLLECTABLE_P(zv)) { |
1795 | 2.61k | ref = Z_COUNTED_P(zv); |
1796 | 2.61k | zv++; |
1797 | 3.21k | while (--n) { |
1798 | 600 | if (Z_COLLECTABLE_P(zv)) { |
1799 | 548 | zend_refcounted *ref = Z_COUNTED_P(zv); |
1800 | 548 | GC_STACK_PUSH(ref); |
1801 | 548 | } |
1802 | 600 | zv++; |
1803 | 600 | } |
1804 | 2.61k | goto tail_call; |
1805 | 2.61k | } |
1806 | 135 | zv++; |
1807 | 135 | } |
1808 | 3.38k | } |
1809 | 4.11k | } else if (GC_TYPE(ref) == IS_ARRAY) { |
1810 | 314 | ht = (zend_array*)ref; |
1811 | | |
1812 | 1.32k | handle_ht: |
1813 | 1.32k | n = ht->nNumUsed; |
1814 | 1.32k | if (HT_IS_PACKED(ht)) { |
1815 | 280 | zv = ht->arPacked; |
1816 | 280 | goto handle_zvals; |
1817 | 280 | } |
1818 | | |
1819 | 1.04k | p = ht->arData; |
1820 | 1.24k | for (; n != 0; n--) { |
1821 | 1.19k | zv = &p->val; |
1822 | 1.19k | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1823 | 265 | zv = Z_INDIRECT_P(zv); |
1824 | 265 | } |
1825 | 1.19k | if (Z_COLLECTABLE_P(zv)) { |
1826 | 998 | ref = Z_COUNTED_P(zv); |
1827 | 998 | p++; |
1828 | 1.23k | while (--n) { |
1829 | 238 | zv = &p->val; |
1830 | 238 | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1831 | 29 | zv = Z_INDIRECT_P(zv); |
1832 | 29 | } |
1833 | 238 | if (Z_COLLECTABLE_P(zv)) { |
1834 | 225 | zend_refcounted *ref = Z_COUNTED_P(zv); |
1835 | 225 | GC_STACK_PUSH(ref); |
1836 | 225 | } |
1837 | 238 | p++; |
1838 | 238 | } |
1839 | 998 | goto tail_call; |
1840 | 998 | } |
1841 | 198 | p++; |
1842 | 198 | } |
1843 | 1.04k | } |
1844 | | |
1845 | 4.16k | next: |
1846 | 4.16k | ref = GC_STACK_POP(); |
1847 | 4.16k | if (ref) { |
1848 | 1.30k | goto tail_call; |
1849 | 1.30k | } |
1850 | | |
1851 | 2.86k | return count; |
1852 | 4.16k | } |
1853 | | |
1854 | | static void zend_get_gc_buffer_release(void); |
1855 | | static void zend_gc_check_root_tmpvars(void); |
1856 | | static void zend_gc_remove_root_tmpvars(void); |
1857 | | |
1858 | | static zend_internal_function gc_destructor_fiber; |
1859 | | |
1860 | | static ZEND_COLD ZEND_NORETURN void gc_create_destructor_fiber_error(void) |
1861 | 0 | { |
1862 | 0 | zend_error_noreturn(E_ERROR, "Unable to create destructor fiber"); |
1863 | 0 | } |
1864 | | |
1865 | | static ZEND_COLD ZEND_NORETURN void gc_start_destructor_fiber_error(void) |
1866 | 0 | { |
1867 | 0 | zend_error_noreturn(E_ERROR, "Unable to start destructor fiber"); |
1868 | 0 | } |
1869 | | |
1870 | | /* Call destructors for garbage in the buffer. */ |
1871 | | static zend_always_inline zend_result gc_call_destructors(uint32_t idx, uint32_t end, zend_fiber *fiber) |
1872 | 650 | { |
1873 | 650 | gc_root_buffer *current; |
1874 | 650 | zend_refcounted *p; |
1875 | | |
1876 | | /* The root buffer might be reallocated during destructors calls, |
1877 | | * make sure to reload pointers as necessary. */ |
1878 | 5.00k | while (idx != end) { |
1879 | 4.40k | current = GC_IDX2PTR(idx); |
1880 | 4.40k | if (GC_IS_DTOR_GARBAGE(current->ref)) { |
1881 | 2.14k | p = GC_GET_PTR(current->ref); |
1882 | | /* Mark this is as a normal root for the next GC run */ |
1883 | 2.14k | current->ref = p; |
1884 | | /* Double check that the destructor hasn't been called yet. It |
1885 | | * could have already been invoked indirectly by some other |
1886 | | * destructor. */ |
1887 | 2.14k | if (!(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) { |
1888 | 2.14k | if (fiber != NULL) { |
1889 | 147 | GC_G(dtor_idx) = idx; |
1890 | 147 | } |
1891 | 2.14k | zend_object *obj = (zend_object*)p; |
1892 | 2.14k | GC_TRACE_REF(obj, "calling destructor"); |
1893 | 2.14k | GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED); |
1894 | 2.14k | GC_ADDREF(obj); |
1895 | 2.14k | obj->handlers->dtor_obj(obj); |
1896 | 2.14k | GC_TRACE_REF(obj, "returned from destructor"); |
1897 | 2.14k | GC_DELREF(obj); |
1898 | 2.14k | if (UNEXPECTED(fiber != NULL && GC_G(dtor_fiber) != fiber)) { |
1899 | | /* We resumed after suspension */ |
1900 | 50 | gc_check_possible_root((zend_refcounted*)&obj->gc); |
1901 | 50 | return FAILURE; |
1902 | 50 | } |
1903 | 2.14k | } |
1904 | 2.14k | } |
1905 | 4.35k | idx++; |
1906 | 4.35k | } |
1907 | | |
1908 | 600 | return SUCCESS; |
1909 | 650 | } |
1910 | | |
1911 | | static zend_fiber *gc_create_destructor_fiber(void) |
1912 | 111 | { |
1913 | 111 | zval zobj; |
1914 | 111 | zend_fiber *fiber; |
1915 | | |
1916 | 111 | GC_TRACE("starting destructor fiber"); |
1917 | | |
1918 | 111 | if (UNEXPECTED(object_init_ex(&zobj, zend_ce_fiber) == FAILURE)) { |
1919 | 0 | gc_create_destructor_fiber_error(); |
1920 | 0 | } |
1921 | | |
1922 | 111 | fiber = (zend_fiber *)Z_OBJ(zobj); |
1923 | 111 | fiber->fci.size = sizeof(fiber->fci); |
1924 | 111 | fiber->fci_cache.function_handler = (zend_function*) &gc_destructor_fiber; |
1925 | | |
1926 | 111 | GC_G(dtor_fiber) = fiber; |
1927 | | |
1928 | 111 | if (UNEXPECTED(zend_fiber_start(fiber, NULL) == FAILURE)) { |
1929 | 0 | gc_start_destructor_fiber_error(); |
1930 | 0 | } |
1931 | | |
1932 | 111 | return fiber; |
1933 | 111 | } |
1934 | | |
1935 | | static zend_never_inline void gc_call_destructors_in_fiber(uint32_t end) |
1936 | 67 | { |
1937 | 67 | ZEND_ASSERT(!GC_G(dtor_fiber_running)); |
1938 | | |
1939 | 67 | zend_fiber *fiber = GC_G(dtor_fiber); |
1940 | | |
1941 | 67 | GC_G(dtor_idx) = GC_FIRST_ROOT; |
1942 | 67 | GC_G(dtor_end) = GC_G(first_unused); |
1943 | | |
1944 | 67 | if (UNEXPECTED(!fiber)) { |
1945 | 61 | fiber = gc_create_destructor_fiber(); |
1946 | 61 | } else { |
1947 | 6 | zend_fiber_resume(fiber, NULL, NULL); |
1948 | 6 | } |
1949 | | |
1950 | 117 | for (;;) { |
1951 | | /* At this point, fiber has executed until suspension */ |
1952 | 117 | GC_TRACE("resumed from destructor fiber"); |
1953 | | |
1954 | 117 | if (UNEXPECTED(GC_G(dtor_fiber_running))) { |
1955 | | /* Fiber was suspended by a destructor. Start a new one for the |
1956 | | * remaining destructors. */ |
1957 | 50 | GC_TRACE("destructor fiber suspended by destructor"); |
1958 | 50 | GC_G(dtor_fiber) = NULL; |
1959 | 50 | GC_G(dtor_idx)++; |
1960 | | /* We do not own the fiber anymore. It may be collected if the |
1961 | | * application does not reference it. */ |
1962 | 50 | zend_object_release(&fiber->std); |
1963 | 50 | fiber = gc_create_destructor_fiber(); |
1964 | 50 | continue; |
1965 | 67 | } else { |
1966 | | /* Fiber suspended itself after calling all destructors */ |
1967 | 67 | GC_TRACE("destructor fiber suspended itself"); |
1968 | 67 | break; |
1969 | 67 | } |
1970 | 117 | } |
1971 | 67 | } |
1972 | | |
1973 | | /* Perform a garbage collection run. The default implementation of gc_collect_cycles. */ |
1974 | | ZEND_API int zend_gc_collect_cycles(void) |
1975 | 791k | { |
1976 | 791k | int total_count = 0; |
1977 | 791k | bool should_rerun_gc = false; |
1978 | 791k | bool did_rerun_gc = false; |
1979 | | |
1980 | 791k | zend_hrtime_t start_time = zend_hrtime(); |
1981 | 791k | if (GC_G(num_roots) && !GC_G(gc_active)) { |
1982 | 21.3k | zend_gc_remove_root_tmpvars(); |
1983 | 21.3k | } |
1984 | | |
1985 | 791k | rerun_gc: |
1986 | 791k | if (GC_G(num_roots)) { |
1987 | 21.8k | int count; |
1988 | 21.8k | gc_root_buffer *current, *last; |
1989 | 21.8k | zend_refcounted *p; |
1990 | 21.8k | uint32_t gc_flags = 0; |
1991 | 21.8k | uint32_t idx, end; |
1992 | 21.8k | gc_stack stack; |
1993 | | |
1994 | 21.8k | stack.prev = NULL; |
1995 | 21.8k | stack.next = NULL; |
1996 | | |
1997 | 21.8k | if (GC_G(gc_active)) { |
1998 | 33 | GC_G(collector_time) += zend_hrtime() - start_time; |
1999 | 33 | return 0; |
2000 | 33 | } |
2001 | | |
2002 | 21.7k | GC_TRACE("Collecting cycles"); |
2003 | 21.7k | GC_G(gc_runs)++; |
2004 | 21.7k | GC_G(gc_active) = 1; |
2005 | | |
2006 | 21.7k | GC_TRACE("Marking roots"); |
2007 | 21.7k | gc_mark_roots(&stack); |
2008 | 21.7k | GC_TRACE("Scanning roots"); |
2009 | 21.7k | gc_scan_roots(&stack); |
2010 | | |
2011 | 21.7k | GC_TRACE("Collecting roots"); |
2012 | 21.7k | count = gc_collect_roots(&gc_flags, &stack); |
2013 | | |
2014 | 21.7k | if (!GC_G(num_roots)) { |
2015 | | /* nothing to free */ |
2016 | 17.4k | GC_TRACE("Nothing to free"); |
2017 | 17.4k | gc_stack_free(&stack); |
2018 | 17.4k | GC_G(gc_active) = 0; |
2019 | 17.4k | goto finish; |
2020 | 17.4k | } |
2021 | | |
2022 | 4.27k | end = GC_G(first_unused); |
2023 | | |
2024 | 4.27k | if (gc_flags & GC_HAS_DESTRUCTORS) { |
2025 | 600 | GC_TRACE("Calling destructors"); |
2026 | | |
2027 | | /* During a destructor call, new externally visible references to nested data may |
2028 | | * be introduced. These references can be introduced in a way that does not |
2029 | | * modify any refcounts, so we have no real way to detect this situation |
2030 | | * short of rerunning full GC tracing. What we do instead is to only run |
2031 | | * destructors at this point and automatically re-run GC afterwards. */ |
2032 | 600 | should_rerun_gc = true; |
2033 | | |
2034 | | /* Mark all roots for which a dtor will be invoked as DTOR_GARBAGE. Additionally |
2035 | | * color them purple. This serves a double purpose: First, they should be |
2036 | | * considered new potential roots for the next GC run. Second, it will prevent |
2037 | | * their removal from the root buffer by nested data removal. */ |
2038 | 600 | idx = GC_FIRST_ROOT; |
2039 | 600 | current = GC_IDX2PTR(GC_FIRST_ROOT); |
2040 | 6.04k | while (idx != end) { |
2041 | 5.44k | if (GC_IS_GARBAGE(current->ref)) { |
2042 | 5.44k | p = GC_GET_PTR(current->ref); |
2043 | 5.44k | if (GC_TYPE(p) == IS_OBJECT && !(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) { |
2044 | 4.73k | zend_object *obj = (zend_object *) p; |
2045 | 4.73k | if (obj->handlers->dtor_obj != zend_objects_destroy_object |
2046 | 4.46k | || obj->ce->destructor) { |
2047 | 2.86k | current->ref = GC_MAKE_DTOR_GARBAGE(obj); |
2048 | 2.86k | GC_REF_SET_COLOR(obj, GC_PURPLE); |
2049 | 2.86k | } else { |
2050 | 1.86k | GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED); |
2051 | 1.86k | } |
2052 | 4.73k | } |
2053 | 5.44k | } |
2054 | 5.44k | current++; |
2055 | 5.44k | idx++; |
2056 | 5.44k | } |
2057 | | |
2058 | | /* Remove nested data for objects on which a destructor will be called. |
2059 | | * This will not remove the objects themselves, as they have been colored |
2060 | | * purple. */ |
2061 | 600 | idx = GC_FIRST_ROOT; |
2062 | 600 | current = GC_IDX2PTR(GC_FIRST_ROOT); |
2063 | 6.04k | while (idx != end) { |
2064 | 5.44k | if (GC_IS_DTOR_GARBAGE(current->ref)) { |
2065 | 2.86k | p = GC_GET_PTR(current->ref); |
2066 | 2.86k | count -= gc_remove_nested_data_from_buffer(p, current, &stack); |
2067 | 2.86k | } |
2068 | 5.44k | current++; |
2069 | 5.44k | idx++; |
2070 | 5.44k | } |
2071 | | |
2072 | | /* Actually call destructors. */ |
2073 | 600 | zend_hrtime_t dtor_start_time = zend_hrtime(); |
2074 | 600 | if (EXPECTED(!EG(active_fiber))) { |
2075 | 533 | gc_call_destructors(GC_FIRST_ROOT, end, NULL); |
2076 | 533 | } else { |
2077 | 67 | gc_call_destructors_in_fiber(end); |
2078 | 67 | } |
2079 | 600 | GC_G(dtor_time) += zend_hrtime() - dtor_start_time; |
2080 | | |
2081 | 600 | if (GC_G(gc_protected)) { |
2082 | | /* something went wrong */ |
2083 | 13 | zend_get_gc_buffer_release(); |
2084 | 13 | GC_G(collector_time) += zend_hrtime() - start_time; |
2085 | 13 | return 0; |
2086 | 13 | } |
2087 | 600 | } |
2088 | | |
2089 | 4.26k | gc_stack_free(&stack); |
2090 | | |
2091 | | /* Destroy zvals. The root buffer may be reallocated. */ |
2092 | 4.26k | GC_TRACE("Destroying zvals"); |
2093 | 4.26k | zend_hrtime_t free_start_time = zend_hrtime(); |
2094 | 4.26k | idx = GC_FIRST_ROOT; |
2095 | 972k | while (idx != end) { |
2096 | 968k | current = GC_IDX2PTR(idx); |
2097 | 968k | if (GC_IS_GARBAGE(current->ref)) { |
2098 | 48.5k | p = GC_GET_PTR(current->ref); |
2099 | 48.5k | GC_TRACE_REF(p, "destroying"); |
2100 | 48.5k | if (GC_TYPE(p) == IS_OBJECT) { |
2101 | 43.8k | zend_object *obj = (zend_object*)p; |
2102 | | |
2103 | 43.8k | EG(objects_store).object_buckets[obj->handle] = SET_OBJ_INVALID(obj); |
2104 | 43.8k | GC_TYPE_INFO(obj) = GC_NULL | |
2105 | 43.8k | (GC_TYPE_INFO(obj) & ~GC_TYPE_MASK); |
2106 | | /* Modify current before calling free_obj (bug #78811: free_obj() can cause the root buffer (with current) to be reallocated.) */ |
2107 | 43.8k | current->ref = GC_MAKE_GARBAGE(((char*)obj) - obj->handlers->offset); |
2108 | 43.8k | if (!(OBJ_FLAGS(obj) & IS_OBJ_FREE_CALLED)) { |
2109 | 43.8k | GC_ADD_FLAGS(obj, IS_OBJ_FREE_CALLED); |
2110 | 43.8k | GC_ADDREF(obj); |
2111 | 43.8k | obj->handlers->free_obj(obj); |
2112 | 43.8k | GC_DELREF(obj); |
2113 | 43.8k | } |
2114 | | |
2115 | 43.8k | ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST(obj->handle); |
2116 | 43.8k | } else if (GC_TYPE(p) == IS_ARRAY) { |
2117 | 4.72k | zend_array *arr = (zend_array*)p; |
2118 | | |
2119 | 4.72k | GC_TYPE_INFO(arr) = GC_NULL | |
2120 | 4.72k | (GC_TYPE_INFO(arr) & ~GC_TYPE_MASK); |
2121 | | |
2122 | | /* GC may destroy arrays with rc>1. This is valid and safe. */ |
2123 | 4.72k | HT_ALLOW_COW_VIOLATION(arr); |
2124 | | |
2125 | 4.72k | zend_hash_destroy(arr); |
2126 | 4.72k | } |
2127 | 48.5k | } |
2128 | 968k | idx++; |
2129 | 968k | } |
2130 | | |
2131 | | /* Free objects */ |
2132 | 4.26k | current = GC_IDX2PTR(GC_FIRST_ROOT); |
2133 | 4.26k | last = GC_IDX2PTR(end); |
2134 | 972k | while (current != last) { |
2135 | 968k | if (GC_IS_GARBAGE(current->ref)) { |
2136 | 48.5k | p = GC_GET_PTR(current->ref); |
2137 | 48.5k | GC_LINK_UNUSED(current); |
2138 | 48.5k | GC_G(num_roots)--; |
2139 | 48.5k | efree(p); |
2140 | 48.5k | } |
2141 | 968k | current++; |
2142 | 968k | } |
2143 | | |
2144 | 4.26k | GC_G(free_time) += zend_hrtime() - free_start_time; |
2145 | | |
2146 | 4.26k | GC_TRACE("Collection finished"); |
2147 | 4.26k | GC_G(collected) += count; |
2148 | 4.26k | total_count += count; |
2149 | 4.26k | GC_G(gc_active) = 0; |
2150 | 4.26k | } |
2151 | | |
2152 | 774k | gc_compact(); |
2153 | | |
2154 | | /* Objects with destructors were removed from this GC run. Rerun GC right away to clean them |
2155 | | * up. We do this only once: If we encounter more destructors on the second run, we'll not |
2156 | | * run GC another time. */ |
2157 | 774k | if (should_rerun_gc && !did_rerun_gc) { |
2158 | 471 | did_rerun_gc = true; |
2159 | 471 | goto rerun_gc; |
2160 | 471 | } |
2161 | | |
2162 | 791k | finish: |
2163 | 791k | zend_get_gc_buffer_release(); |
2164 | | |
2165 | | /* Prevent GC from running during zend_gc_check_root_tmpvars, before |
2166 | | * gc_threshold is adjusted, as this may result in unbounded recursion */ |
2167 | 791k | GC_G(gc_active) = 1; |
2168 | 791k | zend_gc_check_root_tmpvars(); |
2169 | 791k | GC_G(gc_active) = 0; |
2170 | | |
2171 | 791k | GC_G(collector_time) += zend_hrtime() - start_time; |
2172 | 791k | return total_count; |
2173 | 774k | } |
2174 | | |
2175 | | ZEND_API void zend_gc_get_status(zend_gc_status *status) |
2176 | 211 | { |
2177 | 211 | status->active = GC_G(gc_active); |
2178 | 211 | status->gc_protected = GC_G(gc_protected); |
2179 | 211 | status->full = GC_G(gc_full); |
2180 | 211 | status->runs = GC_G(gc_runs); |
2181 | 211 | status->collected = GC_G(collected); |
2182 | 211 | status->threshold = GC_G(gc_threshold); |
2183 | 211 | status->buf_size = GC_G(buf_size); |
2184 | 211 | status->num_roots = GC_G(num_roots); |
2185 | 211 | status->application_time = zend_hrtime() - GC_G(activated_at); |
2186 | 211 | status->collector_time = GC_G(collector_time); |
2187 | 211 | status->dtor_time = GC_G(dtor_time); |
2188 | 211 | status->free_time = GC_G(free_time); |
2189 | 211 | } |
2190 | | |
2191 | 64.5k | ZEND_API zend_get_gc_buffer *zend_get_gc_buffer_create(void) { |
2192 | | /* There can only be one get_gc() call active at a time, |
2193 | | * so there only needs to be one buffer. */ |
2194 | 64.5k | zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer); |
2195 | 64.5k | gc_buffer->cur = gc_buffer->start; |
2196 | 64.5k | return gc_buffer; |
2197 | 64.5k | } |
2198 | | |
2199 | 2.08k | ZEND_API void zend_get_gc_buffer_grow(zend_get_gc_buffer *gc_buffer) { |
2200 | 2.08k | size_t old_capacity = gc_buffer->end - gc_buffer->start; |
2201 | 2.08k | size_t new_capacity = old_capacity == 0 ? 64 : old_capacity * 2; |
2202 | 2.08k | gc_buffer->start = erealloc(gc_buffer->start, new_capacity * sizeof(zval)); |
2203 | 2.08k | gc_buffer->end = gc_buffer->start + new_capacity; |
2204 | 2.08k | gc_buffer->cur = gc_buffer->start + old_capacity; |
2205 | 2.08k | } |
2206 | | |
2207 | 791k | static void zend_get_gc_buffer_release(void) { |
2208 | 791k | zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer); |
2209 | 791k | efree(gc_buffer->start); |
2210 | 791k | gc_buffer->start = gc_buffer->end = gc_buffer->cur = NULL; |
2211 | 791k | } |
2212 | | |
2213 | | /* TMPVAR operands are destroyed using zval_ptr_dtor_nogc(), because they usually cannot contain |
2214 | | * cycles. However, there are some rare exceptions where this is possible, in which case we rely |
2215 | | * on the producing code to root the value. If a GC run occurs between the rooting and consumption |
2216 | | * of the value, we would end up leaking it. To avoid this, root all live TMPVAR values here. */ |
2217 | 791k | static void zend_gc_check_root_tmpvars(void) { |
2218 | 791k | zend_execute_data *ex = EG(current_execute_data); |
2219 | 978k | for (; ex; ex = ex->prev_execute_data) { |
2220 | 187k | zend_function *func = ex->func; |
2221 | 187k | if (!func || !ZEND_USER_CODE(func->type)) { |
2222 | 184k | continue; |
2223 | 184k | } |
2224 | | |
2225 | 2.39k | uint32_t op_num = ex->opline - ex->func->op_array.opcodes; |
2226 | 10.2k | for (uint32_t i = 0; i < func->op_array.last_live_range; i++) { |
2227 | 8.16k | const zend_live_range *range = &func->op_array.live_range[i]; |
2228 | 8.16k | if (range->start > op_num) { |
2229 | 291 | break; |
2230 | 291 | } |
2231 | 7.86k | if (range->end <= op_num) { |
2232 | 7.55k | continue; |
2233 | 7.55k | } |
2234 | | |
2235 | 317 | uint32_t kind = range->var & ZEND_LIVE_MASK; |
2236 | 317 | if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) { |
2237 | 253 | uint32_t var_num = range->var & ~ZEND_LIVE_MASK; |
2238 | 253 | zval *var = ZEND_CALL_VAR(ex, var_num); |
2239 | 253 | if (Z_COLLECTABLE_P(var)) { |
2240 | 241 | gc_check_possible_root(Z_COUNTED_P(var)); |
2241 | 241 | } |
2242 | 253 | } |
2243 | 317 | } |
2244 | 2.39k | } |
2245 | 791k | } |
2246 | | |
2247 | 21.3k | static void zend_gc_remove_root_tmpvars(void) { |
2248 | 21.3k | zend_execute_data *ex = EG(current_execute_data); |
2249 | 26.7k | for (; ex; ex = ex->prev_execute_data) { |
2250 | 5.46k | zend_function *func = ex->func; |
2251 | 5.46k | if (!func || !ZEND_USER_CODE(func->type)) { |
2252 | 3.46k | continue; |
2253 | 3.46k | } |
2254 | | |
2255 | 2.00k | uint32_t op_num = ex->opline - ex->func->op_array.opcodes; |
2256 | 8.76k | for (uint32_t i = 0; i < func->op_array.last_live_range; i++) { |
2257 | 6.98k | const zend_live_range *range = &func->op_array.live_range[i]; |
2258 | 6.98k | if (range->start > op_num) { |
2259 | 223 | break; |
2260 | 223 | } |
2261 | 6.76k | if (range->end <= op_num) { |
2262 | 6.52k | continue; |
2263 | 6.52k | } |
2264 | | |
2265 | 241 | uint32_t kind = range->var & ZEND_LIVE_MASK; |
2266 | 241 | if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) { |
2267 | 241 | uint32_t var_num = range->var & ~ZEND_LIVE_MASK; |
2268 | 241 | zval *var = ZEND_CALL_VAR(ex, var_num); |
2269 | 241 | if (Z_COLLECTABLE_P(var)) { |
2270 | 241 | GC_REMOVE_FROM_BUFFER(Z_COUNTED_P(var)); |
2271 | 241 | } |
2272 | 241 | } |
2273 | 241 | } |
2274 | 2.00k | } |
2275 | 21.3k | } |
2276 | | |
2277 | | #if GC_BENCH |
2278 | | void gc_bench_print(void) |
2279 | | { |
2280 | | fprintf(stderr, "GC Statistics\n"); |
2281 | | fprintf(stderr, "-------------\n"); |
2282 | | fprintf(stderr, "Runs: %d\n", GC_G(gc_runs)); |
2283 | | fprintf(stderr, "Collected: %d\n", GC_G(collected)); |
2284 | | fprintf(stderr, "Root buffer length: %d\n", GC_G(root_buf_length)); |
2285 | | fprintf(stderr, "Root buffer peak: %d\n\n", GC_G(root_buf_peak)); |
2286 | | fprintf(stderr, " Possible Remove from Marked\n"); |
2287 | | fprintf(stderr, " Root Buffered buffer grey\n"); |
2288 | | fprintf(stderr, " -------- -------- ----------- ------\n"); |
2289 | | fprintf(stderr, "ZVAL %8d %8d %9d %8d\n", GC_G(zval_possible_root), GC_G(zval_buffered), GC_G(zval_remove_from_buffer), GC_G(zval_marked_grey)); |
2290 | | } |
2291 | | #endif |
2292 | | |
2293 | | #ifdef ZTS |
2294 | | size_t zend_gc_globals_size(void) |
2295 | | { |
2296 | | return sizeof(zend_gc_globals); |
2297 | | } |
2298 | | #endif |
2299 | | |
2300 | | static ZEND_FUNCTION(gc_destructor_fiber) |
2301 | 111 | { |
2302 | 111 | uint32_t idx, end; |
2303 | | |
2304 | 111 | zend_fiber *fiber = GC_G(dtor_fiber); |
2305 | 111 | ZEND_ASSERT(fiber != NULL); |
2306 | 111 | ZEND_ASSERT(fiber == EG(active_fiber)); |
2307 | | |
2308 | 117 | for (;;) { |
2309 | 117 | GC_G(dtor_fiber_running) = true; |
2310 | | |
2311 | 117 | idx = GC_G(dtor_idx); |
2312 | 117 | end = GC_G(dtor_end); |
2313 | 117 | if (UNEXPECTED(gc_call_destructors(idx, end, fiber) == FAILURE)) { |
2314 | | /* We resumed after being suspended by a destructor */ |
2315 | 50 | return; |
2316 | 50 | } |
2317 | | |
2318 | | /* We have called all destructors. Suspend fiber until the next GC run |
2319 | | */ |
2320 | 67 | GC_G(dtor_fiber_running) = false; |
2321 | 67 | zend_fiber_suspend(fiber, NULL, NULL); |
2322 | | |
2323 | 67 | if (UNEXPECTED(fiber->flags & ZEND_FIBER_FLAG_DESTROYED)) { |
2324 | | /* Fiber is being destroyed by shutdown sequence */ |
2325 | 61 | if (GC_G(dtor_fiber) == fiber) { |
2326 | 61 | GC_G(dtor_fiber) = NULL; |
2327 | 61 | } |
2328 | 61 | GC_DELREF(&fiber->std); |
2329 | 61 | gc_check_possible_root((zend_refcounted*)&fiber->std.gc); |
2330 | 61 | return; |
2331 | 61 | } |
2332 | 67 | } |
2333 | 111 | } |
2334 | | |
2335 | | static zend_internal_function gc_destructor_fiber = { |
2336 | | .type = ZEND_INTERNAL_FUNCTION, |
2337 | | .fn_flags = ZEND_ACC_PUBLIC, |
2338 | | .handler = ZEND_FN(gc_destructor_fiber), |
2339 | | }; |
2340 | | |
2341 | | void gc_init(void) |
2342 | 16 | { |
2343 | 16 | gc_destructor_fiber.function_name = zend_string_init_interned( |
2344 | 16 | "gc_destructor_fiber", |
2345 | 16 | strlen("gc_destructor_fiber"), |
2346 | | true); |
2347 | 16 | } |