/src/php-src/Zend/zend_gc.c
Line | Count | Source |
1 | | /* |
2 | | +----------------------------------------------------------------------+ |
3 | | | Zend Engine | |
4 | | +----------------------------------------------------------------------+ |
5 | | | Copyright (c) Zend Technologies Ltd. (http://www.zend.com) | |
6 | | +----------------------------------------------------------------------+ |
7 | | | This source file is subject to version 2.00 of the Zend license, | |
8 | | | that is bundled with this package in the file LICENSE, and is | |
9 | | | available through the world-wide-web at the following url: | |
10 | | | http://www.zend.com/license/2_00.txt. | |
11 | | | If you did not receive a copy of the Zend license and are unable to | |
12 | | | obtain it through the world-wide-web, please send a note to | |
13 | | | license@zend.com so we can mail you a copy immediately. | |
14 | | +----------------------------------------------------------------------+ |
15 | | | Authors: David Wang <planetbeing@gmail.com> | |
16 | | | Dmitry Stogov <dmitry@php.net> | |
17 | | +----------------------------------------------------------------------+ |
18 | | */ |
19 | | |
20 | | /** |
21 | | * zend_gc_collect_cycles |
22 | | * ====================== |
23 | | * |
24 | | * Colors and its meaning |
25 | | * ---------------------- |
26 | | * |
27 | | * BLACK (GC_BLACK) - In use or free. |
28 | | * GREY (GC_GREY) - Possible member of cycle. |
29 | | * WHITE (GC_WHITE) - Member of garbage cycle. |
30 | | * PURPLE (GC_PURPLE) - Possible root of cycle. |
31 | | * |
32 | | * Colors described in the paper but not used |
33 | | * ------------------------------------------ |
34 | | * |
35 | | * GREEN - Acyclic |
36 | | * RED - Candidate cycle undergoing |
37 | | * ORANGE - Candidate cycle awaiting epoch boundary. |
38 | | * |
39 | | * |
40 | | * Flow |
41 | | * ===== |
42 | | * |
43 | | * The garbage collect cycle starts from 'gc_mark_roots', which traverses the |
44 | | * possible roots, and calls mark_grey for roots are marked purple with |
45 | | * depth-first traverse. |
46 | | * |
47 | | * After all possible roots are traversed and marked, |
48 | | * gc_scan_roots will be called, and each root will be called with |
49 | | * gc_scan(root->ref) |
50 | | * |
51 | | * gc_scan checks the colors of possible members. |
52 | | * |
53 | | * If the node is marked as grey and the refcount > 0 |
54 | | * gc_scan_black will be called on that node to scan it's subgraph. |
55 | | * otherwise (refcount == 0), it marks the node white. |
56 | | * |
57 | | * A node MAY be added to possible roots when ZEND_UNSET_VAR happens or |
58 | | * zend_assign_to_variable is called only when possible garbage node is |
59 | | * produced. |
60 | | * gc_possible_root() will be called to add the nodes to possible roots. |
61 | | * |
62 | | * |
63 | | * For objects, we call their get_gc handler (by default 'zend_std_get_gc') to |
64 | | * get the object properties to scan. |
65 | | * |
66 | | * |
67 | | * @see http://researcher.watson.ibm.com/researcher/files/us-bacon/Bacon01Concurrent.pdf |
68 | | */ |
69 | | #include "zend.h" |
70 | | #include "zend_API.h" |
71 | | #include "zend_compile.h" |
72 | | #include "zend_errors.h" |
73 | | #include "zend_fibers.h" |
74 | | #include "zend_hrtime.h" |
75 | | #include "zend_portability.h" |
76 | | #include "zend_types.h" |
77 | | #include "zend_weakrefs.h" |
78 | | #include "zend_string.h" |
79 | | |
80 | | #ifndef GC_BENCH |
81 | | # define GC_BENCH 0 |
82 | | #endif |
83 | | |
84 | | #ifndef ZEND_GC_DEBUG |
85 | | # define ZEND_GC_DEBUG 0 |
86 | | #endif |
87 | | |
88 | | /* GC_INFO layout */ |
89 | 2.44M | #define GC_ADDRESS 0x0fffffu |
90 | 12.9M | #define GC_COLOR 0x300000u |
91 | | |
92 | | #define GC_BLACK 0x000000u /* must be zero */ |
93 | | #define GC_WHITE 0x100000u |
94 | | #define GC_GREY 0x200000u |
95 | | #define GC_PURPLE 0x300000u |
96 | | |
97 | | /* Debug tracing */ |
98 | | #if ZEND_GC_DEBUG > 1 |
99 | | # define GC_TRACE(format, ...) fprintf(stderr, format "\n", ##__VA_ARGS__); |
100 | | # define GC_TRACE_REF(ref, format, ...) \ |
101 | | do { \ |
102 | | gc_trace_ref((zend_refcounted *) ref); \ |
103 | | fprintf(stderr, format "\n", ##__VA_ARGS__); \ |
104 | | } while (0) |
105 | | # define GC_TRACE_SET_COLOR(ref, color) \ |
106 | | GC_TRACE_REF(ref, "->%s", gc_color_name(color)) |
107 | | #else |
108 | | # define GC_TRACE_REF(ref, format, ...) |
109 | | # define GC_TRACE_SET_COLOR(ref, new_color) |
110 | | # define GC_TRACE(str) |
111 | | #endif |
112 | | |
113 | | /* GC_INFO access */ |
114 | | #define GC_REF_ADDRESS(ref) \ |
115 | 2.44M | (((GC_TYPE_INFO(ref)) & (GC_ADDRESS << GC_INFO_SHIFT)) >> GC_INFO_SHIFT) |
116 | | |
117 | | #define GC_REF_COLOR(ref) \ |
118 | | (((GC_TYPE_INFO(ref)) & (GC_COLOR << GC_INFO_SHIFT)) >> GC_INFO_SHIFT) |
119 | | |
120 | | #define GC_REF_CHECK_COLOR(ref, color) \ |
121 | 9.31M | ((GC_TYPE_INFO(ref) & (GC_COLOR << GC_INFO_SHIFT)) == ((color) << GC_INFO_SHIFT)) |
122 | | |
123 | 5.00M | #define GC_REF_SET_INFO(ref, info) do { \ |
124 | 5.00M | GC_TYPE_INFO(ref) = \ |
125 | 5.00M | (GC_TYPE_INFO(ref) & (GC_TYPE_MASK | GC_FLAGS_MASK)) | \ |
126 | 5.00M | ((info) << GC_INFO_SHIFT); \ |
127 | 5.00M | } while (0) |
128 | | |
129 | 2.34M | #define GC_REF_SET_COLOR(ref, c) do { \ |
130 | 2.34M | GC_TRACE_SET_COLOR(ref, c); \ |
131 | 2.34M | GC_TYPE_INFO(ref) = \ |
132 | 2.34M | (GC_TYPE_INFO(ref) & ~(GC_COLOR << GC_INFO_SHIFT)) | \ |
133 | 2.34M | ((c) << GC_INFO_SHIFT); \ |
134 | 2.34M | } while (0) |
135 | | |
136 | 1.24M | #define GC_REF_SET_BLACK(ref) do { \ |
137 | 1.24M | GC_TRACE_SET_COLOR(ref, GC_BLACK); \ |
138 | 1.24M | GC_TYPE_INFO(ref) &= ~(GC_COLOR << GC_INFO_SHIFT); \ |
139 | 1.24M | } while (0) |
140 | | |
141 | | #define GC_REF_SET_PURPLE(ref) do { \ |
142 | | GC_TRACE_SET_COLOR(ref, GC_PURPLE); \ |
143 | | GC_TYPE_INFO(ref) |= (GC_COLOR << GC_INFO_SHIFT); \ |
144 | | } while (0) |
145 | | |
146 | | /* bit stealing tags for gc_root_buffer.ref */ |
147 | 1.99M | #define GC_BITS 0x3 |
148 | | |
149 | 463k | #define GC_ROOT 0x0 /* possible root of circular garbage */ |
150 | 2.61M | #define GC_UNUSED 0x1 /* part of linked list of unused buffers */ |
151 | 2.03M | #define GC_GARBAGE 0x2 /* garbage to delete */ |
152 | 12.8k | #define GC_DTOR_GARBAGE 0x3 /* garbage on which only the dtor should be invoked */ |
153 | | |
154 | | #define GC_GET_PTR(ptr) \ |
155 | 75.3k | ((void*)(((uintptr_t)(ptr)) & ~GC_BITS)) |
156 | | |
157 | | #define GC_IS_ROOT(ptr) \ |
158 | 463k | ((((uintptr_t)(ptr)) & GC_BITS) == GC_ROOT) |
159 | | #define GC_IS_UNUSED(ptr) \ |
160 | 105k | ((((uintptr_t)(ptr)) & GC_BITS) == GC_UNUSED) |
161 | | #define GC_IS_GARBAGE(ptr) \ |
162 | 1.34M | ((((uintptr_t)(ptr)) & GC_BITS) == GC_GARBAGE) |
163 | | #define GC_IS_DTOR_GARBAGE(ptr) \ |
164 | 9.98k | ((((uintptr_t)(ptr)) & GC_BITS) == GC_DTOR_GARBAGE) |
165 | | |
166 | | #define GC_MAKE_GARBAGE(ptr) \ |
167 | 696k | ((void*)(((uintptr_t)(ptr)) | GC_GARBAGE)) |
168 | | #define GC_MAKE_DTOR_GARBAGE(ptr) \ |
169 | 2.82k | ((void*)(((uintptr_t)(ptr)) | GC_DTOR_GARBAGE)) |
170 | | |
171 | | /* GC address conversion */ |
172 | 6.68M | #define GC_IDX2PTR(idx) (GC_G(buf) + (idx)) |
173 | 2.51M | #define GC_PTR2IDX(ptr) ((ptr) - GC_G(buf)) |
174 | | |
175 | | /* Get the value to be placed in an unused buffer entry with the specified next unused list index */ |
176 | 2.51M | #define GC_IDX2LIST(idx) ((void*)(uintptr_t)(((idx) * sizeof(void*)) | GC_UNUSED)) |
177 | | /* Get the index of the next item in the unused list from the given root buffer entry. */ |
178 | 664k | #define GC_LIST2IDX(list) (((uint32_t)(uintptr_t)(list)) / sizeof(void*)) |
179 | | |
180 | | /* GC buffers */ |
181 | 1.15M | #define GC_INVALID 0 |
182 | 1.62M | #define GC_FIRST_ROOT 1 |
183 | | |
184 | 16 | #define GC_DEFAULT_BUF_SIZE (16 * 1024) |
185 | 1 | #define GC_BUF_GROW_STEP (128 * 1024) |
186 | | |
187 | 0 | #define GC_MAX_UNCOMPRESSED (512 * 1024) |
188 | 2 | #define GC_MAX_BUF_SIZE 0x40000000 |
189 | | |
190 | 16 | #define GC_THRESHOLD_DEFAULT (10000 + GC_FIRST_ROOT) |
191 | 0 | #define GC_THRESHOLD_STEP 10000 |
192 | 0 | #define GC_THRESHOLD_MAX 1000000000 |
193 | 0 | #define GC_THRESHOLD_TRIGGER 100 |
194 | | |
195 | | /* GC flags */ |
196 | 6.81k | #define GC_HAS_DESTRUCTORS (1<<0) |
197 | | |
198 | | /* Weak maps */ |
199 | 1.55k | #define Z_FROM_WEAKMAP_KEY (1<<0) |
200 | 1.51k | #define Z_FROM_WEAKMAP (1<<1) |
201 | | |
202 | | /* The WeakMap entry zv is reachable from roots by following the virtual |
203 | | * reference from the a WeakMap key to the entry */ |
204 | | #define GC_FROM_WEAKMAP_KEY(zv) \ |
205 | 387 | (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT)) |
206 | | |
207 | 574 | #define GC_SET_FROM_WEAKMAP_KEY(zv) do { \ |
208 | 574 | zval *_z = (zv); \ |
209 | 574 | Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \ |
210 | 574 | } while (0) |
211 | | |
212 | 589 | #define GC_UNSET_FROM_WEAKMAP_KEY(zv) do { \ |
213 | 589 | zval *_z = (zv); \ |
214 | 589 | Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \ |
215 | 589 | } while (0) |
216 | | |
217 | | /* The WeakMap entry zv is reachable from roots by following the reference from |
218 | | * the WeakMap */ |
219 | | #define GC_FROM_WEAKMAP(zv) \ |
220 | 1.07k | (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT)) |
221 | | |
222 | 201 | #define GC_SET_FROM_WEAKMAP(zv) do { \ |
223 | 201 | zval *_z = (zv); \ |
224 | 201 | Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \ |
225 | 201 | } while (0) |
226 | | |
227 | 240 | #define GC_UNSET_FROM_WEAKMAP(zv) do { \ |
228 | 240 | zval *_z = (zv); \ |
229 | 240 | Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \ |
230 | 240 | } while (0) |
231 | | |
232 | | /* unused buffers */ |
233 | | |
234 | | /* Are there any unused root buffer entries? */ |
235 | | #define GC_HAS_UNUSED() \ |
236 | 559k | (GC_G(unused) != GC_INVALID) |
237 | | |
238 | | /* Get the next unused entry and remove it from the list */ |
239 | | #define GC_FETCH_UNUSED() \ |
240 | 664k | gc_fetch_unused() |
241 | | |
242 | | /* Add a root buffer entry to the unused list */ |
243 | | #define GC_LINK_UNUSED(root) \ |
244 | 2.51M | gc_link_unused(root) |
245 | | |
246 | | #define GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD() \ |
247 | | (GC_G(first_unused) < GC_G(gc_threshold)) |
248 | | #define GC_HAS_NEXT_UNUSED() \ |
249 | 559k | (GC_G(first_unused) != GC_G(buf_size)) |
250 | | #define GC_FETCH_NEXT_UNUSED() \ |
251 | 1.84M | gc_fetch_next_unused() |
252 | | |
253 | | ZEND_API int (*gc_collect_cycles)(void); |
254 | | |
255 | | /* The type of a root buffer entry. |
256 | | * |
257 | | * The lower two bits are used for flags and need to be masked out to |
258 | | * reconstruct a pointer. |
259 | | * |
260 | | * When a node in the root buffer is removed, the non-flag bits of the |
261 | | * unused entry are used to store the index of the next entry in the unused |
262 | | * list. */ |
263 | | typedef struct _gc_root_buffer { |
264 | | zend_refcounted *ref; |
265 | | } gc_root_buffer; |
266 | | |
267 | | typedef struct _zend_gc_globals { |
268 | | /* The root buffer, which stores possible roots of reference cycles. It is |
269 | | * also used to store garbage to be collected at the end of a run. |
270 | | * A single array which is reallocated as necessary. */ |
271 | | gc_root_buffer *buf; |
272 | | |
273 | | bool gc_enabled; |
274 | | bool gc_active; /* GC currently running, forbid nested GC */ |
275 | | bool gc_protected; /* GC protected, forbid root additions */ |
276 | | bool gc_full; |
277 | | |
278 | | uint32_t unused; /* linked list of unused buffers */ |
279 | | uint32_t first_unused; /* first unused buffer */ |
280 | | uint32_t gc_threshold; /* GC collection threshold */ |
281 | | uint32_t buf_size; /* size of the GC buffer */ |
282 | | uint32_t num_roots; /* number of roots in GC buffer */ |
283 | | |
284 | | uint32_t gc_runs; /* number of GC runs since reset */ |
285 | | uint32_t collected; /* number of collected nodes since reset */ |
286 | | |
287 | | zend_hrtime_t activated_at; /* the timestamp of the last reset */ |
288 | | zend_hrtime_t collector_time; /* time spent running GC (ns) */ |
289 | | zend_hrtime_t dtor_time; /* time spent calling destructors (ns) */ |
290 | | zend_hrtime_t free_time; /* time spent destroying nodes and freeing memory (ns) */ |
291 | | |
292 | | uint32_t dtor_idx; /* root buffer index */ |
293 | | uint32_t dtor_end; |
294 | | zend_fiber *dtor_fiber; |
295 | | bool dtor_fiber_running; |
296 | | |
297 | | #if GC_BENCH |
298 | | uint32_t root_buf_length; |
299 | | uint32_t root_buf_peak; |
300 | | uint32_t zval_possible_root; |
301 | | uint32_t zval_buffered; |
302 | | uint32_t zval_remove_from_buffer; |
303 | | uint32_t zval_marked_grey; |
304 | | #endif |
305 | | } zend_gc_globals; |
306 | | |
307 | | #ifdef ZTS |
308 | | static int gc_globals_id; |
309 | | static size_t gc_globals_offset; |
310 | | #define GC_G(v) ZEND_TSRMG_FAST(gc_globals_offset, zend_gc_globals *, v) |
311 | | #else |
312 | 37.1M | #define GC_G(v) (gc_globals.v) |
313 | | static zend_gc_globals gc_globals; |
314 | | #endif |
315 | | |
316 | | #if GC_BENCH |
317 | | # define GC_BENCH_INC(counter) GC_G(counter)++ |
318 | | # define GC_BENCH_DEC(counter) GC_G(counter)-- |
319 | | # define GC_BENCH_PEAK(peak, counter) do { \ |
320 | | if (GC_G(counter) > GC_G(peak)) { \ |
321 | | GC_G(peak) = GC_G(counter); \ |
322 | | } \ |
323 | | } while (0) |
324 | | #else |
325 | | # define GC_BENCH_INC(counter) |
326 | | # define GC_BENCH_DEC(counter) |
327 | | # define GC_BENCH_PEAK(peak, counter) |
328 | | #endif |
329 | | |
330 | | |
331 | 2.47k | #define GC_STACK_SEGMENT_SIZE (((4096 - ZEND_MM_OVERHEAD) / sizeof(void*)) - 2) |
332 | | |
333 | | typedef struct _gc_stack gc_stack; |
334 | | |
335 | | /* The stack used for graph traversal is stored as a linked list of segments */ |
336 | | struct _gc_stack { |
337 | | gc_stack *prev; |
338 | | gc_stack *next; |
339 | | zend_refcounted *data[GC_STACK_SEGMENT_SIZE]; |
340 | | }; |
341 | | |
342 | | #define GC_STACK_DCL(init) \ |
343 | 170k | gc_stack *_stack = init; \ |
344 | 170k | size_t _top = 0; |
345 | | |
346 | | #define GC_STACK_PUSH(ref) \ |
347 | 1.73M | gc_stack_push(&_stack, &_top, ref); |
348 | | |
349 | | #define GC_STACK_POP() \ |
350 | 1.90M | gc_stack_pop(&_stack, &_top) |
351 | | |
352 | | static zend_never_inline gc_stack* gc_stack_next(gc_stack *stack) |
353 | 18.9k | { |
354 | 18.9k | if (UNEXPECTED(!stack->next)) { |
355 | 18.1k | gc_stack *segment = emalloc(sizeof(gc_stack)); |
356 | 18.1k | segment->prev = stack; |
357 | 18.1k | segment->next = NULL; |
358 | 18.1k | stack->next = segment; |
359 | 18.1k | } |
360 | 18.9k | return stack->next; |
361 | 18.9k | } |
362 | | |
363 | | static zend_always_inline void gc_stack_push(gc_stack **stack, size_t *top, zend_refcounted *ref) |
364 | 1.73M | { |
365 | 1.73M | if (UNEXPECTED(*top == GC_STACK_SEGMENT_SIZE)) { |
366 | 1.23k | (*stack) = gc_stack_next(*stack); |
367 | 1.23k | (*top) = 0; |
368 | 1.23k | } |
369 | 1.73M | (*stack)->data[(*top)++] = ref; |
370 | 1.73M | } |
371 | | |
372 | | static zend_always_inline zend_refcounted* gc_stack_pop(gc_stack **stack, size_t *top) |
373 | 1.90M | { |
374 | 1.90M | if (UNEXPECTED((*top) == 0)) { |
375 | 171k | if (!(*stack)->prev) { |
376 | 170k | return NULL; |
377 | 170k | } else { |
378 | 1.23k | (*stack) = (*stack)->prev; |
379 | 1.23k | (*top) = GC_STACK_SEGMENT_SIZE - 1; |
380 | 1.23k | return (*stack)->data[GC_STACK_SEGMENT_SIZE - 1]; |
381 | 1.23k | } |
382 | 1.73M | } else { |
383 | 1.73M | return (*stack)->data[--(*top)]; |
384 | 1.73M | } |
385 | 1.90M | } |
386 | | |
387 | | static void gc_stack_free(gc_stack *stack) |
388 | 20.9k | { |
389 | 20.9k | gc_stack *p = stack->next; |
390 | | |
391 | 39.0k | while (p) { |
392 | 18.0k | stack = p->next; |
393 | 18.0k | efree(p); |
394 | 18.0k | p = stack; |
395 | 18.0k | } |
396 | 20.9k | } |
397 | | |
398 | | /* Map a full index to a compressed index. |
399 | | * |
400 | | * The root buffer can have up to 2^30 entries, but we only have 20 bits to |
401 | | * store the index. So we use the 1<<19 bit as a compression flag and use the |
402 | | * other 19 bits to store the index modulo 2^19. */ |
403 | | static zend_always_inline uint32_t gc_compress(uint32_t idx) |
404 | 2.52M | { |
405 | 2.52M | if (EXPECTED(idx < GC_MAX_UNCOMPRESSED)) { |
406 | 2.52M | return idx; |
407 | 2.52M | } |
408 | 0 | return (idx % GC_MAX_UNCOMPRESSED) | GC_MAX_UNCOMPRESSED; |
409 | 2.52M | } |
410 | | |
411 | | /* Find the root buffer entry given a pointer and a compressed index. |
412 | | * Iterate through the root buffer in steps of 2^19 until the pointer |
413 | | * matches. */ |
414 | | static zend_always_inline gc_root_buffer* gc_decompress(zend_refcounted *ref, uint32_t idx) |
415 | 0 | { |
416 | 0 | gc_root_buffer *root = GC_IDX2PTR(idx); |
417 | |
|
418 | 0 | if (EXPECTED(GC_GET_PTR(root->ref) == ref)) { |
419 | 0 | return root; |
420 | 0 | } |
421 | | |
422 | 0 | while (1) { |
423 | 0 | idx += GC_MAX_UNCOMPRESSED; |
424 | 0 | ZEND_ASSERT(idx < GC_G(first_unused)); |
425 | 0 | root = GC_IDX2PTR(idx); |
426 | 0 | if (GC_GET_PTR(root->ref) == ref) { |
427 | 0 | return root; |
428 | 0 | } |
429 | 0 | } |
430 | 0 | } |
431 | | |
432 | | /* Get the index of the next unused root buffer entry, and remove it from the |
433 | | * unused list. GC_HAS_UNUSED() must be true before calling this. */ |
434 | | static zend_always_inline uint32_t gc_fetch_unused(void) |
435 | 664k | { |
436 | 664k | uint32_t idx; |
437 | 664k | gc_root_buffer *root; |
438 | | |
439 | 664k | ZEND_ASSERT(GC_HAS_UNUSED()); |
440 | 664k | idx = GC_G(unused); |
441 | 664k | root = GC_IDX2PTR(idx); |
442 | 664k | ZEND_ASSERT(GC_IS_UNUSED(root->ref)); |
443 | 664k | GC_G(unused) = GC_LIST2IDX(root->ref); |
444 | 664k | return idx; |
445 | 664k | } |
446 | | |
447 | | /* Add a root buffer entry to the unused list */ |
448 | | static zend_always_inline void gc_link_unused(gc_root_buffer *root) |
449 | 2.51M | { |
450 | 2.51M | root->ref = GC_IDX2LIST(GC_G(unused)); |
451 | 2.51M | GC_G(unused) = GC_PTR2IDX(root); |
452 | 2.51M | } |
453 | | |
454 | | static zend_always_inline uint32_t gc_fetch_next_unused(void) |
455 | 1.84M | { |
456 | 1.84M | uint32_t idx; |
457 | | |
458 | 1.84M | ZEND_ASSERT(GC_HAS_NEXT_UNUSED()); |
459 | 1.84M | idx = GC_G(first_unused); |
460 | 1.84M | GC_G(first_unused) = GC_G(first_unused) + 1; |
461 | 1.84M | return idx; |
462 | 1.84M | } |
463 | | |
464 | | #if ZEND_GC_DEBUG > 1 |
465 | | static const char *gc_color_name(uint32_t color) { |
466 | | switch (color) { |
467 | | case GC_BLACK: return "black"; |
468 | | case GC_WHITE: return "white"; |
469 | | case GC_GREY: return "grey"; |
470 | | case GC_PURPLE: return "purple"; |
471 | | default: return "unknown"; |
472 | | } |
473 | | } |
474 | | static void gc_trace_ref(zend_refcounted *ref) { |
475 | | if (GC_TYPE(ref) == IS_OBJECT) { |
476 | | zend_object *obj = (zend_object *) ref; |
477 | | fprintf(stderr, "[%p] rc=%d addr=%d %s object(%s)#%d ", |
478 | | ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref), |
479 | | gc_color_name(GC_REF_COLOR(ref)), |
480 | | obj->ce->name->val, obj->handle); |
481 | | } else if (GC_TYPE(ref) == IS_ARRAY) { |
482 | | zend_array *arr = (zend_array *) ref; |
483 | | fprintf(stderr, "[%p] rc=%d addr=%d %s array(%d) ", |
484 | | ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref), |
485 | | gc_color_name(GC_REF_COLOR(ref)), |
486 | | zend_hash_num_elements(arr)); |
487 | | } else { |
488 | | fprintf(stderr, "[%p] rc=%d addr=%d %s %s ", |
489 | | ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref), |
490 | | gc_color_name(GC_REF_COLOR(ref)), |
491 | | GC_TYPE(ref) == IS_REFERENCE |
492 | | ? "reference" : zend_get_type_by_const(GC_TYPE(ref))); |
493 | | } |
494 | | } |
495 | | #endif |
496 | | |
497 | | /* Mark a root buffer entry unused */ |
498 | | static zend_always_inline void gc_remove_from_roots(gc_root_buffer *root) |
499 | 2.48M | { |
500 | 2.48M | GC_LINK_UNUSED(root); |
501 | 2.48M | GC_G(num_roots)--; |
502 | 2.48M | GC_BENCH_DEC(root_buf_length); |
503 | 2.48M | } |
504 | | |
505 | | static void root_buffer_dtor(zend_gc_globals *gc_globals) |
506 | 0 | { |
507 | 0 | if (gc_globals->buf) { |
508 | 0 | free(gc_globals->buf); |
509 | 0 | gc_globals->buf = NULL; |
510 | 0 | } |
511 | 0 | } |
512 | | |
513 | | static void gc_globals_ctor_ex(zend_gc_globals *gc_globals) |
514 | 16 | { |
515 | 16 | gc_globals->gc_enabled = false; |
516 | 16 | gc_globals->gc_active = false; |
517 | 16 | gc_globals->gc_protected = true; |
518 | 16 | gc_globals->gc_full = false; |
519 | | |
520 | 16 | gc_globals->buf = NULL; |
521 | 16 | gc_globals->unused = GC_INVALID; |
522 | 16 | gc_globals->first_unused = GC_INVALID; |
523 | 16 | gc_globals->gc_threshold = GC_INVALID; |
524 | 16 | gc_globals->buf_size = GC_INVALID; |
525 | 16 | gc_globals->num_roots = 0; |
526 | | |
527 | 16 | gc_globals->gc_runs = 0; |
528 | 16 | gc_globals->collected = 0; |
529 | 16 | gc_globals->collector_time = 0; |
530 | 16 | gc_globals->dtor_time = 0; |
531 | 16 | gc_globals->free_time = 0; |
532 | 16 | gc_globals->activated_at = 0; |
533 | | |
534 | 16 | gc_globals->dtor_idx = GC_FIRST_ROOT; |
535 | 16 | gc_globals->dtor_end = 0; |
536 | 16 | gc_globals->dtor_fiber = NULL; |
537 | 16 | gc_globals->dtor_fiber_running = false; |
538 | | |
539 | | #if GC_BENCH |
540 | | gc_globals->root_buf_length = 0; |
541 | | gc_globals->root_buf_peak = 0; |
542 | | gc_globals->zval_possible_root = 0; |
543 | | gc_globals->zval_buffered = 0; |
544 | | gc_globals->zval_remove_from_buffer = 0; |
545 | | gc_globals->zval_marked_grey = 0; |
546 | | #endif |
547 | 16 | } |
548 | | |
549 | | void gc_globals_ctor(void) |
550 | 16 | { |
551 | | #ifdef ZTS |
552 | | ts_allocate_fast_id(&gc_globals_id, &gc_globals_offset, sizeof(zend_gc_globals), (ts_allocate_ctor) gc_globals_ctor_ex, (ts_allocate_dtor) root_buffer_dtor); |
553 | | #else |
554 | 16 | gc_globals_ctor_ex(&gc_globals); |
555 | 16 | #endif |
556 | 16 | } |
557 | | |
558 | | void gc_globals_dtor(void) |
559 | 0 | { |
560 | 0 | #ifndef ZTS |
561 | 0 | root_buffer_dtor(&gc_globals); |
562 | 0 | #endif |
563 | 0 | } |
564 | | |
565 | | void gc_reset(void) |
566 | 247k | { |
567 | 247k | if (GC_G(buf)) { |
568 | 247k | GC_G(gc_active) = 0; |
569 | 247k | GC_G(gc_protected) = 0; |
570 | 247k | GC_G(gc_full) = 0; |
571 | 247k | GC_G(unused) = GC_INVALID; |
572 | 247k | GC_G(first_unused) = GC_FIRST_ROOT; |
573 | 247k | GC_G(num_roots) = 0; |
574 | | |
575 | 247k | GC_G(gc_runs) = 0; |
576 | 247k | GC_G(collected) = 0; |
577 | | |
578 | 247k | GC_G(collector_time) = 0; |
579 | 247k | GC_G(dtor_time) = 0; |
580 | 247k | GC_G(free_time) = 0; |
581 | | |
582 | 247k | GC_G(dtor_idx) = GC_FIRST_ROOT; |
583 | 247k | GC_G(dtor_end) = 0; |
584 | 247k | GC_G(dtor_fiber) = NULL; |
585 | 247k | GC_G(dtor_fiber_running) = false; |
586 | | |
587 | | #if GC_BENCH |
588 | | GC_G(root_buf_length) = 0; |
589 | | GC_G(root_buf_peak) = 0; |
590 | | GC_G(zval_possible_root) = 0; |
591 | | GC_G(zval_buffered) = 0; |
592 | | GC_G(zval_remove_from_buffer) = 0; |
593 | | GC_G(zval_marked_grey) = 0; |
594 | | #endif |
595 | 247k | } |
596 | | |
597 | 247k | GC_G(activated_at) = zend_hrtime(); |
598 | 247k | } |
599 | | |
600 | | /* Enable/disable the garbage collector. |
601 | | * Initialize globals if necessary. */ |
602 | | ZEND_API bool gc_enable(bool enable) |
603 | 115k | { |
604 | 115k | bool old_enabled = GC_G(gc_enabled); |
605 | 115k | GC_G(gc_enabled) = enable; |
606 | 115k | if (enable && !old_enabled && GC_G(buf) == NULL) { |
607 | 16 | GC_G(buf) = (gc_root_buffer*) pemalloc(sizeof(gc_root_buffer) * GC_DEFAULT_BUF_SIZE, 1); |
608 | 16 | GC_G(buf)[0].ref = NULL; |
609 | 16 | GC_G(buf_size) = GC_DEFAULT_BUF_SIZE; |
610 | 16 | GC_G(gc_threshold) = GC_THRESHOLD_DEFAULT; |
611 | 16 | gc_reset(); |
612 | 16 | } |
613 | 115k | return old_enabled; |
614 | 115k | } |
615 | | |
616 | | ZEND_API bool gc_enabled(void) |
617 | 104 | { |
618 | 104 | return GC_G(gc_enabled); |
619 | 104 | } |
620 | | |
621 | | /* Protect the GC root buffer (prevent additions) */ |
622 | | ZEND_API bool gc_protect(bool protect) |
623 | 25.2k | { |
624 | 25.2k | bool old_protected = GC_G(gc_protected); |
625 | 25.2k | GC_G(gc_protected) = protect; |
626 | 25.2k | return old_protected; |
627 | 25.2k | } |
628 | | |
629 | | ZEND_API bool gc_protected(void) |
630 | 0 | { |
631 | 0 | return GC_G(gc_protected); |
632 | 0 | } |
633 | | |
634 | | static void gc_grow_root_buffer(void) |
635 | 1 | { |
636 | 1 | size_t new_size; |
637 | | |
638 | 1 | if (GC_G(buf_size) >= GC_MAX_BUF_SIZE) { |
639 | 0 | if (!GC_G(gc_full)) { |
640 | 0 | zend_error(E_WARNING, "GC buffer overflow (GC disabled)\n"); |
641 | 0 | GC_G(gc_active) = 1; |
642 | 0 | GC_G(gc_protected) = 1; |
643 | 0 | GC_G(gc_full) = 1; |
644 | 0 | return; |
645 | 0 | } |
646 | 0 | } |
647 | 1 | if (GC_G(buf_size) < GC_BUF_GROW_STEP) { |
648 | 1 | new_size = GC_G(buf_size) * 2; |
649 | 1 | } else { |
650 | 0 | new_size = GC_G(buf_size) + GC_BUF_GROW_STEP; |
651 | 0 | } |
652 | 1 | if (new_size > GC_MAX_BUF_SIZE) { |
653 | 0 | new_size = GC_MAX_BUF_SIZE; |
654 | 0 | } |
655 | 1 | GC_G(buf) = perealloc(GC_G(buf), sizeof(gc_root_buffer) * new_size, 1); |
656 | 1 | GC_G(buf_size) = new_size; |
657 | 1 | } |
658 | | |
659 | | /* Adjust the GC activation threshold given the number of nodes collected by the last run */ |
660 | | static void gc_adjust_threshold(int count) |
661 | 0 | { |
662 | 0 | uint32_t new_threshold; |
663 | | |
664 | | /* TODO Very simple heuristic for dynamic GC buffer resizing: |
665 | | * If there are "too few" collections, increase the collection threshold |
666 | | * by a fixed step */ |
667 | 0 | if (count < GC_THRESHOLD_TRIGGER || GC_G(num_roots) >= GC_G(gc_threshold)) { |
668 | | /* increase */ |
669 | 0 | if (GC_G(gc_threshold) < GC_THRESHOLD_MAX) { |
670 | 0 | new_threshold = GC_G(gc_threshold) + GC_THRESHOLD_STEP; |
671 | 0 | if (new_threshold > GC_THRESHOLD_MAX) { |
672 | 0 | new_threshold = GC_THRESHOLD_MAX; |
673 | 0 | } |
674 | 0 | if (new_threshold > GC_G(buf_size)) { |
675 | 0 | gc_grow_root_buffer(); |
676 | 0 | } |
677 | 0 | if (new_threshold <= GC_G(buf_size)) { |
678 | 0 | GC_G(gc_threshold) = new_threshold; |
679 | 0 | } |
680 | 0 | } |
681 | 0 | } else if (GC_G(gc_threshold) > GC_THRESHOLD_DEFAULT) { |
682 | 0 | new_threshold = GC_G(gc_threshold) - GC_THRESHOLD_STEP; |
683 | 0 | if (new_threshold < GC_THRESHOLD_DEFAULT) { |
684 | 0 | new_threshold = GC_THRESHOLD_DEFAULT; |
685 | 0 | } |
686 | 0 | GC_G(gc_threshold) = new_threshold; |
687 | 0 | } |
688 | 0 | } |
689 | | |
690 | | /* Perform a GC run and then add a node as a possible root. */ |
691 | | static zend_never_inline void ZEND_FASTCALL gc_possible_root_when_full(zend_refcounted *ref) |
692 | 0 | { |
693 | 0 | uint32_t idx; |
694 | 0 | gc_root_buffer *newRoot; |
695 | |
|
696 | 0 | ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT); |
697 | 0 | ZEND_ASSERT(GC_INFO(ref) == 0); |
698 | |
|
699 | 0 | if (GC_G(gc_enabled) && !GC_G(gc_active)) { |
700 | 0 | GC_ADDREF(ref); |
701 | 0 | gc_adjust_threshold(gc_collect_cycles()); |
702 | 0 | if (UNEXPECTED(GC_DELREF(ref) == 0)) { |
703 | 0 | rc_dtor_func(ref); |
704 | 0 | return; |
705 | 0 | } else if (UNEXPECTED(GC_INFO(ref))) { |
706 | 0 | return; |
707 | 0 | } |
708 | 0 | } |
709 | | |
710 | 0 | if (GC_HAS_UNUSED()) { |
711 | 0 | idx = GC_FETCH_UNUSED(); |
712 | 0 | } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) { |
713 | 0 | idx = GC_FETCH_NEXT_UNUSED(); |
714 | 0 | } else { |
715 | 0 | gc_grow_root_buffer(); |
716 | 0 | if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) { |
717 | 0 | return; |
718 | 0 | } |
719 | 0 | idx = GC_FETCH_NEXT_UNUSED(); |
720 | 0 | } |
721 | | |
722 | 0 | newRoot = GC_IDX2PTR(idx); |
723 | 0 | newRoot->ref = ref; /* GC_ROOT tag is 0 */ |
724 | 0 | GC_TRACE_SET_COLOR(ref, GC_PURPLE); |
725 | |
|
726 | 0 | idx = gc_compress(idx); |
727 | 0 | GC_REF_SET_INFO(ref, idx | GC_PURPLE); |
728 | 0 | GC_G(num_roots)++; |
729 | |
|
730 | 0 | GC_BENCH_INC(zval_buffered); |
731 | 0 | GC_BENCH_INC(root_buf_length); |
732 | 0 | GC_BENCH_PEAK(root_buf_peak, root_buf_length); |
733 | 0 | } |
734 | | |
735 | | /* Add a possible root node to the buffer. |
736 | | * Maybe perform a GC run. */ |
737 | | ZEND_API void ZEND_FASTCALL gc_possible_root(zend_refcounted *ref) |
738 | 2.05M | { |
739 | 2.05M | uint32_t idx; |
740 | 2.05M | gc_root_buffer *newRoot; |
741 | | |
742 | 2.05M | if (UNEXPECTED(GC_G(gc_protected))) { |
743 | 96.0k | return; |
744 | 96.0k | } |
745 | | |
746 | 1.95M | GC_BENCH_INC(zval_possible_root); |
747 | | |
748 | 1.95M | if (EXPECTED(GC_HAS_UNUSED())) { |
749 | 664k | idx = GC_FETCH_UNUSED(); |
750 | 1.29M | } else if (EXPECTED(GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD())) { |
751 | 1.29M | idx = GC_FETCH_NEXT_UNUSED(); |
752 | 1.29M | } else { |
753 | 0 | gc_possible_root_when_full(ref); |
754 | 0 | return; |
755 | 0 | } |
756 | | |
757 | 1.95M | ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT); |
758 | 1.95M | ZEND_ASSERT(GC_INFO(ref) == 0); |
759 | | |
760 | 1.95M | newRoot = GC_IDX2PTR(idx); |
761 | 1.95M | newRoot->ref = ref; /* GC_ROOT tag is 0 */ |
762 | 1.95M | GC_TRACE_SET_COLOR(ref, GC_PURPLE); |
763 | | |
764 | 1.95M | idx = gc_compress(idx); |
765 | 1.95M | GC_REF_SET_INFO(ref, idx | GC_PURPLE); |
766 | 1.95M | GC_G(num_roots)++; |
767 | | |
768 | 1.95M | GC_BENCH_INC(zval_buffered); |
769 | 1.95M | GC_BENCH_INC(root_buf_length); |
770 | 1.95M | GC_BENCH_PEAK(root_buf_peak, root_buf_length); |
771 | 1.95M | } |
772 | | |
773 | | /* Add an extra root during a GC run */ |
774 | | static void ZEND_FASTCALL gc_extra_root(zend_refcounted *ref) |
775 | 33 | { |
776 | 33 | uint32_t idx; |
777 | 33 | gc_root_buffer *newRoot; |
778 | | |
779 | 33 | if (EXPECTED(GC_HAS_UNUSED())) { |
780 | 0 | idx = GC_FETCH_UNUSED(); |
781 | 33 | } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) { |
782 | 33 | idx = GC_FETCH_NEXT_UNUSED(); |
783 | 33 | } else { |
784 | 0 | gc_grow_root_buffer(); |
785 | 0 | if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) { |
786 | | /* TODO: can this really happen? */ |
787 | 0 | return; |
788 | 0 | } |
789 | 0 | idx = GC_FETCH_NEXT_UNUSED(); |
790 | 0 | } |
791 | | |
792 | 33 | ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT); |
793 | 33 | ZEND_ASSERT(GC_REF_ADDRESS(ref) == 0); |
794 | | |
795 | 33 | newRoot = GC_IDX2PTR(idx); |
796 | 33 | newRoot->ref = ref; /* GC_ROOT tag is 0 */ |
797 | | |
798 | 33 | idx = gc_compress(idx); |
799 | 33 | GC_REF_SET_INFO(ref, idx | GC_REF_COLOR(ref)); |
800 | 33 | GC_G(num_roots)++; |
801 | | |
802 | 33 | GC_BENCH_INC(zval_buffered); |
803 | 33 | GC_BENCH_INC(root_buf_length); |
804 | 33 | GC_BENCH_PEAK(root_buf_peak, root_buf_length); |
805 | 33 | } |
806 | | |
807 | | /* Remove a node from the root buffer given its compressed index */ |
808 | | static zend_never_inline void ZEND_FASTCALL gc_remove_compressed(zend_refcounted *ref, uint32_t idx) |
809 | 0 | { |
810 | 0 | gc_root_buffer *root = gc_decompress(ref, idx); |
811 | 0 | gc_remove_from_roots(root); |
812 | 0 | } |
813 | | |
814 | | ZEND_API void ZEND_FASTCALL gc_remove_from_buffer(zend_refcounted *ref) |
815 | 2.44M | { |
816 | 2.44M | gc_root_buffer *root; |
817 | 2.44M | uint32_t idx = GC_REF_ADDRESS(ref); |
818 | | |
819 | 2.44M | GC_BENCH_INC(zval_remove_from_buffer); |
820 | | |
821 | 2.44M | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
822 | 1.80M | GC_TRACE_SET_COLOR(ref, GC_BLACK); |
823 | 1.80M | } |
824 | 2.44M | GC_REF_SET_INFO(ref, 0); |
825 | | |
826 | | /* Perform decompression only in case of large buffers */ |
827 | 2.44M | if (UNEXPECTED(GC_G(first_unused) >= GC_MAX_UNCOMPRESSED)) { |
828 | 0 | gc_remove_compressed(ref, idx); |
829 | 0 | return; |
830 | 0 | } |
831 | | |
832 | 2.44M | ZEND_ASSERT(idx); |
833 | 2.44M | root = GC_IDX2PTR(idx); |
834 | 2.44M | gc_remove_from_roots(root); |
835 | 2.44M | } |
836 | | |
837 | | /* Mark all nodes reachable from ref as black (live). Restore the reference |
838 | | * counts decremented by gc_mark_grey(). See ScanBlack() in Bacon & Rajan. |
839 | | * To implement a depth-first search, discovered nodes are added to a stack |
840 | | * which is processed iteratively. */ |
841 | | static void gc_scan_black(zend_refcounted *ref, gc_stack *stack) |
842 | 31.9k | { |
843 | 31.9k | HashTable *ht; |
844 | 31.9k | Bucket *p; |
845 | 31.9k | zval *zv; |
846 | 31.9k | uint32_t n; |
847 | 31.9k | GC_STACK_DCL(stack); |
848 | | |
849 | 178k | tail_call: |
850 | 178k | if (GC_TYPE(ref) == IS_OBJECT) { |
851 | 34.6k | zend_object *obj = (zend_object*)ref; |
852 | | |
853 | 34.6k | if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) { |
854 | 34.6k | zval *table; |
855 | 34.6k | int len; |
856 | | |
857 | 34.6k | if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) { |
858 | 668 | zend_weakmap_get_object_key_entry_gc(obj, &table, &len); |
859 | 668 | n = len; |
860 | 668 | zv = table; |
861 | 1.24k | for (; n != 0; n-=2) { |
862 | 579 | ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR); |
863 | 579 | zval *entry = (zval*) Z_PTR_P(zv); |
864 | 579 | zval *weakmap = zv+1; |
865 | 579 | ZEND_ASSERT(Z_REFCOUNTED_P(weakmap)); |
866 | 579 | if (Z_OPT_COLLECTABLE_P(entry)) { |
867 | 475 | GC_UNSET_FROM_WEAKMAP_KEY(entry); |
868 | 475 | if (GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_GREY)) { |
869 | | /* Weakmap was scanned in gc_mark_roots, we must |
870 | | * ensure that it's eventually scanned in |
871 | | * gc_scan_roots as well. */ |
872 | 47 | if (!GC_REF_ADDRESS(Z_COUNTED_P(weakmap))) { |
873 | 26 | gc_extra_root(Z_COUNTED_P(weakmap)); |
874 | 26 | } |
875 | 428 | } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK) && */ !GC_FROM_WEAKMAP(entry)) { |
876 | | /* Both the entry weakmap and key are BLACK, so we |
877 | | * can mark the entry BLACK as well. |
878 | | * !GC_FROM_WEAKMAP(entry) means that the weakmap |
879 | | * was already scanned black (or will not be |
880 | | * scanned), so it's our responsibility to mark the |
881 | | * entry */ |
882 | 404 | ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK)); |
883 | 404 | ref = Z_COUNTED_P(entry); |
884 | 404 | GC_ADDREF(ref); |
885 | 404 | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
886 | 36 | GC_REF_SET_BLACK(ref); |
887 | 36 | GC_STACK_PUSH(ref); |
888 | 36 | } |
889 | 404 | } |
890 | 475 | } |
891 | 579 | zv+=2; |
892 | 579 | } |
893 | 668 | } |
894 | | |
895 | 34.6k | if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) { |
896 | 133 | zend_weakmap_get_key_entry_gc(obj, &table, &len); |
897 | 133 | n = len; |
898 | 133 | zv = table; |
899 | 355 | for (; n != 0; n-=2) { |
900 | 222 | ZEND_ASSERT(Z_TYPE_P(zv+1) == IS_PTR); |
901 | 222 | zval *key = zv; |
902 | 222 | zval *entry = (zval*) Z_PTR_P(zv+1); |
903 | 222 | if (Z_OPT_COLLECTABLE_P(entry)) { |
904 | 126 | GC_UNSET_FROM_WEAKMAP(entry); |
905 | 126 | if (GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_GREY)) { |
906 | | /* Key was scanned in gc_mark_roots, we must |
907 | | * ensure that it's eventually scanned in |
908 | | * gc_scan_roots as well. */ |
909 | 39 | if (!GC_REF_ADDRESS(Z_COUNTED_P(key))) { |
910 | 7 | gc_extra_root(Z_COUNTED_P(key)); |
911 | 7 | } |
912 | 87 | } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK) && */ !GC_FROM_WEAKMAP_KEY(entry)) { |
913 | | /* Both the entry weakmap and key are BLACK, so we |
914 | | * can mark the entry BLACK as well. |
915 | | * !GC_FROM_WEAKMAP_KEY(entry) means that the key |
916 | | * was already scanned black (or will not be |
917 | | * scanned), so it's our responsibility to mark the |
918 | | * entry */ |
919 | 75 | ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK)); |
920 | 75 | ref = Z_COUNTED_P(entry); |
921 | 75 | GC_ADDREF(ref); |
922 | 75 | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
923 | 70 | GC_REF_SET_BLACK(ref); |
924 | 70 | GC_STACK_PUSH(ref); |
925 | 70 | } |
926 | 75 | } |
927 | 126 | } |
928 | 222 | zv += 2; |
929 | 222 | } |
930 | 133 | goto next; |
931 | 133 | } |
932 | | |
933 | 34.5k | ht = obj->handlers->get_gc(obj, &table, &len); |
934 | 34.5k | n = len; |
935 | 34.5k | zv = table; |
936 | 34.5k | if (UNEXPECTED(ht)) { |
937 | 11.4k | GC_ADDREF(ht); |
938 | 11.4k | if (!GC_REF_CHECK_COLOR(ht, GC_BLACK)) { |
939 | 11.4k | GC_REF_SET_BLACK(ht); |
940 | 15.2k | for (; n != 0; n--) { |
941 | 3.80k | if (Z_COLLECTABLE_P(zv)) { |
942 | 645 | ref = Z_COUNTED_P(zv); |
943 | 645 | GC_ADDREF(ref); |
944 | 645 | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
945 | 514 | GC_REF_SET_BLACK(ref); |
946 | 514 | GC_STACK_PUSH(ref); |
947 | 514 | } |
948 | 645 | } |
949 | 3.80k | zv++; |
950 | 3.80k | } |
951 | 11.4k | goto handle_ht; |
952 | 11.4k | } |
953 | 11.4k | } |
954 | | |
955 | 100k | handle_zvals: |
956 | 2.43M | for (; n != 0; n--) { |
957 | 2.35M | if (Z_COLLECTABLE_P(zv)) { |
958 | 24.3k | ref = Z_COUNTED_P(zv); |
959 | 24.3k | GC_ADDREF(ref); |
960 | 24.3k | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
961 | 20.7k | GC_REF_SET_BLACK(ref); |
962 | 20.7k | zv++; |
963 | 94.1k | while (--n) { |
964 | 73.3k | if (Z_COLLECTABLE_P(zv)) { |
965 | 64.2k | zend_refcounted *ref = Z_COUNTED_P(zv); |
966 | 64.2k | GC_ADDREF(ref); |
967 | 64.2k | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
968 | 62.5k | GC_REF_SET_BLACK(ref); |
969 | 62.5k | GC_STACK_PUSH(ref); |
970 | 62.5k | } |
971 | 64.2k | } |
972 | 73.3k | zv++; |
973 | 73.3k | } |
974 | 20.7k | goto tail_call; |
975 | 20.7k | } |
976 | 24.3k | } |
977 | 2.33M | zv++; |
978 | 2.33M | } |
979 | 100k | } |
980 | 143k | } else if (GC_TYPE(ref) == IS_ARRAY) { |
981 | 140k | ZEND_ASSERT((zend_array*)ref != &EG(symbol_table)); |
982 | 140k | ht = (zend_array*)ref; |
983 | 152k | handle_ht: |
984 | 152k | n = ht->nNumUsed; |
985 | 152k | zv = ht->arPacked; |
986 | 152k | if (HT_IS_PACKED(ht)) { |
987 | 77.6k | goto handle_zvals; |
988 | 77.6k | } |
989 | | |
990 | 74.3k | p = (Bucket*)zv; |
991 | 217k | for (; n != 0; n--) { |
992 | 201k | zv = &p->val; |
993 | 201k | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
994 | 5.45k | zv = Z_INDIRECT_P(zv); |
995 | 5.45k | } |
996 | 201k | if (Z_COLLECTABLE_P(zv)) { |
997 | 60.1k | ref = Z_COUNTED_P(zv); |
998 | 60.1k | GC_ADDREF(ref); |
999 | 60.1k | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
1000 | 58.6k | GC_REF_SET_BLACK(ref); |
1001 | 58.6k | p++; |
1002 | 116k | while (--n) { |
1003 | 57.3k | zv = &p->val; |
1004 | 57.3k | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1005 | 306 | zv = Z_INDIRECT_P(zv); |
1006 | 306 | } |
1007 | 57.3k | if (Z_COLLECTABLE_P(zv)) { |
1008 | 3.39k | zend_refcounted *ref = Z_COUNTED_P(zv); |
1009 | 3.39k | GC_ADDREF(ref); |
1010 | 3.39k | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
1011 | 2.58k | GC_REF_SET_BLACK(ref); |
1012 | 2.58k | GC_STACK_PUSH(ref); |
1013 | 2.58k | } |
1014 | 3.39k | } |
1015 | 57.3k | p++; |
1016 | 57.3k | } |
1017 | 58.6k | goto tail_call; |
1018 | 58.6k | } |
1019 | 60.1k | } |
1020 | 142k | p++; |
1021 | 142k | } |
1022 | 74.3k | } else if (GC_TYPE(ref) == IS_REFERENCE) { |
1023 | 3.02k | if (Z_COLLECTABLE(((zend_reference*)ref)->val)) { |
1024 | 1.24k | ref = Z_COUNTED(((zend_reference*)ref)->val); |
1025 | 1.24k | GC_ADDREF(ref); |
1026 | 1.24k | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
1027 | 1.14k | GC_REF_SET_BLACK(ref); |
1028 | 1.14k | goto tail_call; |
1029 | 1.14k | } |
1030 | 1.24k | } |
1031 | 3.02k | } |
1032 | | |
1033 | 97.6k | next: |
1034 | 97.6k | ref = GC_STACK_POP(); |
1035 | 97.6k | if (ref) { |
1036 | 65.7k | goto tail_call; |
1037 | 65.7k | } |
1038 | 97.6k | } |
1039 | | |
1040 | | /* Traverse the graph of nodes referred to by ref. Decrement the reference |
1041 | | * counts and mark visited nodes grey. See MarkGray() in Bacon & Rajan. */ |
1042 | | static void gc_mark_grey(zend_refcounted *ref, gc_stack *stack) |
1043 | 57.6k | { |
1044 | 57.6k | HashTable *ht; |
1045 | 57.6k | Bucket *p; |
1046 | 57.6k | zval *zv; |
1047 | 57.6k | uint32_t n; |
1048 | 57.6k | GC_STACK_DCL(stack); |
1049 | | |
1050 | 860k | tail_call: |
1051 | 860k | GC_BENCH_INC(zval_marked_grey); |
1052 | | |
1053 | 860k | if (GC_TYPE(ref) == IS_OBJECT) { |
1054 | 413k | zend_object *obj = (zend_object*)ref; |
1055 | | |
1056 | 413k | if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) { |
1057 | 413k | zval *table; |
1058 | 413k | int len; |
1059 | | |
1060 | 413k | if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) { |
1061 | 901 | zend_weakmap_get_object_key_entry_gc(obj, &table, &len); |
1062 | 901 | n = len; |
1063 | 901 | zv = table; |
1064 | 1.58k | for (; n != 0; n-=2) { |
1065 | 683 | ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR); |
1066 | 683 | zval *entry = (zval*) Z_PTR_P(zv); |
1067 | 683 | zval *weakmap = zv+1; |
1068 | 683 | ZEND_ASSERT(Z_REFCOUNTED_P(weakmap)); |
1069 | 683 | if (Z_COLLECTABLE_P(entry)) { |
1070 | 574 | GC_SET_FROM_WEAKMAP_KEY(entry); |
1071 | 574 | ref = Z_COUNTED_P(entry); |
1072 | | /* Only DELREF if the contribution from the weakmap has |
1073 | | * not been cancelled yet */ |
1074 | 574 | if (!GC_FROM_WEAKMAP(entry)) { |
1075 | 493 | GC_DELREF(ref); |
1076 | 493 | } |
1077 | 574 | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1078 | 77 | GC_REF_SET_COLOR(ref, GC_GREY); |
1079 | 77 | GC_STACK_PUSH(ref); |
1080 | 77 | } |
1081 | 574 | } |
1082 | 683 | zv+=2; |
1083 | 683 | } |
1084 | 901 | } |
1085 | | |
1086 | 413k | if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) { |
1087 | 190 | zend_weakmap_get_entry_gc(obj, &table, &len); |
1088 | 190 | n = len; |
1089 | 190 | zv = table; |
1090 | 487 | for (; n != 0; n--) { |
1091 | 297 | ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR); |
1092 | 297 | zval *entry = (zval*) Z_PTR_P(zv); |
1093 | 297 | if (Z_COLLECTABLE_P(entry)) { |
1094 | 201 | GC_SET_FROM_WEAKMAP(entry); |
1095 | 201 | ref = Z_COUNTED_P(entry); |
1096 | | /* Only DELREF if the contribution from the weakmap key |
1097 | | * has not been cancelled yet */ |
1098 | 201 | if (!GC_FROM_WEAKMAP_KEY(entry)) { |
1099 | 100 | GC_DELREF(ref); |
1100 | 100 | } |
1101 | 201 | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1102 | 82 | GC_REF_SET_COLOR(ref, GC_GREY); |
1103 | 82 | GC_STACK_PUSH(ref); |
1104 | 82 | } |
1105 | 201 | } |
1106 | 297 | zv++; |
1107 | 297 | } |
1108 | 190 | goto next; |
1109 | 190 | } |
1110 | | |
1111 | 412k | ht = obj->handlers->get_gc(obj, &table, &len); |
1112 | 412k | n = len; |
1113 | 412k | zv = table; |
1114 | 412k | if (UNEXPECTED(ht)) { |
1115 | 379k | GC_DELREF(ht); |
1116 | 379k | if (!GC_REF_CHECK_COLOR(ht, GC_GREY)) { |
1117 | 379k | GC_REF_SET_COLOR(ht, GC_GREY); |
1118 | 492k | for (; n != 0; n--) { |
1119 | 112k | if (Z_COLLECTABLE_P(zv)) { |
1120 | 108k | ref = Z_COUNTED_P(zv); |
1121 | 108k | GC_DELREF(ref); |
1122 | 108k | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1123 | 106k | GC_REF_SET_COLOR(ref, GC_GREY); |
1124 | 106k | GC_STACK_PUSH(ref); |
1125 | 106k | } |
1126 | 108k | } |
1127 | 112k | zv++; |
1128 | 112k | } |
1129 | 379k | goto handle_ht; |
1130 | 379k | } |
1131 | 379k | } |
1132 | 128k | handle_zvals: |
1133 | 2.58M | for (; n != 0; n--) { |
1134 | 2.48M | if (Z_COLLECTABLE_P(zv)) { |
1135 | 66.9k | ref = Z_COUNTED_P(zv); |
1136 | 66.9k | GC_DELREF(ref); |
1137 | 66.9k | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1138 | 30.4k | GC_REF_SET_COLOR(ref, GC_GREY); |
1139 | 30.4k | zv++; |
1140 | 108k | while (--n) { |
1141 | 78.0k | if (Z_COLLECTABLE_P(zv)) { |
1142 | 64.8k | zend_refcounted *ref = Z_COUNTED_P(zv); |
1143 | 64.8k | GC_DELREF(ref); |
1144 | 64.8k | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1145 | 60.1k | GC_REF_SET_COLOR(ref, GC_GREY); |
1146 | 60.1k | GC_STACK_PUSH(ref); |
1147 | 60.1k | } |
1148 | 64.8k | } |
1149 | 78.0k | zv++; |
1150 | 78.0k | } |
1151 | 30.4k | goto tail_call; |
1152 | 30.4k | } |
1153 | 66.9k | } |
1154 | 2.45M | zv++; |
1155 | 2.45M | } |
1156 | 128k | } |
1157 | 447k | } else if (GC_TYPE(ref) == IS_ARRAY) { |
1158 | 433k | ZEND_ASSERT(((zend_array*)ref) != &EG(symbol_table)); |
1159 | 433k | ht = (zend_array*)ref; |
1160 | 813k | handle_ht: |
1161 | 813k | n = ht->nNumUsed; |
1162 | 813k | if (HT_IS_PACKED(ht)) { |
1163 | 95.8k | zv = ht->arPacked; |
1164 | 95.8k | goto handle_zvals; |
1165 | 95.8k | } |
1166 | | |
1167 | 717k | p = ht->arData; |
1168 | 1.61M | for (; n != 0; n--) { |
1169 | 1.19M | zv = &p->val; |
1170 | 1.19M | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1171 | 614k | zv = Z_INDIRECT_P(zv); |
1172 | 614k | } |
1173 | 1.19M | if (Z_COLLECTABLE_P(zv)) { |
1174 | 382k | ref = Z_COUNTED_P(zv); |
1175 | 382k | GC_DELREF(ref); |
1176 | 382k | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1177 | 298k | GC_REF_SET_COLOR(ref, GC_GREY); |
1178 | 298k | p++; |
1179 | 1.05M | while (--n) { |
1180 | 751k | zv = &p->val; |
1181 | 751k | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1182 | 106k | zv = Z_INDIRECT_P(zv); |
1183 | 106k | } |
1184 | 751k | if (Z_COLLECTABLE_P(zv)) { |
1185 | 548k | zend_refcounted *ref = Z_COUNTED_P(zv); |
1186 | 548k | GC_DELREF(ref); |
1187 | 548k | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1188 | 305k | GC_REF_SET_COLOR(ref, GC_GREY); |
1189 | 305k | GC_STACK_PUSH(ref); |
1190 | 305k | } |
1191 | 548k | } |
1192 | 751k | p++; |
1193 | 751k | } |
1194 | 298k | goto tail_call; |
1195 | 298k | } |
1196 | 382k | } |
1197 | 893k | p++; |
1198 | 893k | } |
1199 | 717k | } else if (GC_TYPE(ref) == IS_REFERENCE) { |
1200 | 14.2k | if (Z_COLLECTABLE(((zend_reference*)ref)->val)) { |
1201 | 11.3k | ref = Z_COUNTED(((zend_reference*)ref)->val); |
1202 | 11.3k | GC_DELREF(ref); |
1203 | 11.3k | if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1204 | 2.12k | GC_REF_SET_COLOR(ref, GC_GREY); |
1205 | 2.12k | goto tail_call; |
1206 | 2.12k | } |
1207 | 11.3k | } |
1208 | 14.2k | } |
1209 | | |
1210 | 529k | next: |
1211 | 529k | ref = GC_STACK_POP(); |
1212 | 529k | if (ref) { |
1213 | 472k | goto tail_call; |
1214 | 472k | } |
1215 | 529k | } |
1216 | | |
1217 | | /* Two-Finger compaction algorithm */ |
1218 | | static void gc_compact(void) |
1219 | 726k | { |
1220 | 726k | if (GC_G(num_roots) + GC_FIRST_ROOT != GC_G(first_unused)) { |
1221 | 351k | if (GC_G(num_roots)) { |
1222 | 9.43k | gc_root_buffer *free = GC_IDX2PTR(GC_FIRST_ROOT); |
1223 | 9.43k | gc_root_buffer *scan = GC_IDX2PTR(GC_G(first_unused) - 1); |
1224 | 9.43k | gc_root_buffer *end = GC_IDX2PTR(GC_G(num_roots)); |
1225 | 9.43k | uint32_t idx; |
1226 | 9.43k | zend_refcounted *p; |
1227 | | |
1228 | 17.7k | while (free < scan) { |
1229 | 75.3k | while (!GC_IS_UNUSED(free->ref)) { |
1230 | 63.6k | free++; |
1231 | 63.6k | } |
1232 | 30.3k | while (GC_IS_UNUSED(scan->ref)) { |
1233 | 18.6k | scan--; |
1234 | 18.6k | } |
1235 | 11.7k | if (scan > free) { |
1236 | 5.91k | p = scan->ref; |
1237 | 5.91k | free->ref = p; |
1238 | 5.91k | p = GC_GET_PTR(p); |
1239 | 5.91k | idx = gc_compress(GC_PTR2IDX(free)); |
1240 | 5.91k | GC_REF_SET_INFO(p, idx | GC_REF_COLOR(p)); |
1241 | 5.91k | free++; |
1242 | 5.91k | scan--; |
1243 | 5.91k | if (scan <= end) { |
1244 | 3.41k | break; |
1245 | 3.41k | } |
1246 | 5.91k | } |
1247 | 11.7k | } |
1248 | 9.43k | } |
1249 | 351k | GC_G(unused) = GC_INVALID; |
1250 | 351k | GC_G(first_unused) = GC_G(num_roots) + GC_FIRST_ROOT; |
1251 | 351k | } |
1252 | 726k | } |
1253 | | |
1254 | | /* For all roots marked purple, traverse the graph, decrementing the reference |
1255 | | * count of their child nodes. Mark visited nodes grey so that they are not |
1256 | | * visited again. See MarkRoots() in Bacon & Rajan. */ |
1257 | | static void gc_mark_roots(gc_stack *stack) |
1258 | 20.9k | { |
1259 | 20.9k | gc_root_buffer *current, *last; |
1260 | | |
1261 | 20.9k | gc_compact(); |
1262 | | |
1263 | 20.9k | current = GC_IDX2PTR(GC_FIRST_ROOT); |
1264 | 20.9k | last = GC_IDX2PTR(GC_G(first_unused)); |
1265 | 175k | while (current != last) { |
1266 | 154k | if (GC_IS_ROOT(current->ref)) { |
1267 | 154k | if (GC_REF_CHECK_COLOR(current->ref, GC_PURPLE)) { |
1268 | 57.6k | GC_REF_SET_COLOR(current->ref, GC_GREY); |
1269 | 57.6k | gc_mark_grey(current->ref, stack); |
1270 | 57.6k | } |
1271 | 154k | } |
1272 | 154k | current++; |
1273 | 154k | } |
1274 | 20.9k | } |
1275 | | |
1276 | | /* Traverse the reference graph of ref. Evaluate grey nodes and mark them |
1277 | | * black (to keep) or white (to free). Note that nodes initially marked white |
1278 | | * may later become black if they are visited from a live node. |
1279 | | * See Scan() in Bacon & Rajan. */ |
1280 | | static void gc_scan(zend_refcounted *ref, gc_stack *stack) |
1281 | 57.7k | { |
1282 | 57.7k | HashTable *ht; |
1283 | 57.7k | Bucket *p; |
1284 | 57.7k | zval *zv; |
1285 | 57.7k | uint32_t n; |
1286 | 57.7k | GC_STACK_DCL(stack); |
1287 | | |
1288 | 1.09M | tail_call: |
1289 | 1.09M | if (!GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1290 | 373 | goto next; |
1291 | 373 | } |
1292 | | |
1293 | 1.09M | if (GC_REFCOUNT(ref) > 0) { |
1294 | 31.9k | if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
1295 | 31.9k | GC_REF_SET_BLACK(ref); |
1296 | 31.9k | if (UNEXPECTED(!_stack->next)) { |
1297 | 17.6k | gc_stack_next(_stack); |
1298 | 17.6k | } |
1299 | | /* Split stack and reuse the tail */ |
1300 | 31.9k | _stack->next->prev = NULL; |
1301 | 31.9k | gc_scan_black(ref, _stack->next); |
1302 | 31.9k | _stack->next->prev = _stack; |
1303 | 31.9k | } |
1304 | 31.9k | goto next; |
1305 | 31.9k | } |
1306 | | |
1307 | 1.06M | if (GC_TYPE(ref) == IS_OBJECT) { |
1308 | 380k | zend_object *obj = (zend_object*)ref; |
1309 | 380k | if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) { |
1310 | 380k | zval *table; |
1311 | 380k | int len; |
1312 | | |
1313 | 380k | if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) { |
1314 | 292 | zend_weakmap_get_object_entry_gc(obj, &table, &len); |
1315 | 292 | n = len; |
1316 | 292 | zv = table; |
1317 | 451 | for (; n != 0; n--) { |
1318 | 159 | ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR); |
1319 | 159 | zval *entry = (zval*) Z_PTR_P(zv); |
1320 | 159 | if (Z_OPT_COLLECTABLE_P(entry)) { |
1321 | 104 | ref = Z_COUNTED_P(entry); |
1322 | 104 | if (GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1323 | 29 | GC_REF_SET_COLOR(ref, GC_WHITE); |
1324 | 29 | GC_STACK_PUSH(ref); |
1325 | 29 | } |
1326 | 104 | } |
1327 | 159 | zv++; |
1328 | 159 | } |
1329 | 292 | } |
1330 | | |
1331 | 380k | ht = obj->handlers->get_gc(obj, &table, &len); |
1332 | 380k | n = len; |
1333 | 380k | zv = table; |
1334 | 380k | if (UNEXPECTED(ht)) { |
1335 | 369k | if (GC_REF_CHECK_COLOR(ht, GC_GREY)) { |
1336 | 369k | GC_REF_SET_COLOR(ht, GC_WHITE); |
1337 | 369k | GC_STACK_PUSH((zend_refcounted *) ht); |
1338 | 478k | for (; n != 0; n--) { |
1339 | 108k | if (Z_COLLECTABLE_P(zv)) { |
1340 | 107k | ref = Z_COUNTED_P(zv); |
1341 | 107k | if (GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1342 | 105k | GC_REF_SET_COLOR(ref, GC_WHITE); |
1343 | 105k | GC_STACK_PUSH(ref); |
1344 | 105k | } |
1345 | 107k | } |
1346 | 108k | zv++; |
1347 | 108k | } |
1348 | 369k | goto handle_ht; |
1349 | 369k | } |
1350 | 369k | } |
1351 | | |
1352 | 54.5k | handle_zvals: |
1353 | 296k | for (; n != 0; n--) { |
1354 | 253k | if (Z_COLLECTABLE_P(zv)) { |
1355 | 71.6k | ref = Z_COUNTED_P(zv); |
1356 | 71.6k | if (GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1357 | 11.8k | GC_REF_SET_COLOR(ref, GC_WHITE); |
1358 | 11.8k | zv++; |
1359 | 26.8k | while (--n) { |
1360 | 14.9k | if (Z_COLLECTABLE_P(zv)) { |
1361 | 9.57k | zend_refcounted *ref = Z_COUNTED_P(zv); |
1362 | 9.57k | if (GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1363 | 6.62k | GC_REF_SET_COLOR(ref, GC_WHITE); |
1364 | 6.62k | GC_STACK_PUSH(ref); |
1365 | 6.62k | } |
1366 | 9.57k | } |
1367 | 14.9k | zv++; |
1368 | 14.9k | } |
1369 | 11.8k | goto tail_call; |
1370 | 11.8k | } |
1371 | 71.6k | } |
1372 | 241k | zv++; |
1373 | 241k | } |
1374 | 54.5k | } |
1375 | 686k | } else if (GC_TYPE(ref) == IS_ARRAY) { |
1376 | 674k | ht = (HashTable *)ref; |
1377 | 674k | ZEND_ASSERT(ht != &EG(symbol_table)); |
1378 | | |
1379 | 1.04M | handle_ht: |
1380 | 1.04M | n = ht->nNumUsed; |
1381 | 1.04M | if (HT_IS_PACKED(ht)) { |
1382 | 43.8k | zv = ht->arPacked; |
1383 | 43.8k | goto handle_zvals; |
1384 | 43.8k | } |
1385 | | |
1386 | 1.00M | p = ht->arData; |
1387 | 3.11M | for (; n != 0; n--) { |
1388 | 2.35M | zv = &p->val; |
1389 | 2.35M | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1390 | 1.32M | zv = Z_INDIRECT_P(zv); |
1391 | 1.32M | } |
1392 | 2.35M | if (Z_COLLECTABLE_P(zv)) { |
1393 | 920k | ref = Z_COUNTED_P(zv); |
1394 | 920k | if (GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1395 | 241k | GC_REF_SET_COLOR(ref, GC_WHITE); |
1396 | 241k | p++; |
1397 | 939k | while (--n) { |
1398 | 698k | zv = &p->val; |
1399 | 698k | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1400 | 106k | zv = Z_INDIRECT_P(zv); |
1401 | 106k | } |
1402 | 698k | if (Z_COLLECTABLE_P(zv)) { |
1403 | 547k | zend_refcounted *ref = Z_COUNTED_P(zv); |
1404 | 547k | if (GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1405 | 304k | GC_REF_SET_COLOR(ref, GC_WHITE); |
1406 | 304k | GC_STACK_PUSH(ref); |
1407 | 304k | } |
1408 | 547k | } |
1409 | 698k | p++; |
1410 | 698k | } |
1411 | 241k | goto tail_call; |
1412 | 241k | } |
1413 | 920k | } |
1414 | 2.11M | p++; |
1415 | 2.11M | } |
1416 | 1.00M | } else if (GC_TYPE(ref) == IS_REFERENCE) { |
1417 | 11.6k | if (Z_COLLECTABLE(((zend_reference*)ref)->val)) { |
1418 | 10.3k | ref = Z_COUNTED(((zend_reference*)ref)->val); |
1419 | 10.3k | if (GC_REF_CHECK_COLOR(ref, GC_GREY)) { |
1420 | 1.57k | GC_REF_SET_COLOR(ref, GC_WHITE); |
1421 | 1.57k | goto tail_call; |
1422 | 1.57k | } |
1423 | 10.3k | } |
1424 | 11.6k | } |
1425 | | |
1426 | 844k | next: |
1427 | 844k | ref = GC_STACK_POP(); |
1428 | 844k | if (ref) { |
1429 | 786k | goto tail_call; |
1430 | 786k | } |
1431 | 844k | } |
1432 | | |
1433 | | /* Scan all roots, coloring grey nodes black or white */ |
1434 | | static void gc_scan_roots(gc_stack *stack) |
1435 | 20.9k | { |
1436 | 20.9k | uint32_t idx, end; |
1437 | 20.9k | gc_root_buffer *current; |
1438 | | |
1439 | | /* Root buffer might be reallocated during gc_scan, |
1440 | | * make sure to reload pointers. */ |
1441 | 20.9k | idx = GC_FIRST_ROOT; |
1442 | 20.9k | end = GC_G(first_unused); |
1443 | 175k | while (idx != end) { |
1444 | 154k | current = GC_IDX2PTR(idx); |
1445 | 154k | if (GC_IS_ROOT(current->ref)) { |
1446 | 154k | if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) { |
1447 | 57.6k | GC_REF_SET_COLOR(current->ref, GC_WHITE); |
1448 | 57.6k | gc_scan(current->ref, stack); |
1449 | 57.6k | } |
1450 | 154k | } |
1451 | 154k | idx++; |
1452 | 154k | } |
1453 | | |
1454 | | /* Scan extra roots added during gc_scan */ |
1455 | 21.0k | while (idx != GC_G(first_unused)) { |
1456 | 33 | current = GC_IDX2PTR(idx); |
1457 | 33 | if (GC_IS_ROOT(current->ref)) { |
1458 | 33 | if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) { |
1459 | 33 | GC_REF_SET_COLOR(current->ref, GC_WHITE); |
1460 | 33 | gc_scan(current->ref, stack); |
1461 | 33 | } |
1462 | 33 | } |
1463 | 33 | idx++; |
1464 | 33 | } |
1465 | 20.9k | } |
1466 | | |
1467 | | /* Add a node to the buffer with the garbage flag, so that it will be |
1468 | | * destroyed and freed when the scan is complete. */ |
1469 | | static void gc_add_garbage(zend_refcounted *ref) |
1470 | 559k | { |
1471 | 559k | uint32_t idx; |
1472 | 559k | gc_root_buffer *buf; |
1473 | | |
1474 | 559k | if (GC_HAS_UNUSED()) { |
1475 | 0 | idx = GC_FETCH_UNUSED(); |
1476 | 559k | } else if (GC_HAS_NEXT_UNUSED()) { |
1477 | 559k | idx = GC_FETCH_NEXT_UNUSED(); |
1478 | 559k | } else { |
1479 | 1 | gc_grow_root_buffer(); |
1480 | 1 | if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) { |
1481 | 0 | return; |
1482 | 0 | } |
1483 | 1 | idx = GC_FETCH_NEXT_UNUSED(); |
1484 | 1 | } |
1485 | | |
1486 | 559k | buf = GC_IDX2PTR(idx); |
1487 | 559k | buf->ref = GC_MAKE_GARBAGE(ref); |
1488 | | |
1489 | 559k | idx = gc_compress(idx); |
1490 | 559k | GC_REF_SET_INFO(ref, idx | GC_BLACK); |
1491 | 559k | GC_G(num_roots)++; |
1492 | 559k | } |
1493 | | |
1494 | | /* Traverse the reference graph from ref, marking any white nodes as garbage. */ |
1495 | | static int gc_collect_white(zend_refcounted *ref, uint32_t *flags, gc_stack *stack) |
1496 | 20.5k | { |
1497 | 20.5k | int count = 0; |
1498 | 20.5k | HashTable *ht; |
1499 | 20.5k | Bucket *p; |
1500 | 20.5k | zval *zv; |
1501 | 20.5k | uint32_t n; |
1502 | 20.5k | GC_STACK_DCL(stack); |
1503 | | |
1504 | 682k | tail_call: |
1505 | | /* don't count references for compatibility ??? */ |
1506 | 682k | if (GC_TYPE(ref) != IS_REFERENCE) { |
1507 | 671k | count++; |
1508 | 671k | } |
1509 | | |
1510 | 682k | if (GC_TYPE(ref) == IS_OBJECT) { |
1511 | 378k | zend_object *obj = (zend_object*)ref; |
1512 | | |
1513 | 378k | if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) { |
1514 | 378k | int len; |
1515 | 378k | zval *table; |
1516 | | |
1517 | | /* optimization: color is GC_BLACK (0) */ |
1518 | 378k | if (!GC_INFO(ref)) { |
1519 | 271k | gc_add_garbage(ref); |
1520 | 271k | } |
1521 | 378k | if (!(OBJ_FLAGS(obj) & IS_OBJ_DESTRUCTOR_CALLED) |
1522 | 275k | && (obj->handlers->dtor_obj != zend_objects_destroy_object |
1523 | 275k | || obj->ce->destructor != NULL)) { |
1524 | 2.82k | *flags |= GC_HAS_DESTRUCTORS; |
1525 | 2.82k | } |
1526 | | |
1527 | 378k | if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) { |
1528 | 233 | zend_weakmap_get_object_entry_gc(obj, &table, &len); |
1529 | 233 | n = len; |
1530 | 233 | zv = table; |
1531 | 337 | for (; n != 0; n--) { |
1532 | 104 | ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR); |
1533 | 104 | zval *entry = (zval*) Z_PTR_P(zv); |
1534 | 104 | if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP_KEY(entry)) { |
1535 | 67 | GC_UNSET_FROM_WEAKMAP_KEY(entry); |
1536 | 67 | GC_UNSET_FROM_WEAKMAP(entry); |
1537 | 67 | ref = Z_COUNTED_P(entry); |
1538 | 67 | GC_ADDREF(ref); |
1539 | 67 | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1540 | 18 | GC_REF_SET_BLACK(ref); |
1541 | 18 | GC_STACK_PUSH(ref); |
1542 | 18 | } |
1543 | 67 | } |
1544 | 104 | zv++; |
1545 | 104 | } |
1546 | 233 | } |
1547 | | |
1548 | 378k | if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) { |
1549 | 57 | zend_weakmap_get_entry_gc(obj, &table, &len); |
1550 | 57 | n = len; |
1551 | 57 | zv = table; |
1552 | 132 | for (; n != 0; n--) { |
1553 | 75 | ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR); |
1554 | 75 | zval *entry = (zval*) Z_PTR_P(zv); |
1555 | 75 | if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP(entry)) { |
1556 | 47 | GC_UNSET_FROM_WEAKMAP_KEY(entry); |
1557 | 47 | GC_UNSET_FROM_WEAKMAP(entry); |
1558 | 47 | ref = Z_COUNTED_P(entry); |
1559 | 47 | GC_ADDREF(ref); |
1560 | 47 | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1561 | 37 | GC_REF_SET_BLACK(ref); |
1562 | 37 | GC_STACK_PUSH(ref); |
1563 | 37 | } |
1564 | 47 | } |
1565 | 75 | zv++; |
1566 | 75 | } |
1567 | 57 | goto next; |
1568 | 57 | } |
1569 | | |
1570 | 378k | ht = obj->handlers->get_gc(obj, &table, &len); |
1571 | 378k | n = len; |
1572 | 378k | zv = table; |
1573 | 378k | if (UNEXPECTED(ht)) { |
1574 | 368k | GC_ADDREF(ht); |
1575 | 368k | if (GC_REF_CHECK_COLOR(ht, GC_WHITE)) { |
1576 | 368k | GC_REF_SET_BLACK(ht); |
1577 | 476k | for (; n != 0; n--) { |
1578 | 108k | if (Z_COLLECTABLE_P(zv)) { |
1579 | 107k | ref = Z_COUNTED_P(zv); |
1580 | 107k | GC_ADDREF(ref); |
1581 | 107k | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1582 | 105k | GC_REF_SET_BLACK(ref); |
1583 | 105k | GC_STACK_PUSH(ref); |
1584 | 105k | } |
1585 | 107k | } |
1586 | 108k | zv++; |
1587 | 108k | } |
1588 | 368k | goto handle_ht; |
1589 | 368k | } |
1590 | 368k | } |
1591 | | |
1592 | 28.0k | handle_zvals: |
1593 | 151k | for (; n != 0; n--) { |
1594 | 133k | if (Z_COLLECTABLE_P(zv)) { |
1595 | 40.1k | ref = Z_COUNTED_P(zv); |
1596 | 40.1k | GC_ADDREF(ref); |
1597 | 40.1k | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1598 | 10.7k | GC_REF_SET_BLACK(ref); |
1599 | 10.7k | zv++; |
1600 | 17.9k | while (--n) { |
1601 | 7.20k | if (Z_COLLECTABLE_P(zv)) { |
1602 | 2.98k | zend_refcounted *ref = Z_COUNTED_P(zv); |
1603 | 2.98k | GC_ADDREF(ref); |
1604 | 2.98k | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1605 | 731 | GC_REF_SET_BLACK(ref); |
1606 | 731 | GC_STACK_PUSH(ref); |
1607 | 731 | } |
1608 | 2.98k | } |
1609 | 7.20k | zv++; |
1610 | 7.20k | } |
1611 | 10.7k | goto tail_call; |
1612 | 10.7k | } |
1613 | 40.1k | } |
1614 | 123k | zv++; |
1615 | 123k | } |
1616 | 28.0k | } |
1617 | 378k | } else if (GC_TYPE(ref) == IS_ARRAY) { |
1618 | | /* optimization: color is GC_BLACK (0) */ |
1619 | 292k | if (!GC_INFO(ref)) { |
1620 | 288k | gc_add_garbage(ref); |
1621 | 288k | } |
1622 | 292k | ht = (zend_array*)ref; |
1623 | | |
1624 | 661k | handle_ht: |
1625 | 661k | n = ht->nNumUsed; |
1626 | 661k | if (HT_IS_PACKED(ht)) { |
1627 | 18.1k | zv = ht->arPacked; |
1628 | 18.1k | goto handle_zvals; |
1629 | 18.1k | } |
1630 | | |
1631 | 643k | p = ht->arData; |
1632 | 1.39M | for (; n != 0; n--) { |
1633 | 989k | zv = &p->val; |
1634 | 989k | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1635 | 609k | zv = Z_INDIRECT_P(zv); |
1636 | 609k | } |
1637 | 989k | if (Z_COLLECTABLE_P(zv)) { |
1638 | 322k | ref = Z_COUNTED_P(zv); |
1639 | 322k | GC_ADDREF(ref); |
1640 | 322k | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1641 | 240k | GC_REF_SET_BLACK(ref); |
1642 | 240k | p++; |
1643 | 935k | while (--n) { |
1644 | 695k | zv = &p->val; |
1645 | 695k | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1646 | 106k | zv = Z_INDIRECT_P(zv); |
1647 | 106k | } |
1648 | 695k | if (Z_COLLECTABLE_P(zv)) { |
1649 | 545k | zend_refcounted *ref = Z_COUNTED_P(zv); |
1650 | 545k | GC_ADDREF(ref); |
1651 | 545k | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1652 | 303k | GC_REF_SET_BLACK(ref); |
1653 | 303k | GC_STACK_PUSH(ref); |
1654 | 303k | } |
1655 | 545k | } |
1656 | 695k | p++; |
1657 | 695k | } |
1658 | 240k | goto tail_call; |
1659 | 240k | } |
1660 | 322k | } |
1661 | 748k | p++; |
1662 | 748k | } |
1663 | 643k | } else if (GC_TYPE(ref) == IS_REFERENCE) { |
1664 | 11.2k | if (Z_COLLECTABLE(((zend_reference*)ref)->val)) { |
1665 | 10.1k | ref = Z_COUNTED(((zend_reference*)ref)->val); |
1666 | 10.1k | GC_ADDREF(ref); |
1667 | 10.1k | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1668 | 1.29k | GC_REF_SET_BLACK(ref); |
1669 | 1.29k | goto tail_call; |
1670 | 1.29k | } |
1671 | 10.1k | } |
1672 | 11.2k | } |
1673 | | |
1674 | 430k | next: |
1675 | 430k | ref = GC_STACK_POP(); |
1676 | 430k | if (ref) { |
1677 | 409k | goto tail_call; |
1678 | 409k | } |
1679 | | |
1680 | 20.5k | return count; |
1681 | 430k | } |
1682 | | |
1683 | | /* Traverse the reference graph from all roots, marking white nodes as garbage. */ |
1684 | | static int gc_collect_roots(uint32_t *flags, gc_stack *stack) |
1685 | 20.9k | { |
1686 | 20.9k | uint32_t idx, end; |
1687 | 20.9k | zend_refcounted *ref; |
1688 | 20.9k | int count = 0; |
1689 | 20.9k | gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT); |
1690 | 20.9k | gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused)); |
1691 | | |
1692 | | /* remove non-garbage from the list */ |
1693 | 175k | while (current != last) { |
1694 | 154k | if (GC_IS_ROOT(current->ref)) { |
1695 | 154k | if (GC_REF_CHECK_COLOR(current->ref, GC_BLACK)) { |
1696 | 42.6k | GC_REF_SET_INFO(current->ref, 0); /* reset GC_ADDRESS() and keep GC_BLACK */ |
1697 | 42.6k | gc_remove_from_roots(current); |
1698 | 42.6k | } |
1699 | 154k | } |
1700 | 154k | current++; |
1701 | 154k | } |
1702 | | |
1703 | 20.9k | gc_compact(); |
1704 | | |
1705 | | /* Root buffer might be reallocated during gc_collect_white, |
1706 | | * make sure to reload pointers. */ |
1707 | 20.9k | idx = GC_FIRST_ROOT; |
1708 | 20.9k | end = GC_G(first_unused); |
1709 | 132k | while (idx != end) { |
1710 | 111k | current = GC_IDX2PTR(idx); |
1711 | 111k | ref = current->ref; |
1712 | 111k | ZEND_ASSERT(GC_IS_ROOT(ref)); |
1713 | 111k | current->ref = GC_MAKE_GARBAGE(ref); |
1714 | 111k | if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) { |
1715 | 20.5k | GC_REF_SET_BLACK(ref); |
1716 | 20.5k | count += gc_collect_white(ref, flags, stack); |
1717 | 20.5k | } |
1718 | 111k | idx++; |
1719 | 111k | } |
1720 | | |
1721 | 20.9k | return count; |
1722 | 20.9k | } |
1723 | | |
1724 | | static int gc_remove_nested_data_from_buffer(zend_refcounted *ref, gc_root_buffer *root, gc_stack *stack) |
1725 | 2.82k | { |
1726 | 2.82k | HashTable *ht; |
1727 | 2.82k | Bucket *p; |
1728 | 2.82k | zval *zv; |
1729 | 2.82k | uint32_t n; |
1730 | 2.82k | int count = 0; |
1731 | 2.82k | GC_STACK_DCL(stack); |
1732 | | |
1733 | 8.20k | tail_call: |
1734 | 8.20k | if (root) { |
1735 | 2.82k | root = NULL; |
1736 | 2.82k | count++; |
1737 | 5.37k | } else if (GC_REF_ADDRESS(ref) != 0 |
1738 | 4.47k | && GC_REF_CHECK_COLOR(ref, GC_BLACK)) { |
1739 | 1.63k | GC_TRACE_REF(ref, "removing from buffer"); |
1740 | 1.63k | GC_REMOVE_FROM_BUFFER(ref); |
1741 | 1.63k | count++; |
1742 | 3.74k | } else if (GC_TYPE(ref) == IS_REFERENCE) { |
1743 | 557 | if (Z_COLLECTABLE(((zend_reference*)ref)->val)) { |
1744 | 279 | ref = Z_COUNTED(((zend_reference*)ref)->val); |
1745 | 279 | goto tail_call; |
1746 | 279 | } |
1747 | 278 | goto next; |
1748 | 3.19k | } else { |
1749 | 3.19k | goto next; |
1750 | 3.19k | } |
1751 | | |
1752 | 4.45k | if (GC_TYPE(ref) == IS_OBJECT) { |
1753 | 4.14k | zend_object *obj = (zend_object*)ref; |
1754 | | |
1755 | 4.14k | if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) { |
1756 | 4.14k | int len; |
1757 | 4.14k | zval *table; |
1758 | | |
1759 | 4.14k | if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) { |
1760 | 72 | zend_weakmap_get_object_entry_gc(obj, &table, &len); |
1761 | 72 | n = len; |
1762 | 72 | zv = table; |
1763 | 77 | for (; n != 0; n--) { |
1764 | 5 | ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR); |
1765 | 5 | zval *entry = (zval*) Z_PTR_P(zv); |
1766 | 5 | if (Z_OPT_COLLECTABLE_P(entry)) { |
1767 | 5 | ref = Z_COUNTED_P(entry); |
1768 | 5 | GC_STACK_PUSH(ref); |
1769 | 5 | } |
1770 | 5 | zv++; |
1771 | 5 | } |
1772 | 72 | } |
1773 | | |
1774 | 4.14k | ht = obj->handlers->get_gc(obj, &table, &len); |
1775 | 4.14k | n = len; |
1776 | 4.14k | zv = table; |
1777 | 4.14k | if (UNEXPECTED(ht)) { |
1778 | 1.83k | for (; n != 0; n--) { |
1779 | 755 | if (Z_COLLECTABLE_P(zv)) { |
1780 | 513 | ref = Z_COUNTED_P(zv); |
1781 | 513 | GC_STACK_PUSH(ref); |
1782 | 513 | } |
1783 | 755 | zv++; |
1784 | 755 | } |
1785 | 1.07k | if (GC_REF_ADDRESS(ht) != 0 && GC_REF_CHECK_COLOR(ht, GC_BLACK)) { |
1786 | 5 | GC_TRACE_REF(ht, "removing from buffer"); |
1787 | 5 | GC_REMOVE_FROM_BUFFER(ht); |
1788 | 5 | } |
1789 | 1.07k | goto handle_ht; |
1790 | 1.07k | } |
1791 | | |
1792 | 3.34k | handle_zvals: |
1793 | 3.45k | for (; n != 0; n--) { |
1794 | 2.77k | if (Z_COLLECTABLE_P(zv)) { |
1795 | 2.66k | ref = Z_COUNTED_P(zv); |
1796 | 2.66k | zv++; |
1797 | 3.30k | while (--n) { |
1798 | 640 | if (Z_COLLECTABLE_P(zv)) { |
1799 | 591 | zend_refcounted *ref = Z_COUNTED_P(zv); |
1800 | 591 | GC_STACK_PUSH(ref); |
1801 | 591 | } |
1802 | 640 | zv++; |
1803 | 640 | } |
1804 | 2.66k | goto tail_call; |
1805 | 2.66k | } |
1806 | 115 | zv++; |
1807 | 115 | } |
1808 | 3.34k | } |
1809 | 4.14k | } else if (GC_TYPE(ref) == IS_ARRAY) { |
1810 | 308 | ht = (zend_array*)ref; |
1811 | | |
1812 | 1.38k | handle_ht: |
1813 | 1.38k | n = ht->nNumUsed; |
1814 | 1.38k | if (HT_IS_PACKED(ht)) { |
1815 | 274 | zv = ht->arPacked; |
1816 | 274 | goto handle_zvals; |
1817 | 274 | } |
1818 | | |
1819 | 1.11k | p = ht->arData; |
1820 | 1.30k | for (; n != 0; n--) { |
1821 | 1.26k | zv = &p->val; |
1822 | 1.26k | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1823 | 256 | zv = Z_INDIRECT_P(zv); |
1824 | 256 | } |
1825 | 1.26k | if (Z_COLLECTABLE_P(zv)) { |
1826 | 1.07k | ref = Z_COUNTED_P(zv); |
1827 | 1.07k | p++; |
1828 | 1.33k | while (--n) { |
1829 | 264 | zv = &p->val; |
1830 | 264 | if (Z_TYPE_P(zv) == IS_INDIRECT) { |
1831 | 23 | zv = Z_INDIRECT_P(zv); |
1832 | 23 | } |
1833 | 264 | if (Z_COLLECTABLE_P(zv)) { |
1834 | 254 | zend_refcounted *ref = Z_COUNTED_P(zv); |
1835 | 254 | GC_STACK_PUSH(ref); |
1836 | 254 | } |
1837 | 264 | p++; |
1838 | 264 | } |
1839 | 1.07k | goto tail_call; |
1840 | 1.07k | } |
1841 | 192 | p++; |
1842 | 192 | } |
1843 | 1.11k | } |
1844 | | |
1845 | 4.18k | next: |
1846 | 4.18k | ref = GC_STACK_POP(); |
1847 | 4.18k | if (ref) { |
1848 | 1.36k | goto tail_call; |
1849 | 1.36k | } |
1850 | | |
1851 | 2.82k | return count; |
1852 | 4.18k | } |
1853 | | |
1854 | | static void zend_get_gc_buffer_release(void); |
1855 | | static void zend_gc_check_root_tmpvars(void); |
1856 | | static void zend_gc_remove_root_tmpvars(void); |
1857 | | |
1858 | | static zend_internal_function gc_destructor_fiber; |
1859 | | |
1860 | | static ZEND_COLD ZEND_NORETURN void gc_create_destructor_fiber_error(void) |
1861 | 0 | { |
1862 | 0 | zend_error_noreturn(E_ERROR, "Unable to create destructor fiber"); |
1863 | 0 | } |
1864 | | |
1865 | | static ZEND_COLD ZEND_NORETURN void gc_start_destructor_fiber_error(void) |
1866 | 0 | { |
1867 | 0 | zend_error_noreturn(E_ERROR, "Unable to start destructor fiber"); |
1868 | 0 | } |
1869 | | |
1870 | | /* Call destructors for garbage in the buffer. */ |
1871 | | static zend_always_inline zend_result gc_call_destructors(uint32_t idx, uint32_t end, zend_fiber *fiber) |
1872 | 684 | { |
1873 | 684 | gc_root_buffer *current; |
1874 | 684 | zend_refcounted *p; |
1875 | | |
1876 | | /* The root buffer might be reallocated during destructors calls, |
1877 | | * make sure to reload pointers as necessary. */ |
1878 | 5.10k | while (idx != end) { |
1879 | 4.47k | current = GC_IDX2PTR(idx); |
1880 | 4.47k | if (GC_IS_DTOR_GARBAGE(current->ref)) { |
1881 | 2.10k | p = GC_GET_PTR(current->ref); |
1882 | | /* Mark this is as a normal root for the next GC run */ |
1883 | 2.10k | current->ref = p; |
1884 | | /* Double check that the destructor hasn't been called yet. It |
1885 | | * could have already been invoked indirectly by some other |
1886 | | * destructor. */ |
1887 | 2.10k | if (!(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) { |
1888 | 2.10k | if (fiber != NULL) { |
1889 | 166 | GC_G(dtor_idx) = idx; |
1890 | 166 | } |
1891 | 2.10k | zend_object *obj = (zend_object*)p; |
1892 | 2.10k | GC_TRACE_REF(obj, "calling destructor"); |
1893 | 2.10k | GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED); |
1894 | 2.10k | GC_ADDREF(obj); |
1895 | 2.10k | obj->handlers->dtor_obj(obj); |
1896 | 2.10k | GC_TRACE_REF(obj, "returned from destructor"); |
1897 | 2.10k | GC_DELREF(obj); |
1898 | 2.10k | if (UNEXPECTED(fiber != NULL && GC_G(dtor_fiber) != fiber)) { |
1899 | | /* We resumed after suspension */ |
1900 | 50 | gc_check_possible_root((zend_refcounted*)&obj->gc); |
1901 | 50 | return FAILURE; |
1902 | 50 | } |
1903 | 2.10k | } |
1904 | 2.10k | } |
1905 | 4.42k | idx++; |
1906 | 4.42k | } |
1907 | | |
1908 | 634 | return SUCCESS; |
1909 | 684 | } |
1910 | | |
1911 | | static zend_fiber *gc_create_destructor_fiber(void) |
1912 | 116 | { |
1913 | 116 | zval zobj; |
1914 | 116 | zend_fiber *fiber; |
1915 | | |
1916 | 116 | GC_TRACE("starting destructor fiber"); |
1917 | | |
1918 | 116 | if (UNEXPECTED(object_init_ex(&zobj, zend_ce_fiber) == FAILURE)) { |
1919 | 0 | gc_create_destructor_fiber_error(); |
1920 | 0 | } |
1921 | | |
1922 | 116 | fiber = (zend_fiber *)Z_OBJ(zobj); |
1923 | 116 | fiber->fci.size = sizeof(fiber->fci); |
1924 | 116 | fiber->fci_cache.function_handler = (zend_function*) &gc_destructor_fiber; |
1925 | | |
1926 | 116 | GC_G(dtor_fiber) = fiber; |
1927 | | |
1928 | 116 | if (UNEXPECTED(zend_fiber_start(fiber, NULL) == FAILURE)) { |
1929 | 0 | gc_start_destructor_fiber_error(); |
1930 | 0 | } |
1931 | | |
1932 | 116 | return fiber; |
1933 | 116 | } |
1934 | | |
1935 | | static zend_never_inline void gc_call_destructors_in_fiber(uint32_t end) |
1936 | 72 | { |
1937 | 72 | ZEND_ASSERT(!GC_G(dtor_fiber_running)); |
1938 | | |
1939 | 72 | zend_fiber *fiber = GC_G(dtor_fiber); |
1940 | | |
1941 | 72 | GC_G(dtor_idx) = GC_FIRST_ROOT; |
1942 | 72 | GC_G(dtor_end) = GC_G(first_unused); |
1943 | | |
1944 | 72 | if (UNEXPECTED(!fiber)) { |
1945 | 66 | fiber = gc_create_destructor_fiber(); |
1946 | 66 | } else { |
1947 | 6 | zend_fiber_resume(fiber, NULL, NULL); |
1948 | 6 | } |
1949 | | |
1950 | 122 | for (;;) { |
1951 | | /* At this point, fiber has executed until suspension */ |
1952 | 122 | GC_TRACE("resumed from destructor fiber"); |
1953 | | |
1954 | 122 | if (UNEXPECTED(GC_G(dtor_fiber_running))) { |
1955 | | /* Fiber was suspended by a destructor. Start a new one for the |
1956 | | * remaining destructors. */ |
1957 | 50 | GC_TRACE("destructor fiber suspended by destructor"); |
1958 | 50 | GC_G(dtor_fiber) = NULL; |
1959 | 50 | GC_G(dtor_idx)++; |
1960 | | /* We do not own the fiber anymore. It may be collected if the |
1961 | | * application does not reference it. */ |
1962 | 50 | zend_object_release(&fiber->std); |
1963 | 50 | fiber = gc_create_destructor_fiber(); |
1964 | 50 | continue; |
1965 | 72 | } else { |
1966 | | /* Fiber suspended itself after calling all destructors */ |
1967 | 72 | GC_TRACE("destructor fiber suspended itself"); |
1968 | 72 | break; |
1969 | 72 | } |
1970 | 122 | } |
1971 | 72 | } |
1972 | | |
1973 | | /* Perform a garbage collection run. The default implementation of gc_collect_cycles. */ |
1974 | | ZEND_API int zend_gc_collect_cycles(void) |
1975 | 701k | { |
1976 | 701k | int total_count = 0; |
1977 | 701k | bool should_rerun_gc = false; |
1978 | 701k | bool did_rerun_gc = false; |
1979 | | |
1980 | 701k | zend_hrtime_t start_time = zend_hrtime(); |
1981 | 701k | if (GC_G(num_roots) && !GC_G(gc_active)) { |
1982 | 20.5k | zend_gc_remove_root_tmpvars(); |
1983 | 20.5k | } |
1984 | | |
1985 | 702k | rerun_gc: |
1986 | 702k | if (GC_G(num_roots)) { |
1987 | 21.0k | int count; |
1988 | 21.0k | gc_root_buffer *current, *last; |
1989 | 21.0k | zend_refcounted *p; |
1990 | 21.0k | uint32_t gc_flags = 0; |
1991 | 21.0k | uint32_t idx, end; |
1992 | 21.0k | gc_stack stack; |
1993 | | |
1994 | 21.0k | stack.prev = NULL; |
1995 | 21.0k | stack.next = NULL; |
1996 | | |
1997 | 21.0k | if (GC_G(gc_active)) { |
1998 | 32 | GC_G(collector_time) += zend_hrtime() - start_time; |
1999 | 32 | return 0; |
2000 | 32 | } |
2001 | | |
2002 | 20.9k | GC_TRACE("Collecting cycles"); |
2003 | 20.9k | GC_G(gc_runs)++; |
2004 | 20.9k | GC_G(gc_active) = 1; |
2005 | | |
2006 | 20.9k | GC_TRACE("Marking roots"); |
2007 | 20.9k | gc_mark_roots(&stack); |
2008 | 20.9k | GC_TRACE("Scanning roots"); |
2009 | 20.9k | gc_scan_roots(&stack); |
2010 | | |
2011 | 20.9k | GC_TRACE("Collecting roots"); |
2012 | 20.9k | count = gc_collect_roots(&gc_flags, &stack); |
2013 | | |
2014 | 20.9k | if (!GC_G(num_roots)) { |
2015 | | /* nothing to free */ |
2016 | 16.9k | GC_TRACE("Nothing to free"); |
2017 | 16.9k | gc_stack_free(&stack); |
2018 | 16.9k | GC_G(gc_active) = 0; |
2019 | 16.9k | goto finish; |
2020 | 16.9k | } |
2021 | | |
2022 | 3.98k | end = GC_G(first_unused); |
2023 | | |
2024 | 3.98k | if (gc_flags & GC_HAS_DESTRUCTORS) { |
2025 | 634 | GC_TRACE("Calling destructors"); |
2026 | | |
2027 | | /* During a destructor call, new externally visible references to nested data may |
2028 | | * be introduced. These references can be introduced in a way that does not |
2029 | | * modify any refcounts, so we have no real way to detect this situation |
2030 | | * short of rerunning full GC tracing. What we do instead is to only run |
2031 | | * destructors at this point and automatically re-run GC afterwards. */ |
2032 | 634 | should_rerun_gc = true; |
2033 | | |
2034 | | /* Mark all roots for which a dtor will be invoked as DTOR_GARBAGE. Additionally |
2035 | | * color them purple. This serves a double purpose: First, they should be |
2036 | | * considered new potential roots for the next GC run. Second, it will prevent |
2037 | | * their removal from the root buffer by nested data removal. */ |
2038 | 634 | idx = GC_FIRST_ROOT; |
2039 | 634 | current = GC_IDX2PTR(GC_FIRST_ROOT); |
2040 | 6.14k | while (idx != end) { |
2041 | 5.51k | if (GC_IS_GARBAGE(current->ref)) { |
2042 | 5.51k | p = GC_GET_PTR(current->ref); |
2043 | 5.51k | if (GC_TYPE(p) == IS_OBJECT && !(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) { |
2044 | 4.68k | zend_object *obj = (zend_object *) p; |
2045 | 4.68k | if (obj->handlers->dtor_obj != zend_objects_destroy_object |
2046 | 4.41k | || obj->ce->destructor) { |
2047 | 2.82k | current->ref = GC_MAKE_DTOR_GARBAGE(obj); |
2048 | 2.82k | GC_REF_SET_COLOR(obj, GC_PURPLE); |
2049 | 2.82k | } else { |
2050 | 1.85k | GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED); |
2051 | 1.85k | } |
2052 | 4.68k | } |
2053 | 5.51k | } |
2054 | 5.51k | current++; |
2055 | 5.51k | idx++; |
2056 | 5.51k | } |
2057 | | |
2058 | | /* Remove nested data for objects on which a destructor will be called. |
2059 | | * This will not remove the objects themselves, as they have been colored |
2060 | | * purple. */ |
2061 | 634 | idx = GC_FIRST_ROOT; |
2062 | 634 | current = GC_IDX2PTR(GC_FIRST_ROOT); |
2063 | 6.14k | while (idx != end) { |
2064 | 5.51k | if (GC_IS_DTOR_GARBAGE(current->ref)) { |
2065 | 2.82k | p = GC_GET_PTR(current->ref); |
2066 | 2.82k | count -= gc_remove_nested_data_from_buffer(p, current, &stack); |
2067 | 2.82k | } |
2068 | 5.51k | current++; |
2069 | 5.51k | idx++; |
2070 | 5.51k | } |
2071 | | |
2072 | | /* Actually call destructors. */ |
2073 | 634 | zend_hrtime_t dtor_start_time = zend_hrtime(); |
2074 | 634 | if (EXPECTED(!EG(active_fiber))) { |
2075 | 562 | gc_call_destructors(GC_FIRST_ROOT, end, NULL); |
2076 | 562 | } else { |
2077 | 72 | gc_call_destructors_in_fiber(end); |
2078 | 72 | } |
2079 | 634 | GC_G(dtor_time) += zend_hrtime() - dtor_start_time; |
2080 | | |
2081 | 634 | if (GC_G(gc_protected)) { |
2082 | | /* something went wrong */ |
2083 | 12 | zend_get_gc_buffer_release(); |
2084 | 12 | GC_G(collector_time) += zend_hrtime() - start_time; |
2085 | 12 | return 0; |
2086 | 12 | } |
2087 | 634 | } |
2088 | | |
2089 | 3.97k | gc_stack_free(&stack); |
2090 | | |
2091 | | /* Destroy zvals. The root buffer may be reallocated. */ |
2092 | 3.97k | GC_TRACE("Destroying zvals"); |
2093 | 3.97k | zend_hrtime_t free_start_time = zend_hrtime(); |
2094 | 3.97k | idx = GC_FIRST_ROOT; |
2095 | 671k | while (idx != end) { |
2096 | 667k | current = GC_IDX2PTR(idx); |
2097 | 667k | if (GC_IS_GARBAGE(current->ref)) { |
2098 | 29.4k | p = GC_GET_PTR(current->ref); |
2099 | 29.4k | GC_TRACE_REF(p, "destroying"); |
2100 | 29.4k | if (GC_TYPE(p) == IS_OBJECT) { |
2101 | 25.0k | zend_object *obj = (zend_object*)p; |
2102 | | |
2103 | 25.0k | EG(objects_store).object_buckets[obj->handle] = SET_OBJ_INVALID(obj); |
2104 | 25.0k | GC_TYPE_INFO(obj) = GC_NULL | |
2105 | 25.0k | (GC_TYPE_INFO(obj) & ~GC_TYPE_MASK); |
2106 | | /* Modify current before calling free_obj (bug #78811: free_obj() can cause the root buffer (with current) to be reallocated.) */ |
2107 | 25.0k | current->ref = GC_MAKE_GARBAGE(((char*)obj) - obj->handlers->offset); |
2108 | 25.0k | if (!(OBJ_FLAGS(obj) & IS_OBJ_FREE_CALLED)) { |
2109 | 25.0k | GC_ADD_FLAGS(obj, IS_OBJ_FREE_CALLED); |
2110 | 25.0k | GC_ADDREF(obj); |
2111 | 25.0k | obj->handlers->free_obj(obj); |
2112 | 25.0k | GC_DELREF(obj); |
2113 | 25.0k | } |
2114 | | |
2115 | 25.0k | ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST(obj->handle); |
2116 | 25.0k | } else if (GC_TYPE(p) == IS_ARRAY) { |
2117 | 4.46k | zend_array *arr = (zend_array*)p; |
2118 | | |
2119 | 4.46k | GC_TYPE_INFO(arr) = GC_NULL | |
2120 | 4.46k | (GC_TYPE_INFO(arr) & ~GC_TYPE_MASK); |
2121 | | |
2122 | | /* GC may destroy arrays with rc>1. This is valid and safe. */ |
2123 | 4.46k | HT_ALLOW_COW_VIOLATION(arr); |
2124 | | |
2125 | 4.46k | zend_hash_destroy(arr); |
2126 | 4.46k | } |
2127 | 29.4k | } |
2128 | 667k | idx++; |
2129 | 667k | } |
2130 | | |
2131 | | /* Free objects */ |
2132 | 3.97k | current = GC_IDX2PTR(GC_FIRST_ROOT); |
2133 | 3.97k | last = GC_IDX2PTR(end); |
2134 | 671k | while (current != last) { |
2135 | 667k | if (GC_IS_GARBAGE(current->ref)) { |
2136 | 29.4k | p = GC_GET_PTR(current->ref); |
2137 | 29.4k | GC_LINK_UNUSED(current); |
2138 | 29.4k | GC_G(num_roots)--; |
2139 | 29.4k | efree(p); |
2140 | 29.4k | } |
2141 | 667k | current++; |
2142 | 667k | } |
2143 | | |
2144 | 3.97k | GC_G(free_time) += zend_hrtime() - free_start_time; |
2145 | | |
2146 | 3.97k | GC_TRACE("Collection finished"); |
2147 | 3.97k | GC_G(collected) += count; |
2148 | 3.97k | total_count += count; |
2149 | 3.97k | GC_G(gc_active) = 0; |
2150 | 3.97k | } |
2151 | | |
2152 | 685k | gc_compact(); |
2153 | | |
2154 | | /* Objects with destructors were removed from this GC run. Rerun GC right away to clean them |
2155 | | * up. We do this only once: If we encounter more destructors on the second run, we'll not |
2156 | | * run GC another time. */ |
2157 | 685k | if (should_rerun_gc && !did_rerun_gc) { |
2158 | 485 | did_rerun_gc = true; |
2159 | 485 | goto rerun_gc; |
2160 | 485 | } |
2161 | | |
2162 | 701k | finish: |
2163 | 701k | zend_get_gc_buffer_release(); |
2164 | | |
2165 | | /* Prevent GC from running during zend_gc_check_root_tmpvars, before |
2166 | | * gc_threshold is adjusted, as this may result in unbounded recursion */ |
2167 | 701k | GC_G(gc_active) = 1; |
2168 | 701k | zend_gc_check_root_tmpvars(); |
2169 | 701k | GC_G(gc_active) = 0; |
2170 | | |
2171 | 701k | GC_G(collector_time) += zend_hrtime() - start_time; |
2172 | 701k | return total_count; |
2173 | 685k | } |
2174 | | |
2175 | | ZEND_API void zend_gc_get_status(zend_gc_status *status) |
2176 | 254 | { |
2177 | 254 | status->active = GC_G(gc_active); |
2178 | 254 | status->gc_protected = GC_G(gc_protected); |
2179 | 254 | status->full = GC_G(gc_full); |
2180 | 254 | status->runs = GC_G(gc_runs); |
2181 | 254 | status->collected = GC_G(collected); |
2182 | 254 | status->threshold = GC_G(gc_threshold); |
2183 | 254 | status->buf_size = GC_G(buf_size); |
2184 | 254 | status->num_roots = GC_G(num_roots); |
2185 | 254 | status->application_time = zend_hrtime() - GC_G(activated_at); |
2186 | 254 | status->collector_time = GC_G(collector_time); |
2187 | 254 | status->dtor_time = GC_G(dtor_time); |
2188 | 254 | status->free_time = GC_G(free_time); |
2189 | 254 | } |
2190 | | |
2191 | 67.3k | ZEND_API zend_get_gc_buffer *zend_get_gc_buffer_create(void) { |
2192 | | /* There can only be one get_gc() call active at a time, |
2193 | | * so there only needs to be one buffer. */ |
2194 | 67.3k | zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer); |
2195 | 67.3k | gc_buffer->cur = gc_buffer->start; |
2196 | 67.3k | return gc_buffer; |
2197 | 67.3k | } |
2198 | | |
2199 | 2.02k | ZEND_API void zend_get_gc_buffer_grow(zend_get_gc_buffer *gc_buffer) { |
2200 | 2.02k | size_t old_capacity = gc_buffer->end - gc_buffer->start; |
2201 | 2.02k | size_t new_capacity = old_capacity == 0 ? 64 : old_capacity * 2; |
2202 | 2.02k | gc_buffer->start = erealloc(gc_buffer->start, new_capacity * sizeof(zval)); |
2203 | 2.02k | gc_buffer->end = gc_buffer->start + new_capacity; |
2204 | 2.02k | gc_buffer->cur = gc_buffer->start + old_capacity; |
2205 | 2.02k | } |
2206 | | |
2207 | 701k | static void zend_get_gc_buffer_release(void) { |
2208 | 701k | zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer); |
2209 | 701k | efree(gc_buffer->start); |
2210 | 701k | gc_buffer->start = gc_buffer->end = gc_buffer->cur = NULL; |
2211 | 701k | } |
2212 | | |
2213 | | /* TMPVAR operands are destroyed using zval_ptr_dtor_nogc(), because they usually cannot contain |
2214 | | * cycles. However, there are some rare exceptions where this is possible, in which case we rely |
2215 | | * on the producing code to root the value. If a GC run occurs between the rooting and consumption |
2216 | | * of the value, we would end up leaking it. To avoid this, root all live TMPVAR values here. */ |
2217 | 701k | static void zend_gc_check_root_tmpvars(void) { |
2218 | 701k | zend_execute_data *ex = EG(current_execute_data); |
2219 | 856k | for (; ex; ex = ex->prev_execute_data) { |
2220 | 154k | zend_function *func = ex->func; |
2221 | 154k | if (!func || !ZEND_USER_CODE(func->type)) { |
2222 | 152k | continue; |
2223 | 152k | } |
2224 | | |
2225 | 2.20k | uint32_t op_num = ex->opline - ex->func->op_array.opcodes; |
2226 | 8.65k | for (uint32_t i = 0; i < func->op_array.last_live_range; i++) { |
2227 | 6.77k | const zend_live_range *range = &func->op_array.live_range[i]; |
2228 | 6.77k | if (range->start > op_num) { |
2229 | 330 | break; |
2230 | 330 | } |
2231 | 6.44k | if (range->end <= op_num) { |
2232 | 6.18k | continue; |
2233 | 6.18k | } |
2234 | | |
2235 | 262 | uint32_t kind = range->var & ZEND_LIVE_MASK; |
2236 | 262 | if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) { |
2237 | 203 | uint32_t var_num = range->var & ~ZEND_LIVE_MASK; |
2238 | 203 | zval *var = ZEND_CALL_VAR(ex, var_num); |
2239 | 203 | if (Z_COLLECTABLE_P(var)) { |
2240 | 191 | gc_check_possible_root(Z_COUNTED_P(var)); |
2241 | 191 | } |
2242 | 203 | } |
2243 | 262 | } |
2244 | 2.20k | } |
2245 | 701k | } |
2246 | | |
2247 | 20.5k | static void zend_gc_remove_root_tmpvars(void) { |
2248 | 20.5k | zend_execute_data *ex = EG(current_execute_data); |
2249 | 25.5k | for (; ex; ex = ex->prev_execute_data) { |
2250 | 5.04k | zend_function *func = ex->func; |
2251 | 5.04k | if (!func || !ZEND_USER_CODE(func->type)) { |
2252 | 3.14k | continue; |
2253 | 3.14k | } |
2254 | | |
2255 | 1.90k | uint32_t op_num = ex->opline - ex->func->op_array.opcodes; |
2256 | 8.00k | for (uint32_t i = 0; i < func->op_array.last_live_range; i++) { |
2257 | 6.36k | const zend_live_range *range = &func->op_array.live_range[i]; |
2258 | 6.36k | if (range->start > op_num) { |
2259 | 261 | break; |
2260 | 261 | } |
2261 | 6.10k | if (range->end <= op_num) { |
2262 | 5.91k | continue; |
2263 | 5.91k | } |
2264 | | |
2265 | 191 | uint32_t kind = range->var & ZEND_LIVE_MASK; |
2266 | 191 | if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) { |
2267 | 191 | uint32_t var_num = range->var & ~ZEND_LIVE_MASK; |
2268 | 191 | zval *var = ZEND_CALL_VAR(ex, var_num); |
2269 | 191 | if (Z_COLLECTABLE_P(var)) { |
2270 | 191 | GC_REMOVE_FROM_BUFFER(Z_COUNTED_P(var)); |
2271 | 191 | } |
2272 | 191 | } |
2273 | 191 | } |
2274 | 1.90k | } |
2275 | 20.5k | } |
2276 | | |
2277 | | #if GC_BENCH |
2278 | | void gc_bench_print(void) |
2279 | | { |
2280 | | fprintf(stderr, "GC Statistics\n"); |
2281 | | fprintf(stderr, "-------------\n"); |
2282 | | fprintf(stderr, "Runs: %d\n", GC_G(gc_runs)); |
2283 | | fprintf(stderr, "Collected: %d\n", GC_G(collected)); |
2284 | | fprintf(stderr, "Root buffer length: %d\n", GC_G(root_buf_length)); |
2285 | | fprintf(stderr, "Root buffer peak: %d\n\n", GC_G(root_buf_peak)); |
2286 | | fprintf(stderr, " Possible Remove from Marked\n"); |
2287 | | fprintf(stderr, " Root Buffered buffer grey\n"); |
2288 | | fprintf(stderr, " -------- -------- ----------- ------\n"); |
2289 | | fprintf(stderr, "ZVAL %8d %8d %9d %8d\n", GC_G(zval_possible_root), GC_G(zval_buffered), GC_G(zval_remove_from_buffer), GC_G(zval_marked_grey)); |
2290 | | } |
2291 | | #endif |
2292 | | |
2293 | | #ifdef ZTS |
2294 | | size_t zend_gc_globals_size(void) |
2295 | | { |
2296 | | return sizeof(zend_gc_globals); |
2297 | | } |
2298 | | #endif |
2299 | | |
2300 | | static ZEND_FUNCTION(gc_destructor_fiber) |
2301 | 116 | { |
2302 | 116 | uint32_t idx, end; |
2303 | | |
2304 | 116 | zend_fiber *fiber = GC_G(dtor_fiber); |
2305 | 116 | ZEND_ASSERT(fiber != NULL); |
2306 | 116 | ZEND_ASSERT(fiber == EG(active_fiber)); |
2307 | | |
2308 | 122 | for (;;) { |
2309 | 122 | GC_G(dtor_fiber_running) = true; |
2310 | | |
2311 | 122 | idx = GC_G(dtor_idx); |
2312 | 122 | end = GC_G(dtor_end); |
2313 | 122 | if (UNEXPECTED(gc_call_destructors(idx, end, fiber) == FAILURE)) { |
2314 | | /* We resumed after being suspended by a destructor */ |
2315 | 50 | return; |
2316 | 50 | } |
2317 | | |
2318 | | /* We have called all destructors. Suspend fiber until the next GC run |
2319 | | */ |
2320 | 72 | GC_G(dtor_fiber_running) = false; |
2321 | 72 | zend_fiber_suspend(fiber, NULL, NULL); |
2322 | | |
2323 | 72 | if (UNEXPECTED(fiber->flags & ZEND_FIBER_FLAG_DESTROYED)) { |
2324 | | /* Fiber is being destroyed by shutdown sequence */ |
2325 | 66 | if (GC_G(dtor_fiber) == fiber) { |
2326 | 66 | GC_G(dtor_fiber) = NULL; |
2327 | 66 | } |
2328 | 66 | GC_DELREF(&fiber->std); |
2329 | 66 | gc_check_possible_root((zend_refcounted*)&fiber->std.gc); |
2330 | 66 | return; |
2331 | 66 | } |
2332 | 72 | } |
2333 | 116 | } |
2334 | | |
2335 | | static zend_internal_function gc_destructor_fiber = { |
2336 | | .type = ZEND_INTERNAL_FUNCTION, |
2337 | | .fn_flags = ZEND_ACC_PUBLIC, |
2338 | | .handler = ZEND_FN(gc_destructor_fiber), |
2339 | | }; |
2340 | | |
2341 | | void gc_init(void) |
2342 | 16 | { |
2343 | 16 | gc_destructor_fiber.function_name = zend_string_init_interned( |
2344 | 16 | "gc_destructor_fiber", |
2345 | 16 | strlen("gc_destructor_fiber"), |
2346 | | true); |
2347 | 16 | } |