Coverage Report

Created: 2025-12-14 06:09

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/php-src/Zend/zend_gc.c
Line
Count
Source
1
/*
2
   +----------------------------------------------------------------------+
3
   | Zend Engine                                                          |
4
   +----------------------------------------------------------------------+
5
   | Copyright (c) Zend Technologies Ltd. (http://www.zend.com)           |
6
   +----------------------------------------------------------------------+
7
   | This source file is subject to version 2.00 of the Zend license,     |
8
   | that is bundled with this package in the file LICENSE, and is        |
9
   | available through the world-wide-web at the following url:           |
10
   | http://www.zend.com/license/2_00.txt.                                |
11
   | If you did not receive a copy of the Zend license and are unable to  |
12
   | obtain it through the world-wide-web, please send a note to          |
13
   | license@zend.com so we can mail you a copy immediately.              |
14
   +----------------------------------------------------------------------+
15
   | Authors: David Wang <planetbeing@gmail.com>                          |
16
   |          Dmitry Stogov <dmitry@php.net>                              |
17
   +----------------------------------------------------------------------+
18
*/
19
20
/**
21
 * zend_gc_collect_cycles
22
 * ======================
23
 *
24
 * Colors and its meaning
25
 * ----------------------
26
 *
27
 * BLACK  (GC_BLACK)   - In use or free.
28
 * GREY   (GC_GREY)    - Possible member of cycle.
29
 * WHITE  (GC_WHITE)   - Member of garbage cycle.
30
 * PURPLE (GC_PURPLE)  - Possible root of cycle.
31
 *
32
 * Colors described in the paper but not used
33
 * ------------------------------------------
34
 *
35
 * GREEN - Acyclic
36
 * RED   - Candidate cycle undergoing
37
 * ORANGE - Candidate cycle awaiting epoch boundary.
38
 *
39
 *
40
 * Flow
41
 * =====
42
 *
43
 * The garbage collect cycle starts from 'gc_mark_roots', which traverses the
44
 * possible roots, and calls mark_grey for roots are marked purple with
45
 * depth-first traverse.
46
 *
47
 * After all possible roots are traversed and marked,
48
 * gc_scan_roots will be called, and each root will be called with
49
 * gc_scan(root->ref)
50
 *
51
 * gc_scan checks the colors of possible members.
52
 *
53
 * If the node is marked as grey and the refcount > 0
54
 *    gc_scan_black will be called on that node to scan it's subgraph.
55
 * otherwise (refcount == 0), it marks the node white.
56
 *
57
 * A node MAY be added to possible roots when ZEND_UNSET_VAR happens or
58
 * zend_assign_to_variable is called only when possible garbage node is
59
 * produced.
60
 * gc_possible_root() will be called to add the nodes to possible roots.
61
 *
62
 *
63
 * For objects, we call their get_gc handler (by default 'zend_std_get_gc') to
64
 * get the object properties to scan.
65
 *
66
 *
67
 * @see http://researcher.watson.ibm.com/researcher/files/us-bacon/Bacon01Concurrent.pdf
68
 */
69
#include "zend.h"
70
#include "zend_API.h"
71
#include "zend_compile.h"
72
#include "zend_errors.h"
73
#include "zend_fibers.h"
74
#include "zend_hrtime.h"
75
#include "zend_portability.h"
76
#include "zend_types.h"
77
#include "zend_weakrefs.h"
78
#include "zend_string.h"
79
80
#ifndef GC_BENCH
81
# define GC_BENCH 0
82
#endif
83
84
#ifndef ZEND_GC_DEBUG
85
# define ZEND_GC_DEBUG 0
86
#endif
87
88
/* GC_INFO layout */
89
465k
#define GC_ADDRESS  0x0fffffu
90
1.01M
#define GC_COLOR    0x300000u
91
92
#define GC_BLACK    0x000000u /* must be zero */
93
#define GC_WHITE    0x100000u
94
#define GC_GREY     0x200000u
95
#define GC_PURPLE   0x300000u
96
97
/* Debug tracing */
98
#if ZEND_GC_DEBUG > 1
99
# define GC_TRACE(format, ...) fprintf(stderr, format "\n", ##__VA_ARGS__);
100
# define GC_TRACE_REF(ref, format, ...) \
101
  do { \
102
    gc_trace_ref((zend_refcounted *) ref); \
103
    fprintf(stderr, format "\n", ##__VA_ARGS__); \
104
  } while (0)
105
# define GC_TRACE_SET_COLOR(ref, color) \
106
  GC_TRACE_REF(ref, "->%s", gc_color_name(color))
107
#else
108
# define GC_TRACE_REF(ref, format, ...)
109
# define GC_TRACE_SET_COLOR(ref, new_color)
110
# define GC_TRACE(str)
111
#endif
112
113
/* GC_INFO access */
114
#define GC_REF_ADDRESS(ref) \
115
465k
  (((GC_TYPE_INFO(ref)) & (GC_ADDRESS << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
116
117
#define GC_REF_COLOR(ref) \
118
  (((GC_TYPE_INFO(ref)) & (GC_COLOR << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
119
120
#define GC_REF_CHECK_COLOR(ref, color) \
121
779k
  ((GC_TYPE_INFO(ref) & (GC_COLOR << GC_INFO_SHIFT)) == ((color) << GC_INFO_SHIFT))
122
123
970k
#define GC_REF_SET_INFO(ref, info) do { \
124
970k
    GC_TYPE_INFO(ref) = \
125
970k
      (GC_TYPE_INFO(ref) & (GC_TYPE_MASK | GC_FLAGS_MASK)) | \
126
970k
      ((info) << GC_INFO_SHIFT); \
127
970k
  } while (0)
128
129
131k
#define GC_REF_SET_COLOR(ref, c) do { \
130
131k
    GC_TRACE_SET_COLOR(ref, c); \
131
131k
    GC_TYPE_INFO(ref) = \
132
131k
      (GC_TYPE_INFO(ref) & ~(GC_COLOR << GC_INFO_SHIFT)) | \
133
131k
      ((c) << GC_INFO_SHIFT); \
134
131k
  } while (0)
135
136
102k
#define GC_REF_SET_BLACK(ref) do { \
137
102k
    GC_TRACE_SET_COLOR(ref, GC_BLACK); \
138
102k
    GC_TYPE_INFO(ref) &= ~(GC_COLOR << GC_INFO_SHIFT); \
139
102k
  } while (0)
140
141
#define GC_REF_SET_PURPLE(ref) do { \
142
    GC_TRACE_SET_COLOR(ref, GC_PURPLE); \
143
    GC_TYPE_INFO(ref) |= (GC_COLOR << GC_INFO_SHIFT); \
144
  } while (0)
145
146
/* bit stealing tags for gc_root_buffer.ref */
147
122k
#define GC_BITS    0x3
148
149
70.1k
#define GC_ROOT    0x0 /* possible root of circular garbage     */
150
507k
#define GC_UNUSED  0x1 /* part of linked list of unused buffers */
151
19.8k
#define GC_GARBAGE 0x2 /* garbage to delete                     */
152
5.63k
#define GC_DTOR_GARBAGE 0x3 /* garbage on which only the dtor should be invoked */
153
154
#define GC_GET_PTR(ptr) \
155
13.0k
  ((void*)(((uintptr_t)(ptr)) & ~GC_BITS))
156
157
#define GC_IS_ROOT(ptr) \
158
70.1k
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_ROOT)
159
#define GC_IS_UNUSED(ptr) \
160
22.4k
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_UNUSED)
161
#define GC_IS_GARBAGE(ptr) \
162
12.2k
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_GARBAGE)
163
#define GC_IS_DTOR_GARBAGE(ptr) \
164
4.50k
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_DTOR_GARBAGE)
165
166
#define GC_MAKE_GARBAGE(ptr) \
167
7.58k
  ((void*)(((uintptr_t)(ptr)) | GC_GARBAGE))
168
#define GC_MAKE_DTOR_GARBAGE(ptr) \
169
1.12k
  ((void*)(((uintptr_t)(ptr)) | GC_DTOR_GARBAGE))
170
171
/* GC address conversion */
172
1.28M
#define GC_IDX2PTR(idx)      (GC_G(buf) + (idx))
173
487k
#define GC_PTR2IDX(ptr)      ((ptr) - GC_G(buf))
174
175
/* Get the value to be placed in an unused buffer entry with the specified next unused list index */
176
484k
#define GC_IDX2LIST(idx)     ((void*)(uintptr_t)(((idx) * sizeof(void*)) | GC_UNUSED))
177
/* Get the index of the next item in the unused list from the given root buffer entry. */
178
242k
#define GC_LIST2IDX(list)    (((uint32_t)(uintptr_t)(list)) / sizeof(void*))
179
180
/* GC buffers */
181
139k
#define GC_INVALID           0
182
396k
#define GC_FIRST_ROOT        1
183
184
2
#define GC_DEFAULT_BUF_SIZE  (16 * 1024)
185
0
#define GC_BUF_GROW_STEP     (128 * 1024)
186
187
0
#define GC_MAX_UNCOMPRESSED  (512 * 1024)
188
0
#define GC_MAX_BUF_SIZE      0x40000000
189
190
2
#define GC_THRESHOLD_DEFAULT (10000 + GC_FIRST_ROOT)
191
0
#define GC_THRESHOLD_STEP    10000
192
0
#define GC_THRESHOLD_MAX     1000000000
193
0
#define GC_THRESHOLD_TRIGGER 100
194
195
/* GC flags */
196
2.33k
#define GC_HAS_DESTRUCTORS  (1<<0)
197
198
/* Weak maps */
199
777
#define Z_FROM_WEAKMAP_KEY    (1<<0)
200
762
#define Z_FROM_WEAKMAP      (1<<1)
201
202
/* The WeakMap entry zv is reachable from roots by following the virtual
203
 * reference from the a WeakMap key to the entry */
204
#define GC_FROM_WEAKMAP_KEY(zv) \
205
192
  (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT))
206
207
288
#define GC_SET_FROM_WEAKMAP_KEY(zv) do {                    \
208
288
  zval *_z = (zv);                               \
209
288
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \
210
288
} while (0)
211
212
297
#define GC_UNSET_FROM_WEAKMAP_KEY(zv) do {                    \
213
297
  zval *_z = (zv);                               \
214
297
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \
215
297
} while (0)
216
217
/* The WeakMap entry zv is reachable from roots by following the reference from
218
 * the WeakMap */
219
#define GC_FROM_WEAKMAP(zv) \
220
540
  (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT))
221
222
99
#define GC_SET_FROM_WEAKMAP(zv) do {                        \
223
99
  zval *_z = (zv);                               \
224
99
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \
225
99
} while (0)
226
227
123
#define GC_UNSET_FROM_WEAKMAP(zv) do {                      \
228
123
  zval *_z = (zv);                               \
229
123
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \
230
123
} while (0)
231
232
/* unused buffers */
233
234
/* Are there any unused root buffer entries? */
235
#define GC_HAS_UNUSED() \
236
1.56k
  (GC_G(unused) != GC_INVALID)
237
238
/* Get the next unused entry and remove it from the list */
239
#define GC_FETCH_UNUSED() \
240
242k
  gc_fetch_unused()
241
242
/* Add a root buffer entry to the unused list */
243
#define GC_LINK_UNUSED(root) \
244
484k
  gc_link_unused(root)
245
246
#define GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD() \
247
  (GC_G(first_unused) < GC_G(gc_threshold))
248
#define GC_HAS_NEXT_UNUSED() \
249
1.56k
  (GC_G(first_unused) != GC_G(buf_size))
250
#define GC_FETCH_NEXT_UNUSED() \
251
242k
  gc_fetch_next_unused()
252
253
ZEND_API int (*gc_collect_cycles)(void);
254
255
/* The type of a root buffer entry.
256
 *
257
 * The lower two bits are used for flags and need to be masked out to
258
 * reconstruct a pointer.
259
 *
260
 * When a node in the root buffer is removed, the non-flag bits of the
261
 * unused entry are used to store the index of the next entry in the unused
262
 * list. */
263
typedef struct _gc_root_buffer {
264
  zend_refcounted  *ref;
265
} gc_root_buffer;
266
267
typedef struct _zend_gc_globals {
268
  /* The root buffer, which stores possible roots of reference cycles. It is
269
   * also used to store garbage to be collected at the end of a run.
270
   * A single array which is reallocated as necessary. */
271
  gc_root_buffer   *buf;
272
273
  bool         gc_enabled;
274
  bool         gc_active;        /* GC currently running, forbid nested GC */
275
  bool         gc_protected;     /* GC protected, forbid root additions */
276
  bool         gc_full;
277
278
  uint32_t          unused;     /* linked list of unused buffers    */
279
  uint32_t          first_unused;   /* first unused buffer              */
280
  uint32_t          gc_threshold;     /* GC collection threshold          */
281
  uint32_t          buf_size;     /* size of the GC buffer            */
282
  uint32_t          num_roots;    /* number of roots in GC buffer     */
283
284
  uint32_t gc_runs;         /* number of GC runs since reset */
285
  uint32_t collected;         /* number of collected nodes since reset */
286
287
  zend_hrtime_t activated_at;     /* the timestamp of the last reset */
288
  zend_hrtime_t collector_time;   /* time spent running GC (ns) */
289
  zend_hrtime_t dtor_time;      /* time spent calling destructors (ns) */
290
  zend_hrtime_t free_time;      /* time spent destroying nodes and freeing memory (ns) */
291
292
  uint32_t dtor_idx;      /* root buffer index */
293
  uint32_t dtor_end;
294
  zend_fiber *dtor_fiber;
295
  bool dtor_fiber_running;
296
297
#if GC_BENCH
298
  uint32_t root_buf_length;
299
  uint32_t root_buf_peak;
300
  uint32_t zval_possible_root;
301
  uint32_t zval_buffered;
302
  uint32_t zval_remove_from_buffer;
303
  uint32_t zval_marked_grey;
304
#endif
305
} zend_gc_globals;
306
307
#ifdef ZTS
308
static int gc_globals_id;
309
static size_t gc_globals_offset;
310
#define GC_G(v) ZEND_TSRMG_FAST(gc_globals_offset, zend_gc_globals *, v)
311
#else
312
7.38M
#define GC_G(v) (gc_globals.v)
313
static zend_gc_globals gc_globals;
314
#endif
315
316
#if GC_BENCH
317
# define GC_BENCH_INC(counter) GC_G(counter)++
318
# define GC_BENCH_DEC(counter) GC_G(counter)--
319
# define GC_BENCH_PEAK(peak, counter) do {    \
320
    if (GC_G(counter) > GC_G(peak)) {   \
321
      GC_G(peak) = GC_G(counter);     \
322
    }                   \
323
  } while (0)
324
#else
325
# define GC_BENCH_INC(counter)
326
# define GC_BENCH_DEC(counter)
327
# define GC_BENCH_PEAK(peak, counter)
328
#endif
329
330
331
0
#define GC_STACK_SEGMENT_SIZE (((4096 - ZEND_MM_OVERHEAD) / sizeof(void*)) - 2)
332
333
typedef struct _gc_stack gc_stack;
334
335
/* The stack used for graph traversal is stored as a linked list of segments */
336
struct _gc_stack {
337
  gc_stack        *prev;
338
  gc_stack        *next;
339
  zend_refcounted *data[GC_STACK_SEGMENT_SIZE];
340
};
341
342
#define GC_STACK_DCL(init) \
343
63.9k
  gc_stack *_stack = init; \
344
63.9k
  size_t    _top = 0;
345
346
#define GC_STACK_PUSH(ref) \
347
63.1k
  gc_stack_push(&_stack, &_top, ref);
348
349
#define GC_STACK_POP() \
350
127k
  gc_stack_pop(&_stack, &_top)
351
352
static zend_never_inline gc_stack* gc_stack_next(gc_stack *stack)
353
9.89k
{
354
9.89k
  if (UNEXPECTED(!stack->next)) {
355
9.89k
    gc_stack *segment = emalloc(sizeof(gc_stack));
356
9.89k
    segment->prev = stack;
357
9.89k
    segment->next = NULL;
358
9.89k
    stack->next = segment;
359
9.89k
  }
360
9.89k
  return stack->next;
361
9.89k
}
362
363
static zend_always_inline void gc_stack_push(gc_stack **stack, size_t *top, zend_refcounted *ref)
364
63.1k
{
365
63.1k
  if (UNEXPECTED(*top == GC_STACK_SEGMENT_SIZE)) {
366
0
    (*stack) = gc_stack_next(*stack);
367
0
    (*top) = 0;
368
0
  }
369
63.1k
  (*stack)->data[(*top)++] = ref;
370
63.1k
}
371
372
static zend_always_inline zend_refcounted* gc_stack_pop(gc_stack **stack, size_t *top)
373
127k
{
374
127k
  if (UNEXPECTED((*top) == 0)) {
375
63.9k
    if (!(*stack)->prev) {
376
63.9k
      return NULL;
377
63.9k
    } else {
378
0
      (*stack) = (*stack)->prev;
379
0
      (*top) = GC_STACK_SEGMENT_SIZE - 1;
380
0
      return (*stack)->data[GC_STACK_SEGMENT_SIZE - 1];
381
0
    }
382
63.9k
  } else {
383
63.1k
    return (*stack)->data[--(*top)];
384
63.1k
  }
385
127k
}
386
387
static void gc_stack_free(gc_stack *stack)
388
10.6k
{
389
10.6k
  gc_stack *p = stack->next;
390
391
20.5k
  while (p) {
392
9.87k
    stack = p->next;
393
9.87k
    efree(p);
394
9.87k
    p = stack;
395
9.87k
  }
396
10.6k
}
397
398
/* Map a full index to a compressed index.
399
 *
400
 * The root buffer can have up to 2^30 entries, but we only have 20 bits to
401
 * store the index. So we use the 1<<19 bit as a compression flag and use the
402
 * other 19 bits to store the index modulo 2^19. */
403
static zend_always_inline uint32_t gc_compress(uint32_t idx)
404
488k
{
405
488k
  if (EXPECTED(idx < GC_MAX_UNCOMPRESSED)) {
406
488k
    return idx;
407
488k
  }
408
0
  return (idx % GC_MAX_UNCOMPRESSED) | GC_MAX_UNCOMPRESSED;
409
488k
}
410
411
/* Find the root buffer entry given a pointer and a compressed index.
412
 * Iterate through the root buffer in steps of 2^19 until the pointer
413
 * matches. */
414
static zend_always_inline gc_root_buffer* gc_decompress(zend_refcounted *ref, uint32_t idx)
415
0
{
416
0
  gc_root_buffer *root = GC_IDX2PTR(idx);
417
418
0
  if (EXPECTED(GC_GET_PTR(root->ref) == ref)) {
419
0
    return root;
420
0
  }
421
422
0
  while (1) {
423
0
    idx += GC_MAX_UNCOMPRESSED;
424
0
    ZEND_ASSERT(idx < GC_G(first_unused));
425
0
    root = GC_IDX2PTR(idx);
426
0
    if (GC_GET_PTR(root->ref) == ref) {
427
0
      return root;
428
0
    }
429
0
  }
430
0
}
431
432
/* Get the index of the next unused root buffer entry, and remove it from the
433
 * unused list. GC_HAS_UNUSED() must be true before calling this. */
434
static zend_always_inline uint32_t gc_fetch_unused(void)
435
242k
{
436
242k
  uint32_t idx;
437
242k
  gc_root_buffer *root;
438
439
242k
  ZEND_ASSERT(GC_HAS_UNUSED());
440
242k
  idx = GC_G(unused);
441
242k
  root = GC_IDX2PTR(idx);
442
242k
  ZEND_ASSERT(GC_IS_UNUSED(root->ref));
443
242k
  GC_G(unused) = GC_LIST2IDX(root->ref);
444
242k
  return idx;
445
242k
}
446
447
/* Add a root buffer entry to the unused list */
448
static zend_always_inline void gc_link_unused(gc_root_buffer *root)
449
484k
{
450
484k
  root->ref = GC_IDX2LIST(GC_G(unused));
451
484k
  GC_G(unused) = GC_PTR2IDX(root);
452
484k
}
453
454
static zend_always_inline uint32_t gc_fetch_next_unused(void)
455
242k
{
456
242k
  uint32_t idx;
457
458
242k
  ZEND_ASSERT(GC_HAS_NEXT_UNUSED());
459
242k
  idx = GC_G(first_unused);
460
242k
  GC_G(first_unused) = GC_G(first_unused) + 1;
461
242k
  return idx;
462
242k
}
463
464
#if ZEND_GC_DEBUG > 1
465
static const char *gc_color_name(uint32_t color) {
466
  switch (color) {
467
    case GC_BLACK: return "black";
468
    case GC_WHITE: return "white";
469
    case GC_GREY: return "grey";
470
    case GC_PURPLE: return "purple";
471
    default: return "unknown";
472
  }
473
}
474
static void gc_trace_ref(zend_refcounted *ref) {
475
  if (GC_TYPE(ref) == IS_OBJECT) {
476
    zend_object *obj = (zend_object *) ref;
477
    fprintf(stderr, "[%p] rc=%d addr=%d %s object(%s)#%d ",
478
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
479
      gc_color_name(GC_REF_COLOR(ref)),
480
      obj->ce->name->val, obj->handle);
481
  } else if (GC_TYPE(ref) == IS_ARRAY) {
482
    zend_array *arr = (zend_array *) ref;
483
    fprintf(stderr, "[%p] rc=%d addr=%d %s array(%d) ",
484
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
485
      gc_color_name(GC_REF_COLOR(ref)),
486
      zend_hash_num_elements(arr));
487
  } else {
488
    fprintf(stderr, "[%p] rc=%d addr=%d %s %s ",
489
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
490
      gc_color_name(GC_REF_COLOR(ref)),
491
      GC_TYPE(ref) == IS_REFERENCE
492
        ? "reference" : zend_get_type_by_const(GC_TYPE(ref)));
493
  }
494
}
495
#endif
496
497
/* Mark a root buffer entry unused */
498
static zend_always_inline void gc_remove_from_roots(gc_root_buffer *root)
499
481k
{
500
481k
  GC_LINK_UNUSED(root);
501
481k
  GC_G(num_roots)--;
502
481k
  GC_BENCH_DEC(root_buf_length);
503
481k
}
504
505
static void root_buffer_dtor(zend_gc_globals *gc_globals)
506
0
{
507
0
  if (gc_globals->buf) {
508
0
    free(gc_globals->buf);
509
0
    gc_globals->buf = NULL;
510
0
  }
511
0
}
512
513
static void gc_globals_ctor_ex(zend_gc_globals *gc_globals)
514
2
{
515
2
  gc_globals->gc_enabled = false;
516
2
  gc_globals->gc_active = false;
517
2
  gc_globals->gc_protected = true;
518
2
  gc_globals->gc_full = false;
519
520
2
  gc_globals->buf = NULL;
521
2
  gc_globals->unused = GC_INVALID;
522
2
  gc_globals->first_unused = GC_INVALID;
523
2
  gc_globals->gc_threshold = GC_INVALID;
524
2
  gc_globals->buf_size = GC_INVALID;
525
2
  gc_globals->num_roots = 0;
526
527
2
  gc_globals->gc_runs = 0;
528
2
  gc_globals->collected = 0;
529
2
  gc_globals->collector_time = 0;
530
2
  gc_globals->dtor_time = 0;
531
2
  gc_globals->free_time = 0;
532
2
  gc_globals->activated_at = 0;
533
534
2
  gc_globals->dtor_idx = GC_FIRST_ROOT;
535
2
  gc_globals->dtor_end = 0;
536
2
  gc_globals->dtor_fiber = NULL;
537
2
  gc_globals->dtor_fiber_running = false;
538
539
#if GC_BENCH
540
  gc_globals->root_buf_length = 0;
541
  gc_globals->root_buf_peak = 0;
542
  gc_globals->zval_possible_root = 0;
543
  gc_globals->zval_buffered = 0;
544
  gc_globals->zval_remove_from_buffer = 0;
545
  gc_globals->zval_marked_grey = 0;
546
#endif
547
2
}
548
549
void gc_globals_ctor(void)
550
2
{
551
#ifdef ZTS
552
  ts_allocate_fast_id(&gc_globals_id, &gc_globals_offset, sizeof(zend_gc_globals), (ts_allocate_ctor) gc_globals_ctor_ex, (ts_allocate_dtor) root_buffer_dtor);
553
#else
554
2
  gc_globals_ctor_ex(&gc_globals);
555
2
#endif
556
2
}
557
558
void gc_globals_dtor(void)
559
0
{
560
0
#ifndef ZTS
561
0
  root_buffer_dtor(&gc_globals);
562
0
#endif
563
0
}
564
565
void gc_reset(void)
566
57.8k
{
567
57.8k
  if (GC_G(buf)) {
568
57.8k
    GC_G(gc_active) = 0;
569
57.8k
    GC_G(gc_protected) = 0;
570
57.8k
    GC_G(gc_full) = 0;
571
57.8k
    GC_G(unused) = GC_INVALID;
572
57.8k
    GC_G(first_unused) = GC_FIRST_ROOT;
573
57.8k
    GC_G(num_roots) = 0;
574
575
57.8k
    GC_G(gc_runs) = 0;
576
57.8k
    GC_G(collected) = 0;
577
578
57.8k
    GC_G(collector_time) = 0;
579
57.8k
    GC_G(dtor_time) = 0;
580
57.8k
    GC_G(free_time) = 0;
581
582
57.8k
    GC_G(dtor_idx) = GC_FIRST_ROOT;
583
57.8k
    GC_G(dtor_end) = 0;
584
57.8k
    GC_G(dtor_fiber) = NULL;
585
57.8k
    GC_G(dtor_fiber_running) = false;
586
587
#if GC_BENCH
588
    GC_G(root_buf_length) = 0;
589
    GC_G(root_buf_peak) = 0;
590
    GC_G(zval_possible_root) = 0;
591
    GC_G(zval_buffered) = 0;
592
    GC_G(zval_remove_from_buffer) = 0;
593
    GC_G(zval_marked_grey) = 0;
594
#endif
595
57.8k
  }
596
597
57.8k
  GC_G(activated_at) = zend_hrtime();
598
57.8k
}
599
600
/* Enable/disable the garbage collector.
601
 * Initialize globals if necessary. */
602
ZEND_API bool gc_enable(bool enable)
603
62.1k
{
604
62.1k
  bool old_enabled = GC_G(gc_enabled);
605
62.1k
  GC_G(gc_enabled) = enable;
606
62.1k
  if (enable && !old_enabled && GC_G(buf) == NULL) {
607
2
    GC_G(buf) = (gc_root_buffer*) pemalloc(sizeof(gc_root_buffer) * GC_DEFAULT_BUF_SIZE, 1);
608
2
    GC_G(buf)[0].ref = NULL;
609
2
    GC_G(buf_size) = GC_DEFAULT_BUF_SIZE;
610
2
    GC_G(gc_threshold) = GC_THRESHOLD_DEFAULT;
611
2
    gc_reset();
612
2
  }
613
62.1k
  return old_enabled;
614
62.1k
}
615
616
ZEND_API bool gc_enabled(void)
617
50
{
618
50
  return GC_G(gc_enabled);
619
50
}
620
621
/* Protect the GC root buffer (prevent additions) */
622
ZEND_API bool gc_protect(bool protect)
623
12.6k
{
624
12.6k
  bool old_protected = GC_G(gc_protected);
625
12.6k
  GC_G(gc_protected) = protect;
626
12.6k
  return old_protected;
627
12.6k
}
628
629
ZEND_API bool gc_protected(void)
630
0
{
631
0
  return GC_G(gc_protected);
632
0
}
633
634
static void gc_grow_root_buffer(void)
635
0
{
636
0
  size_t new_size;
637
638
0
  if (GC_G(buf_size) >= GC_MAX_BUF_SIZE) {
639
0
    if (!GC_G(gc_full)) {
640
0
      zend_error(E_WARNING, "GC buffer overflow (GC disabled)\n");
641
0
      GC_G(gc_active) = 1;
642
0
      GC_G(gc_protected) = 1;
643
0
      GC_G(gc_full) = 1;
644
0
      return;
645
0
    }
646
0
  }
647
0
  if (GC_G(buf_size) < GC_BUF_GROW_STEP) {
648
0
    new_size = GC_G(buf_size) * 2;
649
0
  } else {
650
0
    new_size = GC_G(buf_size) + GC_BUF_GROW_STEP;
651
0
  }
652
0
  if (new_size > GC_MAX_BUF_SIZE) {
653
0
    new_size = GC_MAX_BUF_SIZE;
654
0
  }
655
0
  GC_G(buf) = perealloc(GC_G(buf), sizeof(gc_root_buffer) * new_size, 1);
656
0
  GC_G(buf_size) = new_size;
657
0
}
658
659
/* Adjust the GC activation threshold given the number of nodes collected by the last run */
660
static void gc_adjust_threshold(int count)
661
0
{
662
0
  uint32_t new_threshold;
663
664
  /* TODO Very simple heuristic for dynamic GC buffer resizing:
665
   * If there are "too few" collections, increase the collection threshold
666
   * by a fixed step */
667
0
  if (count < GC_THRESHOLD_TRIGGER || GC_G(num_roots) >= GC_G(gc_threshold)) {
668
    /* increase */
669
0
    if (GC_G(gc_threshold) < GC_THRESHOLD_MAX) {
670
0
      new_threshold = GC_G(gc_threshold) + GC_THRESHOLD_STEP;
671
0
      if (new_threshold > GC_THRESHOLD_MAX) {
672
0
        new_threshold = GC_THRESHOLD_MAX;
673
0
      }
674
0
      if (new_threshold > GC_G(buf_size)) {
675
0
        gc_grow_root_buffer();
676
0
      }
677
0
      if (new_threshold <= GC_G(buf_size)) {
678
0
        GC_G(gc_threshold) = new_threshold;
679
0
      }
680
0
    }
681
0
  } else if (GC_G(gc_threshold) > GC_THRESHOLD_DEFAULT) {
682
0
    new_threshold = GC_G(gc_threshold) - GC_THRESHOLD_STEP;
683
0
    if (new_threshold < GC_THRESHOLD_DEFAULT) {
684
0
      new_threshold = GC_THRESHOLD_DEFAULT;
685
0
    }
686
0
    GC_G(gc_threshold) = new_threshold;
687
0
  }
688
0
}
689
690
/* Perform a GC run and then add a node as a possible root. */
691
static zend_never_inline void ZEND_FASTCALL gc_possible_root_when_full(zend_refcounted *ref)
692
0
{
693
0
  uint32_t idx;
694
0
  gc_root_buffer *newRoot;
695
696
0
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
697
0
  ZEND_ASSERT(GC_INFO(ref) == 0);
698
699
0
  if (GC_G(gc_enabled) && !GC_G(gc_active)) {
700
0
    GC_ADDREF(ref);
701
0
    gc_adjust_threshold(gc_collect_cycles());
702
0
    if (UNEXPECTED(GC_DELREF(ref) == 0)) {
703
0
      rc_dtor_func(ref);
704
0
      return;
705
0
    } else if (UNEXPECTED(GC_INFO(ref))) {
706
0
      return;
707
0
    }
708
0
  }
709
710
0
  if (GC_HAS_UNUSED()) {
711
0
    idx = GC_FETCH_UNUSED();
712
0
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
713
0
    idx = GC_FETCH_NEXT_UNUSED();
714
0
  } else {
715
0
    gc_grow_root_buffer();
716
0
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
717
0
      return;
718
0
    }
719
0
    idx = GC_FETCH_NEXT_UNUSED();
720
0
  }
721
722
0
  newRoot = GC_IDX2PTR(idx);
723
0
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
724
0
  GC_TRACE_SET_COLOR(ref, GC_PURPLE);
725
726
0
  idx = gc_compress(idx);
727
0
  GC_REF_SET_INFO(ref, idx | GC_PURPLE);
728
0
  GC_G(num_roots)++;
729
730
0
  GC_BENCH_INC(zval_buffered);
731
0
  GC_BENCH_INC(root_buf_length);
732
0
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
733
0
}
734
735
/* Add a possible root node to the buffer.
736
 * Maybe perform a GC run. */
737
ZEND_API void ZEND_FASTCALL gc_possible_root(zend_refcounted *ref)
738
531k
{
739
531k
  uint32_t idx;
740
531k
  gc_root_buffer *newRoot;
741
742
531k
  if (UNEXPECTED(GC_G(gc_protected))) {
743
47.6k
    return;
744
47.6k
  }
745
746
483k
  GC_BENCH_INC(zval_possible_root);
747
748
483k
  if (EXPECTED(GC_HAS_UNUSED())) {
749
242k
    idx = GC_FETCH_UNUSED();
750
242k
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD())) {
751
241k
    idx = GC_FETCH_NEXT_UNUSED();
752
241k
  } else {
753
0
    gc_possible_root_when_full(ref);
754
0
    return;
755
0
  }
756
757
483k
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
758
483k
  ZEND_ASSERT(GC_INFO(ref) == 0);
759
760
483k
  newRoot = GC_IDX2PTR(idx);
761
483k
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
762
483k
  GC_TRACE_SET_COLOR(ref, GC_PURPLE);
763
764
483k
  idx = gc_compress(idx);
765
483k
  GC_REF_SET_INFO(ref, idx | GC_PURPLE);
766
483k
  GC_G(num_roots)++;
767
768
483k
  GC_BENCH_INC(zval_buffered);
769
483k
  GC_BENCH_INC(root_buf_length);
770
483k
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
771
483k
}
772
773
/* Add an extra root during a GC run */
774
static void ZEND_FASTCALL gc_extra_root(zend_refcounted *ref)
775
15
{
776
15
  uint32_t idx;
777
15
  gc_root_buffer *newRoot;
778
779
15
  if (EXPECTED(GC_HAS_UNUSED())) {
780
0
    idx = GC_FETCH_UNUSED();
781
15
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
782
15
    idx = GC_FETCH_NEXT_UNUSED();
783
15
  } else {
784
0
    gc_grow_root_buffer();
785
0
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
786
      /* TODO: can this really happen? */
787
0
      return;
788
0
    }
789
0
    idx = GC_FETCH_NEXT_UNUSED();
790
0
  }
791
792
15
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
793
15
  ZEND_ASSERT(GC_REF_ADDRESS(ref) == 0);
794
795
15
  newRoot = GC_IDX2PTR(idx);
796
15
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
797
798
15
  idx = gc_compress(idx);
799
15
  GC_REF_SET_INFO(ref, idx | GC_REF_COLOR(ref));
800
15
  GC_G(num_roots)++;
801
802
15
  GC_BENCH_INC(zval_buffered);
803
15
  GC_BENCH_INC(root_buf_length);
804
15
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
805
15
}
806
807
/* Remove a node from the root buffer given its compressed index */
808
static zend_never_inline void ZEND_FASTCALL gc_remove_compressed(zend_refcounted *ref, uint32_t idx)
809
0
{
810
0
  gc_root_buffer *root = gc_decompress(ref, idx);
811
0
  gc_remove_from_roots(root);
812
0
}
813
814
ZEND_API void ZEND_FASTCALL gc_remove_from_buffer(zend_refcounted *ref)
815
462k
{
816
462k
  gc_root_buffer *root;
817
462k
  uint32_t idx = GC_REF_ADDRESS(ref);
818
819
462k
  GC_BENCH_INC(zval_remove_from_buffer);
820
821
462k
  if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
822
461k
    GC_TRACE_SET_COLOR(ref, GC_BLACK);
823
461k
  }
824
462k
  GC_REF_SET_INFO(ref, 0);
825
826
  /* Perform decompression only in case of large buffers */
827
462k
  if (UNEXPECTED(GC_G(first_unused) >= GC_MAX_UNCOMPRESSED)) {
828
0
    gc_remove_compressed(ref, idx);
829
0
    return;
830
0
  }
831
832
462k
  ZEND_ASSERT(idx);
833
462k
  root = GC_IDX2PTR(idx);
834
462k
  gc_remove_from_roots(root);
835
462k
}
836
837
/* Mark all nodes reachable from ref as black (live). Restore the reference
838
 * counts decremented by gc_mark_grey(). See ScanBlack() in Bacon & Rajan.
839
 * To implement a depth-first search, discovered nodes are added to a stack
840
 * which is processed iteratively. */
841
static void gc_scan_black(zend_refcounted *ref, gc_stack *stack)
842
16.6k
{
843
16.6k
  HashTable *ht;
844
16.6k
  Bucket *p;
845
16.6k
  zval *zv;
846
16.6k
  uint32_t n;
847
16.6k
  GC_STACK_DCL(stack);
848
849
87.0k
tail_call:
850
87.0k
  if (GC_TYPE(ref) == IS_OBJECT) {
851
18.2k
    zend_object *obj = (zend_object*)ref;
852
853
18.2k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
854
18.2k
      zval *table;
855
18.2k
      int len;
856
857
18.2k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
858
330
        zend_weakmap_get_object_key_entry_gc(obj, &table, &len);
859
330
        n = len;
860
330
        zv = table;
861
621
        for (; n != 0; n-=2) {
862
291
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
863
291
          zval *entry = (zval*) Z_PTR_P(zv);
864
291
          zval *weakmap = zv+1;
865
291
          ZEND_ASSERT(Z_REFCOUNTED_P(weakmap));
866
291
          if (Z_OPT_COLLECTABLE_P(entry)) {
867
234
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
868
234
            if (GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_GREY)) {
869
              /* Weakmap was scanned in gc_mark_roots, we must
870
               * ensure that it's eventually scanned in
871
               * gc_scan_roots as well. */
872
21
              if (!GC_REF_ADDRESS(Z_COUNTED_P(weakmap))) {
873
12
                gc_extra_root(Z_COUNTED_P(weakmap));
874
12
              }
875
213
            } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK) && */ !GC_FROM_WEAKMAP(entry)) {
876
              /* Both the entry weakmap and key are BLACK, so we
877
               * can mark the entry BLACK as well.
878
               * !GC_FROM_WEAKMAP(entry) means that the weakmap
879
               * was already scanned black (or will not be
880
               * scanned), so it's our responsibility to mark the
881
               * entry */
882
204
              ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK));
883
204
              ref = Z_COUNTED_P(entry);
884
204
              GC_ADDREF(ref);
885
204
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
886
18
                GC_REF_SET_BLACK(ref);
887
18
                GC_STACK_PUSH(ref);
888
18
              }
889
204
            }
890
234
          }
891
291
          zv+=2;
892
291
        }
893
330
      }
894
895
18.2k
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
896
66
        zend_weakmap_get_key_entry_gc(obj, &table, &len);
897
66
        n = len;
898
66
        zv = table;
899
174
        for (; n != 0; n-=2) {
900
108
          ZEND_ASSERT(Z_TYPE_P(zv+1) == IS_PTR);
901
108
          zval *key = zv;
902
108
          zval *entry = (zval*) Z_PTR_P(zv+1);
903
108
          if (Z_OPT_COLLECTABLE_P(entry)) {
904
60
            GC_UNSET_FROM_WEAKMAP(entry);
905
60
            if (GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_GREY)) {
906
              /* Key was scanned in gc_mark_roots, we must
907
               * ensure that it's eventually scanned in
908
               * gc_scan_roots as well. */
909
21
              if (!GC_REF_ADDRESS(Z_COUNTED_P(key))) {
910
3
                gc_extra_root(Z_COUNTED_P(key));
911
3
              }
912
39
            } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK) && */ !GC_FROM_WEAKMAP_KEY(entry)) {
913
              /* Both the entry weakmap and key are BLACK, so we
914
               * can mark the entry BLACK as well.
915
               * !GC_FROM_WEAKMAP_KEY(entry) means that the key
916
               * was already scanned black (or will not be
917
               * scanned), so it's our responsibility to mark the
918
               * entry */
919
33
              ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK));
920
33
              ref = Z_COUNTED_P(entry);
921
33
              GC_ADDREF(ref);
922
33
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
923
30
                GC_REF_SET_BLACK(ref);
924
30
                GC_STACK_PUSH(ref);
925
30
              }
926
33
            }
927
60
          }
928
108
          zv += 2;
929
108
        }
930
66
        goto next;
931
66
      }
932
933
18.1k
      ht = obj->handlers->get_gc(obj, &table, &len);
934
18.1k
      n = len;
935
18.1k
      zv = table;
936
18.1k
      if (UNEXPECTED(ht)) {
937
6.11k
        GC_ADDREF(ht);
938
6.11k
        if (!GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
939
6.11k
          GC_REF_SET_BLACK(ht);
940
8.23k
          for (; n != 0; n--) {
941
2.12k
            if (Z_COLLECTABLE_P(zv)) {
942
327
              ref = Z_COUNTED_P(zv);
943
327
              GC_ADDREF(ref);
944
327
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
945
265
                GC_REF_SET_BLACK(ref);
946
265
                GC_STACK_PUSH(ref);
947
265
              }
948
327
            }
949
2.12k
            zv++;
950
2.12k
          }
951
6.11k
          goto handle_ht;
952
6.11k
        }
953
6.11k
      }
954
955
46.1k
handle_zvals:
956
176k
      for (; n != 0; n--) {
957
140k
        if (Z_COLLECTABLE_P(zv)) {
958
11.2k
          ref = Z_COUNTED_P(zv);
959
11.2k
          GC_ADDREF(ref);
960
11.2k
          if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
961
10.0k
            GC_REF_SET_BLACK(ref);
962
10.0k
            zv++;
963
43.2k
            while (--n) {
964
33.2k
              if (Z_COLLECTABLE_P(zv)) {
965
28.9k
                zend_refcounted *ref = Z_COUNTED_P(zv);
966
28.9k
                GC_ADDREF(ref);
967
28.9k
                if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
968
28.1k
                  GC_REF_SET_BLACK(ref);
969
28.1k
                  GC_STACK_PUSH(ref);
970
28.1k
                }
971
28.9k
              }
972
33.2k
              zv++;
973
33.2k
            }
974
10.0k
            goto tail_call;
975
10.0k
          }
976
11.2k
        }
977
130k
        zv++;
978
130k
      }
979
46.1k
    }
980
68.8k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
981
67.0k
    ZEND_ASSERT((zend_array*)ref != &EG(symbol_table));
982
67.0k
    ht = (zend_array*)ref;
983
73.1k
handle_ht:
984
73.1k
    n = ht->nNumUsed;
985
73.1k
    zv = ht->arPacked;
986
73.1k
    if (HT_IS_PACKED(ht)) {
987
34.1k
      goto handle_zvals;
988
34.1k
    }
989
990
39.0k
    p = (Bucket*)zv;
991
114k
    for (; n != 0; n--) {
992
105k
      zv = &p->val;
993
105k
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
994
3.05k
        zv = Z_INDIRECT_P(zv);
995
3.05k
      }
996
105k
      if (Z_COLLECTABLE_P(zv)) {
997
31.5k
        ref = Z_COUNTED_P(zv);
998
31.5k
        GC_ADDREF(ref);
999
31.5k
        if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1000
30.7k
          GC_REF_SET_BLACK(ref);
1001
30.7k
          p++;
1002
59.3k
          while (--n) {
1003
28.5k
            zv = &p->val;
1004
28.5k
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1005
225
              zv = Z_INDIRECT_P(zv);
1006
225
            }
1007
28.5k
            if (Z_COLLECTABLE_P(zv)) {
1008
988
              zend_refcounted *ref = Z_COUNTED_P(zv);
1009
988
              GC_ADDREF(ref);
1010
988
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1011
514
                GC_REF_SET_BLACK(ref);
1012
514
                GC_STACK_PUSH(ref);
1013
514
              }
1014
988
            }
1015
28.5k
            p++;
1016
28.5k
          }
1017
30.7k
          goto tail_call;
1018
30.7k
        }
1019
31.5k
      }
1020
75.2k
      p++;
1021
75.2k
    }
1022
39.0k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1023
1.78k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1024
712
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1025
712
      GC_ADDREF(ref);
1026
712
      if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1027
667
        GC_REF_SET_BLACK(ref);
1028
667
        goto tail_call;
1029
667
      }
1030
712
    }
1031
1.78k
  }
1032
1033
45.6k
next:
1034
45.6k
  ref = GC_STACK_POP();
1035
45.6k
  if (ref) {
1036
28.9k
    goto tail_call;
1037
28.9k
  }
1038
45.6k
}
1039
1040
/* Traverse the graph of nodes referred to by ref. Decrement the reference
1041
 * counts and mark visited nodes grey. See MarkGray() in Bacon & Rajan. */
1042
static void gc_mark_grey(zend_refcounted *ref, gc_stack *stack)
1043
21.0k
{
1044
21.0k
  HashTable *ht;
1045
21.0k
  Bucket *p;
1046
21.0k
  zval *zv;
1047
21.0k
  uint32_t n;
1048
21.0k
  GC_STACK_DCL(stack);
1049
1050
94.6k
tail_call:
1051
94.6k
  GC_BENCH_INC(zval_marked_grey);
1052
1053
94.6k
  if (GC_TYPE(ref) == IS_OBJECT) {
1054
22.2k
    zend_object *obj = (zend_object*)ref;
1055
1056
22.2k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1057
22.2k
      zval *table;
1058
22.2k
      int len;
1059
1060
22.2k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1061
462
        zend_weakmap_get_object_key_entry_gc(obj, &table, &len);
1062
462
        n = len;
1063
462
        zv = table;
1064
810
        for (; n != 0; n-=2) {
1065
348
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1066
348
          zval *entry = (zval*) Z_PTR_P(zv);
1067
348
          zval *weakmap = zv+1;
1068
348
          ZEND_ASSERT(Z_REFCOUNTED_P(weakmap));
1069
348
          if (Z_COLLECTABLE_P(entry)) {
1070
288
            GC_SET_FROM_WEAKMAP_KEY(entry);
1071
288
            ref = Z_COUNTED_P(entry);
1072
            /* Only DELREF if the contribution from the weakmap has
1073
             * not been cancelled yet */
1074
288
            if (!GC_FROM_WEAKMAP(entry)) {
1075
249
              GC_DELREF(ref);
1076
249
            }
1077
288
            if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1078
39
              GC_REF_SET_COLOR(ref, GC_GREY);
1079
39
              GC_STACK_PUSH(ref);
1080
39
            }
1081
288
          }
1082
348
          zv+=2;
1083
348
        }
1084
462
      }
1085
1086
22.2k
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
1087
96
        zend_weakmap_get_entry_gc(obj, &table, &len);
1088
96
        n = len;
1089
96
        zv = table;
1090
243
        for (; n != 0; n--) {
1091
147
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1092
147
          zval *entry = (zval*) Z_PTR_P(zv);
1093
147
          if (Z_COLLECTABLE_P(entry)) {
1094
99
            GC_SET_FROM_WEAKMAP(entry);
1095
99
            ref = Z_COUNTED_P(entry);
1096
            /* Only DELREF if the contribution from the weakmap key
1097
             * has not been cancelled yet */
1098
99
            if (!GC_FROM_WEAKMAP_KEY(entry)) {
1099
51
              GC_DELREF(ref);
1100
51
            }
1101
99
            if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1102
39
              GC_REF_SET_COLOR(ref, GC_GREY);
1103
39
              GC_STACK_PUSH(ref);
1104
39
            }
1105
99
          }
1106
147
          zv++;
1107
147
        }
1108
96
        goto next;
1109
96
      }
1110
1111
22.1k
      ht = obj->handlers->get_gc(obj, &table, &len);
1112
22.1k
      n = len;
1113
22.1k
      zv = table;
1114
22.1k
      if (UNEXPECTED(ht)) {
1115
7.75k
        GC_DELREF(ht);
1116
7.75k
        if (!GC_REF_CHECK_COLOR(ht, GC_GREY)) {
1117
7.74k
          GC_REF_SET_COLOR(ht, GC_GREY);
1118
10.6k
          for (; n != 0; n--) {
1119
2.86k
            if (Z_COLLECTABLE_P(zv)) {
1120
733
              ref = Z_COUNTED_P(zv);
1121
733
              GC_DELREF(ref);
1122
733
              if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1123
379
                GC_REF_SET_COLOR(ref, GC_GREY);
1124
379
                GC_STACK_PUSH(ref);
1125
379
              }
1126
733
            }
1127
2.86k
            zv++;
1128
2.86k
          }
1129
7.74k
          goto handle_ht;
1130
7.74k
        }
1131
7.75k
      }
1132
49.9k
handle_zvals:
1133
182k
      for (; n != 0; n--) {
1134
144k
        if (Z_COLLECTABLE_P(zv)) {
1135
14.8k
          ref = Z_COUNTED_P(zv);
1136
14.8k
          GC_DELREF(ref);
1137
14.8k
          if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1138
11.7k
            GC_REF_SET_COLOR(ref, GC_GREY);
1139
11.7k
            zv++;
1140
45.4k
            while (--n) {
1141
33.6k
              if (Z_COLLECTABLE_P(zv)) {
1142
29.3k
                zend_refcounted *ref = Z_COUNTED_P(zv);
1143
29.3k
                GC_DELREF(ref);
1144
29.3k
                if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1145
28.3k
                  GC_REF_SET_COLOR(ref, GC_GREY);
1146
28.3k
                  GC_STACK_PUSH(ref);
1147
28.3k
                }
1148
29.3k
              }
1149
33.6k
              zv++;
1150
33.6k
            }
1151
11.7k
            goto tail_call;
1152
11.7k
          }
1153
14.8k
        }
1154
132k
        zv++;
1155
132k
      }
1156
49.9k
    }
1157
72.3k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1158
69.0k
    ZEND_ASSERT(((zend_array*)ref) != &EG(symbol_table));
1159
69.0k
    ht = (zend_array*)ref;
1160
76.7k
handle_ht:
1161
76.7k
    n = ht->nNumUsed;
1162
76.7k
    if (HT_IS_PACKED(ht)) {
1163
35.4k
            zv = ht->arPacked;
1164
35.4k
            goto handle_zvals;
1165
35.4k
    }
1166
1167
41.2k
    p = ht->arData;
1168
119k
    for (; n != 0; n--) {
1169
109k
      zv = &p->val;
1170
109k
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1171
3.28k
        zv = Z_INDIRECT_P(zv);
1172
3.28k
      }
1173
109k
      if (Z_COLLECTABLE_P(zv)) {
1174
33.7k
        ref = Z_COUNTED_P(zv);
1175
33.7k
        GC_DELREF(ref);
1176
33.7k
        if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1177
31.4k
          GC_REF_SET_COLOR(ref, GC_GREY);
1178
31.4k
          p++;
1179
60.9k
          while (--n) {
1180
29.4k
            zv = &p->val;
1181
29.4k
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1182
270
              zv = Z_INDIRECT_P(zv);
1183
270
            }
1184
29.4k
            if (Z_COLLECTABLE_P(zv)) {
1185
1.50k
              zend_refcounted *ref = Z_COUNTED_P(zv);
1186
1.50k
              GC_DELREF(ref);
1187
1.50k
              if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1188
865
                GC_REF_SET_COLOR(ref, GC_GREY);
1189
865
                GC_STACK_PUSH(ref);
1190
865
              }
1191
1.50k
            }
1192
29.4k
            p++;
1193
29.4k
          }
1194
31.4k
          goto tail_call;
1195
31.4k
        }
1196
33.7k
      }
1197
78.3k
      p++;
1198
78.3k
    }
1199
41.2k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1200
3.38k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1201
2.23k
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1202
2.23k
      GC_DELREF(ref);
1203
2.23k
      if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1204
679
        GC_REF_SET_COLOR(ref, GC_GREY);
1205
679
        goto tail_call;
1206
679
      }
1207
2.23k
    }
1208
3.38k
  }
1209
1210
50.7k
next:
1211
50.7k
  ref = GC_STACK_POP();
1212
50.7k
  if (ref) {
1213
29.6k
    goto tail_call;
1214
29.6k
  }
1215
50.7k
}
1216
1217
/* Two-Finger compaction algorithm */
1218
static void gc_compact(void)
1219
177k
{
1220
177k
  if (GC_G(num_roots) + GC_FIRST_ROOT != GC_G(first_unused)) {
1221
79.7k
    if (GC_G(num_roots)) {
1222
4.81k
      gc_root_buffer *free = GC_IDX2PTR(GC_FIRST_ROOT);
1223
4.81k
      gc_root_buffer *scan = GC_IDX2PTR(GC_G(first_unused) - 1);
1224
4.81k
      gc_root_buffer *end  = GC_IDX2PTR(GC_G(num_roots));
1225
4.81k
      uint32_t idx;
1226
4.81k
      zend_refcounted *p;
1227
1228
8.51k
      while (free < scan) {
1229
12.3k
        while (!GC_IS_UNUSED(free->ref)) {
1230
6.86k
          free++;
1231
6.86k
        }
1232
10.0k
        while (GC_IS_UNUSED(scan->ref)) {
1233
4.55k
          scan--;
1234
4.55k
        }
1235
5.52k
        if (scan > free) {
1236
2.63k
          p = scan->ref;
1237
2.63k
          free->ref = p;
1238
2.63k
          p = GC_GET_PTR(p);
1239
2.63k
          idx = gc_compress(GC_PTR2IDX(free));
1240
2.63k
          GC_REF_SET_INFO(p, idx | GC_REF_COLOR(p));
1241
2.63k
          free++;
1242
2.63k
          scan--;
1243
2.63k
          if (scan <= end) {
1244
1.82k
            break;
1245
1.82k
          }
1246
2.63k
        }
1247
5.52k
      }
1248
4.81k
    }
1249
79.7k
    GC_G(unused) = GC_INVALID;
1250
79.7k
    GC_G(first_unused) = GC_G(num_roots) + GC_FIRST_ROOT;
1251
79.7k
  }
1252
177k
}
1253
1254
/* For all roots marked purple, traverse the graph, decrementing the reference
1255
 * count of their child nodes. Mark visited nodes grey so that they are not
1256
 * visited again. See MarkRoots() in Bacon & Rajan. */
1257
static void gc_mark_roots(gc_stack *stack)
1258
10.6k
{
1259
10.6k
  gc_root_buffer *current, *last;
1260
1261
10.6k
  gc_compact();
1262
1263
10.6k
  current = GC_IDX2PTR(GC_FIRST_ROOT);
1264
10.6k
  last = GC_IDX2PTR(GC_G(first_unused));
1265
34.0k
  while (current != last) {
1266
23.3k
    if (GC_IS_ROOT(current->ref)) {
1267
23.3k
      if (GC_REF_CHECK_COLOR(current->ref, GC_PURPLE)) {
1268
21.0k
        GC_REF_SET_COLOR(current->ref, GC_GREY);
1269
21.0k
        gc_mark_grey(current->ref, stack);
1270
21.0k
      }
1271
23.3k
    }
1272
23.3k
    current++;
1273
23.3k
  }
1274
10.6k
}
1275
1276
/* Traverse the reference graph of ref. Evaluate grey nodes and mark them
1277
 * black (to keep) or white (to free). Note that nodes initially marked white
1278
 * may later become black if they are visited from a live node.
1279
 * See Scan() in Bacon & Rajan. */
1280
static void gc_scan(zend_refcounted *ref, gc_stack *stack)
1281
21.1k
{
1282
21.1k
  HashTable *ht;
1283
21.1k
  Bucket *p;
1284
21.1k
  zval *zv;
1285
21.1k
  uint32_t n;
1286
21.1k
  GC_STACK_DCL(stack);
1287
1288
28.0k
tail_call:
1289
28.0k
  if (!GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1290
255
    goto next;
1291
255
  }
1292
1293
27.8k
  if (GC_REFCOUNT(ref) > 0) {
1294
16.6k
    if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1295
16.6k
      GC_REF_SET_BLACK(ref);
1296
16.6k
      if (UNEXPECTED(!_stack->next)) {
1297
9.89k
        gc_stack_next(_stack);
1298
9.89k
      }
1299
      /* Split stack and reuse the tail */
1300
16.6k
      _stack->next->prev = NULL;
1301
16.6k
      gc_scan_black(ref, _stack->next);
1302
16.6k
      _stack->next->prev = _stack;
1303
16.6k
    }
1304
16.6k
    goto next;
1305
16.6k
  }
1306
1307
11.1k
  if (GC_TYPE(ref) == IS_OBJECT) {
1308
4.81k
    zend_object *obj = (zend_object*)ref;
1309
4.81k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1310
4.81k
      zval *table;
1311
4.81k
      int len;
1312
1313
4.81k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1314
165
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1315
165
        n = len;
1316
165
        zv = table;
1317
255
        for (; n != 0; n--) {
1318
90
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1319
90
          zval *entry = (zval*) Z_PTR_P(zv);
1320
90
          if (Z_OPT_COLLECTABLE_P(entry)) {
1321
57
            ref = Z_COUNTED_P(entry);
1322
57
            if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1323
18
              GC_REF_SET_COLOR(ref, GC_WHITE);
1324
18
              GC_STACK_PUSH(ref);
1325
18
            }
1326
57
          }
1327
90
          zv++;
1328
90
        }
1329
165
      }
1330
1331
4.81k
      ht = obj->handlers->get_gc(obj, &table, &len);
1332
4.81k
      n = len;
1333
4.81k
      zv = table;
1334
4.81k
      if (UNEXPECTED(ht)) {
1335
2.07k
        if (GC_REF_CHECK_COLOR(ht, GC_GREY)) {
1336
2.07k
          GC_REF_SET_COLOR(ht, GC_WHITE);
1337
2.07k
          GC_STACK_PUSH((zend_refcounted *) ht);
1338
2.90k
          for (; n != 0; n--) {
1339
838
            if (Z_COLLECTABLE_P(zv)) {
1340
424
              ref = Z_COUNTED_P(zv);
1341
424
              if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1342
144
                GC_REF_SET_COLOR(ref, GC_WHITE);
1343
144
                GC_STACK_PUSH(ref);
1344
144
              }
1345
424
            }
1346
838
            zv++;
1347
838
          }
1348
2.07k
          goto handle_ht;
1349
2.07k
        }
1350
2.07k
      }
1351
1352
4.30k
handle_zvals:
1353
6.74k
      for (; n != 0; n--) {
1354
4.84k
        if (Z_COLLECTABLE_P(zv)) {
1355
3.86k
          ref = Z_COUNTED_P(zv);
1356
3.86k
          if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1357
2.40k
            GC_REF_SET_COLOR(ref, GC_WHITE);
1358
2.40k
            zv++;
1359
3.22k
            while (--n) {
1360
821
              if (Z_COLLECTABLE_P(zv)) {
1361
616
                zend_refcounted *ref = Z_COUNTED_P(zv);
1362
616
                if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1363
426
                  GC_REF_SET_COLOR(ref, GC_WHITE);
1364
426
                  GC_STACK_PUSH(ref);
1365
426
                }
1366
616
              }
1367
821
              zv++;
1368
821
            }
1369
2.40k
            goto tail_call;
1370
2.40k
          }
1371
3.86k
        }
1372
2.44k
        zv++;
1373
2.44k
      }
1374
4.30k
    }
1375
6.33k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1376
4.57k
    ht = (HashTable *)ref;
1377
4.57k
    ZEND_ASSERT(ht != &EG(symbol_table));
1378
1379
6.64k
handle_ht:
1380
6.64k
    n = ht->nNumUsed;
1381
6.64k
    if (HT_IS_PACKED(ht)) {
1382
1.55k
            zv = ht->arPacked;
1383
1.55k
            goto handle_zvals;
1384
1.55k
    }
1385
1386
5.09k
    p = ht->arData;
1387
11.6k
    for (; n != 0; n--) {
1388
7.65k
      zv = &p->val;
1389
7.65k
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1390
978
        zv = Z_INDIRECT_P(zv);
1391
978
      }
1392
7.65k
      if (Z_COLLECTABLE_P(zv)) {
1393
4.45k
        ref = Z_COUNTED_P(zv);
1394
4.45k
        if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1395
1.06k
          GC_REF_SET_COLOR(ref, GC_WHITE);
1396
1.06k
          p++;
1397
2.45k
          while (--n) {
1398
1.38k
            zv = &p->val;
1399
1.38k
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1400
96
              zv = Z_INDIRECT_P(zv);
1401
96
            }
1402
1.38k
            if (Z_COLLECTABLE_P(zv)) {
1403
799
              zend_refcounted *ref = Z_COUNTED_P(zv);
1404
799
              if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1405
532
                GC_REF_SET_COLOR(ref, GC_WHITE);
1406
532
                GC_STACK_PUSH(ref);
1407
532
              }
1408
799
            }
1409
1.38k
            p++;
1410
1.38k
          }
1411
1.06k
          goto tail_call;
1412
1.06k
        }
1413
4.45k
      }
1414
6.58k
      p++;
1415
6.58k
    }
1416
5.09k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1417
1.76k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1418
1.63k
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1419
1.63k
      if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1420
298
        GC_REF_SET_COLOR(ref, GC_WHITE);
1421
298
        goto tail_call;
1422
298
      }
1423
1.63k
    }
1424
1.76k
  }
1425
1426
24.3k
next:
1427
24.3k
  ref = GC_STACK_POP();
1428
24.3k
  if (ref) {
1429
3.19k
    goto tail_call;
1430
3.19k
  }
1431
24.3k
}
1432
1433
/* Scan all roots, coloring grey nodes black or white */
1434
static void gc_scan_roots(gc_stack *stack)
1435
10.6k
{
1436
10.6k
  uint32_t idx, end;
1437
10.6k
  gc_root_buffer *current;
1438
1439
  /* Root buffer might be reallocated during gc_scan,
1440
   * make sure to reload pointers. */
1441
10.6k
  idx = GC_FIRST_ROOT;
1442
10.6k
  end = GC_G(first_unused);
1443
34.0k
  while (idx != end) {
1444
23.3k
    current = GC_IDX2PTR(idx);
1445
23.3k
    if (GC_IS_ROOT(current->ref)) {
1446
23.3k
      if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1447
21.1k
        GC_REF_SET_COLOR(current->ref, GC_WHITE);
1448
21.1k
        gc_scan(current->ref, stack);
1449
21.1k
      }
1450
23.3k
    }
1451
23.3k
    idx++;
1452
23.3k
  }
1453
1454
  /* Scan extra roots added during gc_scan */
1455
10.7k
  while (idx != GC_G(first_unused)) {
1456
15
    current = GC_IDX2PTR(idx);
1457
15
    if (GC_IS_ROOT(current->ref)) {
1458
15
      if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1459
15
        GC_REF_SET_COLOR(current->ref, GC_WHITE);
1460
15
        gc_scan(current->ref, stack);
1461
15
      }
1462
15
    }
1463
15
    idx++;
1464
15
  }
1465
10.6k
}
1466
1467
/* Add a node to the buffer with the garbage flag, so that it will be
1468
 * destroyed and freed when the scan is complete. */
1469
static void gc_add_garbage(zend_refcounted *ref)
1470
1.56k
{
1471
1.56k
  uint32_t idx;
1472
1.56k
  gc_root_buffer *buf;
1473
1474
1.56k
  if (GC_HAS_UNUSED()) {
1475
0
    idx = GC_FETCH_UNUSED();
1476
1.56k
  } else if (GC_HAS_NEXT_UNUSED()) {
1477
1.56k
    idx = GC_FETCH_NEXT_UNUSED();
1478
1.56k
  } else {
1479
0
    gc_grow_root_buffer();
1480
0
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
1481
0
      return;
1482
0
    }
1483
0
    idx = GC_FETCH_NEXT_UNUSED();
1484
0
  }
1485
1486
1.56k
  buf = GC_IDX2PTR(idx);
1487
1.56k
  buf->ref = GC_MAKE_GARBAGE(ref);
1488
1489
1.56k
  idx = gc_compress(idx);
1490
1.56k
  GC_REF_SET_INFO(ref, idx | GC_BLACK);
1491
1.56k
  GC_G(num_roots)++;
1492
1.56k
}
1493
1494
/* Traverse the reference graph from ref, marking any white nodes as garbage. */
1495
static int gc_collect_white(zend_refcounted *ref, uint32_t *flags, gc_stack *stack)
1496
3.91k
{
1497
3.91k
  int count = 0;
1498
3.91k
  HashTable *ht;
1499
3.91k
  Bucket *p;
1500
3.91k
  zval *zv;
1501
3.91k
  uint32_t n;
1502
3.91k
  GC_STACK_DCL(stack);
1503
1504
7.59k
tail_call:
1505
  /* don't count references for compatibility ??? */
1506
7.59k
  if (GC_TYPE(ref) != IS_REFERENCE) {
1507
5.99k
    count++;
1508
5.99k
  }
1509
1510
7.59k
  if (GC_TYPE(ref) == IS_OBJECT) {
1511
4.02k
    zend_object *obj = (zend_object*)ref;
1512
1513
4.02k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1514
4.02k
      int len;
1515
4.02k
      zval *table;
1516
1517
      /* optimization: color is GC_BLACK (0) */
1518
4.02k
      if (!GC_INFO(ref)) {
1519
919
        gc_add_garbage(ref);
1520
919
      }
1521
4.02k
      if (!(OBJ_FLAGS(obj) & IS_OBJ_DESTRUCTOR_CALLED)
1522
2.93k
       && (obj->handlers->dtor_obj != zend_objects_destroy_object
1523
2.77k
        || obj->ce->destructor != NULL)) {
1524
1.12k
        *flags |= GC_HAS_DESTRUCTORS;
1525
1.12k
      }
1526
1527
4.02k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1528
132
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1529
132
        n = len;
1530
132
        zv = table;
1531
189
        for (; n != 0; n--) {
1532
57
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1533
57
          zval *entry = (zval*) Z_PTR_P(zv);
1534
57
          if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP_KEY(entry)) {
1535
42
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
1536
42
            GC_UNSET_FROM_WEAKMAP(entry);
1537
42
            ref = Z_COUNTED_P(entry);
1538
42
            GC_ADDREF(ref);
1539
42
            if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1540
12
              GC_REF_SET_BLACK(ref);
1541
12
              GC_STACK_PUSH(ref);
1542
12
            }
1543
42
          }
1544
57
          zv++;
1545
57
        }
1546
132
      }
1547
1548
4.02k
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
1549
30
        zend_weakmap_get_entry_gc(obj, &table, &len);
1550
30
        n = len;
1551
30
        zv = table;
1552
69
        for (; n != 0; n--) {
1553
39
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1554
39
          zval *entry = (zval*) Z_PTR_P(zv);
1555
39
          if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP(entry)) {
1556
21
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
1557
21
            GC_UNSET_FROM_WEAKMAP(entry);
1558
21
            ref = Z_COUNTED_P(entry);
1559
21
            GC_ADDREF(ref);
1560
21
            if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1561
15
              GC_REF_SET_BLACK(ref);
1562
15
              GC_STACK_PUSH(ref);
1563
15
            }
1564
21
          }
1565
39
          zv++;
1566
39
        }
1567
30
        goto next;
1568
30
      }
1569
1570
3.99k
      ht = obj->handlers->get_gc(obj, &table, &len);
1571
3.99k
      n = len;
1572
3.99k
      zv = table;
1573
3.99k
      if (UNEXPECTED(ht)) {
1574
1.63k
        GC_ADDREF(ht);
1575
1.63k
        if (GC_REF_CHECK_COLOR(ht, GC_WHITE)) {
1576
1.63k
          GC_REF_SET_BLACK(ht);
1577
2.38k
          for (; n != 0; n--) {
1578
748
            if (Z_COLLECTABLE_P(zv)) {
1579
406
              ref = Z_COUNTED_P(zv);
1580
406
              GC_ADDREF(ref);
1581
406
              if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1582
129
                GC_REF_SET_BLACK(ref);
1583
129
                GC_STACK_PUSH(ref);
1584
129
              }
1585
406
            }
1586
748
            zv++;
1587
748
          }
1588
1.63k
          goto handle_ht;
1589
1.63k
        }
1590
1.63k
      }
1591
1592
3.72k
handle_zvals:
1593
5.49k
      for (; n != 0; n--) {
1594
3.86k
        if (Z_COLLECTABLE_P(zv)) {
1595
3.54k
          ref = Z_COUNTED_P(zv);
1596
3.54k
          GC_ADDREF(ref);
1597
3.54k
          if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1598
2.09k
            GC_REF_SET_BLACK(ref);
1599
2.09k
            zv++;
1600
2.59k
            while (--n) {
1601
499
              if (Z_COLLECTABLE_P(zv)) {
1602
386
                zend_refcounted *ref = Z_COUNTED_P(zv);
1603
386
                GC_ADDREF(ref);
1604
386
                if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1605
242
                  GC_REF_SET_BLACK(ref);
1606
242
                  GC_STACK_PUSH(ref);
1607
242
                }
1608
386
              }
1609
499
              zv++;
1610
499
            }
1611
2.09k
            goto tail_call;
1612
2.09k
          }
1613
3.54k
        }
1614
1.76k
        zv++;
1615
1.76k
      }
1616
3.72k
    }
1617
4.02k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1618
    /* optimization: color is GC_BLACK (0) */
1619
1.97k
    if (!GC_INFO(ref)) {
1620
644
      gc_add_garbage(ref);
1621
644
    }
1622
1.97k
    ht = (zend_array*)ref;
1623
1624
3.60k
handle_ht:
1625
3.60k
    n = ht->nNumUsed;
1626
3.60k
    if (HT_IS_PACKED(ht)) {
1627
1.35k
      zv = ht->arPacked;
1628
1.35k
      goto handle_zvals;
1629
1.35k
    }
1630
1631
2.24k
    p = ht->arData;
1632
5.44k
    for (; n != 0; n--) {
1633
3.89k
      zv = &p->val;
1634
3.89k
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1635
228
        zv = Z_INDIRECT_P(zv);
1636
228
      }
1637
3.89k
      if (Z_COLLECTABLE_P(zv)) {
1638
2.32k
        ref = Z_COUNTED_P(zv);
1639
2.32k
        GC_ADDREF(ref);
1640
2.32k
        if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1641
694
          GC_REF_SET_BLACK(ref);
1642
694
          p++;
1643
1.50k
          while (--n) {
1644
807
            zv = &p->val;
1645
807
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1646
48
              zv = Z_INDIRECT_P(zv);
1647
48
            }
1648
807
            if (Z_COLLECTABLE_P(zv)) {
1649
435
              zend_refcounted *ref = Z_COUNTED_P(zv);
1650
435
              GC_ADDREF(ref);
1651
435
              if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1652
267
                GC_REF_SET_BLACK(ref);
1653
267
                GC_STACK_PUSH(ref);
1654
267
              }
1655
435
            }
1656
807
            p++;
1657
807
          }
1658
694
          goto tail_call;
1659
694
        }
1660
2.32k
      }
1661
3.20k
      p++;
1662
3.20k
    }
1663
2.24k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1664
1.59k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1665
1.52k
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1666
1.52k
      GC_ADDREF(ref);
1667
1.52k
      if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1668
223
        GC_REF_SET_BLACK(ref);
1669
223
        goto tail_call;
1670
223
      }
1671
1.52k
    }
1672
1.59k
  }
1673
1674
4.58k
next:
1675
4.58k
  ref = GC_STACK_POP();
1676
4.58k
  if (ref) {
1677
665
    goto tail_call;
1678
665
  }
1679
1680
3.91k
  return count;
1681
4.58k
}
1682
1683
/* Traverse the reference graph from all roots, marking white nodes as garbage. */
1684
static int gc_collect_roots(uint32_t *flags, gc_stack *stack)
1685
10.6k
{
1686
10.6k
  uint32_t idx, end;
1687
10.6k
  zend_refcounted *ref;
1688
10.6k
  int count = 0;
1689
10.6k
  gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
1690
10.6k
  gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
1691
1692
  /* remove non-garbage from the list */
1693
34.0k
  while (current != last) {
1694
23.3k
    if (GC_IS_ROOT(current->ref)) {
1695
23.3k
      if (GC_REF_CHECK_COLOR(current->ref, GC_BLACK)) {
1696
18.9k
        GC_REF_SET_INFO(current->ref, 0); /* reset GC_ADDRESS() and keep GC_BLACK */
1697
18.9k
        gc_remove_from_roots(current);
1698
18.9k
      }
1699
23.3k
    }
1700
23.3k
    current++;
1701
23.3k
  }
1702
1703
10.6k
  gc_compact();
1704
1705
  /* Root buffer might be reallocated during gc_collect_white,
1706
   * make sure to reload pointers. */
1707
10.6k
  idx = GC_FIRST_ROOT;
1708
10.6k
  end = GC_G(first_unused);
1709
15.1k
  while (idx != end) {
1710
4.44k
    current = GC_IDX2PTR(idx);
1711
4.44k
    ref = current->ref;
1712
4.44k
    ZEND_ASSERT(GC_IS_ROOT(ref));
1713
4.44k
    current->ref = GC_MAKE_GARBAGE(ref);
1714
4.44k
    if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1715
3.91k
      GC_REF_SET_BLACK(ref);
1716
3.91k
      count += gc_collect_white(ref, flags, stack);
1717
3.91k
    }
1718
4.44k
    idx++;
1719
4.44k
  }
1720
1721
10.6k
  return count;
1722
10.6k
}
1723
1724
static int gc_remove_nested_data_from_buffer(zend_refcounted *ref, gc_root_buffer *root, gc_stack *stack)
1725
1.12k
{
1726
1.12k
  HashTable *ht;
1727
1.12k
  Bucket *p;
1728
1.12k
  zval *zv;
1729
1.12k
  uint32_t n;
1730
1.12k
  int count = 0;
1731
1.12k
  GC_STACK_DCL(stack);
1732
1733
3.62k
tail_call:
1734
3.62k
  if (root) {
1735
1.12k
    root = NULL;
1736
1.12k
    count++;
1737
2.49k
  } else if (GC_REF_ADDRESS(ref) != 0
1738
1.95k
   && GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1739
738
    GC_TRACE_REF(ref, "removing from buffer");
1740
738
    GC_REMOVE_FROM_BUFFER(ref);
1741
738
    count++;
1742
1.76k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1743
339
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1744
159
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1745
159
      goto tail_call;
1746
159
    }
1747
180
    goto next;
1748
1.42k
  } else {
1749
1.42k
    goto next;
1750
1.42k
  }
1751
1752
1.86k
  if (GC_TYPE(ref) == IS_OBJECT) {
1753
1.69k
    zend_object *obj = (zend_object*)ref;
1754
1755
1.69k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1756
1.69k
      int len;
1757
1.69k
      zval *table;
1758
1759
1.69k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1760
42
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1761
42
        n = len;
1762
42
        zv = table;
1763
45
        for (; n != 0; n--) {
1764
3
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1765
3
          zval *entry = (zval*) Z_PTR_P(zv);
1766
3
          if (Z_OPT_COLLECTABLE_P(entry)) {
1767
3
            ref = Z_COUNTED_P(entry);
1768
3
            GC_STACK_PUSH(ref);
1769
3
          }
1770
3
          zv++;
1771
3
        }
1772
42
      }
1773
1774
1.69k
      ht = obj->handlers->get_gc(obj, &table, &len);
1775
1.69k
      n = len;
1776
1.69k
      zv = table;
1777
1.69k
      if (UNEXPECTED(ht)) {
1778
802
        for (; n != 0; n--) {
1779
338
          if (Z_COLLECTABLE_P(zv)) {
1780
185
            ref = Z_COUNTED_P(zv);
1781
185
            GC_STACK_PUSH(ref);
1782
185
          }
1783
338
          zv++;
1784
338
        }
1785
464
        if (GC_REF_ADDRESS(ht) != 0 && GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
1786
3
          GC_TRACE_REF(ht, "removing from buffer");
1787
3
          GC_REMOVE_FROM_BUFFER(ht);
1788
3
        }
1789
464
        goto handle_ht;
1790
464
      }
1791
1792
1.37k
handle_zvals:
1793
1.45k
      for (; n != 0; n--) {
1794
1.25k
        if (Z_COLLECTABLE_P(zv)) {
1795
1.17k
          ref = Z_COUNTED_P(zv);
1796
1.17k
          zv++;
1797
1.55k
          while (--n) {
1798
380
            if (Z_COLLECTABLE_P(zv)) {
1799
353
              zend_refcounted *ref = Z_COUNTED_P(zv);
1800
353
              GC_STACK_PUSH(ref);
1801
353
            }
1802
380
            zv++;
1803
380
          }
1804
1.17k
          goto tail_call;
1805
1.17k
        }
1806
81
        zv++;
1807
81
      }
1808
1.37k
    }
1809
1.69k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1810
166
    ht = (zend_array*)ref;
1811
1812
630
handle_ht:
1813
630
    n = ht->nNumUsed;
1814
630
    if (HT_IS_PACKED(ht)) {
1815
142
      zv = ht->arPacked;
1816
142
      goto handle_zvals;
1817
142
    }
1818
1819
488
    p = ht->arData;
1820
536
    for (; n != 0; n--) {
1821
506
      zv = &p->val;
1822
506
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1823
93
        zv = Z_INDIRECT_P(zv);
1824
93
      }
1825
506
      if (Z_COLLECTABLE_P(zv)) {
1826
458
        ref = Z_COUNTED_P(zv);
1827
458
        p++;
1828
632
        while (--n) {
1829
174
          zv = &p->val;
1830
174
          if (Z_TYPE_P(zv) == IS_INDIRECT) {
1831
15
            zv = Z_INDIRECT_P(zv);
1832
15
          }
1833
174
          if (Z_COLLECTABLE_P(zv)) {
1834
168
            zend_refcounted *ref = Z_COUNTED_P(zv);
1835
168
            GC_STACK_PUSH(ref);
1836
168
          }
1837
174
          p++;
1838
174
        }
1839
458
        goto tail_call;
1840
458
      }
1841
48
      p++;
1842
48
    }
1843
488
  }
1844
1845
1.83k
next:
1846
1.83k
  ref = GC_STACK_POP();
1847
1.83k
  if (ref) {
1848
709
    goto tail_call;
1849
709
  }
1850
1851
1.12k
  return count;
1852
1.83k
}
1853
1854
static void zend_get_gc_buffer_release(void);
1855
static void zend_gc_check_root_tmpvars(void);
1856
static void zend_gc_remove_root_tmpvars(void);
1857
1858
static zend_internal_function gc_destructor_fiber;
1859
1860
static ZEND_COLD ZEND_NORETURN void gc_create_destructor_fiber_error(void)
1861
0
{
1862
0
  zend_error_noreturn(E_ERROR, "Unable to create destructor fiber");
1863
0
}
1864
1865
static ZEND_COLD ZEND_NORETURN void gc_start_destructor_fiber_error(void)
1866
0
{
1867
0
  zend_error_noreturn(E_ERROR, "Unable to start destructor fiber");
1868
0
}
1869
1870
/* Call destructors for garbage in the buffer. */
1871
static zend_always_inline zend_result gc_call_destructors(uint32_t idx, uint32_t end, zend_fiber *fiber)
1872
386
{
1873
386
  gc_root_buffer *current;
1874
386
  zend_refcounted *p;
1875
1876
  /* The root buffer might be reallocated during destructors calls,
1877
   * make sure to reload pointers as necessary. */
1878
2.59k
  while (idx != end) {
1879
2.23k
    current = GC_IDX2PTR(idx);
1880
2.23k
    if (GC_IS_DTOR_GARBAGE(current->ref)) {
1881
1.01k
      p = GC_GET_PTR(current->ref);
1882
      /* Mark this is as a normal root for the next GC run */
1883
1.01k
      current->ref = p;
1884
      /* Double check that the destructor hasn't been called yet. It
1885
       * could have already been invoked indirectly by some other
1886
       * destructor. */
1887
1.01k
      if (!(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
1888
1.01k
        if (fiber != NULL) {
1889
90
          GC_G(dtor_idx) = idx;
1890
90
        }
1891
1.01k
        zend_object *obj = (zend_object*)p;
1892
1.01k
        GC_TRACE_REF(obj, "calling destructor");
1893
1.01k
        GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
1894
1.01k
        GC_ADDREF(obj);
1895
1.01k
        obj->handlers->dtor_obj(obj);
1896
1.01k
        GC_TRACE_REF(obj, "returned from destructor");
1897
1.01k
        GC_DELREF(obj);
1898
1.01k
        if (UNEXPECTED(fiber != NULL && GC_G(dtor_fiber) != fiber)) {
1899
          /* We resumed after suspension */
1900
24
          gc_check_possible_root((zend_refcounted*)&obj->gc);
1901
24
          return FAILURE;
1902
24
        }
1903
1.01k
      }
1904
1.01k
    }
1905
2.20k
    idx++;
1906
2.20k
  }
1907
1908
362
  return SUCCESS;
1909
386
}
1910
1911
static zend_fiber *gc_create_destructor_fiber(void)
1912
60
{
1913
60
  zval zobj;
1914
60
  zend_fiber *fiber;
1915
1916
60
  GC_TRACE("starting destructor fiber");
1917
1918
60
  if (UNEXPECTED(object_init_ex(&zobj, zend_ce_fiber) == FAILURE)) {
1919
0
    gc_create_destructor_fiber_error();
1920
0
  }
1921
1922
60
  fiber = (zend_fiber *)Z_OBJ(zobj);
1923
60
  fiber->fci.size = sizeof(fiber->fci);
1924
60
  fiber->fci_cache.function_handler = (zend_function*) &gc_destructor_fiber;
1925
1926
60
  GC_G(dtor_fiber) = fiber;
1927
1928
60
  if (UNEXPECTED(zend_fiber_start(fiber, NULL) == FAILURE)) {
1929
0
    gc_start_destructor_fiber_error();
1930
0
  }
1931
1932
60
  return fiber;
1933
60
}
1934
1935
static zend_never_inline void gc_call_destructors_in_fiber(uint32_t end)
1936
36
{
1937
36
  ZEND_ASSERT(!GC_G(dtor_fiber_running));
1938
1939
36
  zend_fiber *fiber = GC_G(dtor_fiber);
1940
1941
36
  GC_G(dtor_idx) = GC_FIRST_ROOT;
1942
36
  GC_G(dtor_end) = GC_G(first_unused);
1943
1944
36
  if (UNEXPECTED(!fiber)) {
1945
36
    fiber = gc_create_destructor_fiber();
1946
36
  } else {
1947
0
    zend_fiber_resume(fiber, NULL, NULL);
1948
0
  }
1949
1950
60
  for (;;) {
1951
    /* At this point, fiber has executed until suspension */
1952
60
    GC_TRACE("resumed from destructor fiber");
1953
1954
60
    if (UNEXPECTED(GC_G(dtor_fiber_running))) {
1955
      /* Fiber was suspended by a destructor. Start a new one for the
1956
       * remaining destructors. */
1957
24
      GC_TRACE("destructor fiber suspended by destructor");
1958
24
      GC_G(dtor_fiber) = NULL;
1959
24
      GC_G(dtor_idx)++;
1960
      /* We do not own the fiber anymore. It may be collected if the
1961
       * application does not reference it. */
1962
24
      zend_object_release(&fiber->std);
1963
24
      fiber = gc_create_destructor_fiber();
1964
24
      continue;
1965
36
    } else {
1966
      /* Fiber suspended itself after calling all destructors */
1967
36
      GC_TRACE("destructor fiber suspended itself");
1968
36
      break;
1969
36
    }
1970
60
  }
1971
36
}
1972
1973
/* Perform a garbage collection run. The default implementation of gc_collect_cycles. */
1974
ZEND_API int zend_gc_collect_cycles(void)
1975
165k
{
1976
165k
  int total_count = 0;
1977
165k
  bool should_rerun_gc = false;
1978
165k
  bool did_rerun_gc = false;
1979
1980
165k
  zend_hrtime_t start_time = zend_hrtime();
1981
165k
  if (GC_G(num_roots) && !GC_G(gc_active)) {
1982
10.4k
    zend_gc_remove_root_tmpvars();
1983
10.4k
  }
1984
1985
165k
rerun_gc:
1986
165k
  if (GC_G(num_roots)) {
1987
10.7k
    int count;
1988
10.7k
    gc_root_buffer *current, *last;
1989
10.7k
    zend_refcounted *p;
1990
10.7k
    uint32_t gc_flags = 0;
1991
10.7k
    uint32_t idx, end;
1992
10.7k
    gc_stack stack;
1993
1994
10.7k
    stack.prev = NULL;
1995
10.7k
    stack.next = NULL;
1996
1997
10.7k
    if (GC_G(gc_active)) {
1998
17
      GC_G(collector_time) += zend_hrtime() - start_time;
1999
17
      return 0;
2000
17
    }
2001
2002
10.6k
    GC_TRACE("Collecting cycles");
2003
10.6k
    GC_G(gc_runs)++;
2004
10.6k
    GC_G(gc_active) = 1;
2005
2006
10.6k
    GC_TRACE("Marking roots");
2007
10.6k
    gc_mark_roots(&stack);
2008
10.6k
    GC_TRACE("Scanning roots");
2009
10.6k
    gc_scan_roots(&stack);
2010
2011
10.6k
    GC_TRACE("Collecting roots");
2012
10.6k
    count = gc_collect_roots(&gc_flags, &stack);
2013
2014
10.6k
    if (!GC_G(num_roots)) {
2015
      /* nothing to free */
2016
9.48k
      GC_TRACE("Nothing to free");
2017
9.48k
      gc_stack_free(&stack);
2018
9.48k
      GC_G(gc_active) = 0;
2019
9.48k
      goto finish;
2020
9.48k
    }
2021
2022
1.20k
    end = GC_G(first_unused);
2023
2024
1.20k
    if (gc_flags & GC_HAS_DESTRUCTORS) {
2025
362
      GC_TRACE("Calling destructors");
2026
2027
      /* During a destructor call, new externally visible references to nested data may
2028
       * be introduced. These references can be introduced in a way that does not
2029
       * modify any refcounts, so we have no real way to detect this situation
2030
       * short of rerunning full GC tracing. What we do instead is to only run
2031
       * destructors at this point and automatically re-run GC afterwards. */
2032
362
      should_rerun_gc = true;
2033
2034
      /* Mark all roots for which a dtor will be invoked as DTOR_GARBAGE. Additionally
2035
       * color them purple. This serves a double purpose: First, they should be
2036
       * considered new potential roots for the next GC run. Second, it will prevent
2037
       * their removal from the root buffer by nested data removal. */
2038
362
      idx = GC_FIRST_ROOT;
2039
362
      current = GC_IDX2PTR(GC_FIRST_ROOT);
2040
2.63k
      while (idx != end) {
2041
2.27k
        if (GC_IS_GARBAGE(current->ref)) {
2042
2.27k
          p = GC_GET_PTR(current->ref);
2043
2.27k
          if (GC_TYPE(p) == IS_OBJECT && !(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
2044
1.81k
            zend_object *obj = (zend_object *) p;
2045
1.81k
            if (obj->handlers->dtor_obj != zend_objects_destroy_object
2046
1.66k
              || obj->ce->destructor) {
2047
1.12k
              current->ref = GC_MAKE_DTOR_GARBAGE(obj);
2048
1.12k
              GC_REF_SET_COLOR(obj, GC_PURPLE);
2049
1.12k
            } else {
2050
687
              GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
2051
687
            }
2052
1.81k
          }
2053
2.27k
        }
2054
2.27k
        current++;
2055
2.27k
        idx++;
2056
2.27k
      }
2057
2058
      /* Remove nested data for objects on which a destructor will be called.
2059
       * This will not remove the objects themselves, as they have been colored
2060
       * purple. */
2061
362
      idx = GC_FIRST_ROOT;
2062
362
      current = GC_IDX2PTR(GC_FIRST_ROOT);
2063
2.63k
      while (idx != end) {
2064
2.27k
        if (GC_IS_DTOR_GARBAGE(current->ref)) {
2065
1.12k
          p = GC_GET_PTR(current->ref);
2066
1.12k
          count -= gc_remove_nested_data_from_buffer(p, current, &stack);
2067
1.12k
        }
2068
2.27k
        current++;
2069
2.27k
        idx++;
2070
2.27k
      }
2071
2072
      /* Actually call destructors. */
2073
362
      zend_hrtime_t dtor_start_time = zend_hrtime();
2074
362
      if (EXPECTED(!EG(active_fiber))) {
2075
326
        gc_call_destructors(GC_FIRST_ROOT, end, NULL);
2076
326
      } else {
2077
36
        gc_call_destructors_in_fiber(end);
2078
36
      }
2079
362
      GC_G(dtor_time) += zend_hrtime() - dtor_start_time;
2080
2081
362
      if (GC_G(gc_protected)) {
2082
        /* something went wrong */
2083
5
        zend_get_gc_buffer_release();
2084
5
        GC_G(collector_time) += zend_hrtime() - start_time;
2085
5
        return 0;
2086
5
      }
2087
362
    }
2088
2089
1.20k
    gc_stack_free(&stack);
2090
2091
    /* Destroy zvals. The root buffer may be reallocated. */
2092
1.20k
    GC_TRACE("Destroying zvals");
2093
1.20k
    zend_hrtime_t free_start_time = zend_hrtime();
2094
1.20k
    idx = GC_FIRST_ROOT;
2095
6.20k
    while (idx != end) {
2096
5.00k
      current = GC_IDX2PTR(idx);
2097
5.00k
      if (GC_IS_GARBAGE(current->ref)) {
2098
2.99k
        p = GC_GET_PTR(current->ref);
2099
2.99k
        GC_TRACE_REF(p, "destroying");
2100
2.99k
        if (GC_TYPE(p) == IS_OBJECT) {
2101
1.57k
          zend_object *obj = (zend_object*)p;
2102
2103
1.57k
          EG(objects_store).object_buckets[obj->handle] = SET_OBJ_INVALID(obj);
2104
1.57k
          GC_TYPE_INFO(obj) = GC_NULL |
2105
1.57k
            (GC_TYPE_INFO(obj) & ~GC_TYPE_MASK);
2106
          /* Modify current before calling free_obj (bug #78811: free_obj() can cause the root buffer (with current) to be reallocated.) */
2107
1.57k
          current->ref = GC_MAKE_GARBAGE(((char*)obj) - obj->handlers->offset);
2108
1.57k
          if (!(OBJ_FLAGS(obj) & IS_OBJ_FREE_CALLED)) {
2109
1.57k
            GC_ADD_FLAGS(obj, IS_OBJ_FREE_CALLED);
2110
1.57k
            GC_ADDREF(obj);
2111
1.57k
            obj->handlers->free_obj(obj);
2112
1.57k
            GC_DELREF(obj);
2113
1.57k
          }
2114
2115
1.57k
          ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST(obj->handle);
2116
1.57k
        } else if (GC_TYPE(p) == IS_ARRAY) {
2117
1.41k
          zend_array *arr = (zend_array*)p;
2118
2119
1.41k
          GC_TYPE_INFO(arr) = GC_NULL |
2120
1.41k
            (GC_TYPE_INFO(arr) & ~GC_TYPE_MASK);
2121
2122
          /* GC may destroy arrays with rc>1. This is valid and safe. */
2123
1.41k
          HT_ALLOW_COW_VIOLATION(arr);
2124
2125
1.41k
          zend_hash_destroy(arr);
2126
1.41k
        }
2127
2.99k
      }
2128
5.00k
      idx++;
2129
5.00k
    }
2130
2131
    /* Free objects */
2132
1.20k
    current = GC_IDX2PTR(GC_FIRST_ROOT);
2133
1.20k
    last = GC_IDX2PTR(end);
2134
6.20k
    while (current != last) {
2135
5.00k
      if (GC_IS_GARBAGE(current->ref)) {
2136
2.99k
        p = GC_GET_PTR(current->ref);
2137
2.99k
        GC_LINK_UNUSED(current);
2138
2.99k
        GC_G(num_roots)--;
2139
2.99k
        efree(p);
2140
2.99k
      }
2141
5.00k
      current++;
2142
5.00k
    }
2143
2144
1.20k
    GC_G(free_time) += zend_hrtime() - free_start_time;
2145
2146
1.20k
    GC_TRACE("Collection finished");
2147
1.20k
    GC_G(collected) += count;
2148
1.20k
    total_count += count;
2149
1.20k
    GC_G(gc_active) = 0;
2150
1.20k
  }
2151
2152
155k
  gc_compact();
2153
2154
  /* Objects with destructors were removed from this GC run. Rerun GC right away to clean them
2155
   * up. We do this only once: If we encounter more destructors on the second run, we'll not
2156
   * run GC another time. */
2157
155k
  if (should_rerun_gc && !did_rerun_gc) {
2158
283
    did_rerun_gc = true;
2159
283
    goto rerun_gc;
2160
283
  }
2161
2162
165k
finish:
2163
165k
  zend_get_gc_buffer_release();
2164
2165
  /* Prevent GC from running during zend_gc_check_root_tmpvars, before
2166
   * gc_threshold is adjusted, as this may result in unbounded recursion */
2167
165k
  GC_G(gc_active) = 1;
2168
165k
  zend_gc_check_root_tmpvars();
2169
165k
  GC_G(gc_active) = 0;
2170
2171
165k
  GC_G(collector_time) += zend_hrtime() - start_time;
2172
165k
  return total_count;
2173
155k
}
2174
2175
ZEND_API void zend_gc_get_status(zend_gc_status *status)
2176
144
{
2177
144
  status->active = GC_G(gc_active);
2178
144
  status->gc_protected = GC_G(gc_protected);
2179
144
  status->full = GC_G(gc_full);
2180
144
  status->runs = GC_G(gc_runs);
2181
144
  status->collected = GC_G(collected);
2182
144
  status->threshold = GC_G(gc_threshold);
2183
144
  status->buf_size = GC_G(buf_size);
2184
144
  status->num_roots = GC_G(num_roots);
2185
144
  status->application_time = zend_hrtime() - GC_G(activated_at);
2186
144
  status->collector_time = GC_G(collector_time);
2187
144
  status->dtor_time = GC_G(dtor_time);
2188
144
  status->free_time = GC_G(free_time);
2189
144
}
2190
2191
5.19k
ZEND_API zend_get_gc_buffer *zend_get_gc_buffer_create(void) {
2192
  /* There can only be one get_gc() call active at a time,
2193
   * so there only needs to be one buffer. */
2194
5.19k
  zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
2195
5.19k
  gc_buffer->cur = gc_buffer->start;
2196
5.19k
  return gc_buffer;
2197
5.19k
}
2198
2199
1.12k
ZEND_API void zend_get_gc_buffer_grow(zend_get_gc_buffer *gc_buffer) {
2200
1.12k
  size_t old_capacity = gc_buffer->end - gc_buffer->start;
2201
1.12k
  size_t new_capacity = old_capacity == 0 ? 64 : old_capacity * 2;
2202
1.12k
  gc_buffer->start = erealloc(gc_buffer->start, new_capacity * sizeof(zval));
2203
1.12k
  gc_buffer->end = gc_buffer->start + new_capacity;
2204
1.12k
  gc_buffer->cur = gc_buffer->start + old_capacity;
2205
1.12k
}
2206
2207
165k
static void zend_get_gc_buffer_release(void) {
2208
165k
  zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
2209
165k
  efree(gc_buffer->start);
2210
165k
  gc_buffer->start = gc_buffer->end = gc_buffer->cur = NULL;
2211
165k
}
2212
2213
/* TMPVAR operands are destroyed using zval_ptr_dtor_nogc(), because they usually cannot contain
2214
 * cycles. However, there are some rare exceptions where this is possible, in which case we rely
2215
 * on the producing code to root the value. If a GC run occurs between the rooting and consumption
2216
 * of the value, we would end up leaking it. To avoid this, root all live TMPVAR values here. */
2217
165k
static void zend_gc_check_root_tmpvars(void) {
2218
165k
  zend_execute_data *ex = EG(current_execute_data);
2219
166k
  for (; ex; ex = ex->prev_execute_data) {
2220
1.73k
    zend_function *func = ex->func;
2221
1.73k
    if (!func || !ZEND_USER_CODE(func->type)) {
2222
825
      continue;
2223
825
    }
2224
2225
914
    uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
2226
2.30k
    for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
2227
1.53k
      const zend_live_range *range = &func->op_array.live_range[i];
2228
1.53k
      if (range->start > op_num) {
2229
144
        break;
2230
144
      }
2231
1.39k
      if (range->end <= op_num) {
2232
1.26k
        continue;
2233
1.26k
      }
2234
2235
128
      uint32_t kind = range->var & ZEND_LIVE_MASK;
2236
128
      if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
2237
69
        uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
2238
69
        zval *var = ZEND_CALL_VAR(ex, var_num);
2239
69
        if (Z_COLLECTABLE_P(var)) {
2240
63
          gc_check_possible_root(Z_COUNTED_P(var));
2241
63
        }
2242
69
      }
2243
128
    }
2244
914
  }
2245
165k
}
2246
2247
10.4k
static void zend_gc_remove_root_tmpvars(void) {
2248
10.4k
  zend_execute_data *ex = EG(current_execute_data);
2249
11.8k
  for (; ex; ex = ex->prev_execute_data) {
2250
1.42k
    zend_function *func = ex->func;
2251
1.42k
    if (!func || !ZEND_USER_CODE(func->type)) {
2252
699
      continue;
2253
699
    }
2254
2255
723
    uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
2256
1.95k
    for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
2257
1.36k
      const zend_live_range *range = &func->op_array.live_range[i];
2258
1.36k
      if (range->start > op_num) {
2259
132
        break;
2260
132
      }
2261
1.23k
      if (range->end <= op_num) {
2262
1.16k
        continue;
2263
1.16k
      }
2264
2265
63
      uint32_t kind = range->var & ZEND_LIVE_MASK;
2266
63
      if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
2267
63
        uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
2268
63
        zval *var = ZEND_CALL_VAR(ex, var_num);
2269
63
        if (Z_COLLECTABLE_P(var)) {
2270
63
          GC_REMOVE_FROM_BUFFER(Z_COUNTED_P(var));
2271
63
        }
2272
63
      }
2273
63
    }
2274
723
  }
2275
10.4k
}
2276
2277
#if GC_BENCH
2278
void gc_bench_print(void)
2279
{
2280
  fprintf(stderr, "GC Statistics\n");
2281
  fprintf(stderr, "-------------\n");
2282
  fprintf(stderr, "Runs:               %d\n", GC_G(gc_runs));
2283
  fprintf(stderr, "Collected:          %d\n", GC_G(collected));
2284
  fprintf(stderr, "Root buffer length: %d\n", GC_G(root_buf_length));
2285
  fprintf(stderr, "Root buffer peak:   %d\n\n", GC_G(root_buf_peak));
2286
  fprintf(stderr, "      Possible            Remove from  Marked\n");
2287
  fprintf(stderr, "        Root    Buffered     buffer     grey\n");
2288
  fprintf(stderr, "      --------  --------  -----------  ------\n");
2289
  fprintf(stderr, "ZVAL  %8d  %8d  %9d  %8d\n", GC_G(zval_possible_root), GC_G(zval_buffered), GC_G(zval_remove_from_buffer), GC_G(zval_marked_grey));
2290
}
2291
#endif
2292
2293
#ifdef ZTS
2294
size_t zend_gc_globals_size(void)
2295
{
2296
  return sizeof(zend_gc_globals);
2297
}
2298
#endif
2299
2300
static ZEND_FUNCTION(gc_destructor_fiber)
2301
60
{
2302
60
  uint32_t idx, end;
2303
2304
60
  zend_fiber *fiber = GC_G(dtor_fiber);
2305
60
  ZEND_ASSERT(fiber != NULL);
2306
60
  ZEND_ASSERT(fiber == EG(active_fiber));
2307
2308
60
  for (;;) {
2309
60
    GC_G(dtor_fiber_running) = true;
2310
2311
60
    idx = GC_G(dtor_idx);
2312
60
    end = GC_G(dtor_end);
2313
60
    if (UNEXPECTED(gc_call_destructors(idx, end, fiber) == FAILURE)) {
2314
      /* We resumed after being suspended by a destructor */
2315
24
      return;
2316
24
    }
2317
2318
    /* We have called all destructors. Suspend fiber until the next GC run
2319
     */
2320
36
    GC_G(dtor_fiber_running) = false;
2321
36
    zend_fiber_suspend(fiber, NULL, NULL);
2322
2323
36
    if (UNEXPECTED(fiber->flags & ZEND_FIBER_FLAG_DESTROYED)) {
2324
      /* Fiber is being destroyed by shutdown sequence */
2325
36
      if (GC_G(dtor_fiber) == fiber) {
2326
36
        GC_G(dtor_fiber) = NULL;
2327
36
      }
2328
36
      GC_DELREF(&fiber->std);
2329
36
      gc_check_possible_root((zend_refcounted*)&fiber->std.gc);
2330
36
      return;
2331
36
    }
2332
36
  }
2333
60
}
2334
2335
static zend_internal_function gc_destructor_fiber = {
2336
  .type = ZEND_INTERNAL_FUNCTION,
2337
  .fn_flags = ZEND_ACC_PUBLIC,
2338
  .handler = ZEND_FN(gc_destructor_fiber),
2339
};
2340
2341
void gc_init(void)
2342
2
{
2343
2
  gc_destructor_fiber.function_name = zend_string_init_interned(
2344
2
      "gc_destructor_fiber",
2345
2
      strlen("gc_destructor_fiber"),
2346
      true);
2347
2
}