Coverage Report

Created: 2026-04-01 06:49

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/php-src/Zend/zend_gc.c
Line
Count
Source
1
/*
2
   +----------------------------------------------------------------------+
3
   | Zend Engine                                                          |
4
   +----------------------------------------------------------------------+
5
   | Copyright (c) Zend Technologies Ltd. (http://www.zend.com)           |
6
   +----------------------------------------------------------------------+
7
   | This source file is subject to version 2.00 of the Zend license,     |
8
   | that is bundled with this package in the file LICENSE, and is        |
9
   | available through the world-wide-web at the following url:           |
10
   | http://www.zend.com/license/2_00.txt.                                |
11
   | If you did not receive a copy of the Zend license and are unable to  |
12
   | obtain it through the world-wide-web, please send a note to          |
13
   | license@zend.com so we can mail you a copy immediately.              |
14
   +----------------------------------------------------------------------+
15
   | Authors: David Wang <planetbeing@gmail.com>                          |
16
   |          Dmitry Stogov <dmitry@php.net>                              |
17
   +----------------------------------------------------------------------+
18
*/
19
20
/**
21
 * zend_gc_collect_cycles
22
 * ======================
23
 *
24
 * Colors and its meaning
25
 * ----------------------
26
 *
27
 * BLACK  (GC_BLACK)   - In use or free.
28
 * GREY   (GC_GREY)    - Possible member of cycle.
29
 * WHITE  (GC_WHITE)   - Member of garbage cycle.
30
 * PURPLE (GC_PURPLE)  - Possible root of cycle.
31
 *
32
 * Colors described in the paper but not used
33
 * ------------------------------------------
34
 *
35
 * GREEN - Acyclic
36
 * RED   - Candidate cycle undergoing
37
 * ORANGE - Candidate cycle awaiting epoch boundary.
38
 *
39
 *
40
 * Flow
41
 * =====
42
 *
43
 * The garbage collect cycle starts from 'gc_mark_roots', which traverses the
44
 * possible roots, and calls mark_grey for roots are marked purple with
45
 * depth-first traverse.
46
 *
47
 * After all possible roots are traversed and marked,
48
 * gc_scan_roots will be called, and each root will be called with
49
 * gc_scan(root->ref)
50
 *
51
 * gc_scan checks the colors of possible members.
52
 *
53
 * If the node is marked as grey and the refcount > 0
54
 *    gc_scan_black will be called on that node to scan it's subgraph.
55
 * otherwise (refcount == 0), it marks the node white.
56
 *
57
 * A node MAY be added to possible roots when ZEND_UNSET_VAR happens or
58
 * zend_assign_to_variable is called only when possible garbage node is
59
 * produced.
60
 * gc_possible_root() will be called to add the nodes to possible roots.
61
 *
62
 *
63
 * For objects, we call their get_gc handler (by default 'zend_std_get_gc') to
64
 * get the object properties to scan.
65
 *
66
 *
67
 * @see http://researcher.watson.ibm.com/researcher/files/us-bacon/Bacon01Concurrent.pdf
68
 */
69
#include "zend.h"
70
#include "zend_API.h"
71
#include "zend_compile.h"
72
#include "zend_errors.h"
73
#include "zend_fibers.h"
74
#include "zend_hrtime.h"
75
#include "zend_portability.h"
76
#include "zend_types.h"
77
#include "zend_weakrefs.h"
78
#include "zend_string.h"
79
#include "zend_exceptions.h"
80
81
#ifndef GC_BENCH
82
# define GC_BENCH 0
83
#endif
84
85
#ifndef ZEND_GC_DEBUG
86
# define ZEND_GC_DEBUG 0
87
#endif
88
89
/* GC_INFO layout */
90
2.90M
#define GC_ADDRESS  0x0fffffu
91
18.0M
#define GC_COLOR    0x300000u
92
93
#define GC_BLACK    0x000000u /* must be zero */
94
#define GC_WHITE    0x100000u
95
#define GC_GREY     0x200000u
96
#define GC_PURPLE   0x300000u
97
98
/* Debug tracing */
99
#if ZEND_GC_DEBUG > 1
100
# define GC_TRACE(format, ...) fprintf(stderr, format "\n", ##__VA_ARGS__);
101
# define GC_TRACE_REF(ref, format, ...) \
102
  do { \
103
    gc_trace_ref((zend_refcounted *) ref); \
104
    fprintf(stderr, format "\n", ##__VA_ARGS__); \
105
  } while (0)
106
# define GC_TRACE_SET_COLOR(ref, color) \
107
  GC_TRACE_REF(ref, "->%s", gc_color_name(color))
108
#else
109
# define GC_TRACE_REF(ref, format, ...)
110
# define GC_TRACE_SET_COLOR(ref, new_color)
111
# define GC_TRACE(str)
112
#endif
113
114
/* GC_INFO access */
115
#define GC_REF_ADDRESS(ref) \
116
2.90M
  (((GC_TYPE_INFO(ref)) & (GC_ADDRESS << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
117
118
#define GC_REF_COLOR(ref) \
119
  (((GC_TYPE_INFO(ref)) & (GC_COLOR << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
120
121
#define GC_REF_CHECK_COLOR(ref, color) \
122
12.5M
  ((GC_TYPE_INFO(ref) & (GC_COLOR << GC_INFO_SHIFT)) == ((color) << GC_INFO_SHIFT))
123
124
5.94M
#define GC_REF_SET_INFO(ref, info) do { \
125
5.94M
    GC_TYPE_INFO(ref) = \
126
5.94M
      (GC_TYPE_INFO(ref) & (GC_TYPE_MASK | GC_FLAGS_MASK)) | \
127
5.94M
      ((info) << GC_INFO_SHIFT); \
128
5.94M
  } while (0)
129
130
3.62M
#define GC_REF_SET_COLOR(ref, c) do { \
131
3.62M
    GC_TRACE_SET_COLOR(ref, c); \
132
3.62M
    GC_TYPE_INFO(ref) = \
133
3.62M
      (GC_TYPE_INFO(ref) & ~(GC_COLOR << GC_INFO_SHIFT)) | \
134
3.62M
      ((c) << GC_INFO_SHIFT); \
135
3.62M
  } while (0)
136
137
1.89M
#define GC_REF_SET_BLACK(ref) do { \
138
1.89M
    GC_TRACE_SET_COLOR(ref, GC_BLACK); \
139
1.89M
    GC_TYPE_INFO(ref) &= ~(GC_COLOR << GC_INFO_SHIFT); \
140
1.89M
  } while (0)
141
142
#define GC_REF_SET_PURPLE(ref) do { \
143
    GC_TRACE_SET_COLOR(ref, GC_PURPLE); \
144
    GC_TYPE_INFO(ref) |= (GC_COLOR << GC_INFO_SHIFT); \
145
  } while (0)
146
147
/* bit stealing tags for gc_root_buffer.ref */
148
3.15M
#define GC_BITS    0x3
149
150
736k
#define GC_ROOT    0x0 /* possible root of circular garbage     */
151
3.13M
#define GC_UNUSED  0x1 /* part of linked list of unused buffers */
152
3.27M
#define GC_GARBAGE 0x2 /* garbage to delete                     */
153
15.3k
#define GC_DTOR_GARBAGE 0x3 /* garbage on which only the dtor should be invoked */
154
155
#define GC_GET_PTR(ptr) \
156
124k
  ((void*)(((uintptr_t)(ptr)) & ~GC_BITS))
157
158
#define GC_IS_ROOT(ptr) \
159
736k
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_ROOT)
160
#define GC_IS_UNUSED(ptr) \
161
138k
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_UNUSED)
162
#define GC_IS_GARBAGE(ptr) \
163
2.14M
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_GARBAGE)
164
#define GC_IS_DTOR_GARBAGE(ptr) \
165
11.9k
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_DTOR_GARBAGE)
166
167
#define GC_MAKE_GARBAGE(ptr) \
168
1.12M
  ((void*)(((uintptr_t)(ptr)) | GC_GARBAGE))
169
#define GC_MAKE_DTOR_GARBAGE(ptr) \
170
3.35k
  ((void*)(((uintptr_t)(ptr)) | GC_DTOR_GARBAGE))
171
172
/* GC address conversion */
173
8.38M
#define GC_IDX2PTR(idx)      (GC_G(buf) + (idx))
174
3.00M
#define GC_PTR2IDX(ptr)      ((ptr) - GC_G(buf))
175
176
/* Get the value to be placed in an unused buffer entry with the specified next unused list index */
177
2.99M
#define GC_IDX2LIST(idx)     ((void*)(uintptr_t)(((idx) * sizeof(void*)) | GC_UNUSED))
178
/* Get the index of the next item in the unused list from the given root buffer entry. */
179
832k
#define GC_LIST2IDX(list)    (((uint32_t)(uintptr_t)(list)) / sizeof(void*))
180
181
/* GC buffers */
182
1.41M
#define GC_INVALID           0
183
1.48M
#define GC_FIRST_ROOT        1
184
185
16
#define GC_DEFAULT_BUF_SIZE  (16 * 1024)
186
1
#define GC_BUF_GROW_STEP     (128 * 1024)
187
188
0
#define GC_MAX_UNCOMPRESSED  (512 * 1024)
189
2
#define GC_MAX_BUF_SIZE      0x40000000
190
191
16
#define GC_THRESHOLD_DEFAULT (10000 + GC_FIRST_ROOT)
192
0
#define GC_THRESHOLD_STEP    10000
193
0
#define GC_THRESHOLD_MAX     1000000000
194
0
#define GC_THRESHOLD_TRIGGER 100
195
196
/* GC flags */
197
7.65k
#define GC_HAS_DESTRUCTORS  (1<<0)
198
199
/* Weak maps */
200
1.97k
#define Z_FROM_WEAKMAP_KEY    (1<<0)
201
1.92k
#define Z_FROM_WEAKMAP      (1<<1)
202
203
/* The WeakMap entry zv is reachable from roots by following the virtual
204
 * reference from the a WeakMap key to the entry */
205
#define GC_FROM_WEAKMAP_KEY(zv) \
206
416
  (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT))
207
208
775
#define GC_SET_FROM_WEAKMAP_KEY(zv) do {                    \
209
775
  zval *_z = (zv);                               \
210
775
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \
211
775
} while (0)
212
213
787
#define GC_UNSET_FROM_WEAKMAP_KEY(zv) do {                    \
214
787
  zval *_z = (zv);                               \
215
787
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \
216
787
} while (0)
217
218
/* The WeakMap entry zv is reachable from roots by following the reference from
219
 * the WeakMap */
220
#define GC_FROM_WEAKMAP(zv) \
221
1.46k
  (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT))
222
223
207
#define GC_SET_FROM_WEAKMAP(zv) do {                        \
224
207
  zval *_z = (zv);                               \
225
207
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \
226
207
} while (0)
227
228
255
#define GC_UNSET_FROM_WEAKMAP(zv) do {                      \
229
255
  zval *_z = (zv);                               \
230
255
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \
231
255
} while (0)
232
233
/* unused buffers */
234
235
/* Are there any unused root buffer entries? */
236
#define GC_HAS_UNUSED() \
237
873k
  (GC_G(unused) != GC_INVALID)
238
239
/* Get the next unused entry and remove it from the list */
240
#define GC_FETCH_UNUSED() \
241
832k
  gc_fetch_unused()
242
243
/* Add a root buffer entry to the unused list */
244
#define GC_LINK_UNUSED(root) \
245
2.99M
  gc_link_unused(root)
246
247
#define GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD() \
248
  (GC_G(first_unused) < GC_G(gc_threshold))
249
#define GC_HAS_NEXT_UNUSED() \
250
873k
  (GC_G(first_unused) != GC_G(buf_size))
251
#define GC_FETCH_NEXT_UNUSED() \
252
2.16M
  gc_fetch_next_unused()
253
254
ZEND_API int (*gc_collect_cycles)(void);
255
256
/* The type of a root buffer entry.
257
 *
258
 * The lower two bits are used for flags and need to be masked out to
259
 * reconstruct a pointer.
260
 *
261
 * When a node in the root buffer is removed, the non-flag bits of the
262
 * unused entry are used to store the index of the next entry in the unused
263
 * list. */
264
typedef struct _gc_root_buffer {
265
  zend_refcounted  *ref;
266
} gc_root_buffer;
267
268
typedef struct _zend_gc_globals {
269
  /* The root buffer, which stores possible roots of reference cycles. It is
270
   * also used to store garbage to be collected at the end of a run.
271
   * A single array which is reallocated as necessary. */
272
  gc_root_buffer   *buf;
273
274
  bool         gc_enabled;
275
  bool         gc_active;        /* GC currently running, forbid nested GC */
276
  bool         gc_protected;     /* GC protected, forbid root additions */
277
  bool         gc_full;
278
279
  uint32_t          unused;     /* linked list of unused buffers    */
280
  uint32_t          first_unused;   /* first unused buffer              */
281
  uint32_t          gc_threshold;     /* GC collection threshold          */
282
  uint32_t          buf_size;     /* size of the GC buffer            */
283
  uint32_t          num_roots;    /* number of roots in GC buffer     */
284
285
  uint32_t gc_runs;         /* number of GC runs since reset */
286
  uint32_t collected;         /* number of collected nodes since reset */
287
288
  zend_hrtime_t activated_at;     /* the timestamp of the last reset */
289
  zend_hrtime_t collector_time;   /* time spent running GC (ns) */
290
  zend_hrtime_t dtor_time;      /* time spent calling destructors (ns) */
291
  zend_hrtime_t free_time;      /* time spent destroying nodes and freeing memory (ns) */
292
293
  uint32_t dtor_idx;      /* root buffer index */
294
  uint32_t dtor_end;
295
  zend_fiber *dtor_fiber;
296
  bool dtor_fiber_running;
297
298
#if GC_BENCH
299
  uint32_t root_buf_length;
300
  uint32_t root_buf_peak;
301
  uint32_t zval_possible_root;
302
  uint32_t zval_buffered;
303
  uint32_t zval_remove_from_buffer;
304
  uint32_t zval_marked_grey;
305
#endif
306
} zend_gc_globals;
307
308
#ifdef ZTS
309
static int gc_globals_id;
310
static size_t gc_globals_offset;
311
#define GC_G(v) ZEND_TSRMG_FAST(gc_globals_offset, zend_gc_globals *, v)
312
#else
313
41.9M
#define GC_G(v) (gc_globals.v)
314
static zend_gc_globals gc_globals;
315
#endif
316
317
#if GC_BENCH
318
# define GC_BENCH_INC(counter) GC_G(counter)++
319
# define GC_BENCH_DEC(counter) GC_G(counter)--
320
# define GC_BENCH_PEAK(peak, counter) do {    \
321
    if (GC_G(counter) > GC_G(peak)) {   \
322
      GC_G(peak) = GC_G(counter);     \
323
    }                   \
324
  } while (0)
325
#else
326
# define GC_BENCH_INC(counter)
327
# define GC_BENCH_DEC(counter)
328
# define GC_BENCH_PEAK(peak, counter)
329
#endif
330
331
332
4.77k
#define GC_STACK_SEGMENT_SIZE (((4096 - ZEND_MM_OVERHEAD) / sizeof(void*)) - 2)
333
334
typedef struct _gc_stack gc_stack;
335
336
/* The stack used for graph traversal is stored as a linked list of segments */
337
struct _gc_stack {
338
  gc_stack        *prev;
339
  gc_stack        *next;
340
  zend_refcounted *data[GC_STACK_SEGMENT_SIZE];
341
};
342
343
#define GC_STACK_DCL(init) \
344
204k
  gc_stack *_stack = init; \
345
204k
  size_t    _top = 0;
346
347
#define GC_STACK_PUSH(ref) \
348
2.96M
  gc_stack_push(&_stack, &_top, ref);
349
350
#define GC_STACK_POP() \
351
3.16M
  gc_stack_pop(&_stack, &_top)
352
353
static zend_never_inline gc_stack* gc_stack_next(gc_stack *stack)
354
21.8k
{
355
21.8k
  if (UNEXPECTED(!stack->next)) {
356
20.1k
    gc_stack *segment = emalloc(sizeof(gc_stack));
357
20.1k
    segment->prev = stack;
358
20.1k
    segment->next = NULL;
359
20.1k
    stack->next = segment;
360
20.1k
  }
361
21.8k
  return stack->next;
362
21.8k
}
363
364
static zend_always_inline void gc_stack_push(gc_stack **stack, size_t *top, zend_refcounted *ref)
365
2.96M
{
366
2.96M
  if (UNEXPECTED(*top == GC_STACK_SEGMENT_SIZE)) {
367
2.38k
    (*stack) = gc_stack_next(*stack);
368
2.38k
    (*top) = 0;
369
2.38k
  }
370
2.96M
  (*stack)->data[(*top)++] = ref;
371
2.96M
}
372
373
static zend_always_inline zend_refcounted* gc_stack_pop(gc_stack **stack, size_t *top)
374
3.16M
{
375
3.16M
  if (UNEXPECTED((*top) == 0)) {
376
206k
    if (!(*stack)->prev) {
377
204k
      return NULL;
378
204k
    } else {
379
2.38k
      (*stack) = (*stack)->prev;
380
2.38k
      (*top) = GC_STACK_SEGMENT_SIZE - 1;
381
2.38k
      return (*stack)->data[GC_STACK_SEGMENT_SIZE - 1];
382
2.38k
    }
383
2.95M
  } else {
384
2.95M
    return (*stack)->data[--(*top)];
385
2.95M
  }
386
3.16M
}
387
388
static void gc_stack_free(gc_stack *stack)
389
22.9k
{
390
22.9k
  gc_stack *p = stack->next;
391
392
43.1k
  while (p) {
393
20.1k
    stack = p->next;
394
20.1k
    efree(p);
395
20.1k
    p = stack;
396
20.1k
  }
397
22.9k
}
398
399
/* Map a full index to a compressed index.
400
 *
401
 * The root buffer can have up to 2^30 entries, but we only have 20 bits to
402
 * store the index. So we use the 1<<19 bit as a compression flag and use the
403
 * other 19 bits to store the index modulo 2^19. */
404
static zend_always_inline uint32_t gc_compress(uint32_t idx)
405
3.00M
{
406
3.00M
  if (EXPECTED(idx < GC_MAX_UNCOMPRESSED)) {
407
3.00M
    return idx;
408
3.00M
  }
409
0
  return (idx % GC_MAX_UNCOMPRESSED) | GC_MAX_UNCOMPRESSED;
410
3.00M
}
411
412
/* Find the root buffer entry given a pointer and a compressed index.
413
 * Iterate through the root buffer in steps of 2^19 until the pointer
414
 * matches. */
415
static zend_always_inline gc_root_buffer* gc_decompress(zend_refcounted *ref, uint32_t idx)
416
0
{
417
0
  gc_root_buffer *root = GC_IDX2PTR(idx);
418
419
0
  if (EXPECTED(GC_GET_PTR(root->ref) == ref)) {
420
0
    return root;
421
0
  }
422
423
0
  while (1) {
424
0
    idx += GC_MAX_UNCOMPRESSED;
425
0
    ZEND_ASSERT(idx < GC_G(first_unused));
426
0
    root = GC_IDX2PTR(idx);
427
0
    if (GC_GET_PTR(root->ref) == ref) {
428
0
      return root;
429
0
    }
430
0
  }
431
0
}
432
433
/* Get the index of the next unused root buffer entry, and remove it from the
434
 * unused list. GC_HAS_UNUSED() must be true before calling this. */
435
static zend_always_inline uint32_t gc_fetch_unused(void)
436
832k
{
437
832k
  uint32_t idx;
438
832k
  gc_root_buffer *root;
439
440
832k
  ZEND_ASSERT(GC_HAS_UNUSED());
441
832k
  idx = GC_G(unused);
442
832k
  root = GC_IDX2PTR(idx);
443
832k
  ZEND_ASSERT(GC_IS_UNUSED(root->ref));
444
832k
  GC_G(unused) = GC_LIST2IDX(root->ref);
445
832k
  return idx;
446
832k
}
447
448
/* Add a root buffer entry to the unused list */
449
static zend_always_inline void gc_link_unused(gc_root_buffer *root)
450
2.99M
{
451
2.99M
  root->ref = GC_IDX2LIST(GC_G(unused));
452
2.99M
  GC_G(unused) = GC_PTR2IDX(root);
453
2.99M
}
454
455
static zend_always_inline uint32_t gc_fetch_next_unused(void)
456
2.16M
{
457
2.16M
  uint32_t idx;
458
459
2.16M
  ZEND_ASSERT(GC_HAS_NEXT_UNUSED());
460
2.16M
  idx = GC_G(first_unused);
461
2.16M
  GC_G(first_unused) = GC_G(first_unused) + 1;
462
2.16M
  return idx;
463
2.16M
}
464
465
#if ZEND_GC_DEBUG > 1
466
static const char *gc_color_name(uint32_t color) {
467
  switch (color) {
468
    case GC_BLACK: return "black";
469
    case GC_WHITE: return "white";
470
    case GC_GREY: return "grey";
471
    case GC_PURPLE: return "purple";
472
    default: return "unknown";
473
  }
474
}
475
static void gc_trace_ref(zend_refcounted *ref) {
476
  if (GC_TYPE(ref) == IS_OBJECT) {
477
    zend_object *obj = (zend_object *) ref;
478
    fprintf(stderr, "[%p] rc=%d addr=%d %s object(%s)#%d ",
479
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
480
      gc_color_name(GC_REF_COLOR(ref)),
481
      obj->ce->name->val, obj->handle);
482
  } else if (GC_TYPE(ref) == IS_ARRAY) {
483
    zend_array *arr = (zend_array *) ref;
484
    fprintf(stderr, "[%p] rc=%d addr=%d %s array(%d) ",
485
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
486
      gc_color_name(GC_REF_COLOR(ref)),
487
      zend_hash_num_elements(arr));
488
  } else {
489
    fprintf(stderr, "[%p] rc=%d addr=%d %s %s ",
490
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
491
      gc_color_name(GC_REF_COLOR(ref)),
492
      GC_TYPE(ref) == IS_REFERENCE
493
        ? "reference" : zend_get_type_by_const(GC_TYPE(ref)));
494
  }
495
}
496
#endif
497
498
/* Mark a root buffer entry unused */
499
static zend_always_inline void gc_remove_from_roots(gc_root_buffer *root)
500
2.94M
{
501
2.94M
  GC_LINK_UNUSED(root);
502
2.94M
  GC_G(num_roots)--;
503
2.94M
  GC_BENCH_DEC(root_buf_length);
504
2.94M
}
505
506
static void root_buffer_dtor(zend_gc_globals *gc_globals)
507
0
{
508
0
  if (gc_globals->buf) {
509
0
    free(gc_globals->buf);
510
0
    gc_globals->buf = NULL;
511
0
  }
512
0
}
513
514
static void gc_globals_ctor_ex(zend_gc_globals *gc_globals)
515
16
{
516
16
  gc_globals->gc_enabled = false;
517
16
  gc_globals->gc_active = false;
518
16
  gc_globals->gc_protected = true;
519
16
  gc_globals->gc_full = false;
520
521
16
  gc_globals->buf = NULL;
522
16
  gc_globals->unused = GC_INVALID;
523
16
  gc_globals->first_unused = GC_INVALID;
524
16
  gc_globals->gc_threshold = GC_INVALID;
525
16
  gc_globals->buf_size = GC_INVALID;
526
16
  gc_globals->num_roots = 0;
527
528
16
  gc_globals->gc_runs = 0;
529
16
  gc_globals->collected = 0;
530
16
  gc_globals->collector_time = 0;
531
16
  gc_globals->dtor_time = 0;
532
16
  gc_globals->free_time = 0;
533
16
  gc_globals->activated_at = 0;
534
535
16
  gc_globals->dtor_idx = GC_FIRST_ROOT;
536
16
  gc_globals->dtor_end = 0;
537
16
  gc_globals->dtor_fiber = NULL;
538
16
  gc_globals->dtor_fiber_running = false;
539
540
#if GC_BENCH
541
  gc_globals->root_buf_length = 0;
542
  gc_globals->root_buf_peak = 0;
543
  gc_globals->zval_possible_root = 0;
544
  gc_globals->zval_buffered = 0;
545
  gc_globals->zval_remove_from_buffer = 0;
546
  gc_globals->zval_marked_grey = 0;
547
#endif
548
16
}
549
550
void gc_globals_ctor(void)
551
16
{
552
#ifdef ZTS
553
  ts_allocate_fast_id(&gc_globals_id, &gc_globals_offset, sizeof(zend_gc_globals), (ts_allocate_ctor) gc_globals_ctor_ex, (ts_allocate_dtor) root_buffer_dtor);
554
#else
555
16
  gc_globals_ctor_ex(&gc_globals);
556
16
#endif
557
16
}
558
559
void gc_globals_dtor(void)
560
0
{
561
0
#ifndef ZTS
562
0
  root_buffer_dtor(&gc_globals);
563
0
#endif
564
0
}
565
566
void gc_reset(void)
567
224k
{
568
224k
  if (GC_G(buf)) {
569
224k
    GC_G(gc_active) = 0;
570
224k
    GC_G(gc_protected) = 0;
571
224k
    GC_G(gc_full) = 0;
572
224k
    GC_G(unused) = GC_INVALID;
573
224k
    GC_G(first_unused) = GC_FIRST_ROOT;
574
224k
    GC_G(num_roots) = 0;
575
576
224k
    GC_G(gc_runs) = 0;
577
224k
    GC_G(collected) = 0;
578
579
224k
    GC_G(collector_time) = 0;
580
224k
    GC_G(dtor_time) = 0;
581
224k
    GC_G(free_time) = 0;
582
583
224k
    GC_G(dtor_idx) = GC_FIRST_ROOT;
584
224k
    GC_G(dtor_end) = 0;
585
224k
    GC_G(dtor_fiber) = NULL;
586
224k
    GC_G(dtor_fiber_running) = false;
587
588
#if GC_BENCH
589
    GC_G(root_buf_length) = 0;
590
    GC_G(root_buf_peak) = 0;
591
    GC_G(zval_possible_root) = 0;
592
    GC_G(zval_buffered) = 0;
593
    GC_G(zval_remove_from_buffer) = 0;
594
    GC_G(zval_marked_grey) = 0;
595
#endif
596
224k
  }
597
598
224k
  GC_G(activated_at) = zend_hrtime();
599
224k
}
600
601
/* Enable/disable the garbage collector.
602
 * Initialize globals if necessary. */
603
ZEND_API bool gc_enable(bool enable)
604
97.7k
{
605
97.7k
  bool old_enabled = GC_G(gc_enabled);
606
97.7k
  GC_G(gc_enabled) = enable;
607
97.7k
  if (enable && !old_enabled && GC_G(buf) == NULL) {
608
16
    GC_G(buf) = (gc_root_buffer*) pemalloc(sizeof(gc_root_buffer) * GC_DEFAULT_BUF_SIZE, 1);
609
16
    GC_G(buf)[0].ref = NULL;
610
16
    GC_G(buf_size) = GC_DEFAULT_BUF_SIZE;
611
16
    GC_G(gc_threshold) = GC_THRESHOLD_DEFAULT;
612
16
    gc_reset();
613
16
  }
614
97.7k
  return old_enabled;
615
97.7k
}
616
617
ZEND_API bool gc_enabled(void)
618
121
{
619
121
  return GC_G(gc_enabled);
620
121
}
621
622
/* Protect the GC root buffer (prevent additions) */
623
ZEND_API bool gc_protect(bool protect)
624
26.0k
{
625
26.0k
  bool old_protected = GC_G(gc_protected);
626
26.0k
  GC_G(gc_protected) = protect;
627
26.0k
  return old_protected;
628
26.0k
}
629
630
ZEND_API bool gc_protected(void)
631
0
{
632
0
  return GC_G(gc_protected);
633
0
}
634
635
static void gc_grow_root_buffer(void)
636
1
{
637
1
  size_t new_size;
638
639
1
  if (GC_G(buf_size) >= GC_MAX_BUF_SIZE) {
640
0
    if (!GC_G(gc_full)) {
641
0
      zend_error(E_WARNING, "GC buffer overflow (GC disabled)\n");
642
0
      GC_G(gc_active) = 1;
643
0
      GC_G(gc_protected) = 1;
644
0
      GC_G(gc_full) = 1;
645
0
      return;
646
0
    }
647
0
  }
648
1
  if (GC_G(buf_size) < GC_BUF_GROW_STEP) {
649
1
    new_size = GC_G(buf_size) * 2;
650
1
  } else {
651
0
    new_size = GC_G(buf_size) + GC_BUF_GROW_STEP;
652
0
  }
653
1
  if (new_size > GC_MAX_BUF_SIZE) {
654
0
    new_size = GC_MAX_BUF_SIZE;
655
0
  }
656
1
  GC_G(buf) = perealloc(GC_G(buf), sizeof(gc_root_buffer) * new_size, 1);
657
1
  GC_G(buf_size) = new_size;
658
1
}
659
660
/* Adjust the GC activation threshold given the number of nodes collected by the last run */
661
static void gc_adjust_threshold(int count)
662
0
{
663
0
  uint32_t new_threshold;
664
665
  /* TODO Very simple heuristic for dynamic GC buffer resizing:
666
   * If there are "too few" collections, increase the collection threshold
667
   * by a fixed step */
668
0
  if (count < GC_THRESHOLD_TRIGGER || GC_G(num_roots) >= GC_G(gc_threshold)) {
669
    /* increase */
670
0
    if (GC_G(gc_threshold) < GC_THRESHOLD_MAX) {
671
0
      new_threshold = GC_G(gc_threshold) + GC_THRESHOLD_STEP;
672
0
      if (new_threshold > GC_THRESHOLD_MAX) {
673
0
        new_threshold = GC_THRESHOLD_MAX;
674
0
      }
675
0
      if (new_threshold > GC_G(buf_size)) {
676
0
        gc_grow_root_buffer();
677
0
      }
678
0
      if (new_threshold <= GC_G(buf_size)) {
679
0
        GC_G(gc_threshold) = new_threshold;
680
0
      }
681
0
    }
682
0
  } else if (GC_G(gc_threshold) > GC_THRESHOLD_DEFAULT) {
683
0
    new_threshold = GC_G(gc_threshold) - GC_THRESHOLD_STEP;
684
0
    if (new_threshold < GC_THRESHOLD_DEFAULT) {
685
0
      new_threshold = GC_THRESHOLD_DEFAULT;
686
0
    }
687
0
    GC_G(gc_threshold) = new_threshold;
688
0
  }
689
0
}
690
691
/* Perform a GC run and then add a node as a possible root. */
692
static zend_never_inline void ZEND_FASTCALL gc_possible_root_when_full(zend_refcounted *ref)
693
0
{
694
0
  uint32_t idx;
695
0
  gc_root_buffer *newRoot;
696
697
0
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
698
0
  ZEND_ASSERT(GC_INFO(ref) == 0);
699
700
0
  if (GC_G(gc_enabled) && !GC_G(gc_active)) {
701
0
    GC_ADDREF(ref);
702
0
    gc_adjust_threshold(gc_collect_cycles());
703
0
    if (UNEXPECTED(GC_DELREF(ref) == 0)) {
704
0
      rc_dtor_func(ref);
705
0
      return;
706
0
    } else if (UNEXPECTED(GC_INFO(ref))) {
707
0
      return;
708
0
    }
709
0
  }
710
711
0
  if (GC_HAS_UNUSED()) {
712
0
    idx = GC_FETCH_UNUSED();
713
0
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
714
0
    idx = GC_FETCH_NEXT_UNUSED();
715
0
  } else {
716
0
    gc_grow_root_buffer();
717
0
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
718
0
      return;
719
0
    }
720
0
    idx = GC_FETCH_NEXT_UNUSED();
721
0
  }
722
723
0
  newRoot = GC_IDX2PTR(idx);
724
0
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
725
0
  GC_TRACE_SET_COLOR(ref, GC_PURPLE);
726
727
0
  idx = gc_compress(idx);
728
0
  GC_REF_SET_INFO(ref, idx | GC_PURPLE);
729
0
  GC_G(num_roots)++;
730
731
0
  GC_BENCH_INC(zval_buffered);
732
0
  GC_BENCH_INC(root_buf_length);
733
0
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
734
0
}
735
736
/* Add a possible root node to the buffer.
737
 * Maybe perform a GC run. */
738
ZEND_API void ZEND_FASTCALL gc_possible_root(zend_refcounted *ref)
739
2.23M
{
740
2.23M
  uint32_t idx;
741
2.23M
  gc_root_buffer *newRoot;
742
743
2.23M
  if (UNEXPECTED(GC_G(gc_protected))) {
744
109k
    return;
745
109k
  }
746
747
2.12M
  GC_BENCH_INC(zval_possible_root);
748
749
2.12M
  if (EXPECTED(GC_HAS_UNUSED())) {
750
832k
    idx = GC_FETCH_UNUSED();
751
1.29M
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD())) {
752
1.29M
    idx = GC_FETCH_NEXT_UNUSED();
753
1.29M
  } else {
754
0
    gc_possible_root_when_full(ref);
755
0
    return;
756
0
  }
757
758
2.12M
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
759
2.12M
  ZEND_ASSERT(GC_INFO(ref) == 0);
760
761
2.12M
  newRoot = GC_IDX2PTR(idx);
762
2.12M
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
763
2.12M
  GC_TRACE_SET_COLOR(ref, GC_PURPLE);
764
765
2.12M
  idx = gc_compress(idx);
766
2.12M
  GC_REF_SET_INFO(ref, idx | GC_PURPLE);
767
2.12M
  GC_G(num_roots)++;
768
769
2.12M
  GC_BENCH_INC(zval_buffered);
770
2.12M
  GC_BENCH_INC(root_buf_length);
771
2.12M
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
772
2.12M
}
773
774
/* Add an extra root during a GC run */
775
static void ZEND_FASTCALL gc_extra_root(zend_refcounted *ref)
776
37
{
777
37
  uint32_t idx;
778
37
  gc_root_buffer *newRoot;
779
780
37
  if (EXPECTED(GC_HAS_UNUSED())) {
781
0
    idx = GC_FETCH_UNUSED();
782
37
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
783
37
    idx = GC_FETCH_NEXT_UNUSED();
784
37
  } else {
785
0
    gc_grow_root_buffer();
786
0
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
787
      /* TODO: can this really happen? */
788
0
      return;
789
0
    }
790
0
    idx = GC_FETCH_NEXT_UNUSED();
791
0
  }
792
793
37
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
794
37
  ZEND_ASSERT(GC_REF_ADDRESS(ref) == 0);
795
796
37
  newRoot = GC_IDX2PTR(idx);
797
37
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
798
799
37
  idx = gc_compress(idx);
800
37
  GC_REF_SET_INFO(ref, idx | GC_REF_COLOR(ref));
801
37
  GC_G(num_roots)++;
802
803
37
  GC_BENCH_INC(zval_buffered);
804
37
  GC_BENCH_INC(root_buf_length);
805
37
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
806
37
}
807
808
/* Remove a node from the root buffer given its compressed index */
809
static zend_never_inline void ZEND_FASTCALL gc_remove_compressed(zend_refcounted *ref, uint32_t idx)
810
0
{
811
0
  gc_root_buffer *root = gc_decompress(ref, idx);
812
0
  gc_remove_from_roots(root);
813
0
}
814
815
ZEND_API void ZEND_FASTCALL gc_remove_from_buffer(zend_refcounted *ref)
816
2.89M
{
817
2.89M
  gc_root_buffer *root;
818
2.89M
  uint32_t idx = GC_REF_ADDRESS(ref);
819
820
2.89M
  GC_BENCH_INC(zval_remove_from_buffer);
821
822
2.89M
  if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
823
1.87M
    GC_TRACE_SET_COLOR(ref, GC_BLACK);
824
1.87M
  }
825
2.89M
  GC_REF_SET_INFO(ref, 0);
826
827
  /* Perform decompression only in case of large buffers */
828
2.89M
  if (UNEXPECTED(GC_G(first_unused) >= GC_MAX_UNCOMPRESSED)) {
829
0
    gc_remove_compressed(ref, idx);
830
0
    return;
831
0
  }
832
833
2.89M
  ZEND_ASSERT(idx);
834
2.89M
  root = GC_IDX2PTR(idx);
835
2.89M
  gc_remove_from_roots(root);
836
2.89M
}
837
838
/* Mark all nodes reachable from ref as black (live). Restore the reference
839
 * counts decremented by gc_mark_grey(). See ScanBlack() in Bacon & Rajan.
840
 * To implement a depth-first search, discovered nodes are added to a stack
841
 * which is processed iteratively. */
842
static void gc_scan_black(zend_refcounted *ref, gc_stack *stack)
843
35.1k
{
844
35.1k
  HashTable *ht;
845
35.1k
  Bucket *p;
846
35.1k
  zval *zv;
847
35.1k
  uint32_t n;
848
35.1k
  GC_STACK_DCL(stack);
849
850
194k
tail_call:
851
194k
  if (GC_TYPE(ref) == IS_OBJECT) {
852
37.8k
    zend_object *obj = (zend_object*)ref;
853
854
37.8k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
855
37.8k
      zval *table;
856
37.8k
      int len;
857
858
37.8k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
859
862
        zend_weakmap_get_object_key_entry_gc(obj, &table, &len);
860
862
        n = len;
861
862
        zv = table;
862
1.61k
        for (; n != 0; n-=2) {
863
754
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
864
754
          zval *entry = (zval*) Z_PTR_P(zv);
865
754
          zval *weakmap = zv+1;
866
754
          ZEND_ASSERT(Z_REFCOUNTED_P(weakmap));
867
754
          if (Z_OPT_COLLECTABLE_P(entry)) {
868
663
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
869
663
            if (GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_GREY)) {
870
              /* Weakmap was scanned in gc_mark_roots, we must
871
               * ensure that it's eventually scanned in
872
               * gc_scan_roots as well. */
873
50
              if (!GC_REF_ADDRESS(Z_COUNTED_P(weakmap))) {
874
29
                gc_extra_root(Z_COUNTED_P(weakmap));
875
29
              }
876
613
            } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK) && */ !GC_FROM_WEAKMAP(entry)) {
877
              /* Both the entry weakmap and key are BLACK, so we
878
               * can mark the entry BLACK as well.
879
               * !GC_FROM_WEAKMAP(entry) means that the weakmap
880
               * was already scanned black (or will not be
881
               * scanned), so it's our responsibility to mark the
882
               * entry */
883
591
              ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK));
884
591
              ref = Z_COUNTED_P(entry);
885
591
              GC_ADDREF(ref);
886
591
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
887
47
                GC_REF_SET_BLACK(ref);
888
47
                GC_STACK_PUSH(ref);
889
47
              }
890
591
            }
891
663
          }
892
754
          zv+=2;
893
754
        }
894
862
      }
895
896
37.8k
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
897
130
        zend_weakmap_get_key_entry_gc(obj, &table, &len);
898
130
        n = len;
899
130
        zv = table;
900
334
        for (; n != 0; n-=2) {
901
204
          ZEND_ASSERT(Z_TYPE_P(zv+1) == IS_PTR);
902
204
          zval *key = zv;
903
204
          zval *entry = (zval*) Z_PTR_P(zv+1);
904
204
          if (Z_OPT_COLLECTABLE_P(entry)) {
905
131
            GC_UNSET_FROM_WEAKMAP(entry);
906
131
            if (GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_GREY)) {
907
              /* Key was scanned in gc_mark_roots, we must
908
               * ensure that it's eventually scanned in
909
               * gc_scan_roots as well. */
910
34
              if (!GC_REF_ADDRESS(Z_COUNTED_P(key))) {
911
8
                gc_extra_root(Z_COUNTED_P(key));
912
8
              }
913
97
            } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK) && */ !GC_FROM_WEAKMAP_KEY(entry)) {
914
              /* Both the entry weakmap and key are BLACK, so we
915
               * can mark the entry BLACK as well.
916
               * !GC_FROM_WEAKMAP_KEY(entry) means that the key
917
               * was already scanned black (or will not be
918
               * scanned), so it's our responsibility to mark the
919
               * entry */
920
73
              ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK));
921
73
              ref = Z_COUNTED_P(entry);
922
73
              GC_ADDREF(ref);
923
73
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
924
66
                GC_REF_SET_BLACK(ref);
925
66
                GC_STACK_PUSH(ref);
926
66
              }
927
73
            }
928
131
          }
929
204
          zv += 2;
930
204
        }
931
130
        goto next;
932
130
      }
933
934
37.6k
      ht = obj->handlers->get_gc(obj, &table, &len);
935
37.6k
      n = len;
936
37.6k
      zv = table;
937
37.6k
      if (UNEXPECTED(ht)) {
938
12.8k
        GC_ADDREF(ht);
939
12.8k
        if (!GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
940
12.8k
          GC_REF_SET_BLACK(ht);
941
17.1k
          for (; n != 0; n--) {
942
4.28k
            if (Z_COLLECTABLE_P(zv)) {
943
856
              ref = Z_COUNTED_P(zv);
944
856
              GC_ADDREF(ref);
945
856
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
946
681
                GC_REF_SET_BLACK(ref);
947
681
                GC_STACK_PUSH(ref);
948
681
              }
949
856
            }
950
4.28k
            zv++;
951
4.28k
          }
952
12.8k
          goto handle_ht;
953
12.8k
        }
954
12.8k
      }
955
956
105k
handle_zvals:
957
2.48M
      for (; n != 0; n--) {
958
2.40M
        if (Z_COLLECTABLE_P(zv)) {
959
26.0k
          ref = Z_COUNTED_P(zv);
960
26.0k
          GC_ADDREF(ref);
961
26.0k
          if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
962
22.3k
            GC_REF_SET_BLACK(ref);
963
22.3k
            zv++;
964
98.6k
            while (--n) {
965
76.3k
              if (Z_COLLECTABLE_P(zv)) {
966
66.7k
                zend_refcounted *ref = Z_COUNTED_P(zv);
967
66.7k
                GC_ADDREF(ref);
968
66.7k
                if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
969
65.0k
                  GC_REF_SET_BLACK(ref);
970
65.0k
                  GC_STACK_PUSH(ref);
971
65.0k
                }
972
66.7k
              }
973
76.3k
              zv++;
974
76.3k
            }
975
22.3k
            goto tail_call;
976
22.3k
          }
977
26.0k
        }
978
2.37M
        zv++;
979
2.37M
      }
980
105k
    }
981
156k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
982
152k
    ZEND_ASSERT((zend_array*)ref != &EG(symbol_table));
983
152k
    ht = (zend_array*)ref;
984
165k
handle_ht:
985
165k
    n = ht->nNumUsed;
986
165k
    zv = ht->arPacked;
987
165k
    if (HT_IS_PACKED(ht)) {
988
80.9k
      goto handle_zvals;
989
80.9k
    }
990
991
84.7k
    p = (Bucket*)zv;
992
249k
    for (; n != 0; n--) {
993
231k
      zv = &p->val;
994
231k
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
995
6.23k
        zv = Z_INDIRECT_P(zv);
996
6.23k
      }
997
231k
      if (Z_COLLECTABLE_P(zv)) {
998
68.6k
        ref = Z_COUNTED_P(zv);
999
68.6k
        GC_ADDREF(ref);
1000
68.6k
        if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1001
66.9k
          GC_REF_SET_BLACK(ref);
1002
66.9k
          p++;
1003
132k
          while (--n) {
1004
65.0k
            zv = &p->val;
1005
65.0k
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1006
396
              zv = Z_INDIRECT_P(zv);
1007
396
            }
1008
65.0k
            if (Z_COLLECTABLE_P(zv)) {
1009
3.73k
              zend_refcounted *ref = Z_COUNTED_P(zv);
1010
3.73k
              GC_ADDREF(ref);
1011
3.73k
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1012
2.69k
                GC_REF_SET_BLACK(ref);
1013
2.69k
                GC_STACK_PUSH(ref);
1014
2.69k
              }
1015
3.73k
            }
1016
65.0k
            p++;
1017
65.0k
          }
1018
66.9k
          goto tail_call;
1019
66.9k
        }
1020
68.6k
      }
1021
164k
      p++;
1022
164k
    }
1023
84.7k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1024
3.40k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1025
1.40k
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1026
1.40k
      GC_ADDREF(ref);
1027
1.40k
      if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1028
1.24k
        GC_REF_SET_BLACK(ref);
1029
1.24k
        goto tail_call;
1030
1.24k
      }
1031
1.40k
    }
1032
3.40k
  }
1033
1034
103k
next:
1035
103k
  ref = GC_STACK_POP();
1036
103k
  if (ref) {
1037
68.5k
    goto tail_call;
1038
68.5k
  }
1039
103k
}
1040
1041
/* Traverse the graph of nodes referred to by ref. Decrement the reference
1042
 * counts and mark visited nodes grey. See MarkGray() in Bacon & Rajan. */
1043
static void gc_mark_grey(zend_refcounted *ref, gc_stack *stack)
1044
68.1k
{
1045
68.1k
  HashTable *ht;
1046
68.1k
  Bucket *p;
1047
68.1k
  zval *zv;
1048
68.1k
  uint32_t n;
1049
68.1k
  GC_STACK_DCL(stack);
1050
1051
1.28M
tail_call:
1052
1.28M
  GC_BENCH_INC(zval_marked_grey);
1053
1054
1.28M
  if (GC_TYPE(ref) == IS_OBJECT) {
1055
647k
    zend_object *obj = (zend_object*)ref;
1056
1057
647k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1058
647k
      zval *table;
1059
647k
      int len;
1060
1061
647k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1062
1.19k
        zend_weakmap_get_object_key_entry_gc(obj, &table, &len);
1063
1.19k
        n = len;
1064
1.19k
        zv = table;
1065
2.06k
        for (; n != 0; n-=2) {
1066
871
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1067
871
          zval *entry = (zval*) Z_PTR_P(zv);
1068
871
          zval *weakmap = zv+1;
1069
871
          ZEND_ASSERT(Z_REFCOUNTED_P(weakmap));
1070
871
          if (Z_COLLECTABLE_P(entry)) {
1071
775
            GC_SET_FROM_WEAKMAP_KEY(entry);
1072
775
            ref = Z_COUNTED_P(entry);
1073
            /* Only DELREF if the contribution from the weakmap has
1074
             * not been cancelled yet */
1075
775
            if (!GC_FROM_WEAKMAP(entry)) {
1076
681
              GC_DELREF(ref);
1077
681
            }
1078
775
            if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1079
77
              GC_REF_SET_COLOR(ref, GC_GREY);
1080
77
              GC_STACK_PUSH(ref);
1081
77
            }
1082
775
          }
1083
871
          zv+=2;
1084
871
        }
1085
1.19k
      }
1086
1087
647k
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
1088
191
        zend_weakmap_get_entry_gc(obj, &table, &len);
1089
191
        n = len;
1090
191
        zv = table;
1091
471
        for (; n != 0; n--) {
1092
280
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1093
280
          zval *entry = (zval*) Z_PTR_P(zv);
1094
280
          if (Z_COLLECTABLE_P(entry)) {
1095
207
            GC_SET_FROM_WEAKMAP(entry);
1096
207
            ref = Z_COUNTED_P(entry);
1097
            /* Only DELREF if the contribution from the weakmap key
1098
             * has not been cancelled yet */
1099
207
            if (!GC_FROM_WEAKMAP_KEY(entry)) {
1100
107
              GC_DELREF(ref);
1101
107
            }
1102
207
            if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1103
83
              GC_REF_SET_COLOR(ref, GC_GREY);
1104
83
              GC_STACK_PUSH(ref);
1105
83
            }
1106
207
          }
1107
280
          zv++;
1108
280
        }
1109
191
        goto next;
1110
191
      }
1111
1112
647k
      ht = obj->handlers->get_gc(obj, &table, &len);
1113
647k
      n = len;
1114
647k
      zv = table;
1115
647k
      if (UNEXPECTED(ht)) {
1116
609k
        GC_DELREF(ht);
1117
609k
        if (!GC_REF_CHECK_COLOR(ht, GC_GREY)) {
1118
609k
          GC_REF_SET_COLOR(ht, GC_GREY);
1119
851k
          for (; n != 0; n--) {
1120
241k
            if (Z_COLLECTABLE_P(zv)) {
1121
237k
              ref = Z_COUNTED_P(zv);
1122
237k
              GC_DELREF(ref);
1123
237k
              if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1124
233k
                GC_REF_SET_COLOR(ref, GC_GREY);
1125
233k
                GC_STACK_PUSH(ref);
1126
233k
              }
1127
237k
            }
1128
241k
            zv++;
1129
241k
          }
1130
609k
          goto handle_ht;
1131
609k
        }
1132
609k
      }
1133
137k
handle_zvals:
1134
2.65M
      for (; n != 0; n--) {
1135
2.55M
        if (Z_COLLECTABLE_P(zv)) {
1136
72.6k
          ref = Z_COUNTED_P(zv);
1137
72.6k
          GC_DELREF(ref);
1138
72.6k
          if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1139
32.9k
            GC_REF_SET_COLOR(ref, GC_GREY);
1140
32.9k
            zv++;
1141
116k
            while (--n) {
1142
83.4k
              if (Z_COLLECTABLE_P(zv)) {
1143
68.2k
                zend_refcounted *ref = Z_COUNTED_P(zv);
1144
68.2k
                GC_DELREF(ref);
1145
68.2k
                if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1146
64.4k
                  GC_REF_SET_COLOR(ref, GC_GREY);
1147
64.4k
                  GC_STACK_PUSH(ref);
1148
64.4k
                }
1149
68.2k
              }
1150
83.4k
              zv++;
1151
83.4k
            }
1152
32.9k
            goto tail_call;
1153
32.9k
          }
1154
72.6k
        }
1155
2.52M
        zv++;
1156
2.52M
      }
1157
137k
    }
1158
647k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1159
618k
    ZEND_ASSERT(((zend_array*)ref) != &EG(symbol_table));
1160
618k
    ht = (zend_array*)ref;
1161
1.22M
handle_ht:
1162
1.22M
    n = ht->nNumUsed;
1163
1.22M
    if (HT_IS_PACKED(ht)) {
1164
100k
            zv = ht->arPacked;
1165
100k
            goto handle_zvals;
1166
100k
    }
1167
1168
1.12M
    p = ht->arData;
1169
2.07M
    for (; n != 0; n--) {
1170
1.31M
      zv = &p->val;
1171
1.31M
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1172
571k
        zv = Z_INDIRECT_P(zv);
1173
571k
      }
1174
1.31M
      if (Z_COLLECTABLE_P(zv)) {
1175
467k
        ref = Z_COUNTED_P(zv);
1176
467k
        GC_DELREF(ref);
1177
467k
        if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1178
369k
          GC_REF_SET_COLOR(ref, GC_GREY);
1179
369k
          p++;
1180
1.28M
          while (--n) {
1181
912k
            zv = &p->val;
1182
912k
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1183
98.3k
              zv = Z_INDIRECT_P(zv);
1184
98.3k
            }
1185
912k
            if (Z_COLLECTABLE_P(zv)) {
1186
701k
              zend_refcounted *ref = Z_COUNTED_P(zv);
1187
701k
              GC_DELREF(ref);
1188
701k
              if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1189
509k
                GC_REF_SET_COLOR(ref, GC_GREY);
1190
509k
                GC_STACK_PUSH(ref);
1191
509k
              }
1192
701k
            }
1193
912k
            p++;
1194
912k
          }
1195
369k
          goto tail_call;
1196
369k
        }
1197
467k
      }
1198
942k
      p++;
1199
942k
    }
1200
1.12M
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1201
15.5k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1202
12.3k
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1203
12.3k
      GC_DELREF(ref);
1204
12.3k
      if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1205
3.87k
        GC_REF_SET_COLOR(ref, GC_GREY);
1206
3.87k
        goto tail_call;
1207
3.87k
      }
1208
12.3k
    }
1209
15.5k
  }
1210
1211
875k
next:
1212
875k
  ref = GC_STACK_POP();
1213
875k
  if (ref) {
1214
807k
    goto tail_call;
1215
807k
  }
1216
875k
}
1217
1218
/* Two-Finger compaction algorithm */
1219
static void gc_compact(void)
1220
666k
{
1221
666k
  if (GC_G(num_roots) + GC_FIRST_ROOT != GC_G(first_unused)) {
1222
320k
    if (GC_G(num_roots)) {
1223
10.4k
      gc_root_buffer *free = GC_IDX2PTR(GC_FIRST_ROOT);
1224
10.4k
      gc_root_buffer *scan = GC_IDX2PTR(GC_G(first_unused) - 1);
1225
10.4k
      gc_root_buffer *end  = GC_IDX2PTR(GC_G(num_roots));
1226
10.4k
      uint32_t idx;
1227
10.4k
      zend_refcounted *p;
1228
1229
19.8k
      while (free < scan) {
1230
108k
        while (!GC_IS_UNUSED(free->ref)) {
1231
95.2k
          free++;
1232
95.2k
        }
1233
30.1k
        while (GC_IS_UNUSED(scan->ref)) {
1234
16.9k
          scan--;
1235
16.9k
        }
1236
13.2k
        if (scan > free) {
1237
6.85k
          p = scan->ref;
1238
6.85k
          free->ref = p;
1239
6.85k
          p = GC_GET_PTR(p);
1240
6.85k
          idx = gc_compress(GC_PTR2IDX(free));
1241
6.85k
          GC_REF_SET_INFO(p, idx | GC_REF_COLOR(p));
1242
6.85k
          free++;
1243
6.85k
          scan--;
1244
6.85k
          if (scan <= end) {
1245
3.81k
            break;
1246
3.81k
          }
1247
6.85k
        }
1248
13.2k
      }
1249
10.4k
    }
1250
320k
    GC_G(unused) = GC_INVALID;
1251
320k
    GC_G(first_unused) = GC_G(num_roots) + GC_FIRST_ROOT;
1252
320k
  }
1253
666k
}
1254
1255
/* For all roots marked purple, traverse the graph, decrementing the reference
1256
 * count of their child nodes. Mark visited nodes grey so that they are not
1257
 * visited again. See MarkRoots() in Bacon & Rajan. */
1258
static void gc_mark_roots(gc_stack *stack)
1259
23.0k
{
1260
23.0k
  gc_root_buffer *current, *last;
1261
1262
23.0k
  gc_compact();
1263
1264
23.0k
  current = GC_IDX2PTR(GC_FIRST_ROOT);
1265
23.0k
  last = GC_IDX2PTR(GC_G(first_unused));
1266
268k
  while (current != last) {
1267
245k
    if (GC_IS_ROOT(current->ref)) {
1268
245k
      if (GC_REF_CHECK_COLOR(current->ref, GC_PURPLE)) {
1269
68.1k
        GC_REF_SET_COLOR(current->ref, GC_GREY);
1270
68.1k
        gc_mark_grey(current->ref, stack);
1271
68.1k
      }
1272
245k
    }
1273
245k
    current++;
1274
245k
  }
1275
23.0k
}
1276
1277
/* Traverse the reference graph of ref. Evaluate grey nodes and mark them
1278
 * black (to keep) or white (to free). Note that nodes initially marked white
1279
 * may later become black if they are visited from a live node.
1280
 * See Scan() in Bacon & Rajan. */
1281
static void gc_scan(zend_refcounted *ref, gc_stack *stack)
1282
68.2k
{
1283
68.2k
  HashTable *ht;
1284
68.2k
  Bucket *p;
1285
68.2k
  zval *zv;
1286
68.2k
  uint32_t n;
1287
68.2k
  GC_STACK_DCL(stack);
1288
1289
1.73M
tail_call:
1290
1.73M
  if (!GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1291
310
    goto next;
1292
310
  }
1293
1294
1.73M
  if (GC_REFCOUNT(ref) > 0) {
1295
35.1k
    if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1296
35.1k
      GC_REF_SET_BLACK(ref);
1297
35.1k
      if (UNEXPECTED(!_stack->next)) {
1298
19.4k
        gc_stack_next(_stack);
1299
19.4k
      }
1300
      /* Split stack and reuse the tail */
1301
35.1k
      _stack->next->prev = NULL;
1302
35.1k
      gc_scan_black(ref, _stack->next);
1303
35.1k
      _stack->next->prev = _stack;
1304
35.1k
    }
1305
35.1k
    goto next;
1306
35.1k
  }
1307
1308
1.69M
  if (GC_TYPE(ref) == IS_OBJECT) {
1309
611k
    zend_object *obj = (zend_object*)ref;
1310
611k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1311
611k
      zval *table;
1312
611k
      int len;
1313
1314
611k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1315
391
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1316
391
        n = len;
1317
391
        zv = table;
1318
566
        for (; n != 0; n--) {
1319
175
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1320
175
          zval *entry = (zval*) Z_PTR_P(zv);
1321
175
          if (Z_OPT_COLLECTABLE_P(entry)) {
1322
119
            ref = Z_COUNTED_P(entry);
1323
119
            if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1324
25
              GC_REF_SET_COLOR(ref, GC_WHITE);
1325
25
              GC_STACK_PUSH(ref);
1326
25
            }
1327
119
          }
1328
175
          zv++;
1329
175
        }
1330
391
      }
1331
1332
611k
      ht = obj->handlers->get_gc(obj, &table, &len);
1333
611k
      n = len;
1334
611k
      zv = table;
1335
611k
      if (UNEXPECTED(ht)) {
1336
598k
        if (GC_REF_CHECK_COLOR(ht, GC_GREY)) {
1337
598k
          GC_REF_SET_COLOR(ht, GC_WHITE);
1338
598k
          GC_STACK_PUSH((zend_refcounted *) ht);
1339
835k
          for (; n != 0; n--) {
1340
237k
            if (Z_COLLECTABLE_P(zv)) {
1341
236k
              ref = Z_COUNTED_P(zv);
1342
236k
              if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1343
232k
                GC_REF_SET_COLOR(ref, GC_WHITE);
1344
232k
                GC_STACK_PUSH(ref);
1345
232k
              }
1346
236k
            }
1347
237k
            zv++;
1348
237k
          }
1349
598k
          goto handle_ht;
1350
598k
        }
1351
598k
      }
1352
1353
54.6k
handle_zvals:
1354
312k
      for (; n != 0; n--) {
1355
270k
        if (Z_COLLECTABLE_P(zv)) {
1356
78.5k
          ref = Z_COUNTED_P(zv);
1357
78.5k
          if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1358
12.7k
            GC_REF_SET_COLOR(ref, GC_WHITE);
1359
12.7k
            zv++;
1360
25.9k
            while (--n) {
1361
13.1k
              if (Z_COLLECTABLE_P(zv)) {
1362
6.60k
                zend_refcounted *ref = Z_COUNTED_P(zv);
1363
6.60k
                if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1364
4.55k
                  GC_REF_SET_COLOR(ref, GC_WHITE);
1365
4.55k
                  GC_STACK_PUSH(ref);
1366
4.55k
                }
1367
6.60k
              }
1368
13.1k
              zv++;
1369
13.1k
            }
1370
12.7k
            goto tail_call;
1371
12.7k
          }
1372
78.5k
        }
1373
257k
        zv++;
1374
257k
      }
1375
54.6k
    }
1376
1.08M
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1377
1.07M
    ht = (HashTable *)ref;
1378
1.07M
    ZEND_ASSERT(ht != &EG(symbol_table));
1379
1380
1.67M
handle_ht:
1381
1.67M
    n = ht->nNumUsed;
1382
1.67M
    if (HT_IS_PACKED(ht)) {
1383
41.2k
            zv = ht->arPacked;
1384
41.2k
            goto handle_zvals;
1385
41.2k
    }
1386
1387
1.62M
    p = ht->arData;
1388
3.74M
    for (; n != 0; n--) {
1389
2.42M
      zv = &p->val;
1390
2.42M
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1391
1.22M
        zv = Z_INDIRECT_P(zv);
1392
1.22M
      }
1393
2.42M
      if (Z_COLLECTABLE_P(zv)) {
1394
979k
        ref = Z_COUNTED_P(zv);
1395
979k
        if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1396
303k
          GC_REF_SET_COLOR(ref, GC_WHITE);
1397
303k
          p++;
1398
1.15M
          while (--n) {
1399
851k
            zv = &p->val;
1400
851k
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1401
98.0k
              zv = Z_INDIRECT_P(zv);
1402
98.0k
            }
1403
851k
            if (Z_COLLECTABLE_P(zv)) {
1404
699k
              zend_refcounted *ref = Z_COUNTED_P(zv);
1405
699k
              if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1406
508k
                GC_REF_SET_COLOR(ref, GC_WHITE);
1407
508k
                GC_STACK_PUSH(ref);
1408
508k
              }
1409
699k
            }
1410
851k
            p++;
1411
851k
          }
1412
303k
          goto tail_call;
1413
303k
        }
1414
979k
      }
1415
2.11M
      p++;
1416
2.11M
    }
1417
1.62M
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1418
12.6k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1419
11.2k
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1420
11.2k
      if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1421
3.32k
        GC_REF_SET_COLOR(ref, GC_WHITE);
1422
3.32k
        goto tail_call;
1423
3.32k
      }
1424
11.2k
    }
1425
12.6k
  }
1426
1427
1.41M
next:
1428
1.41M
  ref = GC_STACK_POP();
1429
1.41M
  if (ref) {
1430
1.34M
    goto tail_call;
1431
1.34M
  }
1432
1.41M
}
1433
1434
/* Scan all roots, coloring grey nodes black or white */
1435
static void gc_scan_roots(gc_stack *stack)
1436
23.0k
{
1437
23.0k
  uint32_t idx, end;
1438
23.0k
  gc_root_buffer *current;
1439
1440
  /* Root buffer might be reallocated during gc_scan,
1441
   * make sure to reload pointers. */
1442
23.0k
  idx = GC_FIRST_ROOT;
1443
23.0k
  end = GC_G(first_unused);
1444
268k
  while (idx != end) {
1445
245k
    current = GC_IDX2PTR(idx);
1446
245k
    if (GC_IS_ROOT(current->ref)) {
1447
245k
      if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1448
68.1k
        GC_REF_SET_COLOR(current->ref, GC_WHITE);
1449
68.1k
        gc_scan(current->ref, stack);
1450
68.1k
      }
1451
245k
    }
1452
245k
    idx++;
1453
245k
  }
1454
1455
  /* Scan extra roots added during gc_scan */
1456
23.0k
  while (idx != GC_G(first_unused)) {
1457
37
    current = GC_IDX2PTR(idx);
1458
37
    if (GC_IS_ROOT(current->ref)) {
1459
37
      if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1460
37
        GC_REF_SET_COLOR(current->ref, GC_WHITE);
1461
37
        gc_scan(current->ref, stack);
1462
37
      }
1463
37
    }
1464
37
    idx++;
1465
37
  }
1466
23.0k
}
1467
1468
/* Add a node to the buffer with the garbage flag, so that it will be
1469
 * destroyed and freed when the scan is complete. */
1470
static void gc_add_garbage(zend_refcounted *ref)
1471
873k
{
1472
873k
  uint32_t idx;
1473
873k
  gc_root_buffer *buf;
1474
1475
873k
  if (GC_HAS_UNUSED()) {
1476
0
    idx = GC_FETCH_UNUSED();
1477
873k
  } else if (GC_HAS_NEXT_UNUSED()) {
1478
873k
    idx = GC_FETCH_NEXT_UNUSED();
1479
873k
  } else {
1480
1
    gc_grow_root_buffer();
1481
1
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
1482
0
      return;
1483
0
    }
1484
1
    idx = GC_FETCH_NEXT_UNUSED();
1485
1
  }
1486
1487
873k
  buf = GC_IDX2PTR(idx);
1488
873k
  buf->ref = GC_MAKE_GARBAGE(ref);
1489
1490
873k
  idx = gc_compress(idx);
1491
873k
  GC_REF_SET_INFO(ref, idx | GC_BLACK);
1492
873k
  GC_G(num_roots)++;
1493
873k
}
1494
1495
/* Traverse the reference graph from ref, marking any white nodes as garbage. */
1496
static int gc_collect_white(zend_refcounted *ref, uint32_t *flags, gc_stack *stack)
1497
29.2k
{
1498
29.2k
  int count = 0;
1499
29.2k
  HashTable *ht;
1500
29.2k
  Bucket *p;
1501
29.2k
  zval *zv;
1502
29.2k
  uint32_t n;
1503
29.2k
  GC_STACK_DCL(stack);
1504
1505
1.08M
tail_call:
1506
  /* don't count references for compatibility ??? */
1507
1.08M
  if (GC_TYPE(ref) != IS_REFERENCE) {
1508
1.07M
    count++;
1509
1.07M
  }
1510
1511
1.08M
  if (GC_TYPE(ref) == IS_OBJECT) {
1512
609k
    zend_object *obj = (zend_object*)ref;
1513
1514
609k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1515
609k
      int len;
1516
609k
      zval *table;
1517
1518
      /* optimization: color is GC_BLACK (0) */
1519
609k
      if (!GC_INFO(ref)) {
1520
414k
        gc_add_garbage(ref);
1521
414k
      }
1522
609k
      if (!(OBJ_FLAGS(obj) & IS_OBJ_DESTRUCTOR_CALLED)
1523
517k
       && (obj->handlers->dtor_obj != zend_objects_destroy_object
1524
517k
        || obj->ce->destructor != NULL)) {
1525
3.35k
        *flags |= GC_HAS_DESTRUCTORS;
1526
3.35k
      }
1527
1528
609k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1529
329
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1530
329
        n = len;
1531
329
        zv = table;
1532
446
        for (; n != 0; n--) {
1533
117
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1534
117
          zval *entry = (zval*) Z_PTR_P(zv);
1535
117
          if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP_KEY(entry)) {
1536
74
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
1537
74
            GC_UNSET_FROM_WEAKMAP(entry);
1538
74
            ref = Z_COUNTED_P(entry);
1539
74
            GC_ADDREF(ref);
1540
74
            if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1541
12
              GC_REF_SET_BLACK(ref);
1542
12
              GC_STACK_PUSH(ref);
1543
12
            }
1544
74
          }
1545
117
          zv++;
1546
117
        }
1547
329
      }
1548
1549
609k
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
1550
61
        zend_weakmap_get_entry_gc(obj, &table, &len);
1551
61
        n = len;
1552
61
        zv = table;
1553
137
        for (; n != 0; n--) {
1554
76
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1555
76
          zval *entry = (zval*) Z_PTR_P(zv);
1556
76
          if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP(entry)) {
1557
50
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
1558
50
            GC_UNSET_FROM_WEAKMAP(entry);
1559
50
            ref = Z_COUNTED_P(entry);
1560
50
            GC_ADDREF(ref);
1561
50
            if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1562
44
              GC_REF_SET_BLACK(ref);
1563
44
              GC_STACK_PUSH(ref);
1564
44
            }
1565
50
          }
1566
76
          zv++;
1567
76
        }
1568
61
        goto next;
1569
61
      }
1570
1571
609k
      ht = obj->handlers->get_gc(obj, &table, &len);
1572
609k
      n = len;
1573
609k
      zv = table;
1574
609k
      if (UNEXPECTED(ht)) {
1575
597k
        GC_ADDREF(ht);
1576
597k
        if (GC_REF_CHECK_COLOR(ht, GC_WHITE)) {
1577
597k
          GC_REF_SET_BLACK(ht);
1578
834k
          for (; n != 0; n--) {
1579
237k
            if (Z_COLLECTABLE_P(zv)) {
1580
236k
              ref = Z_COUNTED_P(zv);
1581
236k
              GC_ADDREF(ref);
1582
236k
              if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1583
232k
                GC_REF_SET_BLACK(ref);
1584
232k
                GC_STACK_PUSH(ref);
1585
232k
              }
1586
236k
            }
1587
237k
            zv++;
1588
237k
          }
1589
597k
          goto handle_ht;
1590
597k
        }
1591
597k
      }
1592
1593
31.6k
handle_zvals:
1594
171k
      for (; n != 0; n--) {
1595
151k
        if (Z_COLLECTABLE_P(zv)) {
1596
45.7k
          ref = Z_COUNTED_P(zv);
1597
45.7k
          GC_ADDREF(ref);
1598
45.7k
          if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1599
11.7k
            GC_REF_SET_BLACK(ref);
1600
11.7k
            zv++;
1601
19.8k
            while (--n) {
1602
8.06k
              if (Z_COLLECTABLE_P(zv)) {
1603
2.32k
                zend_refcounted *ref = Z_COUNTED_P(zv);
1604
2.32k
                GC_ADDREF(ref);
1605
2.32k
                if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1606
644
                  GC_REF_SET_BLACK(ref);
1607
644
                  GC_STACK_PUSH(ref);
1608
644
                }
1609
2.32k
              }
1610
8.06k
              zv++;
1611
8.06k
            }
1612
11.7k
            goto tail_call;
1613
11.7k
          }
1614
45.7k
        }
1615
139k
        zv++;
1616
139k
      }
1617
31.6k
    }
1618
609k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1619
    /* optimization: color is GC_BLACK (0) */
1620
465k
    if (!GC_INFO(ref)) {
1621
459k
      gc_add_garbage(ref);
1622
459k
    }
1623
465k
    ht = (zend_array*)ref;
1624
1625
1.06M
handle_ht:
1626
1.06M
    n = ht->nNumUsed;
1627
1.06M
    if (HT_IS_PACKED(ht)) {
1628
19.0k
      zv = ht->arPacked;
1629
19.0k
      goto handle_zvals;
1630
19.0k
    }
1631
1632
1.04M
    p = ht->arData;
1633
1.81M
    for (; n != 0; n--) {
1634
1.07M
      zv = &p->val;
1635
1.07M
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1636
565k
        zv = Z_INDIRECT_P(zv);
1637
565k
      }
1638
1.07M
      if (Z_COLLECTABLE_P(zv)) {
1639
398k
        ref = Z_COUNTED_P(zv);
1640
398k
        GC_ADDREF(ref);
1641
398k
        if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1642
302k
          GC_REF_SET_BLACK(ref);
1643
302k
          p++;
1644
1.15M
          while (--n) {
1645
849k
            zv = &p->val;
1646
849k
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1647
97.9k
              zv = Z_INDIRECT_P(zv);
1648
97.9k
            }
1649
849k
            if (Z_COLLECTABLE_P(zv)) {
1650
698k
              zend_refcounted *ref = Z_COUNTED_P(zv);
1651
698k
              GC_ADDREF(ref);
1652
698k
              if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1653
507k
                GC_REF_SET_BLACK(ref);
1654
507k
                GC_STACK_PUSH(ref);
1655
507k
              }
1656
698k
            }
1657
849k
            p++;
1658
849k
          }
1659
302k
          goto tail_call;
1660
302k
        }
1661
398k
      }
1662
776k
      p++;
1663
776k
    }
1664
1.04M
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1665
12.1k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1666
10.9k
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1667
10.9k
      GC_ADDREF(ref);
1668
10.9k
      if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1669
2.94k
        GC_REF_SET_BLACK(ref);
1670
2.94k
        goto tail_call;
1671
2.94k
      }
1672
10.9k
    }
1673
12.1k
  }
1674
1675
770k
next:
1676
770k
  ref = GC_STACK_POP();
1677
770k
  if (ref) {
1678
740k
    goto tail_call;
1679
740k
  }
1680
1681
29.2k
  return count;
1682
770k
}
1683
1684
/* Traverse the reference graph from all roots, marking white nodes as garbage. */
1685
static int gc_collect_roots(uint32_t *flags, gc_stack *stack)
1686
23.0k
{
1687
23.0k
  uint32_t idx, end;
1688
23.0k
  zend_refcounted *ref;
1689
23.0k
  int count = 0;
1690
23.0k
  gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
1691
23.0k
  gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
1692
1693
  /* remove non-garbage from the list */
1694
268k
  while (current != last) {
1695
245k
    if (GC_IS_ROOT(current->ref)) {
1696
245k
      if (GC_REF_CHECK_COLOR(current->ref, GC_BLACK)) {
1697
43.8k
        GC_REF_SET_INFO(current->ref, 0); /* reset GC_ADDRESS() and keep GC_BLACK */
1698
43.8k
        gc_remove_from_roots(current);
1699
43.8k
      }
1700
245k
    }
1701
245k
    current++;
1702
245k
  }
1703
1704
23.0k
  gc_compact();
1705
1706
  /* Root buffer might be reallocated during gc_collect_white,
1707
   * make sure to reload pointers. */
1708
23.0k
  idx = GC_FIRST_ROOT;
1709
23.0k
  end = GC_G(first_unused);
1710
224k
  while (idx != end) {
1711
201k
    current = GC_IDX2PTR(idx);
1712
201k
    ref = current->ref;
1713
201k
    ZEND_ASSERT(GC_IS_ROOT(ref));
1714
201k
    current->ref = GC_MAKE_GARBAGE(ref);
1715
201k
    if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1716
29.2k
      GC_REF_SET_BLACK(ref);
1717
29.2k
      count += gc_collect_white(ref, flags, stack);
1718
29.2k
    }
1719
201k
    idx++;
1720
201k
  }
1721
1722
23.0k
  return count;
1723
23.0k
}
1724
1725
static int gc_remove_nested_data_from_buffer(zend_refcounted *ref, gc_root_buffer *root, gc_stack *stack)
1726
3.35k
{
1727
3.35k
  HashTable *ht;
1728
3.35k
  Bucket *p;
1729
3.35k
  zval *zv;
1730
3.35k
  uint32_t n;
1731
3.35k
  int count = 0;
1732
3.35k
  GC_STACK_DCL(stack);
1733
1734
9.83k
tail_call:
1735
9.83k
  if (root) {
1736
3.35k
    root = NULL;
1737
3.35k
    count++;
1738
6.47k
  } else if (GC_REF_ADDRESS(ref) != 0
1739
5.69k
   && GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1740
2.14k
    GC_TRACE_REF(ref, "removing from buffer");
1741
2.14k
    GC_REMOVE_FROM_BUFFER(ref);
1742
2.14k
    count++;
1743
4.33k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1744
477
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1745
231
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1746
231
      goto tail_call;
1747
231
    }
1748
246
    goto next;
1749
3.85k
  } else {
1750
3.85k
    goto next;
1751
3.85k
  }
1752
1753
5.50k
  if (GC_TYPE(ref) == IS_OBJECT) {
1754
4.74k
    zend_object *obj = (zend_object*)ref;
1755
1756
4.74k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1757
4.74k
      int len;
1758
4.74k
      zval *table;
1759
1760
4.74k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1761
116
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1762
116
        n = len;
1763
116
        zv = table;
1764
122
        for (; n != 0; n--) {
1765
6
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1766
6
          zval *entry = (zval*) Z_PTR_P(zv);
1767
6
          if (Z_OPT_COLLECTABLE_P(entry)) {
1768
6
            ref = Z_COUNTED_P(entry);
1769
6
            GC_STACK_PUSH(ref);
1770
6
          }
1771
6
          zv++;
1772
6
        }
1773
116
      }
1774
1775
4.74k
      ht = obj->handlers->get_gc(obj, &table, &len);
1776
4.74k
      n = len;
1777
4.74k
      zv = table;
1778
4.74k
      if (UNEXPECTED(ht)) {
1779
2.11k
        for (; n != 0; n--) {
1780
873
          if (Z_COLLECTABLE_P(zv)) {
1781
676
            ref = Z_COUNTED_P(zv);
1782
676
            GC_STACK_PUSH(ref);
1783
676
          }
1784
873
          zv++;
1785
873
        }
1786
1.24k
        if (GC_REF_ADDRESS(ht) != 0 && GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
1787
6
          GC_TRACE_REF(ht, "removing from buffer");
1788
6
          GC_REMOVE_FROM_BUFFER(ht);
1789
6
        }
1790
1.24k
        goto handle_ht;
1791
1.24k
      }
1792
1793
4.22k
handle_zvals:
1794
4.34k
      for (; n != 0; n--) {
1795
3.73k
        if (Z_COLLECTABLE_P(zv)) {
1796
3.61k
          ref = Z_COUNTED_P(zv);
1797
3.61k
          zv++;
1798
4.31k
          while (--n) {
1799
705
            if (Z_COLLECTABLE_P(zv)) {
1800
601
              zend_refcounted *ref = Z_COUNTED_P(zv);
1801
601
              GC_STACK_PUSH(ref);
1802
601
            }
1803
705
            zv++;
1804
705
          }
1805
3.61k
          goto tail_call;
1806
3.61k
        }
1807
125
        zv++;
1808
125
      }
1809
4.22k
    }
1810
4.74k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1811
755
    ht = (zend_array*)ref;
1812
1813
1.99k
handle_ht:
1814
1.99k
    n = ht->nNumUsed;
1815
1.99k
    if (HT_IS_PACKED(ht)) {
1816
721
      zv = ht->arPacked;
1817
721
      goto handle_zvals;
1818
721
    }
1819
1820
1.27k
    p = ht->arData;
1821
1.55k
    for (; n != 0; n--) {
1822
1.42k
      zv = &p->val;
1823
1.42k
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1824
264
        zv = Z_INDIRECT_P(zv);
1825
264
      }
1826
1.42k
      if (Z_COLLECTABLE_P(zv)) {
1827
1.14k
        ref = Z_COUNTED_P(zv);
1828
1.14k
        p++;
1829
1.37k
        while (--n) {
1830
228
          zv = &p->val;
1831
228
          if (Z_TYPE_P(zv) == IS_INDIRECT) {
1832
33
            zv = Z_INDIRECT_P(zv);
1833
33
          }
1834
228
          if (Z_COLLECTABLE_P(zv)) {
1835
202
            zend_refcounted *ref = Z_COUNTED_P(zv);
1836
202
            GC_STACK_PUSH(ref);
1837
202
          }
1838
228
          p++;
1839
228
        }
1840
1.14k
        goto tail_call;
1841
1.14k
      }
1842
277
      p++;
1843
277
    }
1844
1.27k
  }
1845
1846
4.84k
next:
1847
4.84k
  ref = GC_STACK_POP();
1848
4.84k
  if (ref) {
1849
1.48k
    goto tail_call;
1850
1.48k
  }
1851
1852
3.35k
  return count;
1853
4.84k
}
1854
1855
static void zend_get_gc_buffer_release(void);
1856
static void zend_gc_check_root_tmpvars(void);
1857
static void zend_gc_remove_root_tmpvars(void);
1858
1859
static zend_internal_function gc_destructor_fiber;
1860
1861
static ZEND_COLD ZEND_NORETURN void gc_create_destructor_fiber_error(void)
1862
0
{
1863
0
  zend_error_noreturn(E_ERROR, "Unable to create destructor fiber");
1864
0
}
1865
1866
static ZEND_COLD ZEND_NORETURN void gc_start_destructor_fiber_error(void)
1867
0
{
1868
0
  zend_error_noreturn(E_ERROR, "Unable to start destructor fiber");
1869
0
}
1870
1871
/* Call destructors for garbage in the buffer. */
1872
static zend_always_inline zend_result gc_call_destructors(uint32_t idx, uint32_t end, zend_fiber *fiber)
1873
713
{
1874
713
  gc_root_buffer *current;
1875
713
  zend_refcounted *p;
1876
1877
  /* The root buffer might be reallocated during destructors calls,
1878
   * make sure to reload pointers as necessary. */
1879
6.13k
  while (idx != end) {
1880
5.51k
    current = GC_IDX2PTR(idx);
1881
5.51k
    if (GC_IS_DTOR_GARBAGE(current->ref)) {
1882
2.73k
      p = GC_GET_PTR(current->ref);
1883
      /* Mark this is as a normal root for the next GC run */
1884
2.73k
      current->ref = p;
1885
      /* Double check that the destructor hasn't been called yet. It
1886
       * could have already been invoked indirectly by some other
1887
       * destructor. */
1888
2.73k
      if (!(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
1889
2.73k
        if (fiber != NULL) {
1890
191
          GC_G(dtor_idx) = idx;
1891
191
        }
1892
2.73k
        zend_object *obj = (zend_object*)p;
1893
2.73k
        GC_TRACE_REF(obj, "calling destructor");
1894
2.73k
        GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
1895
2.73k
        GC_ADDREF(obj);
1896
2.73k
        obj->handlers->dtor_obj(obj);
1897
2.73k
        GC_TRACE_REF(obj, "returned from destructor");
1898
2.73k
        GC_DELREF(obj);
1899
2.73k
        if (UNEXPECTED(fiber != NULL && GC_G(dtor_fiber) != fiber)) {
1900
          /* We resumed after suspension */
1901
86
          gc_check_possible_root((zend_refcounted*)&obj->gc);
1902
86
          return FAILURE;
1903
86
        }
1904
2.73k
      }
1905
2.73k
    }
1906
5.42k
    idx++;
1907
5.42k
  }
1908
1909
627
  return SUCCESS;
1910
713
}
1911
1912
static zend_fiber *gc_create_destructor_fiber(void)
1913
163
{
1914
163
  zval zobj;
1915
163
  zend_fiber *fiber;
1916
1917
163
  GC_TRACE("starting destructor fiber");
1918
1919
163
  if (UNEXPECTED(object_init_ex(&zobj, zend_ce_fiber) == FAILURE)) {
1920
0
    gc_create_destructor_fiber_error();
1921
0
  }
1922
1923
163
  fiber = (zend_fiber *)Z_OBJ(zobj);
1924
163
  fiber->fci.size = sizeof(fiber->fci);
1925
163
  fiber->fci_cache.function_handler = (zend_function*) &gc_destructor_fiber;
1926
1927
163
  GC_G(dtor_fiber) = fiber;
1928
1929
163
  if (UNEXPECTED(zend_fiber_start(fiber, NULL) == FAILURE)) {
1930
0
    gc_start_destructor_fiber_error();
1931
0
  }
1932
1933
163
  return fiber;
1934
163
}
1935
1936
static void remember_prev_exception(zend_object **prev_exception)
1937
344
{
1938
344
  if (EG(exception)) {
1939
50
    if (*prev_exception) {
1940
6
      zend_exception_set_previous(EG(exception), *prev_exception);
1941
6
    }
1942
50
    *prev_exception = EG(exception);
1943
50
    EG(exception) = NULL;
1944
50
  }
1945
344
}
1946
1947
static zend_never_inline void gc_call_destructors_in_fiber(void)
1948
86
{
1949
86
  ZEND_ASSERT(!GC_G(dtor_fiber_running));
1950
1951
86
  zend_fiber *fiber = GC_G(dtor_fiber);
1952
1953
86
  GC_G(dtor_idx) = GC_FIRST_ROOT;
1954
86
  GC_G(dtor_end) = GC_G(first_unused);
1955
1956
86
  zend_object *exception = NULL;
1957
86
  remember_prev_exception(&exception);
1958
1959
86
  if (UNEXPECTED(!fiber)) {
1960
77
    fiber = gc_create_destructor_fiber();
1961
77
  } else {
1962
9
    zend_fiber_resume(fiber, NULL, NULL);
1963
9
  }
1964
1965
86
  remember_prev_exception(&exception);
1966
1967
172
  for (;;) {
1968
    /* At this point, fiber has executed until suspension */
1969
172
    GC_TRACE("resumed from destructor fiber");
1970
1971
172
    if (UNEXPECTED(GC_G(dtor_fiber_running))) {
1972
      /* Fiber was suspended by a destructor. Start a new one for the
1973
       * remaining destructors. */
1974
86
      GC_TRACE("destructor fiber suspended by destructor");
1975
86
      GC_G(dtor_fiber) = NULL;
1976
86
      GC_G(dtor_idx)++;
1977
      /* We do not own the fiber anymore. It may be collected if the
1978
       * application does not reference it. */
1979
86
      zend_object_release(&fiber->std);
1980
86
      remember_prev_exception(&exception);
1981
86
      fiber = gc_create_destructor_fiber();
1982
86
      remember_prev_exception(&exception);
1983
86
      continue;
1984
86
    } else {
1985
      /* Fiber suspended itself after calling all destructors */
1986
86
      GC_TRACE("destructor fiber suspended itself");
1987
86
      break;
1988
86
    }
1989
172
  }
1990
1991
86
  EG(exception) = exception;
1992
86
}
1993
1994
/* Perform a garbage collection run. The default implementation of gc_collect_cycles. */
1995
ZEND_API int zend_gc_collect_cycles(void)
1996
638k
{
1997
638k
  int total_count = 0;
1998
638k
  bool should_rerun_gc = false;
1999
638k
  bool did_rerun_gc = false;
2000
2001
638k
  zend_hrtime_t start_time = zend_hrtime();
2002
638k
  if (GC_G(num_roots) && !GC_G(gc_active)) {
2003
22.5k
    zend_gc_remove_root_tmpvars();
2004
22.5k
  }
2005
2006
639k
rerun_gc:
2007
639k
  if (GC_G(num_roots)) {
2008
23.0k
    int count;
2009
23.0k
    gc_root_buffer *current, *last;
2010
23.0k
    zend_refcounted *p;
2011
23.0k
    uint32_t gc_flags = 0;
2012
23.0k
    uint32_t idx, end;
2013
23.0k
    gc_stack stack;
2014
2015
23.0k
    stack.prev = NULL;
2016
23.0k
    stack.next = NULL;
2017
2018
23.0k
    if (GC_G(gc_active)) {
2019
41
      GC_G(collector_time) += zend_hrtime() - start_time;
2020
41
      return 0;
2021
41
    }
2022
2023
23.0k
    GC_TRACE("Collecting cycles");
2024
23.0k
    GC_G(gc_runs)++;
2025
23.0k
    GC_G(gc_active) = 1;
2026
2027
23.0k
    GC_TRACE("Marking roots");
2028
23.0k
    gc_mark_roots(&stack);
2029
23.0k
    GC_TRACE("Scanning roots");
2030
23.0k
    gc_scan_roots(&stack);
2031
2032
23.0k
    GC_TRACE("Collecting roots");
2033
23.0k
    count = gc_collect_roots(&gc_flags, &stack);
2034
2035
23.0k
    if (!GC_G(num_roots)) {
2036
      /* nothing to free */
2037
18.7k
      GC_TRACE("Nothing to free");
2038
18.7k
      gc_stack_free(&stack);
2039
18.7k
      GC_G(gc_active) = 0;
2040
18.7k
      goto finish;
2041
18.7k
    }
2042
2043
4.29k
    end = GC_G(first_unused);
2044
2045
4.29k
    if (gc_flags & GC_HAS_DESTRUCTORS) {
2046
627
      GC_TRACE("Calling destructors");
2047
2048
      /* During a destructor call, new externally visible references to nested data may
2049
       * be introduced. These references can be introduced in a way that does not
2050
       * modify any refcounts, so we have no real way to detect this situation
2051
       * short of rerunning full GC tracing. What we do instead is to only run
2052
       * destructors at this point and automatically re-run GC afterwards. */
2053
627
      should_rerun_gc = true;
2054
2055
      /* Mark all roots for which a dtor will be invoked as DTOR_GARBAGE. Additionally
2056
       * color them purple. This serves a double purpose: First, they should be
2057
       * considered new potential roots for the next GC run. Second, it will prevent
2058
       * their removal from the root buffer by nested data removal. */
2059
627
      idx = GC_FIRST_ROOT;
2060
627
      current = GC_IDX2PTR(GC_FIRST_ROOT);
2061
7.06k
      while (idx != end) {
2062
6.43k
        if (GC_IS_GARBAGE(current->ref)) {
2063
6.43k
          p = GC_GET_PTR(current->ref);
2064
6.43k
          if (GC_TYPE(p) == IS_OBJECT && !(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
2065
5.30k
            zend_object *obj = (zend_object *) p;
2066
5.30k
            if (obj->handlers->dtor_obj != zend_objects_destroy_object
2067
5.00k
              || obj->ce->destructor) {
2068
3.35k
              current->ref = GC_MAKE_DTOR_GARBAGE(obj);
2069
3.35k
              GC_REF_SET_COLOR(obj, GC_PURPLE);
2070
3.35k
            } else {
2071
1.94k
              GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
2072
1.94k
            }
2073
5.30k
          }
2074
6.43k
        }
2075
6.43k
        current++;
2076
6.43k
        idx++;
2077
6.43k
      }
2078
2079
      /* Remove nested data for objects on which a destructor will be called.
2080
       * This will not remove the objects themselves, as they have been colored
2081
       * purple. */
2082
627
      idx = GC_FIRST_ROOT;
2083
627
      current = GC_IDX2PTR(GC_FIRST_ROOT);
2084
7.06k
      while (idx != end) {
2085
6.43k
        if (GC_IS_DTOR_GARBAGE(current->ref)) {
2086
3.35k
          p = GC_GET_PTR(current->ref);
2087
3.35k
          count -= gc_remove_nested_data_from_buffer(p, current, &stack);
2088
3.35k
        }
2089
6.43k
        current++;
2090
6.43k
        idx++;
2091
6.43k
      }
2092
2093
      /* Actually call destructors. */
2094
627
      zend_hrtime_t dtor_start_time = zend_hrtime();
2095
627
      if (EXPECTED(!EG(active_fiber))) {
2096
541
        gc_call_destructors(GC_FIRST_ROOT, end, NULL);
2097
541
      } else {
2098
86
        gc_call_destructors_in_fiber();
2099
86
      }
2100
627
      GC_G(dtor_time) += zend_hrtime() - dtor_start_time;
2101
2102
627
      if (GC_G(gc_protected)) {
2103
        /* something went wrong */
2104
18
        zend_get_gc_buffer_release();
2105
18
        GC_G(collector_time) += zend_hrtime() - start_time;
2106
18
        return 0;
2107
18
      }
2108
627
    }
2109
2110
4.28k
    gc_stack_free(&stack);
2111
2112
    /* Destroy zvals. The root buffer may be reallocated. */
2113
4.28k
    GC_TRACE("Destroying zvals");
2114
4.28k
    zend_hrtime_t free_start_time = zend_hrtime();
2115
4.28k
    idx = GC_FIRST_ROOT;
2116
1.07M
    while (idx != end) {
2117
1.07M
      current = GC_IDX2PTR(idx);
2118
1.07M
      if (GC_IS_GARBAGE(current->ref)) {
2119
52.5k
        p = GC_GET_PTR(current->ref);
2120
52.5k
        GC_TRACE_REF(p, "destroying");
2121
52.5k
        if (GC_TYPE(p) == IS_OBJECT) {
2122
46.5k
          zend_object *obj = (zend_object*)p;
2123
2124
46.5k
          EG(objects_store).object_buckets[obj->handle] = SET_OBJ_INVALID(obj);
2125
46.5k
          GC_TYPE_INFO(obj) = GC_NULL |
2126
46.5k
            (GC_TYPE_INFO(obj) & ~GC_TYPE_MASK);
2127
          /* Modify current before calling free_obj (bug #78811: free_obj() can cause the root buffer (with current) to be reallocated.) */
2128
46.5k
          current->ref = GC_MAKE_GARBAGE(((char*)obj) - obj->handlers->offset);
2129
46.5k
          if (!(OBJ_FLAGS(obj) & IS_OBJ_FREE_CALLED)) {
2130
46.5k
            GC_ADD_FLAGS(obj, IS_OBJ_FREE_CALLED);
2131
46.5k
            GC_ADDREF(obj);
2132
46.5k
            obj->handlers->free_obj(obj);
2133
46.5k
            GC_DELREF(obj);
2134
46.5k
          }
2135
2136
46.5k
          ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST(obj->handle);
2137
46.5k
        } else if (GC_TYPE(p) == IS_ARRAY) {
2138
6.06k
          zend_array *arr = (zend_array*)p;
2139
2140
6.06k
          GC_TYPE_INFO(arr) = GC_NULL |
2141
6.06k
            (GC_TYPE_INFO(arr) & ~GC_TYPE_MASK);
2142
2143
          /* GC may destroy arrays with rc>1. This is valid and safe. */
2144
6.06k
          HT_ALLOW_COW_VIOLATION(arr);
2145
2146
6.06k
          zend_hash_destroy(arr);
2147
6.06k
        }
2148
52.5k
      }
2149
1.07M
      idx++;
2150
1.07M
    }
2151
2152
    /* Free objects */
2153
4.28k
    current = GC_IDX2PTR(GC_FIRST_ROOT);
2154
4.28k
    last = GC_IDX2PTR(end);
2155
1.07M
    while (current != last) {
2156
1.07M
      if (GC_IS_GARBAGE(current->ref)) {
2157
52.5k
        p = GC_GET_PTR(current->ref);
2158
52.5k
        GC_LINK_UNUSED(current);
2159
52.5k
        GC_G(num_roots)--;
2160
52.5k
        efree(p);
2161
52.5k
      }
2162
1.07M
      current++;
2163
1.07M
    }
2164
2165
4.28k
    GC_G(free_time) += zend_hrtime() - free_start_time;
2166
2167
4.28k
    GC_TRACE("Collection finished");
2168
4.28k
    GC_G(collected) += count;
2169
4.28k
    total_count += count;
2170
4.28k
    GC_G(gc_active) = 0;
2171
4.28k
  }
2172
2173
620k
  gc_compact();
2174
2175
  /* Objects with destructors were removed from this GC run. Rerun GC right away to clean them
2176
   * up. We do this only once: If we encounter more destructors on the second run, we'll not
2177
   * run GC another time. */
2178
620k
  if (should_rerun_gc && !did_rerun_gc) {
2179
505
    did_rerun_gc = true;
2180
505
    goto rerun_gc;
2181
505
  }
2182
2183
638k
finish:
2184
638k
  zend_get_gc_buffer_release();
2185
2186
  /* Prevent GC from running during zend_gc_check_root_tmpvars, before
2187
   * gc_threshold is adjusted, as this may result in unbounded recursion */
2188
638k
  GC_G(gc_active) = 1;
2189
638k
  zend_gc_check_root_tmpvars();
2190
638k
  GC_G(gc_active) = 0;
2191
2192
638k
  GC_G(collector_time) += zend_hrtime() - start_time;
2193
638k
  return total_count;
2194
620k
}
2195
2196
ZEND_API void zend_gc_get_status(zend_gc_status *status)
2197
172
{
2198
172
  status->active = GC_G(gc_active);
2199
172
  status->gc_protected = GC_G(gc_protected);
2200
172
  status->full = GC_G(gc_full);
2201
172
  status->runs = GC_G(gc_runs);
2202
172
  status->collected = GC_G(collected);
2203
172
  status->threshold = GC_G(gc_threshold);
2204
172
  status->buf_size = GC_G(buf_size);
2205
172
  status->num_roots = GC_G(num_roots);
2206
172
  status->application_time = zend_hrtime() - GC_G(activated_at);
2207
172
  status->collector_time = GC_G(collector_time);
2208
172
  status->dtor_time = GC_G(dtor_time);
2209
172
  status->free_time = GC_G(free_time);
2210
172
}
2211
2212
86.9k
ZEND_API zend_get_gc_buffer *zend_get_gc_buffer_create(void) {
2213
  /* There can only be one get_gc() call active at a time,
2214
   * so there only needs to be one buffer. */
2215
86.9k
  zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
2216
86.9k
  gc_buffer->cur = gc_buffer->start;
2217
86.9k
  return gc_buffer;
2218
86.9k
}
2219
2220
2.20k
ZEND_API void zend_get_gc_buffer_grow(zend_get_gc_buffer *gc_buffer) {
2221
2.20k
  size_t old_capacity = gc_buffer->end - gc_buffer->start;
2222
2.20k
  size_t new_capacity = old_capacity == 0 ? 64 : old_capacity * 2;
2223
2.20k
  gc_buffer->start = erealloc(gc_buffer->start, new_capacity * sizeof(zval));
2224
2.20k
  gc_buffer->end = gc_buffer->start + new_capacity;
2225
2.20k
  gc_buffer->cur = gc_buffer->start + old_capacity;
2226
2.20k
}
2227
2228
638k
static void zend_get_gc_buffer_release(void) {
2229
638k
  zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
2230
638k
  efree(gc_buffer->start);
2231
638k
  gc_buffer->start = gc_buffer->end = gc_buffer->cur = NULL;
2232
638k
}
2233
2234
/* TMPVAR operands are destroyed using zval_ptr_dtor_nogc(), because they usually cannot contain
2235
 * cycles. However, there are some rare exceptions where this is possible, in which case we rely
2236
 * on the producing code to root the value. If a GC run occurs between the rooting and consumption
2237
 * of the value, we would end up leaking it. To avoid this, root all live TMPVAR values here. */
2238
638k
static void zend_gc_check_root_tmpvars(void) {
2239
638k
  zend_execute_data *ex = EG(current_execute_data);
2240
793k
  for (; ex; ex = ex->prev_execute_data) {
2241
155k
    zend_function *func = ex->func;
2242
155k
    if (!func || !ZEND_USER_CODE(func->type)) {
2243
152k
      continue;
2244
152k
    }
2245
2246
2.25k
    uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
2247
8.42k
    for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
2248
6.51k
      const zend_live_range *range = &func->op_array.live_range[i];
2249
6.51k
      if (range->start > op_num) {
2250
344
        break;
2251
344
      }
2252
6.17k
      if (range->end <= op_num) {
2253
5.83k
        continue;
2254
5.83k
      }
2255
2256
339
      uint32_t kind = range->var & ZEND_LIVE_MASK;
2257
339
      if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
2258
265
        uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
2259
265
        zval *var = ZEND_CALL_VAR(ex, var_num);
2260
265
        if (Z_COLLECTABLE_P(var)) {
2261
211
          gc_check_possible_root(Z_COUNTED_P(var));
2262
211
        }
2263
265
      }
2264
339
    }
2265
2.25k
  }
2266
638k
}
2267
2268
22.5k
static void zend_gc_remove_root_tmpvars(void) {
2269
22.5k
  zend_execute_data *ex = EG(current_execute_data);
2270
27.8k
  for (; ex; ex = ex->prev_execute_data) {
2271
5.25k
    zend_function *func = ex->func;
2272
5.25k
    if (!func || !ZEND_USER_CODE(func->type)) {
2273
3.32k
      continue;
2274
3.32k
    }
2275
2276
1.92k
    uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
2277
7.78k
    for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
2278
6.16k
      const zend_live_range *range = &func->op_array.live_range[i];
2279
6.16k
      if (range->start > op_num) {
2280
305
        break;
2281
305
      }
2282
5.85k
      if (range->end <= op_num) {
2283
5.61k
        continue;
2284
5.61k
      }
2285
2286
240
      uint32_t kind = range->var & ZEND_LIVE_MASK;
2287
240
      if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
2288
240
        uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
2289
240
        zval *var = ZEND_CALL_VAR(ex, var_num);
2290
240
        if (Z_COLLECTABLE_P(var)) {
2291
211
          GC_REMOVE_FROM_BUFFER(Z_COUNTED_P(var));
2292
211
        }
2293
240
      }
2294
240
    }
2295
1.92k
  }
2296
22.5k
}
2297
2298
#if GC_BENCH
2299
void gc_bench_print(void)
2300
{
2301
  fprintf(stderr, "GC Statistics\n");
2302
  fprintf(stderr, "-------------\n");
2303
  fprintf(stderr, "Runs:               %d\n", GC_G(gc_runs));
2304
  fprintf(stderr, "Collected:          %d\n", GC_G(collected));
2305
  fprintf(stderr, "Root buffer length: %d\n", GC_G(root_buf_length));
2306
  fprintf(stderr, "Root buffer peak:   %d\n\n", GC_G(root_buf_peak));
2307
  fprintf(stderr, "      Possible            Remove from  Marked\n");
2308
  fprintf(stderr, "        Root    Buffered     buffer     grey\n");
2309
  fprintf(stderr, "      --------  --------  -----------  ------\n");
2310
  fprintf(stderr, "ZVAL  %8d  %8d  %9d  %8d\n", GC_G(zval_possible_root), GC_G(zval_buffered), GC_G(zval_remove_from_buffer), GC_G(zval_marked_grey));
2311
}
2312
#endif
2313
2314
#ifdef ZTS
2315
size_t zend_gc_globals_size(void)
2316
{
2317
  return sizeof(zend_gc_globals);
2318
}
2319
#endif
2320
2321
static ZEND_FUNCTION(gc_destructor_fiber)
2322
163
{
2323
163
  uint32_t idx, end;
2324
2325
163
  zend_fiber *fiber = GC_G(dtor_fiber);
2326
163
  ZEND_ASSERT(fiber != NULL);
2327
163
  ZEND_ASSERT(fiber == EG(active_fiber));
2328
2329
172
  for (;;) {
2330
172
    GC_G(dtor_fiber_running) = true;
2331
2332
172
    idx = GC_G(dtor_idx);
2333
172
    end = GC_G(dtor_end);
2334
172
    if (UNEXPECTED(gc_call_destructors(idx, end, fiber) == FAILURE)) {
2335
      /* We resumed after being suspended by a destructor */
2336
86
      return;
2337
86
    }
2338
2339
    /* We have called all destructors. Suspend fiber until the next GC run
2340
     */
2341
86
    GC_G(dtor_fiber_running) = false;
2342
86
    zend_fiber_suspend(fiber, NULL, NULL);
2343
2344
86
    if (UNEXPECTED(fiber->flags & ZEND_FIBER_FLAG_DESTROYED)) {
2345
      /* Fiber is being destroyed by shutdown sequence */
2346
77
      if (GC_G(dtor_fiber) == fiber) {
2347
77
        GC_G(dtor_fiber) = NULL;
2348
77
      }
2349
77
      GC_DELREF(&fiber->std);
2350
77
      gc_check_possible_root((zend_refcounted*)&fiber->std.gc);
2351
77
      return;
2352
77
    }
2353
86
  }
2354
163
}
2355
2356
static zend_internal_function gc_destructor_fiber = {
2357
  .type = ZEND_INTERNAL_FUNCTION,
2358
  .fn_flags = ZEND_ACC_PUBLIC,
2359
  .handler = ZEND_FN(gc_destructor_fiber),
2360
};
2361
2362
void gc_init(void)
2363
16
{
2364
16
  gc_destructor_fiber.function_name = zend_string_init_interned(
2365
16
      "gc_destructor_fiber",
2366
16
      strlen("gc_destructor_fiber"),
2367
      true);
2368
16
}