Coverage Report

Created: 2025-12-14 06:10

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/php-src/Zend/zend_gc.c
Line
Count
Source
1
/*
2
   +----------------------------------------------------------------------+
3
   | Zend Engine                                                          |
4
   +----------------------------------------------------------------------+
5
   | Copyright (c) Zend Technologies Ltd. (http://www.zend.com)           |
6
   +----------------------------------------------------------------------+
7
   | This source file is subject to version 2.00 of the Zend license,     |
8
   | that is bundled with this package in the file LICENSE, and is        |
9
   | available through the world-wide-web at the following url:           |
10
   | http://www.zend.com/license/2_00.txt.                                |
11
   | If you did not receive a copy of the Zend license and are unable to  |
12
   | obtain it through the world-wide-web, please send a note to          |
13
   | license@zend.com so we can mail you a copy immediately.              |
14
   +----------------------------------------------------------------------+
15
   | Authors: David Wang <planetbeing@gmail.com>                          |
16
   |          Dmitry Stogov <dmitry@php.net>                              |
17
   +----------------------------------------------------------------------+
18
*/
19
20
/**
21
 * zend_gc_collect_cycles
22
 * ======================
23
 *
24
 * Colors and its meaning
25
 * ----------------------
26
 *
27
 * BLACK  (GC_BLACK)   - In use or free.
28
 * GREY   (GC_GREY)    - Possible member of cycle.
29
 * WHITE  (GC_WHITE)   - Member of garbage cycle.
30
 * PURPLE (GC_PURPLE)  - Possible root of cycle.
31
 *
32
 * Colors described in the paper but not used
33
 * ------------------------------------------
34
 *
35
 * GREEN - Acyclic
36
 * RED   - Candidate cycle undergoing
37
 * ORANGE - Candidate cycle awaiting epoch boundary.
38
 *
39
 *
40
 * Flow
41
 * =====
42
 *
43
 * The garbage collect cycle starts from 'gc_mark_roots', which traverses the
44
 * possible roots, and calls mark_grey for roots are marked purple with
45
 * depth-first traverse.
46
 *
47
 * After all possible roots are traversed and marked,
48
 * gc_scan_roots will be called, and each root will be called with
49
 * gc_scan(root->ref)
50
 *
51
 * gc_scan checks the colors of possible members.
52
 *
53
 * If the node is marked as grey and the refcount > 0
54
 *    gc_scan_black will be called on that node to scan it's subgraph.
55
 * otherwise (refcount == 0), it marks the node white.
56
 *
57
 * A node MAY be added to possible roots when ZEND_UNSET_VAR happens or
58
 * zend_assign_to_variable is called only when possible garbage node is
59
 * produced.
60
 * gc_possible_root() will be called to add the nodes to possible roots.
61
 *
62
 *
63
 * For objects, we call their get_gc handler (by default 'zend_std_get_gc') to
64
 * get the object properties to scan.
65
 *
66
 *
67
 * @see http://researcher.watson.ibm.com/researcher/files/us-bacon/Bacon01Concurrent.pdf
68
 */
69
#include "zend.h"
70
#include "zend_API.h"
71
#include "zend_compile.h"
72
#include "zend_errors.h"
73
#include "zend_fibers.h"
74
#include "zend_hrtime.h"
75
#include "zend_portability.h"
76
#include "zend_types.h"
77
#include "zend_weakrefs.h"
78
#include "zend_string.h"
79
80
#ifndef GC_BENCH
81
# define GC_BENCH 0
82
#endif
83
84
#ifndef ZEND_GC_DEBUG
85
# define ZEND_GC_DEBUG 0
86
#endif
87
88
/* GC_INFO layout */
89
3.19M
#define GC_ADDRESS  0x0fffffu
90
16.9M
#define GC_COLOR    0x300000u
91
92
#define GC_BLACK    0x000000u /* must be zero */
93
#define GC_WHITE    0x100000u
94
#define GC_GREY     0x200000u
95
#define GC_PURPLE   0x300000u
96
97
/* Debug tracing */
98
#if ZEND_GC_DEBUG > 1
99
# define GC_TRACE(format, ...) fprintf(stderr, format "\n", ##__VA_ARGS__);
100
# define GC_TRACE_REF(ref, format, ...) \
101
  do { \
102
    gc_trace_ref((zend_refcounted *) ref); \
103
    fprintf(stderr, format "\n", ##__VA_ARGS__); \
104
  } while (0)
105
# define GC_TRACE_SET_COLOR(ref, color) \
106
  GC_TRACE_REF(ref, "->%s", gc_color_name(color))
107
#else
108
# define GC_TRACE_REF(ref, format, ...)
109
# define GC_TRACE_SET_COLOR(ref, new_color)
110
# define GC_TRACE(str)
111
#endif
112
113
/* GC_INFO access */
114
#define GC_REF_ADDRESS(ref) \
115
3.19M
  (((GC_TYPE_INFO(ref)) & (GC_ADDRESS << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
116
117
#define GC_REF_COLOR(ref) \
118
  (((GC_TYPE_INFO(ref)) & (GC_COLOR << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
119
120
#define GC_REF_CHECK_COLOR(ref, color) \
121
12.1M
  ((GC_TYPE_INFO(ref) & (GC_COLOR << GC_INFO_SHIFT)) == ((color) << GC_INFO_SHIFT))
122
123
6.51M
#define GC_REF_SET_INFO(ref, info) do { \
124
6.51M
    GC_TYPE_INFO(ref) = \
125
6.51M
      (GC_TYPE_INFO(ref) & (GC_TYPE_MASK | GC_FLAGS_MASK)) | \
126
6.51M
      ((info) << GC_INFO_SHIFT); \
127
6.51M
  } while (0)
128
129
3.14M
#define GC_REF_SET_COLOR(ref, c) do { \
130
3.14M
    GC_TRACE_SET_COLOR(ref, c); \
131
3.14M
    GC_TYPE_INFO(ref) = \
132
3.14M
      (GC_TYPE_INFO(ref) & ~(GC_COLOR << GC_INFO_SHIFT)) | \
133
3.14M
      ((c) << GC_INFO_SHIFT); \
134
3.14M
  } while (0)
135
136
1.65M
#define GC_REF_SET_BLACK(ref) do { \
137
1.65M
    GC_TRACE_SET_COLOR(ref, GC_BLACK); \
138
1.65M
    GC_TYPE_INFO(ref) &= ~(GC_COLOR << GC_INFO_SHIFT); \
139
1.65M
  } while (0)
140
141
#define GC_REF_SET_PURPLE(ref) do { \
142
    GC_TRACE_SET_COLOR(ref, GC_PURPLE); \
143
    GC_TYPE_INFO(ref) |= (GC_COLOR << GC_INFO_SHIFT); \
144
  } while (0)
145
146
/* bit stealing tags for gc_root_buffer.ref */
147
2.68M
#define GC_BITS    0x3
148
149
618k
#define GC_ROOT    0x0 /* possible root of circular garbage     */
150
3.40M
#define GC_UNUSED  0x1 /* part of linked list of unused buffers */
151
2.77M
#define GC_GARBAGE 0x2 /* garbage to delete                     */
152
12.9k
#define GC_DTOR_GARBAGE 0x3 /* garbage on which only the dtor should be invoked */
153
154
#define GC_GET_PTR(ptr) \
155
97.3k
  ((void*)(((uintptr_t)(ptr)) & ~GC_BITS))
156
157
#define GC_IS_ROOT(ptr) \
158
618k
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_ROOT)
159
#define GC_IS_UNUSED(ptr) \
160
130k
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_UNUSED)
161
#define GC_IS_GARBAGE(ptr) \
162
1.82M
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_GARBAGE)
163
#define GC_IS_DTOR_GARBAGE(ptr) \
164
10.0k
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_DTOR_GARBAGE)
165
166
#define GC_MAKE_GARBAGE(ptr) \
167
948k
  ((void*)(((uintptr_t)(ptr)) | GC_GARBAGE))
168
#define GC_MAKE_DTOR_GARBAGE(ptr) \
169
2.85k
  ((void*)(((uintptr_t)(ptr)) | GC_DTOR_GARBAGE))
170
171
/* GC address conversion */
172
8.74M
#define GC_IDX2PTR(idx)      (GC_G(buf) + (idx))
173
3.28M
#define GC_PTR2IDX(ptr)      ((ptr) - GC_G(buf))
174
175
/* Get the value to be placed in an unused buffer entry with the specified next unused list index */
176
3.27M
#define GC_IDX2LIST(idx)     ((void*)(uintptr_t)(((idx) * sizeof(void*)) | GC_UNUSED))
177
/* Get the index of the next item in the unused list from the given root buffer entry. */
178
873k
#define GC_LIST2IDX(list)    (((uint32_t)(uintptr_t)(list)) / sizeof(void*))
179
180
/* GC buffers */
181
1.49M
#define GC_INVALID           0
182
2.00M
#define GC_FIRST_ROOT        1
183
184
16
#define GC_DEFAULT_BUF_SIZE  (16 * 1024)
185
1
#define GC_BUF_GROW_STEP     (128 * 1024)
186
187
0
#define GC_MAX_UNCOMPRESSED  (512 * 1024)
188
2
#define GC_MAX_BUF_SIZE      0x40000000
189
190
16
#define GC_THRESHOLD_DEFAULT (10000 + GC_FIRST_ROOT)
191
0
#define GC_THRESHOLD_STEP    10000
192
0
#define GC_THRESHOLD_MAX     1000000000
193
0
#define GC_THRESHOLD_TRIGGER 100
194
195
/* GC flags */
196
7.37k
#define GC_HAS_DESTRUCTORS  (1<<0)
197
198
/* Weak maps */
199
1.55k
#define Z_FROM_WEAKMAP_KEY    (1<<0)
200
1.52k
#define Z_FROM_WEAKMAP      (1<<1)
201
202
/* The WeakMap entry zv is reachable from roots by following the virtual
203
 * reference from the a WeakMap key to the entry */
204
#define GC_FROM_WEAKMAP_KEY(zv) \
205
387
  (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT))
206
207
576
#define GC_SET_FROM_WEAKMAP_KEY(zv) do {                    \
208
576
  zval *_z = (zv);                               \
209
576
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \
210
576
} while (0)
211
212
591
#define GC_UNSET_FROM_WEAKMAP_KEY(zv) do {                    \
213
591
  zval *_z = (zv);                               \
214
591
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \
215
591
} while (0)
216
217
/* The WeakMap entry zv is reachable from roots by following the reference from
218
 * the WeakMap */
219
#define GC_FROM_WEAKMAP(zv) \
220
1.08k
  (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT))
221
222
201
#define GC_SET_FROM_WEAKMAP(zv) do {                        \
223
201
  zval *_z = (zv);                               \
224
201
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \
225
201
} while (0)
226
227
240
#define GC_UNSET_FROM_WEAKMAP(zv) do {                      \
228
240
  zval *_z = (zv);                               \
229
240
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \
230
240
} while (0)
231
232
/* unused buffers */
233
234
/* Are there any unused root buffer entries? */
235
#define GC_HAS_UNUSED() \
236
755k
  (GC_G(unused) != GC_INVALID)
237
238
/* Get the next unused entry and remove it from the list */
239
#define GC_FETCH_UNUSED() \
240
873k
  gc_fetch_unused()
241
242
/* Add a root buffer entry to the unused list */
243
#define GC_LINK_UNUSED(root) \
244
3.27M
  gc_link_unused(root)
245
246
#define GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD() \
247
  (GC_G(first_unused) < GC_G(gc_threshold))
248
#define GC_HAS_NEXT_UNUSED() \
249
755k
  (GC_G(first_unused) != GC_G(buf_size))
250
#define GC_FETCH_NEXT_UNUSED() \
251
2.40M
  gc_fetch_next_unused()
252
253
ZEND_API int (*gc_collect_cycles)(void);
254
255
/* The type of a root buffer entry.
256
 *
257
 * The lower two bits are used for flags and need to be masked out to
258
 * reconstruct a pointer.
259
 *
260
 * When a node in the root buffer is removed, the non-flag bits of the
261
 * unused entry are used to store the index of the next entry in the unused
262
 * list. */
263
typedef struct _gc_root_buffer {
264
  zend_refcounted  *ref;
265
} gc_root_buffer;
266
267
typedef struct _zend_gc_globals {
268
  /* The root buffer, which stores possible roots of reference cycles. It is
269
   * also used to store garbage to be collected at the end of a run.
270
   * A single array which is reallocated as necessary. */
271
  gc_root_buffer   *buf;
272
273
  bool         gc_enabled;
274
  bool         gc_active;        /* GC currently running, forbid nested GC */
275
  bool         gc_protected;     /* GC protected, forbid root additions */
276
  bool         gc_full;
277
278
  uint32_t          unused;     /* linked list of unused buffers    */
279
  uint32_t          first_unused;   /* first unused buffer              */
280
  uint32_t          gc_threshold;     /* GC collection threshold          */
281
  uint32_t          buf_size;     /* size of the GC buffer            */
282
  uint32_t          num_roots;    /* number of roots in GC buffer     */
283
284
  uint32_t gc_runs;         /* number of GC runs since reset */
285
  uint32_t collected;         /* number of collected nodes since reset */
286
287
  zend_hrtime_t activated_at;     /* the timestamp of the last reset */
288
  zend_hrtime_t collector_time;   /* time spent running GC (ns) */
289
  zend_hrtime_t dtor_time;      /* time spent calling destructors (ns) */
290
  zend_hrtime_t free_time;      /* time spent destroying nodes and freeing memory (ns) */
291
292
  uint32_t dtor_idx;      /* root buffer index */
293
  uint32_t dtor_end;
294
  zend_fiber *dtor_fiber;
295
  bool dtor_fiber_running;
296
297
#if GC_BENCH
298
  uint32_t root_buf_length;
299
  uint32_t root_buf_peak;
300
  uint32_t zval_possible_root;
301
  uint32_t zval_buffered;
302
  uint32_t zval_remove_from_buffer;
303
  uint32_t zval_marked_grey;
304
#endif
305
} zend_gc_globals;
306
307
#ifdef ZTS
308
static int gc_globals_id;
309
static size_t gc_globals_offset;
310
#define GC_G(v) ZEND_TSRMG_FAST(gc_globals_offset, zend_gc_globals *, v)
311
#else
312
47.6M
#define GC_G(v) (gc_globals.v)
313
static zend_gc_globals gc_globals;
314
#endif
315
316
#if GC_BENCH
317
# define GC_BENCH_INC(counter) GC_G(counter)++
318
# define GC_BENCH_DEC(counter) GC_G(counter)--
319
# define GC_BENCH_PEAK(peak, counter) do {    \
320
    if (GC_G(counter) > GC_G(peak)) {   \
321
      GC_G(peak) = GC_G(counter);     \
322
    }                   \
323
  } while (0)
324
#else
325
# define GC_BENCH_INC(counter)
326
# define GC_BENCH_DEC(counter)
327
# define GC_BENCH_PEAK(peak, counter)
328
#endif
329
330
331
3.73k
#define GC_STACK_SEGMENT_SIZE (((4096 - ZEND_MM_OVERHEAD) / sizeof(void*)) - 2)
332
333
typedef struct _gc_stack gc_stack;
334
335
/* The stack used for graph traversal is stored as a linked list of segments */
336
struct _gc_stack {
337
  gc_stack        *prev;
338
  gc_stack        *next;
339
  zend_refcounted *data[GC_STACK_SEGMENT_SIZE];
340
};
341
342
#define GC_STACK_DCL(init) \
343
194k
  gc_stack *_stack = init; \
344
194k
  size_t    _top = 0;
345
346
#define GC_STACK_PUSH(ref) \
347
2.42M
  gc_stack_push(&_stack, &_top, ref);
348
349
#define GC_STACK_POP() \
350
2.62M
  gc_stack_pop(&_stack, &_top)
351
352
static zend_never_inline gc_stack* gc_stack_next(gc_stack *stack)
353
20.8k
{
354
20.8k
  if (UNEXPECTED(!stack->next)) {
355
19.5k
    gc_stack *segment = emalloc(sizeof(gc_stack));
356
19.5k
    segment->prev = stack;
357
19.5k
    segment->next = NULL;
358
19.5k
    stack->next = segment;
359
19.5k
  }
360
20.8k
  return stack->next;
361
20.8k
}
362
363
static zend_always_inline void gc_stack_push(gc_stack **stack, size_t *top, zend_refcounted *ref)
364
2.42M
{
365
2.42M
  if (UNEXPECTED(*top == GC_STACK_SEGMENT_SIZE)) {
366
1.86k
    (*stack) = gc_stack_next(*stack);
367
1.86k
    (*top) = 0;
368
1.86k
  }
369
2.42M
  (*stack)->data[(*top)++] = ref;
370
2.42M
}
371
372
static zend_always_inline zend_refcounted* gc_stack_pop(gc_stack **stack, size_t *top)
373
2.62M
{
374
2.62M
  if (UNEXPECTED((*top) == 0)) {
375
196k
    if (!(*stack)->prev) {
376
194k
      return NULL;
377
194k
    } else {
378
1.86k
      (*stack) = (*stack)->prev;
379
1.86k
      (*top) = GC_STACK_SEGMENT_SIZE - 1;
380
1.86k
      return (*stack)->data[GC_STACK_SEGMENT_SIZE - 1];
381
1.86k
    }
382
2.42M
  } else {
383
2.42M
    return (*stack)->data[--(*top)];
384
2.42M
  }
385
2.62M
}
386
387
static void gc_stack_free(gc_stack *stack)
388
22.7k
{
389
22.7k
  gc_stack *p = stack->next;
390
391
42.3k
  while (p) {
392
19.5k
    stack = p->next;
393
19.5k
    efree(p);
394
19.5k
    p = stack;
395
19.5k
  }
396
22.7k
}
397
398
/* Map a full index to a compressed index.
399
 *
400
 * The root buffer can have up to 2^30 entries, but we only have 20 bits to
401
 * store the index. So we use the 1<<19 bit as a compression flag and use the
402
 * other 19 bits to store the index modulo 2^19. */
403
static zend_always_inline uint32_t gc_compress(uint32_t idx)
404
3.28M
{
405
3.28M
  if (EXPECTED(idx < GC_MAX_UNCOMPRESSED)) {
406
3.28M
    return idx;
407
3.28M
  }
408
0
  return (idx % GC_MAX_UNCOMPRESSED) | GC_MAX_UNCOMPRESSED;
409
3.28M
}
410
411
/* Find the root buffer entry given a pointer and a compressed index.
412
 * Iterate through the root buffer in steps of 2^19 until the pointer
413
 * matches. */
414
static zend_always_inline gc_root_buffer* gc_decompress(zend_refcounted *ref, uint32_t idx)
415
0
{
416
0
  gc_root_buffer *root = GC_IDX2PTR(idx);
417
418
0
  if (EXPECTED(GC_GET_PTR(root->ref) == ref)) {
419
0
    return root;
420
0
  }
421
422
0
  while (1) {
423
0
    idx += GC_MAX_UNCOMPRESSED;
424
0
    ZEND_ASSERT(idx < GC_G(first_unused));
425
0
    root = GC_IDX2PTR(idx);
426
0
    if (GC_GET_PTR(root->ref) == ref) {
427
0
      return root;
428
0
    }
429
0
  }
430
0
}
431
432
/* Get the index of the next unused root buffer entry, and remove it from the
433
 * unused list. GC_HAS_UNUSED() must be true before calling this. */
434
static zend_always_inline uint32_t gc_fetch_unused(void)
435
873k
{
436
873k
  uint32_t idx;
437
873k
  gc_root_buffer *root;
438
439
873k
  ZEND_ASSERT(GC_HAS_UNUSED());
440
873k
  idx = GC_G(unused);
441
873k
  root = GC_IDX2PTR(idx);
442
873k
  ZEND_ASSERT(GC_IS_UNUSED(root->ref));
443
873k
  GC_G(unused) = GC_LIST2IDX(root->ref);
444
873k
  return idx;
445
873k
}
446
447
/* Add a root buffer entry to the unused list */
448
static zend_always_inline void gc_link_unused(gc_root_buffer *root)
449
3.27M
{
450
3.27M
  root->ref = GC_IDX2LIST(GC_G(unused));
451
3.27M
  GC_G(unused) = GC_PTR2IDX(root);
452
3.27M
}
453
454
static zend_always_inline uint32_t gc_fetch_next_unused(void)
455
2.40M
{
456
2.40M
  uint32_t idx;
457
458
2.40M
  ZEND_ASSERT(GC_HAS_NEXT_UNUSED());
459
2.40M
  idx = GC_G(first_unused);
460
2.40M
  GC_G(first_unused) = GC_G(first_unused) + 1;
461
2.40M
  return idx;
462
2.40M
}
463
464
#if ZEND_GC_DEBUG > 1
465
static const char *gc_color_name(uint32_t color) {
466
  switch (color) {
467
    case GC_BLACK: return "black";
468
    case GC_WHITE: return "white";
469
    case GC_GREY: return "grey";
470
    case GC_PURPLE: return "purple";
471
    default: return "unknown";
472
  }
473
}
474
static void gc_trace_ref(zend_refcounted *ref) {
475
  if (GC_TYPE(ref) == IS_OBJECT) {
476
    zend_object *obj = (zend_object *) ref;
477
    fprintf(stderr, "[%p] rc=%d addr=%d %s object(%s)#%d ",
478
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
479
      gc_color_name(GC_REF_COLOR(ref)),
480
      obj->ce->name->val, obj->handle);
481
  } else if (GC_TYPE(ref) == IS_ARRAY) {
482
    zend_array *arr = (zend_array *) ref;
483
    fprintf(stderr, "[%p] rc=%d addr=%d %s array(%d) ",
484
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
485
      gc_color_name(GC_REF_COLOR(ref)),
486
      zend_hash_num_elements(arr));
487
  } else {
488
    fprintf(stderr, "[%p] rc=%d addr=%d %s %s ",
489
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
490
      gc_color_name(GC_REF_COLOR(ref)),
491
      GC_TYPE(ref) == IS_REFERENCE
492
        ? "reference" : zend_get_type_by_const(GC_TYPE(ref)));
493
  }
494
}
495
#endif
496
497
/* Mark a root buffer entry unused */
498
static zend_always_inline void gc_remove_from_roots(gc_root_buffer *root)
499
3.23M
{
500
3.23M
  GC_LINK_UNUSED(root);
501
3.23M
  GC_G(num_roots)--;
502
3.23M
  GC_BENCH_DEC(root_buf_length);
503
3.23M
}
504
505
static void root_buffer_dtor(zend_gc_globals *gc_globals)
506
0
{
507
0
  if (gc_globals->buf) {
508
0
    free(gc_globals->buf);
509
0
    gc_globals->buf = NULL;
510
0
  }
511
0
}
512
513
static void gc_globals_ctor_ex(zend_gc_globals *gc_globals)
514
16
{
515
16
  gc_globals->gc_enabled = false;
516
16
  gc_globals->gc_active = false;
517
16
  gc_globals->gc_protected = true;
518
16
  gc_globals->gc_full = false;
519
520
16
  gc_globals->buf = NULL;
521
16
  gc_globals->unused = GC_INVALID;
522
16
  gc_globals->first_unused = GC_INVALID;
523
16
  gc_globals->gc_threshold = GC_INVALID;
524
16
  gc_globals->buf_size = GC_INVALID;
525
16
  gc_globals->num_roots = 0;
526
527
16
  gc_globals->gc_runs = 0;
528
16
  gc_globals->collected = 0;
529
16
  gc_globals->collector_time = 0;
530
16
  gc_globals->dtor_time = 0;
531
16
  gc_globals->free_time = 0;
532
16
  gc_globals->activated_at = 0;
533
534
16
  gc_globals->dtor_idx = GC_FIRST_ROOT;
535
16
  gc_globals->dtor_end = 0;
536
16
  gc_globals->dtor_fiber = NULL;
537
16
  gc_globals->dtor_fiber_running = false;
538
539
#if GC_BENCH
540
  gc_globals->root_buf_length = 0;
541
  gc_globals->root_buf_peak = 0;
542
  gc_globals->zval_possible_root = 0;
543
  gc_globals->zval_buffered = 0;
544
  gc_globals->zval_remove_from_buffer = 0;
545
  gc_globals->zval_marked_grey = 0;
546
#endif
547
16
}
548
549
void gc_globals_ctor(void)
550
16
{
551
#ifdef ZTS
552
  ts_allocate_fast_id(&gc_globals_id, &gc_globals_offset, sizeof(zend_gc_globals), (ts_allocate_ctor) gc_globals_ctor_ex, (ts_allocate_dtor) root_buffer_dtor);
553
#else
554
16
  gc_globals_ctor_ex(&gc_globals);
555
16
#endif
556
16
}
557
558
void gc_globals_dtor(void)
559
0
{
560
0
#ifndef ZTS
561
0
  root_buffer_dtor(&gc_globals);
562
0
#endif
563
0
}
564
565
void gc_reset(void)
566
307k
{
567
307k
  if (GC_G(buf)) {
568
307k
    GC_G(gc_active) = 0;
569
307k
    GC_G(gc_protected) = 0;
570
307k
    GC_G(gc_full) = 0;
571
307k
    GC_G(unused) = GC_INVALID;
572
307k
    GC_G(first_unused) = GC_FIRST_ROOT;
573
307k
    GC_G(num_roots) = 0;
574
575
307k
    GC_G(gc_runs) = 0;
576
307k
    GC_G(collected) = 0;
577
578
307k
    GC_G(collector_time) = 0;
579
307k
    GC_G(dtor_time) = 0;
580
307k
    GC_G(free_time) = 0;
581
582
307k
    GC_G(dtor_idx) = GC_FIRST_ROOT;
583
307k
    GC_G(dtor_end) = 0;
584
307k
    GC_G(dtor_fiber) = NULL;
585
307k
    GC_G(dtor_fiber_running) = false;
586
587
#if GC_BENCH
588
    GC_G(root_buf_length) = 0;
589
    GC_G(root_buf_peak) = 0;
590
    GC_G(zval_possible_root) = 0;
591
    GC_G(zval_buffered) = 0;
592
    GC_G(zval_remove_from_buffer) = 0;
593
    GC_G(zval_marked_grey) = 0;
594
#endif
595
307k
  }
596
597
307k
  GC_G(activated_at) = zend_hrtime();
598
307k
}
599
600
/* Enable/disable the garbage collector.
601
 * Initialize globals if necessary. */
602
ZEND_API bool gc_enable(bool enable)
603
128k
{
604
128k
  bool old_enabled = GC_G(gc_enabled);
605
128k
  GC_G(gc_enabled) = enable;
606
128k
  if (enable && !old_enabled && GC_G(buf) == NULL) {
607
16
    GC_G(buf) = (gc_root_buffer*) pemalloc(sizeof(gc_root_buffer) * GC_DEFAULT_BUF_SIZE, 1);
608
16
    GC_G(buf)[0].ref = NULL;
609
16
    GC_G(buf_size) = GC_DEFAULT_BUF_SIZE;
610
16
    GC_G(gc_threshold) = GC_THRESHOLD_DEFAULT;
611
16
    gc_reset();
612
16
  }
613
128k
  return old_enabled;
614
128k
}
615
616
ZEND_API bool gc_enabled(void)
617
108
{
618
108
  return GC_G(gc_enabled);
619
108
}
620
621
/* Protect the GC root buffer (prevent additions) */
622
ZEND_API bool gc_protect(bool protect)
623
28.5k
{
624
28.5k
  bool old_protected = GC_G(gc_protected);
625
28.5k
  GC_G(gc_protected) = protect;
626
28.5k
  return old_protected;
627
28.5k
}
628
629
ZEND_API bool gc_protected(void)
630
0
{
631
0
  return GC_G(gc_protected);
632
0
}
633
634
static void gc_grow_root_buffer(void)
635
1
{
636
1
  size_t new_size;
637
638
1
  if (GC_G(buf_size) >= GC_MAX_BUF_SIZE) {
639
0
    if (!GC_G(gc_full)) {
640
0
      zend_error(E_WARNING, "GC buffer overflow (GC disabled)\n");
641
0
      GC_G(gc_active) = 1;
642
0
      GC_G(gc_protected) = 1;
643
0
      GC_G(gc_full) = 1;
644
0
      return;
645
0
    }
646
0
  }
647
1
  if (GC_G(buf_size) < GC_BUF_GROW_STEP) {
648
1
    new_size = GC_G(buf_size) * 2;
649
1
  } else {
650
0
    new_size = GC_G(buf_size) + GC_BUF_GROW_STEP;
651
0
  }
652
1
  if (new_size > GC_MAX_BUF_SIZE) {
653
0
    new_size = GC_MAX_BUF_SIZE;
654
0
  }
655
1
  GC_G(buf) = perealloc(GC_G(buf), sizeof(gc_root_buffer) * new_size, 1);
656
1
  GC_G(buf_size) = new_size;
657
1
}
658
659
/* Adjust the GC activation threshold given the number of nodes collected by the last run */
660
static void gc_adjust_threshold(int count)
661
0
{
662
0
  uint32_t new_threshold;
663
664
  /* TODO Very simple heuristic for dynamic GC buffer resizing:
665
   * If there are "too few" collections, increase the collection threshold
666
   * by a fixed step */
667
0
  if (count < GC_THRESHOLD_TRIGGER || GC_G(num_roots) >= GC_G(gc_threshold)) {
668
    /* increase */
669
0
    if (GC_G(gc_threshold) < GC_THRESHOLD_MAX) {
670
0
      new_threshold = GC_G(gc_threshold) + GC_THRESHOLD_STEP;
671
0
      if (new_threshold > GC_THRESHOLD_MAX) {
672
0
        new_threshold = GC_THRESHOLD_MAX;
673
0
      }
674
0
      if (new_threshold > GC_G(buf_size)) {
675
0
        gc_grow_root_buffer();
676
0
      }
677
0
      if (new_threshold <= GC_G(buf_size)) {
678
0
        GC_G(gc_threshold) = new_threshold;
679
0
      }
680
0
    }
681
0
  } else if (GC_G(gc_threshold) > GC_THRESHOLD_DEFAULT) {
682
0
    new_threshold = GC_G(gc_threshold) - GC_THRESHOLD_STEP;
683
0
    if (new_threshold < GC_THRESHOLD_DEFAULT) {
684
0
      new_threshold = GC_THRESHOLD_DEFAULT;
685
0
    }
686
0
    GC_G(gc_threshold) = new_threshold;
687
0
  }
688
0
}
689
690
/* Perform a GC run and then add a node as a possible root. */
691
static zend_never_inline void ZEND_FASTCALL gc_possible_root_when_full(zend_refcounted *ref)
692
0
{
693
0
  uint32_t idx;
694
0
  gc_root_buffer *newRoot;
695
696
0
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
697
0
  ZEND_ASSERT(GC_INFO(ref) == 0);
698
699
0
  if (GC_G(gc_enabled) && !GC_G(gc_active)) {
700
0
    GC_ADDREF(ref);
701
0
    gc_adjust_threshold(gc_collect_cycles());
702
0
    if (UNEXPECTED(GC_DELREF(ref) == 0)) {
703
0
      rc_dtor_func(ref);
704
0
      return;
705
0
    } else if (UNEXPECTED(GC_INFO(ref))) {
706
0
      return;
707
0
    }
708
0
  }
709
710
0
  if (GC_HAS_UNUSED()) {
711
0
    idx = GC_FETCH_UNUSED();
712
0
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
713
0
    idx = GC_FETCH_NEXT_UNUSED();
714
0
  } else {
715
0
    gc_grow_root_buffer();
716
0
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
717
0
      return;
718
0
    }
719
0
    idx = GC_FETCH_NEXT_UNUSED();
720
0
  }
721
722
0
  newRoot = GC_IDX2PTR(idx);
723
0
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
724
0
  GC_TRACE_SET_COLOR(ref, GC_PURPLE);
725
726
0
  idx = gc_compress(idx);
727
0
  GC_REF_SET_INFO(ref, idx | GC_PURPLE);
728
0
  GC_G(num_roots)++;
729
730
0
  GC_BENCH_INC(zval_buffered);
731
0
  GC_BENCH_INC(root_buf_length);
732
0
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
733
0
}
734
735
/* Add a possible root node to the buffer.
736
 * Maybe perform a GC run. */
737
ZEND_API void ZEND_FASTCALL gc_possible_root(zend_refcounted *ref)
738
2.62M
{
739
2.62M
  uint32_t idx;
740
2.62M
  gc_root_buffer *newRoot;
741
742
2.62M
  if (UNEXPECTED(GC_G(gc_protected))) {
743
111k
    return;
744
111k
  }
745
746
2.51M
  GC_BENCH_INC(zval_possible_root);
747
748
2.51M
  if (EXPECTED(GC_HAS_UNUSED())) {
749
873k
    idx = GC_FETCH_UNUSED();
750
1.64M
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD())) {
751
1.64M
    idx = GC_FETCH_NEXT_UNUSED();
752
1.64M
  } else {
753
0
    gc_possible_root_when_full(ref);
754
0
    return;
755
0
  }
756
757
2.51M
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
758
2.51M
  ZEND_ASSERT(GC_INFO(ref) == 0);
759
760
2.51M
  newRoot = GC_IDX2PTR(idx);
761
2.51M
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
762
2.51M
  GC_TRACE_SET_COLOR(ref, GC_PURPLE);
763
764
2.51M
  idx = gc_compress(idx);
765
2.51M
  GC_REF_SET_INFO(ref, idx | GC_PURPLE);
766
2.51M
  GC_G(num_roots)++;
767
768
2.51M
  GC_BENCH_INC(zval_buffered);
769
2.51M
  GC_BENCH_INC(root_buf_length);
770
2.51M
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
771
2.51M
}
772
773
/* Add an extra root during a GC run */
774
static void ZEND_FASTCALL gc_extra_root(zend_refcounted *ref)
775
33
{
776
33
  uint32_t idx;
777
33
  gc_root_buffer *newRoot;
778
779
33
  if (EXPECTED(GC_HAS_UNUSED())) {
780
0
    idx = GC_FETCH_UNUSED();
781
33
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
782
33
    idx = GC_FETCH_NEXT_UNUSED();
783
33
  } else {
784
0
    gc_grow_root_buffer();
785
0
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
786
      /* TODO: can this really happen? */
787
0
      return;
788
0
    }
789
0
    idx = GC_FETCH_NEXT_UNUSED();
790
0
  }
791
792
33
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
793
33
  ZEND_ASSERT(GC_REF_ADDRESS(ref) == 0);
794
795
33
  newRoot = GC_IDX2PTR(idx);
796
33
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
797
798
33
  idx = gc_compress(idx);
799
33
  GC_REF_SET_INFO(ref, idx | GC_REF_COLOR(ref));
800
33
  GC_G(num_roots)++;
801
802
33
  GC_BENCH_INC(zval_buffered);
803
33
  GC_BENCH_INC(root_buf_length);
804
33
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
805
33
}
806
807
/* Remove a node from the root buffer given its compressed index */
808
static zend_never_inline void ZEND_FASTCALL gc_remove_compressed(zend_refcounted *ref, uint32_t idx)
809
0
{
810
0
  gc_root_buffer *root = gc_decompress(ref, idx);
811
0
  gc_remove_from_roots(root);
812
0
}
813
814
ZEND_API void ZEND_FASTCALL gc_remove_from_buffer(zend_refcounted *ref)
815
3.18M
{
816
3.18M
  gc_root_buffer *root;
817
3.18M
  uint32_t idx = GC_REF_ADDRESS(ref);
818
819
3.18M
  GC_BENCH_INC(zval_remove_from_buffer);
820
821
3.18M
  if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
822
2.31M
    GC_TRACE_SET_COLOR(ref, GC_BLACK);
823
2.31M
  }
824
3.18M
  GC_REF_SET_INFO(ref, 0);
825
826
  /* Perform decompression only in case of large buffers */
827
3.18M
  if (UNEXPECTED(GC_G(first_unused) >= GC_MAX_UNCOMPRESSED)) {
828
0
    gc_remove_compressed(ref, idx);
829
0
    return;
830
0
  }
831
832
3.18M
  ZEND_ASSERT(idx);
833
3.18M
  root = GC_IDX2PTR(idx);
834
3.18M
  gc_remove_from_roots(root);
835
3.18M
}
836
837
/* Mark all nodes reachable from ref as black (live). Restore the reference
838
 * counts decremented by gc_mark_grey(). See ScanBlack() in Bacon & Rajan.
839
 * To implement a depth-first search, discovered nodes are added to a stack
840
 * which is processed iteratively. */
841
static void gc_scan_black(zend_refcounted *ref, gc_stack *stack)
842
34.2k
{
843
34.2k
  HashTable *ht;
844
34.2k
  Bucket *p;
845
34.2k
  zval *zv;
846
34.2k
  uint32_t n;
847
34.2k
  GC_STACK_DCL(stack);
848
849
208k
tail_call:
850
208k
  if (GC_TYPE(ref) == IS_OBJECT) {
851
36.8k
    zend_object *obj = (zend_object*)ref;
852
853
36.8k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
854
36.8k
      zval *table;
855
36.8k
      int len;
856
857
36.8k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
858
671
        zend_weakmap_get_object_key_entry_gc(obj, &table, &len);
859
671
        n = len;
860
671
        zv = table;
861
1.25k
        for (; n != 0; n-=2) {
862
582
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
863
582
          zval *entry = (zval*) Z_PTR_P(zv);
864
582
          zval *weakmap = zv+1;
865
582
          ZEND_ASSERT(Z_REFCOUNTED_P(weakmap));
866
582
          if (Z_OPT_COLLECTABLE_P(entry)) {
867
477
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
868
477
            if (GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_GREY)) {
869
              /* Weakmap was scanned in gc_mark_roots, we must
870
               * ensure that it's eventually scanned in
871
               * gc_scan_roots as well. */
872
47
              if (!GC_REF_ADDRESS(Z_COUNTED_P(weakmap))) {
873
26
                gc_extra_root(Z_COUNTED_P(weakmap));
874
26
              }
875
430
            } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK) && */ !GC_FROM_WEAKMAP(entry)) {
876
              /* Both the entry weakmap and key are BLACK, so we
877
               * can mark the entry BLACK as well.
878
               * !GC_FROM_WEAKMAP(entry) means that the weakmap
879
               * was already scanned black (or will not be
880
               * scanned), so it's our responsibility to mark the
881
               * entry */
882
408
              ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK));
883
408
              ref = Z_COUNTED_P(entry);
884
408
              GC_ADDREF(ref);
885
408
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
886
40
                GC_REF_SET_BLACK(ref);
887
40
                GC_STACK_PUSH(ref);
888
40
              }
889
408
            }
890
477
          }
891
582
          zv+=2;
892
582
        }
893
671
      }
894
895
36.8k
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
896
135
        zend_weakmap_get_key_entry_gc(obj, &table, &len);
897
135
        n = len;
898
135
        zv = table;
899
357
        for (; n != 0; n-=2) {
900
222
          ZEND_ASSERT(Z_TYPE_P(zv+1) == IS_PTR);
901
222
          zval *key = zv;
902
222
          zval *entry = (zval*) Z_PTR_P(zv+1);
903
222
          if (Z_OPT_COLLECTABLE_P(entry)) {
904
126
            GC_UNSET_FROM_WEAKMAP(entry);
905
126
            if (GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_GREY)) {
906
              /* Key was scanned in gc_mark_roots, we must
907
               * ensure that it's eventually scanned in
908
               * gc_scan_roots as well. */
909
39
              if (!GC_REF_ADDRESS(Z_COUNTED_P(key))) {
910
7
                gc_extra_root(Z_COUNTED_P(key));
911
7
              }
912
87
            } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK) && */ !GC_FROM_WEAKMAP_KEY(entry)) {
913
              /* Both the entry weakmap and key are BLACK, so we
914
               * can mark the entry BLACK as well.
915
               * !GC_FROM_WEAKMAP_KEY(entry) means that the key
916
               * was already scanned black (or will not be
917
               * scanned), so it's our responsibility to mark the
918
               * entry */
919
73
              ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK));
920
73
              ref = Z_COUNTED_P(entry);
921
73
              GC_ADDREF(ref);
922
73
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
923
68
                GC_REF_SET_BLACK(ref);
924
68
                GC_STACK_PUSH(ref);
925
68
              }
926
73
            }
927
126
          }
928
222
          zv += 2;
929
222
        }
930
135
        goto next;
931
135
      }
932
933
36.7k
      ht = obj->handlers->get_gc(obj, &table, &len);
934
36.7k
      n = len;
935
36.7k
      zv = table;
936
36.7k
      if (UNEXPECTED(ht)) {
937
12.6k
        GC_ADDREF(ht);
938
12.6k
        if (!GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
939
12.6k
          GC_REF_SET_BLACK(ht);
940
16.5k
          for (; n != 0; n--) {
941
3.90k
            if (Z_COLLECTABLE_P(zv)) {
942
670
              ref = Z_COUNTED_P(zv);
943
670
              GC_ADDREF(ref);
944
670
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
945
535
                GC_REF_SET_BLACK(ref);
946
535
                GC_STACK_PUSH(ref);
947
535
              }
948
670
            }
949
3.90k
            zv++;
950
3.90k
          }
951
12.6k
          goto handle_ht;
952
12.6k
        }
953
12.6k
      }
954
955
117k
handle_zvals:
956
2.47M
      for (; n != 0; n--) {
957
2.38M
        if (Z_COLLECTABLE_P(zv)) {
958
26.3k
          ref = Z_COUNTED_P(zv);
959
26.3k
          GC_ADDREF(ref);
960
26.3k
          if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
961
22.7k
            GC_REF_SET_BLACK(ref);
962
22.7k
            zv++;
963
110k
            while (--n) {
964
88.0k
              if (Z_COLLECTABLE_P(zv)) {
965
78.0k
                zend_refcounted *ref = Z_COUNTED_P(zv);
966
78.0k
                GC_ADDREF(ref);
967
78.0k
                if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
968
76.2k
                  GC_REF_SET_BLACK(ref);
969
76.2k
                  GC_STACK_PUSH(ref);
970
76.2k
                }
971
78.0k
              }
972
88.0k
              zv++;
973
88.0k
            }
974
22.7k
            goto tail_call;
975
22.7k
          }
976
26.3k
        }
977
2.35M
        zv++;
978
2.35M
      }
979
117k
    }
980
171k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
981
168k
    ZEND_ASSERT((zend_array*)ref != &EG(symbol_table));
982
168k
    ht = (zend_array*)ref;
983
180k
handle_ht:
984
180k
    n = ht->nNumUsed;
985
180k
    zv = ht->arPacked;
986
180k
    if (HT_IS_PACKED(ht)) {
987
93.1k
      goto handle_zvals;
988
93.1k
    }
989
990
87.7k
    p = (Bucket*)zv;
991
257k
    for (; n != 0; n--) {
992
239k
      zv = &p->val;
993
239k
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
994
5.67k
        zv = Z_INDIRECT_P(zv);
995
5.67k
      }
996
239k
      if (Z_COLLECTABLE_P(zv)) {
997
72.2k
        ref = Z_COUNTED_P(zv);
998
72.2k
        GC_ADDREF(ref);
999
72.2k
        if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1000
70.6k
          GC_REF_SET_BLACK(ref);
1001
70.6k
          p++;
1002
138k
          while (--n) {
1003
68.3k
            zv = &p->val;
1004
68.3k
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1005
382
              zv = Z_INDIRECT_P(zv);
1006
382
            }
1007
68.3k
            if (Z_COLLECTABLE_P(zv)) {
1008
3.28k
              zend_refcounted *ref = Z_COUNTED_P(zv);
1009
3.28k
              GC_ADDREF(ref);
1010
3.28k
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1011
2.43k
                GC_REF_SET_BLACK(ref);
1012
2.43k
                GC_STACK_PUSH(ref);
1013
2.43k
              }
1014
3.28k
            }
1015
68.3k
            p++;
1016
68.3k
          }
1017
70.6k
          goto tail_call;
1018
70.6k
        }
1019
72.2k
      }
1020
169k
      p++;
1021
169k
    }
1022
87.7k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1023
3.15k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1024
1.30k
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1025
1.30k
      GC_ADDREF(ref);
1026
1.30k
      if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1027
1.21k
        GC_REF_SET_BLACK(ref);
1028
1.21k
        goto tail_call;
1029
1.21k
      }
1030
1.30k
    }
1031
3.15k
  }
1032
1033
113k
next:
1034
113k
  ref = GC_STACK_POP();
1035
113k
  if (ref) {
1036
79.3k
    goto tail_call;
1037
79.3k
  }
1038
113k
}
1039
1040
/* Traverse the graph of nodes referred to by ref. Decrement the reference
1041
 * counts and mark visited nodes grey. See MarkGray() in Bacon & Rajan. */
1042
static void gc_mark_grey(zend_refcounted *ref, gc_stack *stack)
1043
66.3k
{
1044
66.3k
  HashTable *ht;
1045
66.3k
  Bucket *p;
1046
66.3k
  zval *zv;
1047
66.3k
  uint32_t n;
1048
66.3k
  GC_STACK_DCL(stack);
1049
1050
1.13M
tail_call:
1051
1.13M
  GC_BENCH_INC(zval_marked_grey);
1052
1053
1.13M
  if (GC_TYPE(ref) == IS_OBJECT) {
1054
551k
    zend_object *obj = (zend_object*)ref;
1055
1056
551k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1057
551k
      zval *table;
1058
551k
      int len;
1059
1060
551k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1061
904
        zend_weakmap_get_object_key_entry_gc(obj, &table, &len);
1062
904
        n = len;
1063
904
        zv = table;
1064
1.59k
        for (; n != 0; n-=2) {
1065
686
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1066
686
          zval *entry = (zval*) Z_PTR_P(zv);
1067
686
          zval *weakmap = zv+1;
1068
686
          ZEND_ASSERT(Z_REFCOUNTED_P(weakmap));
1069
686
          if (Z_COLLECTABLE_P(entry)) {
1070
576
            GC_SET_FROM_WEAKMAP_KEY(entry);
1071
576
            ref = Z_COUNTED_P(entry);
1072
            /* Only DELREF if the contribution from the weakmap has
1073
             * not been cancelled yet */
1074
576
            if (!GC_FROM_WEAKMAP(entry)) {
1075
493
              GC_DELREF(ref);
1076
493
            }
1077
576
            if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1078
79
              GC_REF_SET_COLOR(ref, GC_GREY);
1079
79
              GC_STACK_PUSH(ref);
1080
79
            }
1081
576
          }
1082
686
          zv+=2;
1083
686
        }
1084
904
      }
1085
1086
551k
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
1087
192
        zend_weakmap_get_entry_gc(obj, &table, &len);
1088
192
        n = len;
1089
192
        zv = table;
1090
489
        for (; n != 0; n--) {
1091
297
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1092
297
          zval *entry = (zval*) Z_PTR_P(zv);
1093
297
          if (Z_COLLECTABLE_P(entry)) {
1094
201
            GC_SET_FROM_WEAKMAP(entry);
1095
201
            ref = Z_COUNTED_P(entry);
1096
            /* Only DELREF if the contribution from the weakmap key
1097
             * has not been cancelled yet */
1098
201
            if (!GC_FROM_WEAKMAP_KEY(entry)) {
1099
102
              GC_DELREF(ref);
1100
102
            }
1101
201
            if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1102
82
              GC_REF_SET_COLOR(ref, GC_GREY);
1103
82
              GC_STACK_PUSH(ref);
1104
82
            }
1105
201
          }
1106
297
          zv++;
1107
297
        }
1108
192
        goto next;
1109
192
      }
1110
1111
551k
      ht = obj->handlers->get_gc(obj, &table, &len);
1112
551k
      n = len;
1113
551k
      zv = table;
1114
551k
      if (UNEXPECTED(ht)) {
1115
517k
        GC_DELREF(ht);
1116
517k
        if (!GC_REF_CHECK_COLOR(ht, GC_GREY)) {
1117
517k
          GC_REF_SET_COLOR(ht, GC_GREY);
1118
694k
          for (; n != 0; n--) {
1119
177k
            if (Z_COLLECTABLE_P(zv)) {
1120
173k
              ref = Z_COUNTED_P(zv);
1121
173k
              GC_DELREF(ref);
1122
173k
              if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1123
169k
                GC_REF_SET_COLOR(ref, GC_GREY);
1124
169k
                GC_STACK_PUSH(ref);
1125
169k
              }
1126
173k
            }
1127
177k
            zv++;
1128
177k
          }
1129
517k
          goto handle_ht;
1130
517k
        }
1131
517k
      }
1132
148k
handle_zvals:
1133
2.65M
      for (; n != 0; n--) {
1134
2.53M
        if (Z_COLLECTABLE_P(zv)) {
1135
77.5k
          ref = Z_COUNTED_P(zv);
1136
77.5k
          GC_DELREF(ref);
1137
77.5k
          if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1138
32.5k
            GC_REF_SET_COLOR(ref, GC_GREY);
1139
32.5k
            zv++;
1140
122k
            while (--n) {
1141
90.3k
              if (Z_COLLECTABLE_P(zv)) {
1142
76.7k
                zend_refcounted *ref = Z_COUNTED_P(zv);
1143
76.7k
                GC_DELREF(ref);
1144
76.7k
                if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1145
71.7k
                  GC_REF_SET_COLOR(ref, GC_GREY);
1146
71.7k
                  GC_STACK_PUSH(ref);
1147
71.7k
                }
1148
76.7k
              }
1149
90.3k
              zv++;
1150
90.3k
            }
1151
32.5k
            goto tail_call;
1152
32.5k
          }
1153
77.5k
        }
1154
2.50M
        zv++;
1155
2.50M
      }
1156
148k
    }
1157
583k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1158
567k
    ZEND_ASSERT(((zend_array*)ref) != &EG(symbol_table));
1159
567k
    ht = (zend_array*)ref;
1160
1.08M
handle_ht:
1161
1.08M
    n = ht->nNumUsed;
1162
1.08M
    if (HT_IS_PACKED(ht)) {
1163
114k
            zv = ht->arPacked;
1164
114k
            goto handle_zvals;
1165
114k
    }
1166
1167
970k
    p = ht->arData;
1168
1.99M
    for (; n != 0; n--) {
1169
1.40M
      zv = &p->val;
1170
1.40M
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1171
645k
        zv = Z_INDIRECT_P(zv);
1172
645k
      }
1173
1.40M
      if (Z_COLLECTABLE_P(zv)) {
1174
489k
        ref = Z_COUNTED_P(zv);
1175
489k
        GC_DELREF(ref);
1176
489k
        if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1177
374k
          GC_REF_SET_COLOR(ref, GC_GREY);
1178
374k
          p++;
1179
1.29M
          while (--n) {
1180
916k
            zv = &p->val;
1181
916k
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1182
113k
              zv = Z_INDIRECT_P(zv);
1183
113k
            }
1184
916k
            if (Z_COLLECTABLE_P(zv)) {
1185
683k
              zend_refcounted *ref = Z_COUNTED_P(zv);
1186
683k
              GC_DELREF(ref);
1187
683k
              if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1188
418k
                GC_REF_SET_COLOR(ref, GC_GREY);
1189
418k
                GC_STACK_PUSH(ref);
1190
418k
              }
1191
683k
            }
1192
916k
            p++;
1193
916k
          }
1194
374k
          goto tail_call;
1195
374k
        }
1196
489k
      }
1197
1.02M
      p++;
1198
1.02M
    }
1199
970k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1200
16.2k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1201
13.0k
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1202
13.0k
      GC_DELREF(ref);
1203
13.0k
      if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1204
2.40k
        GC_REF_SET_COLOR(ref, GC_GREY);
1205
2.40k
        goto tail_call;
1206
2.40k
      }
1207
13.0k
    }
1208
16.2k
  }
1209
1210
726k
next:
1211
726k
  ref = GC_STACK_POP();
1212
726k
  if (ref) {
1213
660k
    goto tail_call;
1214
660k
  }
1215
726k
}
1216
1217
/* Two-Finger compaction algorithm */
1218
static void gc_compact(void)
1219
903k
{
1220
903k
  if (GC_G(num_roots) + GC_FIRST_ROOT != GC_G(first_unused)) {
1221
434k
    if (GC_G(num_roots)) {
1222
10.5k
      gc_root_buffer *free = GC_IDX2PTR(GC_FIRST_ROOT);
1223
10.5k
      gc_root_buffer *scan = GC_IDX2PTR(GC_G(first_unused) - 1);
1224
10.5k
      gc_root_buffer *end  = GC_IDX2PTR(GC_G(num_roots));
1225
10.5k
      uint32_t idx;
1226
10.5k
      zend_refcounted *p;
1227
1228
21.8k
      while (free < scan) {
1229
90.4k
        while (!GC_IS_UNUSED(free->ref)) {
1230
75.3k
          free++;
1231
75.3k
        }
1232
39.6k
        while (GC_IS_UNUSED(scan->ref)) {
1233
24.5k
          scan--;
1234
24.5k
        }
1235
15.1k
        if (scan > free) {
1236
8.63k
          p = scan->ref;
1237
8.63k
          free->ref = p;
1238
8.63k
          p = GC_GET_PTR(p);
1239
8.63k
          idx = gc_compress(GC_PTR2IDX(free));
1240
8.63k
          GC_REF_SET_INFO(p, idx | GC_REF_COLOR(p));
1241
8.63k
          free++;
1242
8.63k
          scan--;
1243
8.63k
          if (scan <= end) {
1244
3.81k
            break;
1245
3.81k
          }
1246
8.63k
        }
1247
15.1k
      }
1248
10.5k
    }
1249
434k
    GC_G(unused) = GC_INVALID;
1250
434k
    GC_G(first_unused) = GC_G(num_roots) + GC_FIRST_ROOT;
1251
434k
  }
1252
903k
}
1253
1254
/* For all roots marked purple, traverse the graph, decrementing the reference
1255
 * count of their child nodes. Mark visited nodes grey so that they are not
1256
 * visited again. See MarkRoots() in Bacon & Rajan. */
1257
static void gc_mark_roots(gc_stack *stack)
1258
22.8k
{
1259
22.8k
  gc_root_buffer *current, *last;
1260
1261
22.8k
  gc_compact();
1262
1263
22.8k
  current = GC_IDX2PTR(GC_FIRST_ROOT);
1264
22.8k
  last = GC_IDX2PTR(GC_G(first_unused));
1265
229k
  while (current != last) {
1266
206k
    if (GC_IS_ROOT(current->ref)) {
1267
206k
      if (GC_REF_CHECK_COLOR(current->ref, GC_PURPLE)) {
1268
66.3k
        GC_REF_SET_COLOR(current->ref, GC_GREY);
1269
66.3k
        gc_mark_grey(current->ref, stack);
1270
66.3k
      }
1271
206k
    }
1272
206k
    current++;
1273
206k
  }
1274
22.8k
}
1275
1276
/* Traverse the reference graph of ref. Evaluate grey nodes and mark them
1277
 * black (to keep) or white (to free). Note that nodes initially marked white
1278
 * may later become black if they are visited from a live node.
1279
 * See Scan() in Bacon & Rajan. */
1280
static void gc_scan(zend_refcounted *ref, gc_stack *stack)
1281
66.3k
{
1282
66.3k
  HashTable *ht;
1283
66.3k
  Bucket *p;
1284
66.3k
  zval *zv;
1285
66.3k
  uint32_t n;
1286
66.3k
  GC_STACK_DCL(stack);
1287
1288
1.48M
tail_call:
1289
1.48M
  if (!GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1290
402
    goto next;
1291
402
  }
1292
1293
1.48M
  if (GC_REFCOUNT(ref) > 0) {
1294
34.2k
    if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1295
34.2k
      GC_REF_SET_BLACK(ref);
1296
34.2k
      if (UNEXPECTED(!_stack->next)) {
1297
19.0k
        gc_stack_next(_stack);
1298
19.0k
      }
1299
      /* Split stack and reuse the tail */
1300
34.2k
      _stack->next->prev = NULL;
1301
34.2k
      gc_scan_black(ref, _stack->next);
1302
34.2k
      _stack->next->prev = _stack;
1303
34.2k
    }
1304
34.2k
    goto next;
1305
34.2k
  }
1306
1307
1.45M
  if (GC_TYPE(ref) == IS_OBJECT) {
1308
517k
    zend_object *obj = (zend_object*)ref;
1309
517k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1310
517k
      zval *table;
1311
517k
      int len;
1312
1313
517k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1314
292
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1315
292
        n = len;
1316
292
        zv = table;
1317
451
        for (; n != 0; n--) {
1318
159
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1319
159
          zval *entry = (zval*) Z_PTR_P(zv);
1320
159
          if (Z_OPT_COLLECTABLE_P(entry)) {
1321
104
            ref = Z_COUNTED_P(entry);
1322
104
            if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1323
29
              GC_REF_SET_COLOR(ref, GC_WHITE);
1324
29
              GC_STACK_PUSH(ref);
1325
29
            }
1326
104
          }
1327
159
          zv++;
1328
159
        }
1329
292
      }
1330
1331
517k
      ht = obj->handlers->get_gc(obj, &table, &len);
1332
517k
      n = len;
1333
517k
      zv = table;
1334
517k
      if (UNEXPECTED(ht)) {
1335
506k
        if (GC_REF_CHECK_COLOR(ht, GC_GREY)) {
1336
506k
          GC_REF_SET_COLOR(ht, GC_WHITE);
1337
506k
          GC_STACK_PUSH((zend_refcounted *) ht);
1338
679k
          for (; n != 0; n--) {
1339
173k
            if (Z_COLLECTABLE_P(zv)) {
1340
172k
              ref = Z_COUNTED_P(zv);
1341
172k
              if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1342
169k
                GC_REF_SET_COLOR(ref, GC_WHITE);
1343
169k
                GC_STACK_PUSH(ref);
1344
169k
              }
1345
172k
            }
1346
173k
            zv++;
1347
173k
          }
1348
506k
          goto handle_ht;
1349
506k
        }
1350
506k
      }
1351
1352
64.0k
handle_zvals:
1353
352k
      for (; n != 0; n--) {
1354
300k
        if (Z_COLLECTABLE_P(zv)) {
1355
84.7k
          ref = Z_COUNTED_P(zv);
1356
84.7k
          if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1357
12.1k
            GC_REF_SET_COLOR(ref, GC_WHITE);
1358
12.1k
            zv++;
1359
27.6k
            while (--n) {
1360
15.4k
              if (Z_COLLECTABLE_P(zv)) {
1361
10.3k
                zend_refcounted *ref = Z_COUNTED_P(zv);
1362
10.3k
                if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1363
7.15k
                  GC_REF_SET_COLOR(ref, GC_WHITE);
1364
7.15k
                  GC_STACK_PUSH(ref);
1365
7.15k
                }
1366
10.3k
              }
1367
15.4k
              zv++;
1368
15.4k
            }
1369
12.1k
            goto tail_call;
1370
12.1k
          }
1371
84.7k
        }
1372
288k
        zv++;
1373
288k
      }
1374
64.0k
    }
1375
934k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1376
920k
    ht = (HashTable *)ref;
1377
920k
    ZEND_ASSERT(ht != &EG(symbol_table));
1378
1379
1.42M
handle_ht:
1380
1.42M
    n = ht->nNumUsed;
1381
1.42M
    if (HT_IS_PACKED(ht)) {
1382
53.2k
            zv = ht->arPacked;
1383
53.2k
            goto handle_zvals;
1384
53.2k
    }
1385
1386
1.37M
    p = ht->arData;
1387
3.76M
    for (; n != 0; n--) {
1388
2.69M
      zv = &p->val;
1389
2.69M
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1390
1.39M
        zv = Z_INDIRECT_P(zv);
1391
1.39M
      }
1392
2.69M
      if (Z_COLLECTABLE_P(zv)) {
1393
1.09M
        ref = Z_COUNTED_P(zv);
1394
1.09M
        if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1395
305k
          GC_REF_SET_COLOR(ref, GC_WHITE);
1396
305k
          p++;
1397
1.15M
          while (--n) {
1398
852k
            zv = &p->val;
1399
852k
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1400
113k
              zv = Z_INDIRECT_P(zv);
1401
113k
            }
1402
852k
            if (Z_COLLECTABLE_P(zv)) {
1403
682k
              zend_refcounted *ref = Z_COUNTED_P(zv);
1404
682k
              if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1405
417k
                GC_REF_SET_COLOR(ref, GC_WHITE);
1406
417k
                GC_STACK_PUSH(ref);
1407
417k
              }
1408
682k
            }
1409
852k
            p++;
1410
852k
          }
1411
305k
          goto tail_call;
1412
305k
        }
1413
1.09M
      }
1414
2.39M
      p++;
1415
2.39M
    }
1416
1.37M
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1417
13.5k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1418
11.9k
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1419
11.9k
      if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1420
1.82k
        GC_REF_SET_COLOR(ref, GC_WHITE);
1421
1.82k
        goto tail_call;
1422
1.82k
      }
1423
11.9k
    }
1424
13.5k
  }
1425
1426
1.16M
next:
1427
1.16M
  ref = GC_STACK_POP();
1428
1.16M
  if (ref) {
1429
1.10M
    goto tail_call;
1430
1.10M
  }
1431
1.16M
}
1432
1433
/* Scan all roots, coloring grey nodes black or white */
1434
static void gc_scan_roots(gc_stack *stack)
1435
22.8k
{
1436
22.8k
  uint32_t idx, end;
1437
22.8k
  gc_root_buffer *current;
1438
1439
  /* Root buffer might be reallocated during gc_scan,
1440
   * make sure to reload pointers. */
1441
22.8k
  idx = GC_FIRST_ROOT;
1442
22.8k
  end = GC_G(first_unused);
1443
229k
  while (idx != end) {
1444
206k
    current = GC_IDX2PTR(idx);
1445
206k
    if (GC_IS_ROOT(current->ref)) {
1446
206k
      if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1447
66.3k
        GC_REF_SET_COLOR(current->ref, GC_WHITE);
1448
66.3k
        gc_scan(current->ref, stack);
1449
66.3k
      }
1450
206k
    }
1451
206k
    idx++;
1452
206k
  }
1453
1454
  /* Scan extra roots added during gc_scan */
1455
22.8k
  while (idx != GC_G(first_unused)) {
1456
33
    current = GC_IDX2PTR(idx);
1457
33
    if (GC_IS_ROOT(current->ref)) {
1458
33
      if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1459
33
        GC_REF_SET_COLOR(current->ref, GC_WHITE);
1460
33
        gc_scan(current->ref, stack);
1461
33
      }
1462
33
    }
1463
33
    idx++;
1464
33
  }
1465
22.8k
}
1466
1467
/* Add a node to the buffer with the garbage flag, so that it will be
1468
 * destroyed and freed when the scan is complete. */
1469
static void gc_add_garbage(zend_refcounted *ref)
1470
755k
{
1471
755k
  uint32_t idx;
1472
755k
  gc_root_buffer *buf;
1473
1474
755k
  if (GC_HAS_UNUSED()) {
1475
0
    idx = GC_FETCH_UNUSED();
1476
755k
  } else if (GC_HAS_NEXT_UNUSED()) {
1477
755k
    idx = GC_FETCH_NEXT_UNUSED();
1478
755k
  } else {
1479
1
    gc_grow_root_buffer();
1480
1
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
1481
0
      return;
1482
0
    }
1483
1
    idx = GC_FETCH_NEXT_UNUSED();
1484
1
  }
1485
1486
755k
  buf = GC_IDX2PTR(idx);
1487
755k
  buf->ref = GC_MAKE_GARBAGE(ref);
1488
1489
755k
  idx = gc_compress(idx);
1490
755k
  GC_REF_SET_INFO(ref, idx | GC_BLACK);
1491
755k
  GC_G(num_roots)++;
1492
755k
}
1493
1494
/* Traverse the reference graph from ref, marking any white nodes as garbage. */
1495
static int gc_collect_white(zend_refcounted *ref, uint32_t *flags, gc_stack *stack)
1496
24.5k
{
1497
24.5k
  int count = 0;
1498
24.5k
  HashTable *ht;
1499
24.5k
  Bucket *p;
1500
24.5k
  zval *zv;
1501
24.5k
  uint32_t n;
1502
24.5k
  GC_STACK_DCL(stack);
1503
1504
927k
tail_call:
1505
  /* don't count references for compatibility ??? */
1506
927k
  if (GC_TYPE(ref) != IS_REFERENCE) {
1507
914k
    count++;
1508
914k
  }
1509
1510
927k
  if (GC_TYPE(ref) == IS_OBJECT) {
1511
515k
    zend_object *obj = (zend_object*)ref;
1512
1513
515k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1514
515k
      int len;
1515
515k
      zval *table;
1516
1517
      /* optimization: color is GC_BLACK (0) */
1518
515k
      if (!GC_INFO(ref)) {
1519
361k
        gc_add_garbage(ref);
1520
361k
      }
1521
515k
      if (!(OBJ_FLAGS(obj) & IS_OBJ_DESTRUCTOR_CALLED)
1522
401k
       && (obj->handlers->dtor_obj != zend_objects_destroy_object
1523
401k
        || obj->ce->destructor != NULL)) {
1524
2.85k
        *flags |= GC_HAS_DESTRUCTORS;
1525
2.85k
      }
1526
1527
515k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1528
233
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1529
233
        n = len;
1530
233
        zv = table;
1531
337
        for (; n != 0; n--) {
1532
104
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1533
104
          zval *entry = (zval*) Z_PTR_P(zv);
1534
104
          if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP_KEY(entry)) {
1535
67
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
1536
67
            GC_UNSET_FROM_WEAKMAP(entry);
1537
67
            ref = Z_COUNTED_P(entry);
1538
67
            GC_ADDREF(ref);
1539
67
            if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1540
18
              GC_REF_SET_BLACK(ref);
1541
18
              GC_STACK_PUSH(ref);
1542
18
            }
1543
67
          }
1544
104
          zv++;
1545
104
        }
1546
233
      }
1547
1548
515k
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
1549
57
        zend_weakmap_get_entry_gc(obj, &table, &len);
1550
57
        n = len;
1551
57
        zv = table;
1552
132
        for (; n != 0; n--) {
1553
75
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1554
75
          zval *entry = (zval*) Z_PTR_P(zv);
1555
75
          if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP(entry)) {
1556
47
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
1557
47
            GC_UNSET_FROM_WEAKMAP(entry);
1558
47
            ref = Z_COUNTED_P(entry);
1559
47
            GC_ADDREF(ref);
1560
47
            if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1561
37
              GC_REF_SET_BLACK(ref);
1562
37
              GC_STACK_PUSH(ref);
1563
37
            }
1564
47
          }
1565
75
          zv++;
1566
75
        }
1567
57
        goto next;
1568
57
      }
1569
1570
515k
      ht = obj->handlers->get_gc(obj, &table, &len);
1571
515k
      n = len;
1572
515k
      zv = table;
1573
515k
      if (UNEXPECTED(ht)) {
1574
505k
        GC_ADDREF(ht);
1575
505k
        if (GC_REF_CHECK_COLOR(ht, GC_WHITE)) {
1576
505k
          GC_REF_SET_BLACK(ht);
1577
678k
          for (; n != 0; n--) {
1578
173k
            if (Z_COLLECTABLE_P(zv)) {
1579
172k
              ref = Z_COUNTED_P(zv);
1580
172k
              GC_ADDREF(ref);
1581
172k
              if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1582
169k
                GC_REF_SET_BLACK(ref);
1583
169k
                GC_STACK_PUSH(ref);
1584
169k
              }
1585
172k
            }
1586
173k
            zv++;
1587
173k
          }
1588
505k
          goto handle_ht;
1589
505k
        }
1590
505k
      }
1591
1592
31.7k
handle_zvals:
1593
175k
      for (; n != 0; n--) {
1594
154k
        if (Z_COLLECTABLE_P(zv)) {
1595
46.5k
          ref = Z_COUNTED_P(zv);
1596
46.5k
          GC_ADDREF(ref);
1597
46.5k
          if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1598
11.0k
            GC_REF_SET_BLACK(ref);
1599
11.0k
            zv++;
1600
18.3k
            while (--n) {
1601
7.26k
              if (Z_COLLECTABLE_P(zv)) {
1602
3.35k
                zend_refcounted *ref = Z_COUNTED_P(zv);
1603
3.35k
                GC_ADDREF(ref);
1604
3.35k
                if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1605
850
                  GC_REF_SET_BLACK(ref);
1606
850
                  GC_STACK_PUSH(ref);
1607
850
                }
1608
3.35k
              }
1609
7.26k
              zv++;
1610
7.26k
            }
1611
11.0k
            goto tail_call;
1612
11.0k
          }
1613
46.5k
        }
1614
143k
        zv++;
1615
143k
      }
1616
31.7k
    }
1617
515k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1618
    /* optimization: color is GC_BLACK (0) */
1619
399k
    if (!GC_INFO(ref)) {
1620
394k
      gc_add_garbage(ref);
1621
394k
    }
1622
399k
    ht = (zend_array*)ref;
1623
1624
904k
handle_ht:
1625
904k
    n = ht->nNumUsed;
1626
904k
    if (HT_IS_PACKED(ht)) {
1627
21.7k
      zv = ht->arPacked;
1628
21.7k
      goto handle_zvals;
1629
21.7k
    }
1630
1631
882k
    p = ht->arData;
1632
1.73M
    for (; n != 0; n--) {
1633
1.16M
      zv = &p->val;
1634
1.16M
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1635
639k
        zv = Z_INDIRECT_P(zv);
1636
639k
      }
1637
1.16M
      if (Z_COLLECTABLE_P(zv)) {
1638
416k
        ref = Z_COUNTED_P(zv);
1639
416k
        GC_ADDREF(ref);
1640
416k
        if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1641
304k
          GC_REF_SET_BLACK(ref);
1642
304k
          p++;
1643
1.15M
          while (--n) {
1644
849k
            zv = &p->val;
1645
849k
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1646
113k
              zv = Z_INDIRECT_P(zv);
1647
113k
            }
1648
849k
            if (Z_COLLECTABLE_P(zv)) {
1649
680k
              zend_refcounted *ref = Z_COUNTED_P(zv);
1650
680k
              GC_ADDREF(ref);
1651
680k
              if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1652
416k
                GC_REF_SET_BLACK(ref);
1653
416k
                GC_STACK_PUSH(ref);
1654
416k
              }
1655
680k
            }
1656
849k
            p++;
1657
849k
          }
1658
304k
          goto tail_call;
1659
304k
        }
1660
416k
      }
1661
856k
      p++;
1662
856k
    }
1663
882k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1664
13.1k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1665
11.7k
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1666
11.7k
      GC_ADDREF(ref);
1667
11.7k
      if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1668
1.54k
        GC_REF_SET_BLACK(ref);
1669
1.54k
        goto tail_call;
1670
1.54k
      }
1671
11.7k
    }
1672
13.1k
  }
1673
1674
610k
next:
1675
610k
  ref = GC_STACK_POP();
1676
610k
  if (ref) {
1677
586k
    goto tail_call;
1678
586k
  }
1679
1680
24.5k
  return count;
1681
610k
}
1682
1683
/* Traverse the reference graph from all roots, marking white nodes as garbage. */
1684
static int gc_collect_roots(uint32_t *flags, gc_stack *stack)
1685
22.8k
{
1686
22.8k
  uint32_t idx, end;
1687
22.8k
  zend_refcounted *ref;
1688
22.8k
  int count = 0;
1689
22.8k
  gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
1690
22.8k
  gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
1691
1692
  /* remove non-garbage from the list */
1693
229k
  while (current != last) {
1694
206k
    if (GC_IS_ROOT(current->ref)) {
1695
206k
      if (GC_REF_CHECK_COLOR(current->ref, GC_BLACK)) {
1696
47.6k
        GC_REF_SET_INFO(current->ref, 0); /* reset GC_ADDRESS() and keep GC_BLACK */
1697
47.6k
        gc_remove_from_roots(current);
1698
47.6k
      }
1699
206k
    }
1700
206k
    current++;
1701
206k
  }
1702
1703
22.8k
  gc_compact();
1704
1705
  /* Root buffer might be reallocated during gc_collect_white,
1706
   * make sure to reload pointers. */
1707
22.8k
  idx = GC_FIRST_ROOT;
1708
22.8k
  end = GC_G(first_unused);
1709
181k
  while (idx != end) {
1710
158k
    current = GC_IDX2PTR(idx);
1711
158k
    ref = current->ref;
1712
158k
    ZEND_ASSERT(GC_IS_ROOT(ref));
1713
158k
    current->ref = GC_MAKE_GARBAGE(ref);
1714
158k
    if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1715
24.5k
      GC_REF_SET_BLACK(ref);
1716
24.5k
      count += gc_collect_white(ref, flags, stack);
1717
24.5k
    }
1718
158k
    idx++;
1719
158k
  }
1720
1721
22.8k
  return count;
1722
22.8k
}
1723
1724
static int gc_remove_nested_data_from_buffer(zend_refcounted *ref, gc_root_buffer *root, gc_stack *stack)
1725
2.85k
{
1726
2.85k
  HashTable *ht;
1727
2.85k
  Bucket *p;
1728
2.85k
  zval *zv;
1729
2.85k
  uint32_t n;
1730
2.85k
  int count = 0;
1731
2.85k
  GC_STACK_DCL(stack);
1732
1733
8.30k
tail_call:
1734
8.30k
  if (root) {
1735
2.85k
    root = NULL;
1736
2.85k
    count++;
1737
5.45k
  } else if (GC_REF_ADDRESS(ref) != 0
1738
4.51k
   && GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1739
1.64k
    GC_TRACE_REF(ref, "removing from buffer");
1740
1.64k
    GC_REMOVE_FROM_BUFFER(ref);
1741
1.64k
    count++;
1742
3.80k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1743
579
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1744
291
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1745
291
      goto tail_call;
1746
291
    }
1747
288
    goto next;
1748
3.22k
  } else {
1749
3.22k
    goto next;
1750
3.22k
  }
1751
1752
4.49k
  if (GC_TYPE(ref) == IS_OBJECT) {
1753
4.18k
    zend_object *obj = (zend_object*)ref;
1754
1755
4.18k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1756
4.18k
      int len;
1757
4.18k
      zval *table;
1758
1759
4.18k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1760
72
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1761
72
        n = len;
1762
72
        zv = table;
1763
77
        for (; n != 0; n--) {
1764
5
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1765
5
          zval *entry = (zval*) Z_PTR_P(zv);
1766
5
          if (Z_OPT_COLLECTABLE_P(entry)) {
1767
5
            ref = Z_COUNTED_P(entry);
1768
5
            GC_STACK_PUSH(ref);
1769
5
          }
1770
5
          zv++;
1771
5
        }
1772
72
      }
1773
1774
4.18k
      ht = obj->handlers->get_gc(obj, &table, &len);
1775
4.18k
      n = len;
1776
4.18k
      zv = table;
1777
4.18k
      if (UNEXPECTED(ht)) {
1778
1.86k
        for (; n != 0; n--) {
1779
770
          if (Z_COLLECTABLE_P(zv)) {
1780
525
            ref = Z_COUNTED_P(zv);
1781
525
            GC_STACK_PUSH(ref);
1782
525
          }
1783
770
          zv++;
1784
770
        }
1785
1.09k
        if (GC_REF_ADDRESS(ht) != 0 && GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
1786
5
          GC_TRACE_REF(ht, "removing from buffer");
1787
5
          GC_REMOVE_FROM_BUFFER(ht);
1788
5
        }
1789
1.09k
        goto handle_ht;
1790
1.09k
      }
1791
1792
3.36k
handle_zvals:
1793
3.49k
      for (; n != 0; n--) {
1794
2.79k
        if (Z_COLLECTABLE_P(zv)) {
1795
2.67k
          ref = Z_COUNTED_P(zv);
1796
2.67k
          zv++;
1797
3.33k
          while (--n) {
1798
657
            if (Z_COLLECTABLE_P(zv)) {
1799
608
              zend_refcounted *ref = Z_COUNTED_P(zv);
1800
608
              GC_STACK_PUSH(ref);
1801
608
            }
1802
657
            zv++;
1803
657
          }
1804
2.67k
          goto tail_call;
1805
2.67k
        }
1806
123
        zv++;
1807
123
      }
1808
3.36k
    }
1809
4.18k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1810
316
    ht = (zend_array*)ref;
1811
1812
1.41k
handle_ht:
1813
1.41k
    n = ht->nNumUsed;
1814
1.41k
    if (HT_IS_PACKED(ht)) {
1815
282
      zv = ht->arPacked;
1816
282
      goto handle_zvals;
1817
282
    }
1818
1819
1.12k
    p = ht->arData;
1820
1.33k
    for (; n != 0; n--) {
1821
1.29k
      zv = &p->val;
1822
1.29k
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1823
280
        zv = Z_INDIRECT_P(zv);
1824
280
      }
1825
1.29k
      if (Z_COLLECTABLE_P(zv)) {
1826
1.08k
        ref = Z_COUNTED_P(zv);
1827
1.08k
        p++;
1828
1.36k
        while (--n) {
1829
278
          zv = &p->val;
1830
278
          if (Z_TYPE_P(zv) == IS_INDIRECT) {
1831
29
            zv = Z_INDIRECT_P(zv);
1832
29
          }
1833
278
          if (Z_COLLECTABLE_P(zv)) {
1834
262
            zend_refcounted *ref = Z_COUNTED_P(zv);
1835
262
            GC_STACK_PUSH(ref);
1836
262
          }
1837
278
          p++;
1838
278
        }
1839
1.08k
        goto tail_call;
1840
1.08k
      }
1841
210
      p++;
1842
210
    }
1843
1.12k
  }
1844
1845
4.25k
next:
1846
4.25k
  ref = GC_STACK_POP();
1847
4.25k
  if (ref) {
1848
1.40k
    goto tail_call;
1849
1.40k
  }
1850
1851
2.85k
  return count;
1852
4.25k
}
1853
1854
static void zend_get_gc_buffer_release(void);
1855
static void zend_gc_check_root_tmpvars(void);
1856
static void zend_gc_remove_root_tmpvars(void);
1857
1858
static zend_internal_function gc_destructor_fiber;
1859
1860
static ZEND_COLD ZEND_NORETURN void gc_create_destructor_fiber_error(void)
1861
0
{
1862
0
  zend_error_noreturn(E_ERROR, "Unable to create destructor fiber");
1863
0
}
1864
1865
static ZEND_COLD ZEND_NORETURN void gc_start_destructor_fiber_error(void)
1866
0
{
1867
0
  zend_error_noreturn(E_ERROR, "Unable to start destructor fiber");
1868
0
}
1869
1870
/* Call destructors for garbage in the buffer. */
1871
static zend_always_inline zend_result gc_call_destructors(uint32_t idx, uint32_t end, zend_fiber *fiber)
1872
701
{
1873
701
  gc_root_buffer *current;
1874
701
  zend_refcounted *p;
1875
1876
  /* The root buffer might be reallocated during destructors calls,
1877
   * make sure to reload pointers as necessary. */
1878
5.16k
  while (idx != end) {
1879
4.51k
    current = GC_IDX2PTR(idx);
1880
4.51k
    if (GC_IS_DTOR_GARBAGE(current->ref)) {
1881
2.11k
      p = GC_GET_PTR(current->ref);
1882
      /* Mark this is as a normal root for the next GC run */
1883
2.11k
      current->ref = p;
1884
      /* Double check that the destructor hasn't been called yet. It
1885
       * could have already been invoked indirectly by some other
1886
       * destructor. */
1887
2.11k
      if (!(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
1888
2.11k
        if (fiber != NULL) {
1889
162
          GC_G(dtor_idx) = idx;
1890
162
        }
1891
2.11k
        zend_object *obj = (zend_object*)p;
1892
2.11k
        GC_TRACE_REF(obj, "calling destructor");
1893
2.11k
        GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
1894
2.11k
        GC_ADDREF(obj);
1895
2.11k
        obj->handlers->dtor_obj(obj);
1896
2.11k
        GC_TRACE_REF(obj, "returned from destructor");
1897
2.11k
        GC_DELREF(obj);
1898
2.11k
        if (UNEXPECTED(fiber != NULL && GC_G(dtor_fiber) != fiber)) {
1899
          /* We resumed after suspension */
1900
50
          gc_check_possible_root((zend_refcounted*)&obj->gc);
1901
50
          return FAILURE;
1902
50
        }
1903
2.11k
      }
1904
2.11k
    }
1905
4.46k
    idx++;
1906
4.46k
  }
1907
1908
651
  return SUCCESS;
1909
701
}
1910
1911
static zend_fiber *gc_create_destructor_fiber(void)
1912
116
{
1913
116
  zval zobj;
1914
116
  zend_fiber *fiber;
1915
1916
116
  GC_TRACE("starting destructor fiber");
1917
1918
116
  if (UNEXPECTED(object_init_ex(&zobj, zend_ce_fiber) == FAILURE)) {
1919
0
    gc_create_destructor_fiber_error();
1920
0
  }
1921
1922
116
  fiber = (zend_fiber *)Z_OBJ(zobj);
1923
116
  fiber->fci.size = sizeof(fiber->fci);
1924
116
  fiber->fci_cache.function_handler = (zend_function*) &gc_destructor_fiber;
1925
1926
116
  GC_G(dtor_fiber) = fiber;
1927
1928
116
  if (UNEXPECTED(zend_fiber_start(fiber, NULL) == FAILURE)) {
1929
0
    gc_start_destructor_fiber_error();
1930
0
  }
1931
1932
116
  return fiber;
1933
116
}
1934
1935
static zend_never_inline void gc_call_destructors_in_fiber(uint32_t end)
1936
72
{
1937
72
  ZEND_ASSERT(!GC_G(dtor_fiber_running));
1938
1939
72
  zend_fiber *fiber = GC_G(dtor_fiber);
1940
1941
72
  GC_G(dtor_idx) = GC_FIRST_ROOT;
1942
72
  GC_G(dtor_end) = GC_G(first_unused);
1943
1944
72
  if (UNEXPECTED(!fiber)) {
1945
66
    fiber = gc_create_destructor_fiber();
1946
66
  } else {
1947
6
    zend_fiber_resume(fiber, NULL, NULL);
1948
6
  }
1949
1950
122
  for (;;) {
1951
    /* At this point, fiber has executed until suspension */
1952
122
    GC_TRACE("resumed from destructor fiber");
1953
1954
122
    if (UNEXPECTED(GC_G(dtor_fiber_running))) {
1955
      /* Fiber was suspended by a destructor. Start a new one for the
1956
       * remaining destructors. */
1957
50
      GC_TRACE("destructor fiber suspended by destructor");
1958
50
      GC_G(dtor_fiber) = NULL;
1959
50
      GC_G(dtor_idx)++;
1960
      /* We do not own the fiber anymore. It may be collected if the
1961
       * application does not reference it. */
1962
50
      zend_object_release(&fiber->std);
1963
50
      fiber = gc_create_destructor_fiber();
1964
50
      continue;
1965
72
    } else {
1966
      /* Fiber suspended itself after calling all destructors */
1967
72
      GC_TRACE("destructor fiber suspended itself");
1968
72
      break;
1969
72
    }
1970
122
  }
1971
72
}
1972
1973
/* Perform a garbage collection run. The default implementation of gc_collect_cycles. */
1974
ZEND_API int zend_gc_collect_cycles(void)
1975
875k
{
1976
875k
  int total_count = 0;
1977
875k
  bool should_rerun_gc = false;
1978
875k
  bool did_rerun_gc = false;
1979
1980
875k
  zend_hrtime_t start_time = zend_hrtime();
1981
875k
  if (GC_G(num_roots) && !GC_G(gc_active)) {
1982
22.3k
    zend_gc_remove_root_tmpvars();
1983
22.3k
  }
1984
1985
876k
rerun_gc:
1986
876k
  if (GC_G(num_roots)) {
1987
22.8k
    int count;
1988
22.8k
    gc_root_buffer *current, *last;
1989
22.8k
    zend_refcounted *p;
1990
22.8k
    uint32_t gc_flags = 0;
1991
22.8k
    uint32_t idx, end;
1992
22.8k
    gc_stack stack;
1993
1994
22.8k
    stack.prev = NULL;
1995
22.8k
    stack.next = NULL;
1996
1997
22.8k
    if (GC_G(gc_active)) {
1998
32
      GC_G(collector_time) += zend_hrtime() - start_time;
1999
32
      return 0;
2000
32
    }
2001
2002
22.8k
    GC_TRACE("Collecting cycles");
2003
22.8k
    GC_G(gc_runs)++;
2004
22.8k
    GC_G(gc_active) = 1;
2005
2006
22.8k
    GC_TRACE("Marking roots");
2007
22.8k
    gc_mark_roots(&stack);
2008
22.8k
    GC_TRACE("Scanning roots");
2009
22.8k
    gc_scan_roots(&stack);
2010
2011
22.8k
    GC_TRACE("Collecting roots");
2012
22.8k
    count = gc_collect_roots(&gc_flags, &stack);
2013
2014
22.8k
    if (!GC_G(num_roots)) {
2015
      /* nothing to free */
2016
18.2k
      GC_TRACE("Nothing to free");
2017
18.2k
      gc_stack_free(&stack);
2018
18.2k
      GC_G(gc_active) = 0;
2019
18.2k
      goto finish;
2020
18.2k
    }
2021
2022
4.52k
    end = GC_G(first_unused);
2023
2024
4.52k
    if (gc_flags & GC_HAS_DESTRUCTORS) {
2025
651
      GC_TRACE("Calling destructors");
2026
2027
      /* During a destructor call, new externally visible references to nested data may
2028
       * be introduced. These references can be introduced in a way that does not
2029
       * modify any refcounts, so we have no real way to detect this situation
2030
       * short of rerunning full GC tracing. What we do instead is to only run
2031
       * destructors at this point and automatically re-run GC afterwards. */
2032
651
      should_rerun_gc = true;
2033
2034
      /* Mark all roots for which a dtor will be invoked as DTOR_GARBAGE. Additionally
2035
       * color them purple. This serves a double purpose: First, they should be
2036
       * considered new potential roots for the next GC run. Second, it will prevent
2037
       * their removal from the root buffer by nested data removal. */
2038
651
      idx = GC_FIRST_ROOT;
2039
651
      current = GC_IDX2PTR(GC_FIRST_ROOT);
2040
6.20k
      while (idx != end) {
2041
5.55k
        if (GC_IS_GARBAGE(current->ref)) {
2042
5.55k
          p = GC_GET_PTR(current->ref);
2043
5.55k
          if (GC_TYPE(p) == IS_OBJECT && !(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
2044
4.71k
            zend_object *obj = (zend_object *) p;
2045
4.71k
            if (obj->handlers->dtor_obj != zend_objects_destroy_object
2046
4.44k
              || obj->ce->destructor) {
2047
2.85k
              current->ref = GC_MAKE_DTOR_GARBAGE(obj);
2048
2.85k
              GC_REF_SET_COLOR(obj, GC_PURPLE);
2049
2.85k
            } else {
2050
1.86k
              GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
2051
1.86k
            }
2052
4.71k
          }
2053
5.55k
        }
2054
5.55k
        current++;
2055
5.55k
        idx++;
2056
5.55k
      }
2057
2058
      /* Remove nested data for objects on which a destructor will be called.
2059
       * This will not remove the objects themselves, as they have been colored
2060
       * purple. */
2061
651
      idx = GC_FIRST_ROOT;
2062
651
      current = GC_IDX2PTR(GC_FIRST_ROOT);
2063
6.20k
      while (idx != end) {
2064
5.55k
        if (GC_IS_DTOR_GARBAGE(current->ref)) {
2065
2.85k
          p = GC_GET_PTR(current->ref);
2066
2.85k
          count -= gc_remove_nested_data_from_buffer(p, current, &stack);
2067
2.85k
        }
2068
5.55k
        current++;
2069
5.55k
        idx++;
2070
5.55k
      }
2071
2072
      /* Actually call destructors. */
2073
651
      zend_hrtime_t dtor_start_time = zend_hrtime();
2074
651
      if (EXPECTED(!EG(active_fiber))) {
2075
579
        gc_call_destructors(GC_FIRST_ROOT, end, NULL);
2076
579
      } else {
2077
72
        gc_call_destructors_in_fiber(end);
2078
72
      }
2079
651
      GC_G(dtor_time) += zend_hrtime() - dtor_start_time;
2080
2081
651
      if (GC_G(gc_protected)) {
2082
        /* something went wrong */
2083
12
        zend_get_gc_buffer_release();
2084
12
        GC_G(collector_time) += zend_hrtime() - start_time;
2085
12
        return 0;
2086
12
      }
2087
651
    }
2088
2089
4.51k
    gc_stack_free(&stack);
2090
2091
    /* Destroy zvals. The root buffer may be reallocated. */
2092
4.51k
    GC_TRACE("Destroying zvals");
2093
4.51k
    zend_hrtime_t free_start_time = zend_hrtime();
2094
4.51k
    idx = GC_FIRST_ROOT;
2095
915k
    while (idx != end) {
2096
911k
      current = GC_IDX2PTR(idx);
2097
911k
      if (GC_IS_GARBAGE(current->ref)) {
2098
39.0k
        p = GC_GET_PTR(current->ref);
2099
39.0k
        GC_TRACE_REF(p, "destroying");
2100
39.0k
        if (GC_TYPE(p) == IS_OBJECT) {
2101
33.9k
          zend_object *obj = (zend_object*)p;
2102
2103
33.9k
          EG(objects_store).object_buckets[obj->handle] = SET_OBJ_INVALID(obj);
2104
33.9k
          GC_TYPE_INFO(obj) = GC_NULL |
2105
33.9k
            (GC_TYPE_INFO(obj) & ~GC_TYPE_MASK);
2106
          /* Modify current before calling free_obj (bug #78811: free_obj() can cause the root buffer (with current) to be reallocated.) */
2107
33.9k
          current->ref = GC_MAKE_GARBAGE(((char*)obj) - obj->handlers->offset);
2108
33.9k
          if (!(OBJ_FLAGS(obj) & IS_OBJ_FREE_CALLED)) {
2109
33.9k
            GC_ADD_FLAGS(obj, IS_OBJ_FREE_CALLED);
2110
33.9k
            GC_ADDREF(obj);
2111
33.9k
            obj->handlers->free_obj(obj);
2112
33.9k
            GC_DELREF(obj);
2113
33.9k
          }
2114
2115
33.9k
          ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST(obj->handle);
2116
33.9k
        } else if (GC_TYPE(p) == IS_ARRAY) {
2117
5.17k
          zend_array *arr = (zend_array*)p;
2118
2119
5.17k
          GC_TYPE_INFO(arr) = GC_NULL |
2120
5.17k
            (GC_TYPE_INFO(arr) & ~GC_TYPE_MASK);
2121
2122
          /* GC may destroy arrays with rc>1. This is valid and safe. */
2123
5.17k
          HT_ALLOW_COW_VIOLATION(arr);
2124
2125
5.17k
          zend_hash_destroy(arr);
2126
5.17k
        }
2127
39.0k
      }
2128
911k
      idx++;
2129
911k
    }
2130
2131
    /* Free objects */
2132
4.51k
    current = GC_IDX2PTR(GC_FIRST_ROOT);
2133
4.51k
    last = GC_IDX2PTR(end);
2134
915k
    while (current != last) {
2135
911k
      if (GC_IS_GARBAGE(current->ref)) {
2136
39.0k
        p = GC_GET_PTR(current->ref);
2137
39.0k
        GC_LINK_UNUSED(current);
2138
39.0k
        GC_G(num_roots)--;
2139
39.0k
        efree(p);
2140
39.0k
      }
2141
911k
      current++;
2142
911k
    }
2143
2144
4.51k
    GC_G(free_time) += zend_hrtime() - free_start_time;
2145
2146
4.51k
    GC_TRACE("Collection finished");
2147
4.51k
    GC_G(collected) += count;
2148
4.51k
    total_count += count;
2149
4.51k
    GC_G(gc_active) = 0;
2150
4.51k
  }
2151
2152
857k
  gc_compact();
2153
2154
  /* Objects with destructors were removed from this GC run. Rerun GC right away to clean them
2155
   * up. We do this only once: If we encounter more destructors on the second run, we'll not
2156
   * run GC another time. */
2157
857k
  if (should_rerun_gc && !did_rerun_gc) {
2158
502
    did_rerun_gc = true;
2159
502
    goto rerun_gc;
2160
502
  }
2161
2162
875k
finish:
2163
875k
  zend_get_gc_buffer_release();
2164
2165
  /* Prevent GC from running during zend_gc_check_root_tmpvars, before
2166
   * gc_threshold is adjusted, as this may result in unbounded recursion */
2167
875k
  GC_G(gc_active) = 1;
2168
875k
  zend_gc_check_root_tmpvars();
2169
875k
  GC_G(gc_active) = 0;
2170
2171
875k
  GC_G(collector_time) += zend_hrtime() - start_time;
2172
875k
  return total_count;
2173
857k
}
2174
2175
ZEND_API void zend_gc_get_status(zend_gc_status *status)
2176
254
{
2177
254
  status->active = GC_G(gc_active);
2178
254
  status->gc_protected = GC_G(gc_protected);
2179
254
  status->full = GC_G(gc_full);
2180
254
  status->runs = GC_G(gc_runs);
2181
254
  status->collected = GC_G(collected);
2182
254
  status->threshold = GC_G(gc_threshold);
2183
254
  status->buf_size = GC_G(buf_size);
2184
254
  status->num_roots = GC_G(num_roots);
2185
254
  status->application_time = zend_hrtime() - GC_G(activated_at);
2186
254
  status->collector_time = GC_G(collector_time);
2187
254
  status->dtor_time = GC_G(dtor_time);
2188
254
  status->free_time = GC_G(free_time);
2189
254
}
2190
2191
94.4k
ZEND_API zend_get_gc_buffer *zend_get_gc_buffer_create(void) {
2192
  /* There can only be one get_gc() call active at a time,
2193
   * so there only needs to be one buffer. */
2194
94.4k
  zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
2195
94.4k
  gc_buffer->cur = gc_buffer->start;
2196
94.4k
  return gc_buffer;
2197
94.4k
}
2198
2199
2.10k
ZEND_API void zend_get_gc_buffer_grow(zend_get_gc_buffer *gc_buffer) {
2200
2.10k
  size_t old_capacity = gc_buffer->end - gc_buffer->start;
2201
2.10k
  size_t new_capacity = old_capacity == 0 ? 64 : old_capacity * 2;
2202
2.10k
  gc_buffer->start = erealloc(gc_buffer->start, new_capacity * sizeof(zval));
2203
2.10k
  gc_buffer->end = gc_buffer->start + new_capacity;
2204
2.10k
  gc_buffer->cur = gc_buffer->start + old_capacity;
2205
2.10k
}
2206
2207
875k
static void zend_get_gc_buffer_release(void) {
2208
875k
  zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
2209
875k
  efree(gc_buffer->start);
2210
875k
  gc_buffer->start = gc_buffer->end = gc_buffer->cur = NULL;
2211
875k
}
2212
2213
/* TMPVAR operands are destroyed using zval_ptr_dtor_nogc(), because they usually cannot contain
2214
 * cycles. However, there are some rare exceptions where this is possible, in which case we rely
2215
 * on the producing code to root the value. If a GC run occurs between the rooting and consumption
2216
 * of the value, we would end up leaking it. To avoid this, root all live TMPVAR values here. */
2217
875k
static void zend_gc_check_root_tmpvars(void) {
2218
875k
  zend_execute_data *ex = EG(current_execute_data);
2219
1.07M
  for (; ex; ex = ex->prev_execute_data) {
2220
200k
    zend_function *func = ex->func;
2221
200k
    if (!func || !ZEND_USER_CODE(func->type)) {
2222
198k
      continue;
2223
198k
    }
2224
2225
2.26k
    uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
2226
8.82k
    for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
2227
6.92k
      const zend_live_range *range = &func->op_array.live_range[i];
2228
6.92k
      if (range->start > op_num) {
2229
373
        break;
2230
373
      }
2231
6.55k
      if (range->end <= op_num) {
2232
6.26k
        continue;
2233
6.26k
      }
2234
2235
291
      uint32_t kind = range->var & ZEND_LIVE_MASK;
2236
291
      if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
2237
232
        uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
2238
232
        zval *var = ZEND_CALL_VAR(ex, var_num);
2239
232
        if (Z_COLLECTABLE_P(var)) {
2240
220
          gc_check_possible_root(Z_COUNTED_P(var));
2241
220
        }
2242
232
      }
2243
291
    }
2244
2.26k
  }
2245
875k
}
2246
2247
22.3k
static void zend_gc_remove_root_tmpvars(void) {
2248
22.3k
  zend_execute_data *ex = EG(current_execute_data);
2249
27.9k
  for (; ex; ex = ex->prev_execute_data) {
2250
5.58k
    zend_function *func = ex->func;
2251
5.58k
    if (!func || !ZEND_USER_CODE(func->type)) {
2252
3.64k
      continue;
2253
3.64k
    }
2254
2255
1.94k
    uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
2256
8.11k
    for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
2257
6.46k
      const zend_live_range *range = &func->op_array.live_range[i];
2258
6.46k
      if (range->start > op_num) {
2259
290
        break;
2260
290
      }
2261
6.17k
      if (range->end <= op_num) {
2262
5.95k
        continue;
2263
5.95k
      }
2264
2265
220
      uint32_t kind = range->var & ZEND_LIVE_MASK;
2266
220
      if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
2267
220
        uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
2268
220
        zval *var = ZEND_CALL_VAR(ex, var_num);
2269
220
        if (Z_COLLECTABLE_P(var)) {
2270
220
          GC_REMOVE_FROM_BUFFER(Z_COUNTED_P(var));
2271
220
        }
2272
220
      }
2273
220
    }
2274
1.94k
  }
2275
22.3k
}
2276
2277
#if GC_BENCH
2278
void gc_bench_print(void)
2279
{
2280
  fprintf(stderr, "GC Statistics\n");
2281
  fprintf(stderr, "-------------\n");
2282
  fprintf(stderr, "Runs:               %d\n", GC_G(gc_runs));
2283
  fprintf(stderr, "Collected:          %d\n", GC_G(collected));
2284
  fprintf(stderr, "Root buffer length: %d\n", GC_G(root_buf_length));
2285
  fprintf(stderr, "Root buffer peak:   %d\n\n", GC_G(root_buf_peak));
2286
  fprintf(stderr, "      Possible            Remove from  Marked\n");
2287
  fprintf(stderr, "        Root    Buffered     buffer     grey\n");
2288
  fprintf(stderr, "      --------  --------  -----------  ------\n");
2289
  fprintf(stderr, "ZVAL  %8d  %8d  %9d  %8d\n", GC_G(zval_possible_root), GC_G(zval_buffered), GC_G(zval_remove_from_buffer), GC_G(zval_marked_grey));
2290
}
2291
#endif
2292
2293
#ifdef ZTS
2294
size_t zend_gc_globals_size(void)
2295
{
2296
  return sizeof(zend_gc_globals);
2297
}
2298
#endif
2299
2300
static ZEND_FUNCTION(gc_destructor_fiber)
2301
116
{
2302
116
  uint32_t idx, end;
2303
2304
116
  zend_fiber *fiber = GC_G(dtor_fiber);
2305
116
  ZEND_ASSERT(fiber != NULL);
2306
116
  ZEND_ASSERT(fiber == EG(active_fiber));
2307
2308
122
  for (;;) {
2309
122
    GC_G(dtor_fiber_running) = true;
2310
2311
122
    idx = GC_G(dtor_idx);
2312
122
    end = GC_G(dtor_end);
2313
122
    if (UNEXPECTED(gc_call_destructors(idx, end, fiber) == FAILURE)) {
2314
      /* We resumed after being suspended by a destructor */
2315
50
      return;
2316
50
    }
2317
2318
    /* We have called all destructors. Suspend fiber until the next GC run
2319
     */
2320
72
    GC_G(dtor_fiber_running) = false;
2321
72
    zend_fiber_suspend(fiber, NULL, NULL);
2322
2323
72
    if (UNEXPECTED(fiber->flags & ZEND_FIBER_FLAG_DESTROYED)) {
2324
      /* Fiber is being destroyed by shutdown sequence */
2325
66
      if (GC_G(dtor_fiber) == fiber) {
2326
66
        GC_G(dtor_fiber) = NULL;
2327
66
      }
2328
66
      GC_DELREF(&fiber->std);
2329
66
      gc_check_possible_root((zend_refcounted*)&fiber->std.gc);
2330
66
      return;
2331
66
    }
2332
72
  }
2333
116
}
2334
2335
static zend_internal_function gc_destructor_fiber = {
2336
  .type = ZEND_INTERNAL_FUNCTION,
2337
  .fn_flags = ZEND_ACC_PUBLIC,
2338
  .handler = ZEND_FN(gc_destructor_fiber),
2339
};
2340
2341
void gc_init(void)
2342
16
{
2343
16
  gc_destructor_fiber.function_name = zend_string_init_interned(
2344
16
      "gc_destructor_fiber",
2345
16
      strlen("gc_destructor_fiber"),
2346
      true);
2347
16
}