Coverage Report

Created: 2026-02-14 06:52

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/php-src/Zend/zend_gc.c
Line
Count
Source
1
/*
2
   +----------------------------------------------------------------------+
3
   | Zend Engine                                                          |
4
   +----------------------------------------------------------------------+
5
   | Copyright (c) Zend Technologies Ltd. (http://www.zend.com)           |
6
   +----------------------------------------------------------------------+
7
   | This source file is subject to version 2.00 of the Zend license,     |
8
   | that is bundled with this package in the file LICENSE, and is        |
9
   | available through the world-wide-web at the following url:           |
10
   | http://www.zend.com/license/2_00.txt.                                |
11
   | If you did not receive a copy of the Zend license and are unable to  |
12
   | obtain it through the world-wide-web, please send a note to          |
13
   | license@zend.com so we can mail you a copy immediately.              |
14
   +----------------------------------------------------------------------+
15
   | Authors: David Wang <planetbeing@gmail.com>                          |
16
   |          Dmitry Stogov <dmitry@php.net>                              |
17
   +----------------------------------------------------------------------+
18
*/
19
20
/**
21
 * zend_gc_collect_cycles
22
 * ======================
23
 *
24
 * Colors and its meaning
25
 * ----------------------
26
 *
27
 * BLACK  (GC_BLACK)   - In use or free.
28
 * GREY   (GC_GREY)    - Possible member of cycle.
29
 * WHITE  (GC_WHITE)   - Member of garbage cycle.
30
 * PURPLE (GC_PURPLE)  - Possible root of cycle.
31
 *
32
 * Colors described in the paper but not used
33
 * ------------------------------------------
34
 *
35
 * GREEN - Acyclic
36
 * RED   - Candidate cycle undergoing
37
 * ORANGE - Candidate cycle awaiting epoch boundary.
38
 *
39
 *
40
 * Flow
41
 * =====
42
 *
43
 * The garbage collect cycle starts from 'gc_mark_roots', which traverses the
44
 * possible roots, and calls mark_grey for roots are marked purple with
45
 * depth-first traverse.
46
 *
47
 * After all possible roots are traversed and marked,
48
 * gc_scan_roots will be called, and each root will be called with
49
 * gc_scan(root->ref)
50
 *
51
 * gc_scan checks the colors of possible members.
52
 *
53
 * If the node is marked as grey and the refcount > 0
54
 *    gc_scan_black will be called on that node to scan it's subgraph.
55
 * otherwise (refcount == 0), it marks the node white.
56
 *
57
 * A node MAY be added to possible roots when ZEND_UNSET_VAR happens or
58
 * zend_assign_to_variable is called only when possible garbage node is
59
 * produced.
60
 * gc_possible_root() will be called to add the nodes to possible roots.
61
 *
62
 *
63
 * For objects, we call their get_gc handler (by default 'zend_std_get_gc') to
64
 * get the object properties to scan.
65
 *
66
 *
67
 * @see http://researcher.watson.ibm.com/researcher/files/us-bacon/Bacon01Concurrent.pdf
68
 */
69
#include "zend.h"
70
#include "zend_API.h"
71
#include "zend_compile.h"
72
#include "zend_errors.h"
73
#include "zend_fibers.h"
74
#include "zend_hrtime.h"
75
#include "zend_portability.h"
76
#include "zend_types.h"
77
#include "zend_weakrefs.h"
78
#include "zend_string.h"
79
#include "zend_exceptions.h"
80
81
#ifndef GC_BENCH
82
# define GC_BENCH 0
83
#endif
84
85
#ifndef ZEND_GC_DEBUG
86
# define ZEND_GC_DEBUG 0
87
#endif
88
89
/* GC_INFO layout */
90
2.11M
#define GC_ADDRESS  0x0fffffu
91
11.2M
#define GC_COLOR    0x300000u
92
93
#define GC_BLACK    0x000000u /* must be zero */
94
#define GC_WHITE    0x100000u
95
#define GC_GREY     0x200000u
96
#define GC_PURPLE   0x300000u
97
98
/* Debug tracing */
99
#if ZEND_GC_DEBUG > 1
100
# define GC_TRACE(format, ...) fprintf(stderr, format "\n", ##__VA_ARGS__);
101
# define GC_TRACE_REF(ref, format, ...) \
102
  do { \
103
    gc_trace_ref((zend_refcounted *) ref); \
104
    fprintf(stderr, format "\n", ##__VA_ARGS__); \
105
  } while (0)
106
# define GC_TRACE_SET_COLOR(ref, color) \
107
  GC_TRACE_REF(ref, "->%s", gc_color_name(color))
108
#else
109
# define GC_TRACE_REF(ref, format, ...)
110
# define GC_TRACE_SET_COLOR(ref, new_color)
111
# define GC_TRACE(str)
112
#endif
113
114
/* GC_INFO access */
115
#define GC_REF_ADDRESS(ref) \
116
2.11M
  (((GC_TYPE_INFO(ref)) & (GC_ADDRESS << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
117
118
#define GC_REF_COLOR(ref) \
119
  (((GC_TYPE_INFO(ref)) & (GC_COLOR << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
120
121
#define GC_REF_CHECK_COLOR(ref, color) \
122
8.02M
  ((GC_TYPE_INFO(ref) & (GC_COLOR << GC_INFO_SHIFT)) == ((color) << GC_INFO_SHIFT))
123
124
4.33M
#define GC_REF_SET_INFO(ref, info) do { \
125
4.33M
    GC_TYPE_INFO(ref) = \
126
4.33M
      (GC_TYPE_INFO(ref) & (GC_TYPE_MASK | GC_FLAGS_MASK)) | \
127
4.33M
      ((info) << GC_INFO_SHIFT); \
128
4.33M
  } while (0)
129
130
2.07M
#define GC_REF_SET_COLOR(ref, c) do { \
131
2.07M
    GC_TRACE_SET_COLOR(ref, c); \
132
2.07M
    GC_TYPE_INFO(ref) = \
133
2.07M
      (GC_TYPE_INFO(ref) & ~(GC_COLOR << GC_INFO_SHIFT)) | \
134
2.07M
      ((c) << GC_INFO_SHIFT); \
135
2.07M
  } while (0)
136
137
1.11M
#define GC_REF_SET_BLACK(ref) do { \
138
1.11M
    GC_TRACE_SET_COLOR(ref, GC_BLACK); \
139
1.11M
    GC_TYPE_INFO(ref) &= ~(GC_COLOR << GC_INFO_SHIFT); \
140
1.11M
  } while (0)
141
142
#define GC_REF_SET_PURPLE(ref) do { \
143
    GC_TRACE_SET_COLOR(ref, GC_PURPLE); \
144
    GC_TYPE_INFO(ref) |= (GC_COLOR << GC_INFO_SHIFT); \
145
  } while (0)
146
147
/* bit stealing tags for gc_root_buffer.ref */
148
1.79M
#define GC_BITS    0x3
149
150
450k
#define GC_ROOT    0x0 /* possible root of circular garbage     */
151
2.27M
#define GC_UNUSED  0x1 /* part of linked list of unused buffers */
152
1.77M
#define GC_GARBAGE 0x2 /* garbage to delete                     */
153
11.9k
#define GC_DTOR_GARBAGE 0x3 /* garbage on which only the dtor should be invoked */
154
155
#define GC_GET_PTR(ptr) \
156
75.9k
  ((void*)(((uintptr_t)(ptr)) & ~GC_BITS))
157
158
#define GC_IS_ROOT(ptr) \
159
450k
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_ROOT)
160
#define GC_IS_UNUSED(ptr) \
161
96.7k
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_UNUSED)
162
#define GC_IS_GARBAGE(ptr) \
163
1.16M
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_GARBAGE)
164
#define GC_IS_DTOR_GARBAGE(ptr) \
165
9.27k
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_DTOR_GARBAGE)
166
167
#define GC_MAKE_GARBAGE(ptr) \
168
608k
  ((void*)(((uintptr_t)(ptr)) | GC_GARBAGE))
169
#define GC_MAKE_DTOR_GARBAGE(ptr) \
170
2.68k
  ((void*)(((uintptr_t)(ptr)) | GC_DTOR_GARBAGE))
171
172
/* GC address conversion */
173
5.93M
#define GC_IDX2PTR(idx)      (GC_G(buf) + (idx))
174
2.18M
#define GC_PTR2IDX(ptr)      ((ptr) - GC_G(buf))
175
176
/* Get the value to be placed in an unused buffer entry with the specified next unused list index */
177
2.17M
#define GC_IDX2LIST(idx)     ((void*)(uintptr_t)(((idx) * sizeof(void*)) | GC_UNUSED))
178
/* Get the index of the next item in the unused list from the given root buffer entry. */
179
687k
#define GC_LIST2IDX(list)    (((uint32_t)(uintptr_t)(list)) / sizeof(void*))
180
181
/* GC buffers */
182
935k
#define GC_INVALID           0
183
1.26M
#define GC_FIRST_ROOT        1
184
185
16
#define GC_DEFAULT_BUF_SIZE  (16 * 1024)
186
1
#define GC_BUF_GROW_STEP     (128 * 1024)
187
188
0
#define GC_MAX_UNCOMPRESSED  (512 * 1024)
189
2
#define GC_MAX_BUF_SIZE      0x40000000
190
191
16
#define GC_THRESHOLD_DEFAULT (10000 + GC_FIRST_ROOT)
192
0
#define GC_THRESHOLD_STEP    10000
193
0
#define GC_THRESHOLD_MAX     1000000000
194
0
#define GC_THRESHOLD_TRIGGER 100
195
196
/* GC flags */
197
6.14k
#define GC_HAS_DESTRUCTORS  (1<<0)
198
199
/* Weak maps */
200
1.56k
#define Z_FROM_WEAKMAP_KEY    (1<<0)
201
1.52k
#define Z_FROM_WEAKMAP      (1<<1)
202
203
/* The WeakMap entry zv is reachable from roots by following the virtual
204
 * reference from the a WeakMap key to the entry */
205
#define GC_FROM_WEAKMAP_KEY(zv) \
206
389
  (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT))
207
208
580
#define GC_SET_FROM_WEAKMAP_KEY(zv) do {                    \
209
580
  zval *_z = (zv);                               \
210
580
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \
211
580
} while (0)
212
213
593
#define GC_UNSET_FROM_WEAKMAP_KEY(zv) do {                    \
214
593
  zval *_z = (zv);                               \
215
593
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \
216
593
} while (0)
217
218
/* The WeakMap entry zv is reachable from roots by following the reference from
219
 * the WeakMap */
220
#define GC_FROM_WEAKMAP(zv) \
221
1.08k
  (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT))
222
223
198
#define GC_SET_FROM_WEAKMAP(zv) do {                        \
224
198
  zval *_z = (zv);                               \
225
198
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \
226
198
} while (0)
227
228
237
#define GC_UNSET_FROM_WEAKMAP(zv) do {                      \
229
237
  zval *_z = (zv);                               \
230
237
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \
231
237
} while (0)
232
233
/* unused buffers */
234
235
/* Are there any unused root buffer entries? */
236
#define GC_HAS_UNUSED() \
237
472k
  (GC_G(unused) != GC_INVALID)
238
239
/* Get the next unused entry and remove it from the list */
240
#define GC_FETCH_UNUSED() \
241
687k
  gc_fetch_unused()
242
243
/* Add a root buffer entry to the unused list */
244
#define GC_LINK_UNUSED(root) \
245
2.17M
  gc_link_unused(root)
246
247
#define GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD() \
248
  (GC_G(first_unused) < GC_G(gc_threshold))
249
#define GC_HAS_NEXT_UNUSED() \
250
472k
  (GC_G(first_unused) != GC_G(buf_size))
251
#define GC_FETCH_NEXT_UNUSED() \
252
1.49M
  gc_fetch_next_unused()
253
254
ZEND_API int (*gc_collect_cycles)(void);
255
256
/* The type of a root buffer entry.
257
 *
258
 * The lower two bits are used for flags and need to be masked out to
259
 * reconstruct a pointer.
260
 *
261
 * When a node in the root buffer is removed, the non-flag bits of the
262
 * unused entry are used to store the index of the next entry in the unused
263
 * list. */
264
typedef struct _gc_root_buffer {
265
  zend_refcounted  *ref;
266
} gc_root_buffer;
267
268
typedef struct _zend_gc_globals {
269
  /* The root buffer, which stores possible roots of reference cycles. It is
270
   * also used to store garbage to be collected at the end of a run.
271
   * A single array which is reallocated as necessary. */
272
  gc_root_buffer   *buf;
273
274
  bool         gc_enabled;
275
  bool         gc_active;        /* GC currently running, forbid nested GC */
276
  bool         gc_protected;     /* GC protected, forbid root additions */
277
  bool         gc_full;
278
279
  uint32_t          unused;     /* linked list of unused buffers    */
280
  uint32_t          first_unused;   /* first unused buffer              */
281
  uint32_t          gc_threshold;     /* GC collection threshold          */
282
  uint32_t          buf_size;     /* size of the GC buffer            */
283
  uint32_t          num_roots;    /* number of roots in GC buffer     */
284
285
  uint32_t gc_runs;         /* number of GC runs since reset */
286
  uint32_t collected;         /* number of collected nodes since reset */
287
288
  zend_hrtime_t activated_at;     /* the timestamp of the last reset */
289
  zend_hrtime_t collector_time;   /* time spent running GC (ns) */
290
  zend_hrtime_t dtor_time;      /* time spent calling destructors (ns) */
291
  zend_hrtime_t free_time;      /* time spent destroying nodes and freeing memory (ns) */
292
293
  uint32_t dtor_idx;      /* root buffer index */
294
  uint32_t dtor_end;
295
  zend_fiber *dtor_fiber;
296
  bool dtor_fiber_running;
297
298
#if GC_BENCH
299
  uint32_t root_buf_length;
300
  uint32_t root_buf_peak;
301
  uint32_t zval_possible_root;
302
  uint32_t zval_buffered;
303
  uint32_t zval_remove_from_buffer;
304
  uint32_t zval_marked_grey;
305
#endif
306
} zend_gc_globals;
307
308
#ifdef ZTS
309
static int gc_globals_id;
310
static size_t gc_globals_offset;
311
#define GC_G(v) ZEND_TSRMG_FAST(gc_globals_offset, zend_gc_globals *, v)
312
#else
313
31.1M
#define GC_G(v) (gc_globals.v)
314
static zend_gc_globals gc_globals;
315
#endif
316
317
#if GC_BENCH
318
# define GC_BENCH_INC(counter) GC_G(counter)++
319
# define GC_BENCH_DEC(counter) GC_G(counter)--
320
# define GC_BENCH_PEAK(peak, counter) do {    \
321
    if (GC_G(counter) > GC_G(peak)) {   \
322
      GC_G(peak) = GC_G(counter);     \
323
    }                   \
324
  } while (0)
325
#else
326
# define GC_BENCH_INC(counter)
327
# define GC_BENCH_DEC(counter)
328
# define GC_BENCH_PEAK(peak, counter)
329
#endif
330
331
332
2.27k
#define GC_STACK_SEGMENT_SIZE (((4096 - ZEND_MM_OVERHEAD) / sizeof(void*)) - 2)
333
334
typedef struct _gc_stack gc_stack;
335
336
/* The stack used for graph traversal is stored as a linked list of segments */
337
struct _gc_stack {
338
  gc_stack        *prev;
339
  gc_stack        *next;
340
  zend_refcounted *data[GC_STACK_SEGMENT_SIZE];
341
};
342
343
#define GC_STACK_DCL(init) \
344
169k
  gc_stack *_stack = init; \
345
169k
  size_t    _top = 0;
346
347
#define GC_STACK_PUSH(ref) \
348
1.56M
  gc_stack_push(&_stack, &_top, ref);
349
350
#define GC_STACK_POP() \
351
1.73M
  gc_stack_pop(&_stack, &_top)
352
353
static zend_never_inline gc_stack* gc_stack_next(gc_stack *stack)
354
18.9k
{
355
18.9k
  if (UNEXPECTED(!stack->next)) {
356
18.1k
    gc_stack *segment = emalloc(sizeof(gc_stack));
357
18.1k
    segment->prev = stack;
358
18.1k
    segment->next = NULL;
359
18.1k
    stack->next = segment;
360
18.1k
  }
361
18.9k
  return stack->next;
362
18.9k
}
363
364
static zend_always_inline void gc_stack_push(gc_stack **stack, size_t *top, zend_refcounted *ref)
365
1.56M
{
366
1.56M
  if (UNEXPECTED(*top == GC_STACK_SEGMENT_SIZE)) {
367
1.13k
    (*stack) = gc_stack_next(*stack);
368
1.13k
    (*top) = 0;
369
1.13k
  }
370
1.56M
  (*stack)->data[(*top)++] = ref;
371
1.56M
}
372
373
static zend_always_inline zend_refcounted* gc_stack_pop(gc_stack **stack, size_t *top)
374
1.73M
{
375
1.73M
  if (UNEXPECTED((*top) == 0)) {
376
170k
    if (!(*stack)->prev) {
377
169k
      return NULL;
378
169k
    } else {
379
1.13k
      (*stack) = (*stack)->prev;
380
1.13k
      (*top) = GC_STACK_SEGMENT_SIZE - 1;
381
1.13k
      return (*stack)->data[GC_STACK_SEGMENT_SIZE - 1];
382
1.13k
    }
383
1.56M
  } else {
384
1.56M
    return (*stack)->data[--(*top)];
385
1.56M
  }
386
1.73M
}
387
388
static void gc_stack_free(gc_stack *stack)
389
20.5k
{
390
20.5k
  gc_stack *p = stack->next;
391
392
38.7k
  while (p) {
393
18.1k
    stack = p->next;
394
18.1k
    efree(p);
395
18.1k
    p = stack;
396
18.1k
  }
397
20.5k
}
398
399
/* Map a full index to a compressed index.
400
 *
401
 * The root buffer can have up to 2^30 entries, but we only have 20 bits to
402
 * store the index. So we use the 1<<19 bit as a compression flag and use the
403
 * other 19 bits to store the index modulo 2^19. */
404
static zend_always_inline uint32_t gc_compress(uint32_t idx)
405
2.18M
{
406
2.18M
  if (EXPECTED(idx < GC_MAX_UNCOMPRESSED)) {
407
2.18M
    return idx;
408
2.18M
  }
409
0
  return (idx % GC_MAX_UNCOMPRESSED) | GC_MAX_UNCOMPRESSED;
410
2.18M
}
411
412
/* Find the root buffer entry given a pointer and a compressed index.
413
 * Iterate through the root buffer in steps of 2^19 until the pointer
414
 * matches. */
415
static zend_always_inline gc_root_buffer* gc_decompress(zend_refcounted *ref, uint32_t idx)
416
0
{
417
0
  gc_root_buffer *root = GC_IDX2PTR(idx);
418
419
0
  if (EXPECTED(GC_GET_PTR(root->ref) == ref)) {
420
0
    return root;
421
0
  }
422
423
0
  while (1) {
424
0
    idx += GC_MAX_UNCOMPRESSED;
425
0
    ZEND_ASSERT(idx < GC_G(first_unused));
426
0
    root = GC_IDX2PTR(idx);
427
0
    if (GC_GET_PTR(root->ref) == ref) {
428
0
      return root;
429
0
    }
430
0
  }
431
0
}
432
433
/* Get the index of the next unused root buffer entry, and remove it from the
434
 * unused list. GC_HAS_UNUSED() must be true before calling this. */
435
static zend_always_inline uint32_t gc_fetch_unused(void)
436
687k
{
437
687k
  uint32_t idx;
438
687k
  gc_root_buffer *root;
439
440
687k
  ZEND_ASSERT(GC_HAS_UNUSED());
441
687k
  idx = GC_G(unused);
442
687k
  root = GC_IDX2PTR(idx);
443
687k
  ZEND_ASSERT(GC_IS_UNUSED(root->ref));
444
687k
  GC_G(unused) = GC_LIST2IDX(root->ref);
445
687k
  return idx;
446
687k
}
447
448
/* Add a root buffer entry to the unused list */
449
static zend_always_inline void gc_link_unused(gc_root_buffer *root)
450
2.17M
{
451
2.17M
  root->ref = GC_IDX2LIST(GC_G(unused));
452
2.17M
  GC_G(unused) = GC_PTR2IDX(root);
453
2.17M
}
454
455
static zend_always_inline uint32_t gc_fetch_next_unused(void)
456
1.49M
{
457
1.49M
  uint32_t idx;
458
459
1.49M
  ZEND_ASSERT(GC_HAS_NEXT_UNUSED());
460
1.49M
  idx = GC_G(first_unused);
461
1.49M
  GC_G(first_unused) = GC_G(first_unused) + 1;
462
1.49M
  return idx;
463
1.49M
}
464
465
#if ZEND_GC_DEBUG > 1
466
static const char *gc_color_name(uint32_t color) {
467
  switch (color) {
468
    case GC_BLACK: return "black";
469
    case GC_WHITE: return "white";
470
    case GC_GREY: return "grey";
471
    case GC_PURPLE: return "purple";
472
    default: return "unknown";
473
  }
474
}
475
static void gc_trace_ref(zend_refcounted *ref) {
476
  if (GC_TYPE(ref) == IS_OBJECT) {
477
    zend_object *obj = (zend_object *) ref;
478
    fprintf(stderr, "[%p] rc=%d addr=%d %s object(%s)#%d ",
479
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
480
      gc_color_name(GC_REF_COLOR(ref)),
481
      obj->ce->name->val, obj->handle);
482
  } else if (GC_TYPE(ref) == IS_ARRAY) {
483
    zend_array *arr = (zend_array *) ref;
484
    fprintf(stderr, "[%p] rc=%d addr=%d %s array(%d) ",
485
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
486
      gc_color_name(GC_REF_COLOR(ref)),
487
      zend_hash_num_elements(arr));
488
  } else {
489
    fprintf(stderr, "[%p] rc=%d addr=%d %s %s ",
490
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
491
      gc_color_name(GC_REF_COLOR(ref)),
492
      GC_TYPE(ref) == IS_REFERENCE
493
        ? "reference" : zend_get_type_by_const(GC_TYPE(ref)));
494
  }
495
}
496
#endif
497
498
/* Mark a root buffer entry unused */
499
static zend_always_inline void gc_remove_from_roots(gc_root_buffer *root)
500
2.14M
{
501
2.14M
  GC_LINK_UNUSED(root);
502
2.14M
  GC_G(num_roots)--;
503
2.14M
  GC_BENCH_DEC(root_buf_length);
504
2.14M
}
505
506
static void root_buffer_dtor(zend_gc_globals *gc_globals)
507
0
{
508
0
  if (gc_globals->buf) {
509
0
    free(gc_globals->buf);
510
0
    gc_globals->buf = NULL;
511
0
  }
512
0
}
513
514
static void gc_globals_ctor_ex(zend_gc_globals *gc_globals)
515
16
{
516
16
  gc_globals->gc_enabled = false;
517
16
  gc_globals->gc_active = false;
518
16
  gc_globals->gc_protected = true;
519
16
  gc_globals->gc_full = false;
520
521
16
  gc_globals->buf = NULL;
522
16
  gc_globals->unused = GC_INVALID;
523
16
  gc_globals->first_unused = GC_INVALID;
524
16
  gc_globals->gc_threshold = GC_INVALID;
525
16
  gc_globals->buf_size = GC_INVALID;
526
16
  gc_globals->num_roots = 0;
527
528
16
  gc_globals->gc_runs = 0;
529
16
  gc_globals->collected = 0;
530
16
  gc_globals->collector_time = 0;
531
16
  gc_globals->dtor_time = 0;
532
16
  gc_globals->free_time = 0;
533
16
  gc_globals->activated_at = 0;
534
535
16
  gc_globals->dtor_idx = GC_FIRST_ROOT;
536
16
  gc_globals->dtor_end = 0;
537
16
  gc_globals->dtor_fiber = NULL;
538
16
  gc_globals->dtor_fiber_running = false;
539
540
#if GC_BENCH
541
  gc_globals->root_buf_length = 0;
542
  gc_globals->root_buf_peak = 0;
543
  gc_globals->zval_possible_root = 0;
544
  gc_globals->zval_buffered = 0;
545
  gc_globals->zval_remove_from_buffer = 0;
546
  gc_globals->zval_marked_grey = 0;
547
#endif
548
16
}
549
550
void gc_globals_ctor(void)
551
16
{
552
#ifdef ZTS
553
  ts_allocate_fast_id(&gc_globals_id, &gc_globals_offset, sizeof(zend_gc_globals), (ts_allocate_ctor) gc_globals_ctor_ex, (ts_allocate_dtor) root_buffer_dtor);
554
#else
555
16
  gc_globals_ctor_ex(&gc_globals);
556
16
#endif
557
16
}
558
559
void gc_globals_dtor(void)
560
0
{
561
0
#ifndef ZTS
562
0
  root_buffer_dtor(&gc_globals);
563
0
#endif
564
0
}
565
566
void gc_reset(void)
567
191k
{
568
191k
  if (GC_G(buf)) {
569
191k
    GC_G(gc_active) = 0;
570
191k
    GC_G(gc_protected) = 0;
571
191k
    GC_G(gc_full) = 0;
572
191k
    GC_G(unused) = GC_INVALID;
573
191k
    GC_G(first_unused) = GC_FIRST_ROOT;
574
191k
    GC_G(num_roots) = 0;
575
576
191k
    GC_G(gc_runs) = 0;
577
191k
    GC_G(collected) = 0;
578
579
191k
    GC_G(collector_time) = 0;
580
191k
    GC_G(dtor_time) = 0;
581
191k
    GC_G(free_time) = 0;
582
583
191k
    GC_G(dtor_idx) = GC_FIRST_ROOT;
584
191k
    GC_G(dtor_end) = 0;
585
191k
    GC_G(dtor_fiber) = NULL;
586
191k
    GC_G(dtor_fiber_running) = false;
587
588
#if GC_BENCH
589
    GC_G(root_buf_length) = 0;
590
    GC_G(root_buf_peak) = 0;
591
    GC_G(zval_possible_root) = 0;
592
    GC_G(zval_buffered) = 0;
593
    GC_G(zval_remove_from_buffer) = 0;
594
    GC_G(zval_marked_grey) = 0;
595
#endif
596
191k
  }
597
598
191k
  GC_G(activated_at) = zend_hrtime();
599
191k
}
600
601
/* Enable/disable the garbage collector.
602
 * Initialize globals if necessary. */
603
ZEND_API bool gc_enable(bool enable)
604
91.2k
{
605
91.2k
  bool old_enabled = GC_G(gc_enabled);
606
91.2k
  GC_G(gc_enabled) = enable;
607
91.2k
  if (enable && !old_enabled && GC_G(buf) == NULL) {
608
16
    GC_G(buf) = (gc_root_buffer*) pemalloc(sizeof(gc_root_buffer) * GC_DEFAULT_BUF_SIZE, 1);
609
16
    GC_G(buf)[0].ref = NULL;
610
16
    GC_G(buf_size) = GC_DEFAULT_BUF_SIZE;
611
16
    GC_G(gc_threshold) = GC_THRESHOLD_DEFAULT;
612
16
    gc_reset();
613
16
  }
614
91.2k
  return old_enabled;
615
91.2k
}
616
617
ZEND_API bool gc_enabled(void)
618
96
{
619
96
  return GC_G(gc_enabled);
620
96
}
621
622
/* Protect the GC root buffer (prevent additions) */
623
ZEND_API bool gc_protect(bool protect)
624
24.3k
{
625
24.3k
  bool old_protected = GC_G(gc_protected);
626
24.3k
  GC_G(gc_protected) = protect;
627
24.3k
  return old_protected;
628
24.3k
}
629
630
ZEND_API bool gc_protected(void)
631
0
{
632
0
  return GC_G(gc_protected);
633
0
}
634
635
static void gc_grow_root_buffer(void)
636
1
{
637
1
  size_t new_size;
638
639
1
  if (GC_G(buf_size) >= GC_MAX_BUF_SIZE) {
640
0
    if (!GC_G(gc_full)) {
641
0
      zend_error(E_WARNING, "GC buffer overflow (GC disabled)\n");
642
0
      GC_G(gc_active) = 1;
643
0
      GC_G(gc_protected) = 1;
644
0
      GC_G(gc_full) = 1;
645
0
      return;
646
0
    }
647
0
  }
648
1
  if (GC_G(buf_size) < GC_BUF_GROW_STEP) {
649
1
    new_size = GC_G(buf_size) * 2;
650
1
  } else {
651
0
    new_size = GC_G(buf_size) + GC_BUF_GROW_STEP;
652
0
  }
653
1
  if (new_size > GC_MAX_BUF_SIZE) {
654
0
    new_size = GC_MAX_BUF_SIZE;
655
0
  }
656
1
  GC_G(buf) = perealloc(GC_G(buf), sizeof(gc_root_buffer) * new_size, 1);
657
1
  GC_G(buf_size) = new_size;
658
1
}
659
660
/* Adjust the GC activation threshold given the number of nodes collected by the last run */
661
static void gc_adjust_threshold(int count)
662
0
{
663
0
  uint32_t new_threshold;
664
665
  /* TODO Very simple heuristic for dynamic GC buffer resizing:
666
   * If there are "too few" collections, increase the collection threshold
667
   * by a fixed step */
668
0
  if (count < GC_THRESHOLD_TRIGGER || GC_G(num_roots) >= GC_G(gc_threshold)) {
669
    /* increase */
670
0
    if (GC_G(gc_threshold) < GC_THRESHOLD_MAX) {
671
0
      new_threshold = GC_G(gc_threshold) + GC_THRESHOLD_STEP;
672
0
      if (new_threshold > GC_THRESHOLD_MAX) {
673
0
        new_threshold = GC_THRESHOLD_MAX;
674
0
      }
675
0
      if (new_threshold > GC_G(buf_size)) {
676
0
        gc_grow_root_buffer();
677
0
      }
678
0
      if (new_threshold <= GC_G(buf_size)) {
679
0
        GC_G(gc_threshold) = new_threshold;
680
0
      }
681
0
    }
682
0
  } else if (GC_G(gc_threshold) > GC_THRESHOLD_DEFAULT) {
683
0
    new_threshold = GC_G(gc_threshold) - GC_THRESHOLD_STEP;
684
0
    if (new_threshold < GC_THRESHOLD_DEFAULT) {
685
0
      new_threshold = GC_THRESHOLD_DEFAULT;
686
0
    }
687
0
    GC_G(gc_threshold) = new_threshold;
688
0
  }
689
0
}
690
691
/* Perform a GC run and then add a node as a possible root. */
692
static zend_never_inline void ZEND_FASTCALL gc_possible_root_when_full(zend_refcounted *ref)
693
0
{
694
0
  uint32_t idx;
695
0
  gc_root_buffer *newRoot;
696
697
0
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
698
0
  ZEND_ASSERT(GC_INFO(ref) == 0);
699
700
0
  if (GC_G(gc_enabled) && !GC_G(gc_active)) {
701
0
    GC_ADDREF(ref);
702
0
    gc_adjust_threshold(gc_collect_cycles());
703
0
    if (UNEXPECTED(GC_DELREF(ref) == 0)) {
704
0
      rc_dtor_func(ref);
705
0
      return;
706
0
    } else if (UNEXPECTED(GC_INFO(ref))) {
707
0
      return;
708
0
    }
709
0
  }
710
711
0
  if (GC_HAS_UNUSED()) {
712
0
    idx = GC_FETCH_UNUSED();
713
0
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
714
0
    idx = GC_FETCH_NEXT_UNUSED();
715
0
  } else {
716
0
    gc_grow_root_buffer();
717
0
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
718
0
      return;
719
0
    }
720
0
    idx = GC_FETCH_NEXT_UNUSED();
721
0
  }
722
723
0
  newRoot = GC_IDX2PTR(idx);
724
0
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
725
0
  GC_TRACE_SET_COLOR(ref, GC_PURPLE);
726
727
0
  idx = gc_compress(idx);
728
0
  GC_REF_SET_INFO(ref, idx | GC_PURPLE);
729
0
  GC_G(num_roots)++;
730
731
0
  GC_BENCH_INC(zval_buffered);
732
0
  GC_BENCH_INC(root_buf_length);
733
0
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
734
0
}
735
736
/* Add a possible root node to the buffer.
737
 * Maybe perform a GC run. */
738
ZEND_API void ZEND_FASTCALL gc_possible_root(zend_refcounted *ref)
739
1.80M
{
740
1.80M
  uint32_t idx;
741
1.80M
  gc_root_buffer *newRoot;
742
743
1.80M
  if (UNEXPECTED(GC_G(gc_protected))) {
744
101k
    return;
745
101k
  }
746
747
1.70M
  GC_BENCH_INC(zval_possible_root);
748
749
1.70M
  if (EXPECTED(GC_HAS_UNUSED())) {
750
687k
    idx = GC_FETCH_UNUSED();
751
1.02M
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD())) {
752
1.02M
    idx = GC_FETCH_NEXT_UNUSED();
753
1.02M
  } else {
754
0
    gc_possible_root_when_full(ref);
755
0
    return;
756
0
  }
757
758
1.70M
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
759
1.70M
  ZEND_ASSERT(GC_INFO(ref) == 0);
760
761
1.70M
  newRoot = GC_IDX2PTR(idx);
762
1.70M
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
763
1.70M
  GC_TRACE_SET_COLOR(ref, GC_PURPLE);
764
765
1.70M
  idx = gc_compress(idx);
766
1.70M
  GC_REF_SET_INFO(ref, idx | GC_PURPLE);
767
1.70M
  GC_G(num_roots)++;
768
769
1.70M
  GC_BENCH_INC(zval_buffered);
770
1.70M
  GC_BENCH_INC(root_buf_length);
771
1.70M
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
772
1.70M
}
773
774
/* Add an extra root during a GC run */
775
static void ZEND_FASTCALL gc_extra_root(zend_refcounted *ref)
776
33
{
777
33
  uint32_t idx;
778
33
  gc_root_buffer *newRoot;
779
780
33
  if (EXPECTED(GC_HAS_UNUSED())) {
781
0
    idx = GC_FETCH_UNUSED();
782
33
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
783
33
    idx = GC_FETCH_NEXT_UNUSED();
784
33
  } else {
785
0
    gc_grow_root_buffer();
786
0
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
787
      /* TODO: can this really happen? */
788
0
      return;
789
0
    }
790
0
    idx = GC_FETCH_NEXT_UNUSED();
791
0
  }
792
793
33
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
794
33
  ZEND_ASSERT(GC_REF_ADDRESS(ref) == 0);
795
796
33
  newRoot = GC_IDX2PTR(idx);
797
33
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
798
799
33
  idx = gc_compress(idx);
800
33
  GC_REF_SET_INFO(ref, idx | GC_REF_COLOR(ref));
801
33
  GC_G(num_roots)++;
802
803
33
  GC_BENCH_INC(zval_buffered);
804
33
  GC_BENCH_INC(root_buf_length);
805
33
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
806
33
}
807
808
/* Remove a node from the root buffer given its compressed index */
809
static zend_never_inline void ZEND_FASTCALL gc_remove_compressed(zend_refcounted *ref, uint32_t idx)
810
0
{
811
0
  gc_root_buffer *root = gc_decompress(ref, idx);
812
0
  gc_remove_from_roots(root);
813
0
}
814
815
ZEND_API void ZEND_FASTCALL gc_remove_from_buffer(zend_refcounted *ref)
816
2.10M
{
817
2.10M
  gc_root_buffer *root;
818
2.10M
  uint32_t idx = GC_REF_ADDRESS(ref);
819
820
2.10M
  GC_BENCH_INC(zval_remove_from_buffer);
821
822
2.10M
  if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
823
1.55M
    GC_TRACE_SET_COLOR(ref, GC_BLACK);
824
1.55M
  }
825
2.10M
  GC_REF_SET_INFO(ref, 0);
826
827
  /* Perform decompression only in case of large buffers */
828
2.10M
  if (UNEXPECTED(GC_G(first_unused) >= GC_MAX_UNCOMPRESSED)) {
829
0
    gc_remove_compressed(ref, idx);
830
0
    return;
831
0
  }
832
833
2.10M
  ZEND_ASSERT(idx);
834
2.10M
  root = GC_IDX2PTR(idx);
835
2.10M
  gc_remove_from_roots(root);
836
2.10M
}
837
838
/* Mark all nodes reachable from ref as black (live). Restore the reference
839
 * counts decremented by gc_mark_grey(). See ScanBlack() in Bacon & Rajan.
840
 * To implement a depth-first search, discovered nodes are added to a stack
841
 * which is processed iteratively. */
842
static void gc_scan_black(zend_refcounted *ref, gc_stack *stack)
843
32.4k
{
844
32.4k
  HashTable *ht;
845
32.4k
  Bucket *p;
846
32.4k
  zval *zv;
847
32.4k
  uint32_t n;
848
32.4k
  GC_STACK_DCL(stack);
849
850
188k
tail_call:
851
188k
  if (GC_TYPE(ref) == IS_OBJECT) {
852
35.3k
    zend_object *obj = (zend_object*)ref;
853
854
35.3k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
855
35.3k
      zval *table;
856
35.3k
      int len;
857
858
35.3k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
859
691
        zend_weakmap_get_object_key_entry_gc(obj, &table, &len);
860
691
        n = len;
861
691
        zv = table;
862
1.27k
        for (; n != 0; n-=2) {
863
583
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
864
583
          zval *entry = (zval*) Z_PTR_P(zv);
865
583
          zval *weakmap = zv+1;
866
583
          ZEND_ASSERT(Z_REFCOUNTED_P(weakmap));
867
583
          if (Z_OPT_COLLECTABLE_P(entry)) {
868
482
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
869
482
            if (GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_GREY)) {
870
              /* Weakmap was scanned in gc_mark_roots, we must
871
               * ensure that it's eventually scanned in
872
               * gc_scan_roots as well. */
873
47
              if (!GC_REF_ADDRESS(Z_COUNTED_P(weakmap))) {
874
26
                gc_extra_root(Z_COUNTED_P(weakmap));
875
26
              }
876
435
            } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK) && */ !GC_FROM_WEAKMAP(entry)) {
877
              /* Both the entry weakmap and key are BLACK, so we
878
               * can mark the entry BLACK as well.
879
               * !GC_FROM_WEAKMAP(entry) means that the weakmap
880
               * was already scanned black (or will not be
881
               * scanned), so it's our responsibility to mark the
882
               * entry */
883
414
              ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK));
884
414
              ref = Z_COUNTED_P(entry);
885
414
              GC_ADDREF(ref);
886
414
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
887
40
                GC_REF_SET_BLACK(ref);
888
40
                GC_STACK_PUSH(ref);
889
40
              }
890
414
            }
891
482
          }
892
583
          zv+=2;
893
583
        }
894
691
      }
895
896
35.3k
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
897
121
        zend_weakmap_get_key_entry_gc(obj, &table, &len);
898
121
        n = len;
899
121
        zv = table;
900
335
        for (; n != 0; n-=2) {
901
214
          ZEND_ASSERT(Z_TYPE_P(zv+1) == IS_PTR);
902
214
          zval *key = zv;
903
214
          zval *entry = (zval*) Z_PTR_P(zv+1);
904
214
          if (Z_OPT_COLLECTABLE_P(entry)) {
905
126
            GC_UNSET_FROM_WEAKMAP(entry);
906
126
            if (GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_GREY)) {
907
              /* Key was scanned in gc_mark_roots, we must
908
               * ensure that it's eventually scanned in
909
               * gc_scan_roots as well. */
910
33
              if (!GC_REF_ADDRESS(Z_COUNTED_P(key))) {
911
7
                gc_extra_root(Z_COUNTED_P(key));
912
7
              }
913
93
            } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK) && */ !GC_FROM_WEAKMAP_KEY(entry)) {
914
              /* Both the entry weakmap and key are BLACK, so we
915
               * can mark the entry BLACK as well.
916
               * !GC_FROM_WEAKMAP_KEY(entry) means that the key
917
               * was already scanned black (or will not be
918
               * scanned), so it's our responsibility to mark the
919
               * entry */
920
71
              ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK));
921
71
              ref = Z_COUNTED_P(entry);
922
71
              GC_ADDREF(ref);
923
71
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
924
64
                GC_REF_SET_BLACK(ref);
925
64
                GC_STACK_PUSH(ref);
926
64
              }
927
71
            }
928
126
          }
929
214
          zv += 2;
930
214
        }
931
121
        goto next;
932
121
      }
933
934
35.1k
      ht = obj->handlers->get_gc(obj, &table, &len);
935
35.1k
      n = len;
936
35.1k
      zv = table;
937
35.1k
      if (UNEXPECTED(ht)) {
938
12.0k
        GC_ADDREF(ht);
939
12.0k
        if (!GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
940
12.0k
          GC_REF_SET_BLACK(ht);
941
15.7k
          for (; n != 0; n--) {
942
3.68k
            if (Z_COLLECTABLE_P(zv)) {
943
674
              ref = Z_COUNTED_P(zv);
944
674
              GC_ADDREF(ref);
945
674
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
946
522
                GC_REF_SET_BLACK(ref);
947
522
                GC_STACK_PUSH(ref);
948
522
              }
949
674
            }
950
3.68k
            zv++;
951
3.68k
          }
952
12.0k
          goto handle_ht;
953
12.0k
        }
954
12.0k
      }
955
956
103k
handle_zvals:
957
2.44M
      for (; n != 0; n--) {
958
2.36M
        if (Z_COLLECTABLE_P(zv)) {
959
24.7k
          ref = Z_COUNTED_P(zv);
960
24.7k
          GC_ADDREF(ref);
961
24.7k
          if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
962
21.3k
            GC_REF_SET_BLACK(ref);
963
21.3k
            zv++;
964
97.1k
            while (--n) {
965
75.7k
              if (Z_COLLECTABLE_P(zv)) {
966
66.6k
                zend_refcounted *ref = Z_COUNTED_P(zv);
967
66.6k
                GC_ADDREF(ref);
968
66.6k
                if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
969
64.9k
                  GC_REF_SET_BLACK(ref);
970
64.9k
                  GC_STACK_PUSH(ref);
971
64.9k
                }
972
66.6k
              }
973
75.7k
              zv++;
974
75.7k
            }
975
21.3k
            goto tail_call;
976
21.3k
          }
977
24.7k
        }
978
2.33M
        zv++;
979
2.33M
      }
980
103k
    }
981
153k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
982
150k
    ZEND_ASSERT((zend_array*)ref != &EG(symbol_table));
983
150k
    ht = (zend_array*)ref;
984
162k
handle_ht:
985
162k
    n = ht->nNumUsed;
986
162k
    zv = ht->arPacked;
987
162k
    if (HT_IS_PACKED(ht)) {
988
80.0k
      goto handle_zvals;
989
80.0k
    }
990
991
82.5k
    p = (Bucket*)zv;
992
242k
    for (; n != 0; n--) {
993
226k
      zv = &p->val;
994
226k
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
995
5.57k
        zv = Z_INDIRECT_P(zv);
996
5.57k
      }
997
226k
      if (Z_COLLECTABLE_P(zv)) {
998
67.9k
        ref = Z_COUNTED_P(zv);
999
67.9k
        GC_ADDREF(ref);
1000
67.9k
        if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1001
66.3k
          GC_REF_SET_BLACK(ref);
1002
66.3k
          p++;
1003
129k
          while (--n) {
1004
63.5k
            zv = &p->val;
1005
63.5k
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1006
367
              zv = Z_INDIRECT_P(zv);
1007
367
            }
1008
63.5k
            if (Z_COLLECTABLE_P(zv)) {
1009
2.69k
              zend_refcounted *ref = Z_COUNTED_P(zv);
1010
2.69k
              GC_ADDREF(ref);
1011
2.69k
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1012
1.91k
                GC_REF_SET_BLACK(ref);
1013
1.91k
                GC_STACK_PUSH(ref);
1014
1.91k
              }
1015
2.69k
            }
1016
63.5k
            p++;
1017
63.5k
          }
1018
66.3k
          goto tail_call;
1019
66.3k
        }
1020
67.9k
      }
1021
160k
      p++;
1022
160k
    }
1023
82.5k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1024
2.87k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1025
1.14k
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1026
1.14k
      GC_ADDREF(ref);
1027
1.14k
      if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1028
1.04k
        GC_REF_SET_BLACK(ref);
1029
1.04k
        goto tail_call;
1030
1.04k
      }
1031
1.14k
    }
1032
2.87k
  }
1033
1034
99.9k
next:
1035
99.9k
  ref = GC_STACK_POP();
1036
99.9k
  if (ref) {
1037
67.5k
    goto tail_call;
1038
67.5k
  }
1039
99.9k
}
1040
1041
/* Traverse the graph of nodes referred to by ref. Decrement the reference
1042
 * counts and mark visited nodes grey. See MarkGray() in Bacon & Rajan. */
1043
static void gc_mark_grey(zend_refcounted *ref, gc_stack *stack)
1044
56.7k
{
1045
56.7k
  HashTable *ht;
1046
56.7k
  Bucket *p;
1047
56.7k
  zval *zv;
1048
56.7k
  uint32_t n;
1049
56.7k
  GC_STACK_DCL(stack);
1050
1051
778k
tail_call:
1052
778k
  GC_BENCH_INC(zval_marked_grey);
1053
1054
778k
  if (GC_TYPE(ref) == IS_OBJECT) {
1055
371k
    zend_object *obj = (zend_object*)ref;
1056
1057
371k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1058
371k
      zval *table;
1059
371k
      int len;
1060
1061
371k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1062
960
        zend_weakmap_get_object_key_entry_gc(obj, &table, &len);
1063
960
        n = len;
1064
960
        zv = table;
1065
1.64k
        for (; n != 0; n-=2) {
1066
686
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1067
686
          zval *entry = (zval*) Z_PTR_P(zv);
1068
686
          zval *weakmap = zv+1;
1069
686
          ZEND_ASSERT(Z_REFCOUNTED_P(weakmap));
1070
686
          if (Z_COLLECTABLE_P(entry)) {
1071
580
            GC_SET_FROM_WEAKMAP_KEY(entry);
1072
580
            ref = Z_COUNTED_P(entry);
1073
            /* Only DELREF if the contribution from the weakmap has
1074
             * not been cancelled yet */
1075
580
            if (!GC_FROM_WEAKMAP(entry)) {
1076
491
              GC_DELREF(ref);
1077
491
            }
1078
580
            if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1079
67
              GC_REF_SET_COLOR(ref, GC_GREY);
1080
67
              GC_STACK_PUSH(ref);
1081
67
            }
1082
580
          }
1083
686
          zv+=2;
1084
686
        }
1085
960
      }
1086
1087
371k
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
1088
177
        zend_weakmap_get_entry_gc(obj, &table, &len);
1089
177
        n = len;
1090
177
        zv = table;
1091
463
        for (; n != 0; n--) {
1092
286
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1093
286
          zval *entry = (zval*) Z_PTR_P(zv);
1094
286
          if (Z_COLLECTABLE_P(entry)) {
1095
198
            GC_SET_FROM_WEAKMAP(entry);
1096
198
            ref = Z_COUNTED_P(entry);
1097
            /* Only DELREF if the contribution from the weakmap key
1098
             * has not been cancelled yet */
1099
198
            if (!GC_FROM_WEAKMAP_KEY(entry)) {
1100
105
              GC_DELREF(ref);
1101
105
            }
1102
198
            if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1103
78
              GC_REF_SET_COLOR(ref, GC_GREY);
1104
78
              GC_STACK_PUSH(ref);
1105
78
            }
1106
198
          }
1107
286
          zv++;
1108
286
        }
1109
177
        goto next;
1110
177
      }
1111
1112
371k
      ht = obj->handlers->get_gc(obj, &table, &len);
1113
371k
      n = len;
1114
371k
      zv = table;
1115
371k
      if (UNEXPECTED(ht)) {
1116
337k
        GC_DELREF(ht);
1117
337k
        if (!GC_REF_CHECK_COLOR(ht, GC_GREY)) {
1118
337k
          GC_REF_SET_COLOR(ht, GC_GREY);
1119
439k
          for (; n != 0; n--) {
1120
102k
            if (Z_COLLECTABLE_P(zv)) {
1121
98.6k
              ref = Z_COUNTED_P(zv);
1122
98.6k
              GC_DELREF(ref);
1123
98.6k
              if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1124
95.7k
                GC_REF_SET_COLOR(ref, GC_GREY);
1125
95.7k
                GC_STACK_PUSH(ref);
1126
95.7k
              }
1127
98.6k
            }
1128
102k
            zv++;
1129
102k
          }
1130
337k
          goto handle_ht;
1131
337k
        }
1132
337k
      }
1133
121k
handle_zvals:
1134
2.52M
      for (; n != 0; n--) {
1135
2.43M
        if (Z_COLLECTABLE_P(zv)) {
1136
49.5k
          ref = Z_COUNTED_P(zv);
1137
49.5k
          GC_DELREF(ref);
1138
49.5k
          if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1139
29.1k
            GC_REF_SET_COLOR(ref, GC_GREY);
1140
29.1k
            zv++;
1141
108k
            while (--n) {
1142
79.8k
              if (Z_COLLECTABLE_P(zv)) {
1143
66.5k
                zend_refcounted *ref = Z_COUNTED_P(zv);
1144
66.5k
                GC_DELREF(ref);
1145
66.5k
                if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1146
63.6k
                  GC_REF_SET_COLOR(ref, GC_GREY);
1147
63.6k
                  GC_STACK_PUSH(ref);
1148
63.6k
                }
1149
66.5k
              }
1150
79.8k
              zv++;
1151
79.8k
            }
1152
29.1k
            goto tail_call;
1153
29.1k
          }
1154
49.5k
        }
1155
2.40M
        zv++;
1156
2.40M
      }
1157
121k
    }
1158
407k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1159
396k
    ZEND_ASSERT(((zend_array*)ref) != &EG(symbol_table));
1160
396k
    ht = (zend_array*)ref;
1161
734k
handle_ht:
1162
734k
    n = ht->nNumUsed;
1163
734k
    if (HT_IS_PACKED(ht)) {
1164
88.5k
            zv = ht->arPacked;
1165
88.5k
            goto handle_zvals;
1166
88.5k
    }
1167
1168
646k
    p = ht->arData;
1169
1.38M
    for (; n != 0; n--) {
1170
994k
      zv = &p->val;
1171
994k
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1172
484k
        zv = Z_INDIRECT_P(zv);
1173
484k
      }
1174
994k
      if (Z_COLLECTABLE_P(zv)) {
1175
318k
        ref = Z_COUNTED_P(zv);
1176
318k
        GC_DELREF(ref);
1177
318k
        if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1178
258k
          GC_REF_SET_COLOR(ref, GC_GREY);
1179
258k
          p++;
1180
896k
          while (--n) {
1181
637k
            zv = &p->val;
1182
637k
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1183
84.5k
              zv = Z_INDIRECT_P(zv);
1184
84.5k
            }
1185
637k
            if (Z_COLLECTABLE_P(zv)) {
1186
464k
              zend_refcounted *ref = Z_COUNTED_P(zv);
1187
464k
              GC_DELREF(ref);
1188
464k
              if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1189
272k
                GC_REF_SET_COLOR(ref, GC_GREY);
1190
272k
                GC_STACK_PUSH(ref);
1191
272k
              }
1192
464k
            }
1193
637k
            p++;
1194
637k
          }
1195
258k
          goto tail_call;
1196
258k
        }
1197
318k
      }
1198
736k
      p++;
1199
736k
    }
1200
646k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1201
10.6k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1202
8.27k
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1203
8.27k
      GC_DELREF(ref);
1204
8.27k
      if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1205
2.21k
        GC_REF_SET_COLOR(ref, GC_GREY);
1206
2.21k
        goto tail_call;
1207
2.21k
      }
1208
8.27k
    }
1209
10.6k
  }
1210
1211
489k
next:
1212
489k
  ref = GC_STACK_POP();
1213
489k
  if (ref) {
1214
432k
    goto tail_call;
1215
432k
  }
1216
489k
}
1217
1218
/* Two-Finger compaction algorithm */
1219
static void gc_compact(void)
1220
567k
{
1221
567k
  if (GC_G(num_roots) + GC_FIRST_ROOT != GC_G(first_unused)) {
1222
270k
    if (GC_G(num_roots)) {
1223
9.67k
      gc_root_buffer *free = GC_IDX2PTR(GC_FIRST_ROOT);
1224
9.67k
      gc_root_buffer *scan = GC_IDX2PTR(GC_G(first_unused) - 1);
1225
9.67k
      gc_root_buffer *end  = GC_IDX2PTR(GC_G(num_roots));
1226
9.67k
      uint32_t idx;
1227
9.67k
      zend_refcounted *p;
1228
1229
18.0k
      while (free < scan) {
1230
71.0k
        while (!GC_IS_UNUSED(free->ref)) {
1231
59.2k
          free++;
1232
59.2k
        }
1233
25.6k
        while (GC_IS_UNUSED(scan->ref)) {
1234
13.9k
          scan--;
1235
13.9k
        }
1236
11.7k
        if (scan > free) {
1237
5.76k
          p = scan->ref;
1238
5.76k
          free->ref = p;
1239
5.76k
          p = GC_GET_PTR(p);
1240
5.76k
          idx = gc_compress(GC_PTR2IDX(free));
1241
5.76k
          GC_REF_SET_INFO(p, idx | GC_REF_COLOR(p));
1242
5.76k
          free++;
1243
5.76k
          scan--;
1244
5.76k
          if (scan <= end) {
1245
3.41k
            break;
1246
3.41k
          }
1247
5.76k
        }
1248
11.7k
      }
1249
9.67k
    }
1250
270k
    GC_G(unused) = GC_INVALID;
1251
270k
    GC_G(first_unused) = GC_G(num_roots) + GC_FIRST_ROOT;
1252
270k
  }
1253
567k
}
1254
1255
/* For all roots marked purple, traverse the graph, decrementing the reference
1256
 * count of their child nodes. Mark visited nodes grey so that they are not
1257
 * visited again. See MarkRoots() in Bacon & Rajan. */
1258
static void gc_mark_roots(gc_stack *stack)
1259
20.5k
{
1260
20.5k
  gc_root_buffer *current, *last;
1261
1262
20.5k
  gc_compact();
1263
1264
20.5k
  current = GC_IDX2PTR(GC_FIRST_ROOT);
1265
20.5k
  last = GC_IDX2PTR(GC_G(first_unused));
1266
170k
  while (current != last) {
1267
150k
    if (GC_IS_ROOT(current->ref)) {
1268
150k
      if (GC_REF_CHECK_COLOR(current->ref, GC_PURPLE)) {
1269
56.7k
        GC_REF_SET_COLOR(current->ref, GC_GREY);
1270
56.7k
        gc_mark_grey(current->ref, stack);
1271
56.7k
      }
1272
150k
    }
1273
150k
    current++;
1274
150k
  }
1275
20.5k
}
1276
1277
/* Traverse the reference graph of ref. Evaluate grey nodes and mark them
1278
 * black (to keep) or white (to free). Note that nodes initially marked white
1279
 * may later become black if they are visited from a live node.
1280
 * See Scan() in Bacon & Rajan. */
1281
static void gc_scan(zend_refcounted *ref, gc_stack *stack)
1282
56.8k
{
1283
56.8k
  HashTable *ht;
1284
56.8k
  Bucket *p;
1285
56.8k
  zval *zv;
1286
56.8k
  uint32_t n;
1287
56.8k
  GC_STACK_DCL(stack);
1288
1289
959k
tail_call:
1290
959k
  if (!GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1291
297
    goto next;
1292
297
  }
1293
1294
959k
  if (GC_REFCOUNT(ref) > 0) {
1295
32.4k
    if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1296
32.4k
      GC_REF_SET_BLACK(ref);
1297
32.4k
      if (UNEXPECTED(!_stack->next)) {
1298
17.8k
        gc_stack_next(_stack);
1299
17.8k
      }
1300
      /* Split stack and reuse the tail */
1301
32.4k
      _stack->next->prev = NULL;
1302
32.4k
      gc_scan_black(ref, _stack->next);
1303
32.4k
      _stack->next->prev = _stack;
1304
32.4k
    }
1305
32.4k
    goto next;
1306
32.4k
  }
1307
1308
926k
  if (GC_TYPE(ref) == IS_OBJECT) {
1309
337k
    zend_object *obj = (zend_object*)ref;
1310
337k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1311
337k
      zval *table;
1312
337k
      int len;
1313
1314
337k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1315
345
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1316
345
        n = len;
1317
345
        zv = table;
1318
521
        for (; n != 0; n--) {
1319
176
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1320
176
          zval *entry = (zval*) Z_PTR_P(zv);
1321
176
          if (Z_OPT_COLLECTABLE_P(entry)) {
1322
103
            ref = Z_COUNTED_P(entry);
1323
103
            if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1324
24
              GC_REF_SET_COLOR(ref, GC_WHITE);
1325
24
              GC_STACK_PUSH(ref);
1326
24
            }
1327
103
          }
1328
176
          zv++;
1329
176
        }
1330
345
      }
1331
1332
337k
      ht = obj->handlers->get_gc(obj, &table, &len);
1333
337k
      n = len;
1334
337k
      zv = table;
1335
337k
      if (UNEXPECTED(ht)) {
1336
326k
        if (GC_REF_CHECK_COLOR(ht, GC_GREY)) {
1337
326k
          GC_REF_SET_COLOR(ht, GC_WHITE);
1338
326k
          GC_STACK_PUSH((zend_refcounted *) ht);
1339
425k
          for (; n != 0; n--) {
1340
98.5k
            if (Z_COLLECTABLE_P(zv)) {
1341
98.0k
              ref = Z_COUNTED_P(zv);
1342
98.0k
              if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1343
95.3k
                GC_REF_SET_COLOR(ref, GC_WHITE);
1344
95.3k
                GC_STACK_PUSH(ref);
1345
95.3k
              }
1346
98.0k
            }
1347
98.5k
            zv++;
1348
98.5k
          }
1349
326k
          goto handle_ht;
1350
326k
        }
1351
326k
      }
1352
1353
30.9k
handle_zvals:
1354
138k
      for (; n != 0; n--) {
1355
117k
        if (Z_COLLECTABLE_P(zv)) {
1356
35.6k
          ref = Z_COUNTED_P(zv);
1357
35.6k
          if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1358
9.78k
            GC_REF_SET_COLOR(ref, GC_WHITE);
1359
9.78k
            zv++;
1360
19.9k
            while (--n) {
1361
10.1k
              if (Z_COLLECTABLE_P(zv)) {
1362
5.06k
                zend_refcounted *ref = Z_COUNTED_P(zv);
1363
5.06k
                if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1364
3.77k
                  GC_REF_SET_COLOR(ref, GC_WHITE);
1365
3.77k
                  GC_STACK_PUSH(ref);
1366
3.77k
                }
1367
5.06k
              }
1368
10.1k
              zv++;
1369
10.1k
            }
1370
9.78k
            goto tail_call;
1371
9.78k
          }
1372
35.6k
        }
1373
107k
        zv++;
1374
107k
      }
1375
30.9k
    }
1376
589k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1377
580k
    ht = (HashTable *)ref;
1378
580k
    ZEND_ASSERT(ht != &EG(symbol_table));
1379
1380
907k
handle_ht:
1381
907k
    n = ht->nNumUsed;
1382
907k
    if (HT_IS_PACKED(ht)) {
1383
20.0k
            zv = ht->arPacked;
1384
20.0k
            goto handle_zvals;
1385
20.0k
    }
1386
1387
887k
    p = ht->arData;
1388
2.54M
    for (; n != 0; n--) {
1389
1.84M
      zv = &p->val;
1390
1.84M
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1391
1.04M
        zv = Z_INDIRECT_P(zv);
1392
1.04M
      }
1393
1.84M
      if (Z_COLLECTABLE_P(zv)) {
1394
733k
        ref = Z_COUNTED_P(zv);
1395
733k
        if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1396
192k
          GC_REF_SET_COLOR(ref, GC_WHITE);
1397
192k
          p++;
1398
770k
          while (--n) {
1399
577k
            zv = &p->val;
1400
577k
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1401
84.2k
              zv = Z_INDIRECT_P(zv);
1402
84.2k
            }
1403
577k
            if (Z_COLLECTABLE_P(zv)) {
1404
463k
              zend_refcounted *ref = Z_COUNTED_P(zv);
1405
463k
              if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1406
272k
                GC_REF_SET_COLOR(ref, GC_WHITE);
1407
272k
                GC_STACK_PUSH(ref);
1408
272k
              }
1409
463k
            }
1410
577k
            p++;
1411
577k
          }
1412
192k
          goto tail_call;
1413
192k
        }
1414
733k
      }
1415
1.65M
      p++;
1416
1.65M
    }
1417
887k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1418
8.17k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1419
7.38k
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1420
7.38k
      if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1421
1.75k
        GC_REF_SET_COLOR(ref, GC_WHITE);
1422
1.75k
        goto tail_call;
1423
1.75k
      }
1424
7.38k
    }
1425
8.17k
  }
1426
1427
755k
next:
1428
755k
  ref = GC_STACK_POP();
1429
755k
  if (ref) {
1430
698k
    goto tail_call;
1431
698k
  }
1432
755k
}
1433
1434
/* Scan all roots, coloring grey nodes black or white */
1435
static void gc_scan_roots(gc_stack *stack)
1436
20.5k
{
1437
20.5k
  uint32_t idx, end;
1438
20.5k
  gc_root_buffer *current;
1439
1440
  /* Root buffer might be reallocated during gc_scan,
1441
   * make sure to reload pointers. */
1442
20.5k
  idx = GC_FIRST_ROOT;
1443
20.5k
  end = GC_G(first_unused);
1444
170k
  while (idx != end) {
1445
150k
    current = GC_IDX2PTR(idx);
1446
150k
    if (GC_IS_ROOT(current->ref)) {
1447
150k
      if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1448
56.8k
        GC_REF_SET_COLOR(current->ref, GC_WHITE);
1449
56.8k
        gc_scan(current->ref, stack);
1450
56.8k
      }
1451
150k
    }
1452
150k
    idx++;
1453
150k
  }
1454
1455
  /* Scan extra roots added during gc_scan */
1456
20.6k
  while (idx != GC_G(first_unused)) {
1457
33
    current = GC_IDX2PTR(idx);
1458
33
    if (GC_IS_ROOT(current->ref)) {
1459
33
      if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1460
33
        GC_REF_SET_COLOR(current->ref, GC_WHITE);
1461
33
        gc_scan(current->ref, stack);
1462
33
      }
1463
33
    }
1464
33
    idx++;
1465
33
  }
1466
20.5k
}
1467
1468
/* Add a node to the buffer with the garbage flag, so that it will be
1469
 * destroyed and freed when the scan is complete. */
1470
static void gc_add_garbage(zend_refcounted *ref)
1471
472k
{
1472
472k
  uint32_t idx;
1473
472k
  gc_root_buffer *buf;
1474
1475
472k
  if (GC_HAS_UNUSED()) {
1476
0
    idx = GC_FETCH_UNUSED();
1477
472k
  } else if (GC_HAS_NEXT_UNUSED()) {
1478
472k
    idx = GC_FETCH_NEXT_UNUSED();
1479
472k
  } else {
1480
1
    gc_grow_root_buffer();
1481
1
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
1482
0
      return;
1483
0
    }
1484
1
    idx = GC_FETCH_NEXT_UNUSED();
1485
1
  }
1486
1487
472k
  buf = GC_IDX2PTR(idx);
1488
472k
  buf->ref = GC_MAKE_GARBAGE(ref);
1489
1490
472k
  idx = gc_compress(idx);
1491
472k
  GC_REF_SET_INFO(ref, idx | GC_BLACK);
1492
472k
  GC_G(num_roots)++;
1493
472k
}
1494
1495
/* Traverse the reference graph from ref, marking any white nodes as garbage. */
1496
static int gc_collect_white(zend_refcounted *ref, uint32_t *flags, gc_stack *stack)
1497
20.2k
{
1498
20.2k
  int count = 0;
1499
20.2k
  HashTable *ht;
1500
20.2k
  Bucket *p;
1501
20.2k
  zval *zv;
1502
20.2k
  uint32_t n;
1503
20.2k
  GC_STACK_DCL(stack);
1504
1505
590k
tail_call:
1506
  /* don't count references for compatibility ??? */
1507
590k
  if (GC_TYPE(ref) != IS_REFERENCE) {
1508
582k
    count++;
1509
582k
  }
1510
1511
590k
  if (GC_TYPE(ref) == IS_OBJECT) {
1512
336k
    zend_object *obj = (zend_object*)ref;
1513
1514
336k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1515
336k
      int len;
1516
336k
      zval *table;
1517
1518
      /* optimization: color is GC_BLACK (0) */
1519
336k
      if (!GC_INFO(ref)) {
1520
231k
        gc_add_garbage(ref);
1521
231k
      }
1522
336k
      if (!(OBJ_FLAGS(obj) & IS_OBJ_DESTRUCTOR_CALLED)
1523
253k
       && (obj->handlers->dtor_obj != zend_objects_destroy_object
1524
253k
        || obj->ce->destructor != NULL)) {
1525
2.68k
        *flags |= GC_HAS_DESTRUCTORS;
1526
2.68k
      }
1527
1528
336k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1529
269
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1530
269
        n = len;
1531
269
        zv = table;
1532
372
        for (; n != 0; n--) {
1533
103
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1534
103
          zval *entry = (zval*) Z_PTR_P(zv);
1535
103
          if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP_KEY(entry)) {
1536
64
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
1537
64
            GC_UNSET_FROM_WEAKMAP(entry);
1538
64
            ref = Z_COUNTED_P(entry);
1539
64
            GC_ADDREF(ref);
1540
64
            if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1541
13
              GC_REF_SET_BLACK(ref);
1542
13
              GC_STACK_PUSH(ref);
1543
13
            }
1544
64
          }
1545
103
          zv++;
1546
103
        }
1547
269
      }
1548
1549
336k
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
1550
56
        zend_weakmap_get_entry_gc(obj, &table, &len);
1551
56
        n = len;
1552
56
        zv = table;
1553
128
        for (; n != 0; n--) {
1554
72
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1555
72
          zval *entry = (zval*) Z_PTR_P(zv);
1556
72
          if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP(entry)) {
1557
47
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
1558
47
            GC_UNSET_FROM_WEAKMAP(entry);
1559
47
            ref = Z_COUNTED_P(entry);
1560
47
            GC_ADDREF(ref);
1561
47
            if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1562
39
              GC_REF_SET_BLACK(ref);
1563
39
              GC_STACK_PUSH(ref);
1564
39
            }
1565
47
          }
1566
72
          zv++;
1567
72
        }
1568
56
        goto next;
1569
56
      }
1570
1571
335k
      ht = obj->handlers->get_gc(obj, &table, &len);
1572
335k
      n = len;
1573
335k
      zv = table;
1574
335k
      if (UNEXPECTED(ht)) {
1575
325k
        GC_ADDREF(ht);
1576
325k
        if (GC_REF_CHECK_COLOR(ht, GC_WHITE)) {
1577
325k
          GC_REF_SET_BLACK(ht);
1578
424k
          for (; n != 0; n--) {
1579
98.4k
            if (Z_COLLECTABLE_P(zv)) {
1580
97.9k
              ref = Z_COUNTED_P(zv);
1581
97.9k
              GC_ADDREF(ref);
1582
97.9k
              if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1583
95.3k
                GC_REF_SET_BLACK(ref);
1584
95.3k
                GC_STACK_PUSH(ref);
1585
95.3k
              }
1586
97.9k
            }
1587
98.4k
            zv++;
1588
98.4k
          }
1589
325k
          goto handle_ht;
1590
325k
        }
1591
325k
      }
1592
1593
18.6k
handle_zvals:
1594
80.3k
      for (; n != 0; n--) {
1595
70.5k
        if (Z_COLLECTABLE_P(zv)) {
1596
23.3k
          ref = Z_COUNTED_P(zv);
1597
23.3k
          GC_ADDREF(ref);
1598
23.3k
          if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1599
8.89k
            GC_REF_SET_BLACK(ref);
1600
8.89k
            zv++;
1601
14.4k
            while (--n) {
1602
5.60k
              if (Z_COLLECTABLE_P(zv)) {
1603
1.44k
                zend_refcounted *ref = Z_COUNTED_P(zv);
1604
1.44k
                GC_ADDREF(ref);
1605
1.44k
                if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1606
519
                  GC_REF_SET_BLACK(ref);
1607
519
                  GC_STACK_PUSH(ref);
1608
519
                }
1609
1.44k
              }
1610
5.60k
              zv++;
1611
5.60k
            }
1612
8.89k
            goto tail_call;
1613
8.89k
          }
1614
23.3k
        }
1615
61.6k
        zv++;
1616
61.6k
      }
1617
18.6k
    }
1618
336k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1619
    /* optimization: color is GC_BLACK (0) */
1620
246k
    if (!GC_INFO(ref)) {
1621
241k
      gc_add_garbage(ref);
1622
241k
    }
1623
246k
    ht = (zend_array*)ref;
1624
1625
572k
handle_ht:
1626
572k
    n = ht->nNumUsed;
1627
572k
    if (HT_IS_PACKED(ht)) {
1628
8.45k
      zv = ht->arPacked;
1629
8.45k
      goto handle_zvals;
1630
8.45k
    }
1631
1632
563k
    p = ht->arData;
1633
1.13M
    for (; n != 0; n--) {
1634
766k
      zv = &p->val;
1635
766k
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1636
478k
        zv = Z_INDIRECT_P(zv);
1637
478k
      }
1638
766k
      if (Z_COLLECTABLE_P(zv)) {
1639
250k
        ref = Z_COUNTED_P(zv);
1640
250k
        GC_ADDREF(ref);
1641
250k
        if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1642
192k
          GC_REF_SET_BLACK(ref);
1643
192k
          p++;
1644
768k
          while (--n) {
1645
576k
            zv = &p->val;
1646
576k
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1647
84.2k
              zv = Z_INDIRECT_P(zv);
1648
84.2k
            }
1649
576k
            if (Z_COLLECTABLE_P(zv)) {
1650
462k
              zend_refcounted *ref = Z_COUNTED_P(zv);
1651
462k
              GC_ADDREF(ref);
1652
462k
              if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1653
271k
                GC_REF_SET_BLACK(ref);
1654
271k
                GC_STACK_PUSH(ref);
1655
271k
              }
1656
462k
            }
1657
576k
            p++;
1658
576k
          }
1659
192k
          goto tail_call;
1660
192k
        }
1661
250k
      }
1662
574k
      p++;
1663
574k
    }
1664
563k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1665
7.81k
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1666
7.12k
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1667
7.12k
      GC_ADDREF(ref);
1668
7.12k
      if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1669
1.46k
        GC_REF_SET_BLACK(ref);
1670
1.46k
        goto tail_call;
1671
1.46k
      }
1672
7.12k
    }
1673
7.81k
  }
1674
1675
387k
next:
1676
387k
  ref = GC_STACK_POP();
1677
387k
  if (ref) {
1678
367k
    goto tail_call;
1679
367k
  }
1680
1681
20.2k
  return count;
1682
387k
}
1683
1684
/* Traverse the reference graph from all roots, marking white nodes as garbage. */
1685
static int gc_collect_roots(uint32_t *flags, gc_stack *stack)
1686
20.5k
{
1687
20.5k
  uint32_t idx, end;
1688
20.5k
  zend_refcounted *ref;
1689
20.5k
  int count = 0;
1690
20.5k
  gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
1691
20.5k
  gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
1692
1693
  /* remove non-garbage from the list */
1694
170k
  while (current != last) {
1695
150k
    if (GC_IS_ROOT(current->ref)) {
1696
150k
      if (GC_REF_CHECK_COLOR(current->ref, GC_BLACK)) {
1697
40.8k
        GC_REF_SET_INFO(current->ref, 0); /* reset GC_ADDRESS() and keep GC_BLACK */
1698
40.8k
        gc_remove_from_roots(current);
1699
40.8k
      }
1700
150k
    }
1701
150k
    current++;
1702
150k
  }
1703
1704
20.5k
  gc_compact();
1705
1706
  /* Root buffer might be reallocated during gc_collect_white,
1707
   * make sure to reload pointers. */
1708
20.5k
  idx = GC_FIRST_ROOT;
1709
20.5k
  end = GC_G(first_unused);
1710
130k
  while (idx != end) {
1711
109k
    current = GC_IDX2PTR(idx);
1712
109k
    ref = current->ref;
1713
109k
    ZEND_ASSERT(GC_IS_ROOT(ref));
1714
109k
    current->ref = GC_MAKE_GARBAGE(ref);
1715
109k
    if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1716
20.2k
      GC_REF_SET_BLACK(ref);
1717
20.2k
      count += gc_collect_white(ref, flags, stack);
1718
20.2k
    }
1719
109k
    idx++;
1720
109k
  }
1721
1722
20.5k
  return count;
1723
20.5k
}
1724
1725
static int gc_remove_nested_data_from_buffer(zend_refcounted *ref, gc_root_buffer *root, gc_stack *stack)
1726
2.68k
{
1727
2.68k
  HashTable *ht;
1728
2.68k
  Bucket *p;
1729
2.68k
  zval *zv;
1730
2.68k
  uint32_t n;
1731
2.68k
  int count = 0;
1732
2.68k
  GC_STACK_DCL(stack);
1733
1734
7.41k
tail_call:
1735
7.41k
  if (root) {
1736
2.68k
    root = NULL;
1737
2.68k
    count++;
1738
4.73k
  } else if (GC_REF_ADDRESS(ref) != 0
1739
4.01k
   && GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1740
1.41k
    GC_TRACE_REF(ref, "removing from buffer");
1741
1.41k
    GC_REMOVE_FROM_BUFFER(ref);
1742
1.41k
    count++;
1743
3.32k
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1744
440
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1745
212
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1746
212
      goto tail_call;
1747
212
    }
1748
228
    goto next;
1749
2.88k
  } else {
1750
2.88k
    goto next;
1751
2.88k
  }
1752
1753
4.09k
  if (GC_TYPE(ref) == IS_OBJECT) {
1754
3.78k
    zend_object *obj = (zend_object*)ref;
1755
1756
3.78k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1757
3.78k
      int len;
1758
3.78k
      zval *table;
1759
1760
3.78k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1761
94
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1762
94
        n = len;
1763
94
        zv = table;
1764
99
        for (; n != 0; n--) {
1765
5
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1766
5
          zval *entry = (zval*) Z_PTR_P(zv);
1767
5
          if (Z_OPT_COLLECTABLE_P(entry)) {
1768
5
            ref = Z_COUNTED_P(entry);
1769
5
            GC_STACK_PUSH(ref);
1770
5
          }
1771
5
          zv++;
1772
5
        }
1773
94
      }
1774
1775
3.78k
      ht = obj->handlers->get_gc(obj, &table, &len);
1776
3.78k
      n = len;
1777
3.78k
      zv = table;
1778
3.78k
      if (UNEXPECTED(ht)) {
1779
1.36k
        for (; n != 0; n--) {
1780
608
          if (Z_COLLECTABLE_P(zv)) {
1781
427
            ref = Z_COUNTED_P(zv);
1782
427
            GC_STACK_PUSH(ref);
1783
427
          }
1784
608
          zv++;
1785
608
        }
1786
755
        if (GC_REF_ADDRESS(ht) != 0 && GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
1787
5
          GC_TRACE_REF(ht, "removing from buffer");
1788
5
          GC_REMOVE_FROM_BUFFER(ht);
1789
5
        }
1790
755
        goto handle_ht;
1791
755
      }
1792
1793
3.30k
handle_zvals:
1794
3.43k
      for (; n != 0; n--) {
1795
2.75k
        if (Z_COLLECTABLE_P(zv)) {
1796
2.63k
          ref = Z_COUNTED_P(zv);
1797
2.63k
          zv++;
1798
3.25k
          while (--n) {
1799
621
            if (Z_COLLECTABLE_P(zv)) {
1800
540
              zend_refcounted *ref = Z_COUNTED_P(zv);
1801
540
              GC_STACK_PUSH(ref);
1802
540
            }
1803
621
            zv++;
1804
621
          }
1805
2.63k
          goto tail_call;
1806
2.63k
        }
1807
126
        zv++;
1808
126
      }
1809
3.30k
    }
1810
3.78k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1811
312
    ht = (zend_array*)ref;
1812
1813
1.06k
handle_ht:
1814
1.06k
    n = ht->nNumUsed;
1815
1.06k
    if (HT_IS_PACKED(ht)) {
1816
280
      zv = ht->arPacked;
1817
280
      goto handle_zvals;
1818
280
    }
1819
1820
787
    p = ht->arData;
1821
878
    for (; n != 0; n--) {
1822
827
      zv = &p->val;
1823
827
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1824
153
        zv = Z_INDIRECT_P(zv);
1825
153
      }
1826
827
      if (Z_COLLECTABLE_P(zv)) {
1827
736
        ref = Z_COUNTED_P(zv);
1828
736
        p++;
1829
945
        while (--n) {
1830
209
          zv = &p->val;
1831
209
          if (Z_TYPE_P(zv) == IS_INDIRECT) {
1832
31
            zv = Z_INDIRECT_P(zv);
1833
31
          }
1834
209
          if (Z_COLLECTABLE_P(zv)) {
1835
183
            zend_refcounted *ref = Z_COUNTED_P(zv);
1836
183
            GC_STACK_PUSH(ref);
1837
183
          }
1838
209
          p++;
1839
209
        }
1840
736
        goto tail_call;
1841
736
      }
1842
91
      p++;
1843
91
    }
1844
787
  }
1845
1846
3.83k
next:
1847
3.83k
  ref = GC_STACK_POP();
1848
3.83k
  if (ref) {
1849
1.15k
    goto tail_call;
1850
1.15k
  }
1851
1852
2.68k
  return count;
1853
3.83k
}
1854
1855
static void zend_get_gc_buffer_release(void);
1856
static void zend_gc_check_root_tmpvars(void);
1857
static void zend_gc_remove_root_tmpvars(void);
1858
1859
static zend_internal_function gc_destructor_fiber;
1860
1861
static ZEND_COLD ZEND_NORETURN void gc_create_destructor_fiber_error(void)
1862
0
{
1863
0
  zend_error_noreturn(E_ERROR, "Unable to create destructor fiber");
1864
0
}
1865
1866
static ZEND_COLD ZEND_NORETURN void gc_start_destructor_fiber_error(void)
1867
0
{
1868
0
  zend_error_noreturn(E_ERROR, "Unable to start destructor fiber");
1869
0
}
1870
1871
/* Call destructors for garbage in the buffer. */
1872
static zend_always_inline zend_result gc_call_destructors(uint32_t idx, uint32_t end, zend_fiber *fiber)
1873
672
{
1874
672
  gc_root_buffer *current;
1875
672
  zend_refcounted *p;
1876
1877
  /* The root buffer might be reallocated during destructors calls,
1878
   * make sure to reload pointers as necessary. */
1879
4.76k
  while (idx != end) {
1880
4.17k
    current = GC_IDX2PTR(idx);
1881
4.17k
    if (GC_IS_DTOR_GARBAGE(current->ref)) {
1882
2.06k
      p = GC_GET_PTR(current->ref);
1883
      /* Mark this is as a normal root for the next GC run */
1884
2.06k
      current->ref = p;
1885
      /* Double check that the destructor hasn't been called yet. It
1886
       * could have already been invoked indirectly by some other
1887
       * destructor. */
1888
2.06k
      if (!(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
1889
2.06k
        if (fiber != NULL) {
1890
161
          GC_G(dtor_idx) = idx;
1891
161
        }
1892
2.06k
        zend_object *obj = (zend_object*)p;
1893
2.06k
        GC_TRACE_REF(obj, "calling destructor");
1894
2.06k
        GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
1895
2.06k
        GC_ADDREF(obj);
1896
2.06k
        obj->handlers->dtor_obj(obj);
1897
2.06k
        GC_TRACE_REF(obj, "returned from destructor");
1898
2.06k
        GC_DELREF(obj);
1899
2.06k
        if (UNEXPECTED(fiber != NULL && GC_G(dtor_fiber) != fiber)) {
1900
          /* We resumed after suspension */
1901
79
          gc_check_possible_root((zend_refcounted*)&obj->gc);
1902
79
          return FAILURE;
1903
79
        }
1904
2.06k
      }
1905
2.06k
    }
1906
4.09k
    idx++;
1907
4.09k
  }
1908
1909
593
  return SUCCESS;
1910
672
}
1911
1912
static zend_fiber *gc_create_destructor_fiber(void)
1913
150
{
1914
150
  zval zobj;
1915
150
  zend_fiber *fiber;
1916
1917
150
  GC_TRACE("starting destructor fiber");
1918
1919
150
  if (UNEXPECTED(object_init_ex(&zobj, zend_ce_fiber) == FAILURE)) {
1920
0
    gc_create_destructor_fiber_error();
1921
0
  }
1922
1923
150
  fiber = (zend_fiber *)Z_OBJ(zobj);
1924
150
  fiber->fci.size = sizeof(fiber->fci);
1925
150
  fiber->fci_cache.function_handler = (zend_function*) &gc_destructor_fiber;
1926
1927
150
  GC_G(dtor_fiber) = fiber;
1928
1929
150
  if (UNEXPECTED(zend_fiber_start(fiber, NULL) == FAILURE)) {
1930
0
    gc_start_destructor_fiber_error();
1931
0
  }
1932
1933
150
  return fiber;
1934
150
}
1935
1936
static void remember_prev_exception(zend_object **prev_exception)
1937
232
{
1938
232
  if (EG(exception)) {
1939
27
    if (*prev_exception) {
1940
0
      zend_exception_set_previous(EG(exception), *prev_exception);
1941
0
    }
1942
27
    *prev_exception = EG(exception);
1943
27
    EG(exception) = NULL;
1944
27
  }
1945
232
}
1946
1947
static zend_never_inline void gc_call_destructors_in_fiber(void)
1948
74
{
1949
74
  ZEND_ASSERT(!GC_G(dtor_fiber_running));
1950
1951
74
  zend_fiber *fiber = GC_G(dtor_fiber);
1952
1953
74
  GC_G(dtor_idx) = GC_FIRST_ROOT;
1954
74
  GC_G(dtor_end) = GC_G(first_unused);
1955
1956
74
  if (UNEXPECTED(!fiber)) {
1957
71
    fiber = gc_create_destructor_fiber();
1958
71
  } else {
1959
3
    zend_fiber_resume(fiber, NULL, NULL);
1960
3
  }
1961
1962
74
  zend_object *exception = NULL;
1963
74
  remember_prev_exception(&exception);
1964
1965
153
  for (;;) {
1966
    /* At this point, fiber has executed until suspension */
1967
153
    GC_TRACE("resumed from destructor fiber");
1968
1969
153
    if (UNEXPECTED(GC_G(dtor_fiber_running))) {
1970
      /* Fiber was suspended by a destructor. Start a new one for the
1971
       * remaining destructors. */
1972
79
      GC_TRACE("destructor fiber suspended by destructor");
1973
79
      GC_G(dtor_fiber) = NULL;
1974
79
      GC_G(dtor_idx)++;
1975
      /* We do not own the fiber anymore. It may be collected if the
1976
       * application does not reference it. */
1977
79
      zend_object_release(&fiber->std);
1978
79
      remember_prev_exception(&exception);
1979
79
      fiber = gc_create_destructor_fiber();
1980
79
      remember_prev_exception(&exception);
1981
79
      continue;
1982
79
    } else {
1983
      /* Fiber suspended itself after calling all destructors */
1984
74
      GC_TRACE("destructor fiber suspended itself");
1985
74
      break;
1986
74
    }
1987
153
  }
1988
1989
74
  EG(exception) = exception;
1990
74
}
1991
1992
/* Perform a garbage collection run. The default implementation of gc_collect_cycles. */
1993
ZEND_API int zend_gc_collect_cycles(void)
1994
542k
{
1995
542k
  int total_count = 0;
1996
542k
  bool should_rerun_gc = false;
1997
542k
  bool did_rerun_gc = false;
1998
1999
542k
  zend_hrtime_t start_time = zend_hrtime();
2000
542k
  if (GC_G(num_roots) && !GC_G(gc_active)) {
2001
20.1k
    zend_gc_remove_root_tmpvars();
2002
20.1k
  }
2003
2004
543k
rerun_gc:
2005
543k
  if (GC_G(num_roots)) {
2006
20.6k
    int count;
2007
20.6k
    gc_root_buffer *current, *last;
2008
20.6k
    zend_refcounted *p;
2009
20.6k
    uint32_t gc_flags = 0;
2010
20.6k
    uint32_t idx, end;
2011
20.6k
    gc_stack stack;
2012
2013
20.6k
    stack.prev = NULL;
2014
20.6k
    stack.next = NULL;
2015
2016
20.6k
    if (GC_G(gc_active)) {
2017
34
      GC_G(collector_time) += zend_hrtime() - start_time;
2018
34
      return 0;
2019
34
    }
2020
2021
20.5k
    GC_TRACE("Collecting cycles");
2022
20.5k
    GC_G(gc_runs)++;
2023
20.5k
    GC_G(gc_active) = 1;
2024
2025
20.5k
    GC_TRACE("Marking roots");
2026
20.5k
    gc_mark_roots(&stack);
2027
20.5k
    GC_TRACE("Scanning roots");
2028
20.5k
    gc_scan_roots(&stack);
2029
2030
20.5k
    GC_TRACE("Collecting roots");
2031
20.5k
    count = gc_collect_roots(&gc_flags, &stack);
2032
2033
20.5k
    if (!GC_G(num_roots)) {
2034
      /* nothing to free */
2035
17.1k
      GC_TRACE("Nothing to free");
2036
17.1k
      gc_stack_free(&stack);
2037
17.1k
      GC_G(gc_active) = 0;
2038
17.1k
      goto finish;
2039
17.1k
    }
2040
2041
3.46k
    end = GC_G(first_unused);
2042
2043
3.46k
    if (gc_flags & GC_HAS_DESTRUCTORS) {
2044
593
      GC_TRACE("Calling destructors");
2045
2046
      /* During a destructor call, new externally visible references to nested data may
2047
       * be introduced. These references can be introduced in a way that does not
2048
       * modify any refcounts, so we have no real way to detect this situation
2049
       * short of rerunning full GC tracing. What we do instead is to only run
2050
       * destructors at this point and automatically re-run GC afterwards. */
2051
593
      should_rerun_gc = true;
2052
2053
      /* Mark all roots for which a dtor will be invoked as DTOR_GARBAGE. Additionally
2054
       * color them purple. This serves a double purpose: First, they should be
2055
       * considered new potential roots for the next GC run. Second, it will prevent
2056
       * their removal from the root buffer by nested data removal. */
2057
593
      idx = GC_FIRST_ROOT;
2058
593
      current = GC_IDX2PTR(GC_FIRST_ROOT);
2059
5.69k
      while (idx != end) {
2060
5.10k
        if (GC_IS_GARBAGE(current->ref)) {
2061
5.10k
          p = GC_GET_PTR(current->ref);
2062
5.10k
          if (GC_TYPE(p) == IS_OBJECT && !(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
2063
4.42k
            zend_object *obj = (zend_object *) p;
2064
4.42k
            if (obj->handlers->dtor_obj != zend_objects_destroy_object
2065
4.15k
              || obj->ce->destructor) {
2066
2.68k
              current->ref = GC_MAKE_DTOR_GARBAGE(obj);
2067
2.68k
              GC_REF_SET_COLOR(obj, GC_PURPLE);
2068
2.68k
            } else {
2069
1.74k
              GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
2070
1.74k
            }
2071
4.42k
          }
2072
5.10k
        }
2073
5.10k
        current++;
2074
5.10k
        idx++;
2075
5.10k
      }
2076
2077
      /* Remove nested data for objects on which a destructor will be called.
2078
       * This will not remove the objects themselves, as they have been colored
2079
       * purple. */
2080
593
      idx = GC_FIRST_ROOT;
2081
593
      current = GC_IDX2PTR(GC_FIRST_ROOT);
2082
5.69k
      while (idx != end) {
2083
5.10k
        if (GC_IS_DTOR_GARBAGE(current->ref)) {
2084
2.68k
          p = GC_GET_PTR(current->ref);
2085
2.68k
          count -= gc_remove_nested_data_from_buffer(p, current, &stack);
2086
2.68k
        }
2087
5.10k
        current++;
2088
5.10k
        idx++;
2089
5.10k
      }
2090
2091
      /* Actually call destructors. */
2092
593
      zend_hrtime_t dtor_start_time = zend_hrtime();
2093
593
      if (EXPECTED(!EG(active_fiber))) {
2094
519
        gc_call_destructors(GC_FIRST_ROOT, end, NULL);
2095
519
      } else {
2096
74
        gc_call_destructors_in_fiber();
2097
74
      }
2098
593
      GC_G(dtor_time) += zend_hrtime() - dtor_start_time;
2099
2100
593
      if (GC_G(gc_protected)) {
2101
        /* something went wrong */
2102
12
        zend_get_gc_buffer_release();
2103
12
        GC_G(collector_time) += zend_hrtime() - start_time;
2104
12
        return 0;
2105
12
      }
2106
593
    }
2107
2108
3.45k
    gc_stack_free(&stack);
2109
2110
    /* Destroy zvals. The root buffer may be reallocated. */
2111
3.45k
    GC_TRACE("Destroying zvals");
2112
3.45k
    zend_hrtime_t free_start_time = zend_hrtime();
2113
3.45k
    idx = GC_FIRST_ROOT;
2114
582k
    while (idx != end) {
2115
579k
      current = GC_IDX2PTR(idx);
2116
579k
      if (GC_IS_GARBAGE(current->ref)) {
2117
30.1k
        p = GC_GET_PTR(current->ref);
2118
30.1k
        GC_TRACE_REF(p, "destroying");
2119
30.1k
        if (GC_TYPE(p) == IS_OBJECT) {
2120
25.7k
          zend_object *obj = (zend_object*)p;
2121
2122
25.7k
          EG(objects_store).object_buckets[obj->handle] = SET_OBJ_INVALID(obj);
2123
25.7k
          GC_TYPE_INFO(obj) = GC_NULL |
2124
25.7k
            (GC_TYPE_INFO(obj) & ~GC_TYPE_MASK);
2125
          /* Modify current before calling free_obj (bug #78811: free_obj() can cause the root buffer (with current) to be reallocated.) */
2126
25.7k
          current->ref = GC_MAKE_GARBAGE(((char*)obj) - obj->handlers->offset);
2127
25.7k
          if (!(OBJ_FLAGS(obj) & IS_OBJ_FREE_CALLED)) {
2128
25.7k
            GC_ADD_FLAGS(obj, IS_OBJ_FREE_CALLED);
2129
25.7k
            GC_ADDREF(obj);
2130
25.7k
            obj->handlers->free_obj(obj);
2131
25.7k
            GC_DELREF(obj);
2132
25.7k
          }
2133
2134
25.7k
          ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST(obj->handle);
2135
25.7k
        } else if (GC_TYPE(p) == IS_ARRAY) {
2136
4.44k
          zend_array *arr = (zend_array*)p;
2137
2138
4.44k
          GC_TYPE_INFO(arr) = GC_NULL |
2139
4.44k
            (GC_TYPE_INFO(arr) & ~GC_TYPE_MASK);
2140
2141
          /* GC may destroy arrays with rc>1. This is valid and safe. */
2142
4.44k
          HT_ALLOW_COW_VIOLATION(arr);
2143
2144
4.44k
          zend_hash_destroy(arr);
2145
4.44k
        }
2146
30.1k
      }
2147
579k
      idx++;
2148
579k
    }
2149
2150
    /* Free objects */
2151
3.45k
    current = GC_IDX2PTR(GC_FIRST_ROOT);
2152
3.45k
    last = GC_IDX2PTR(end);
2153
582k
    while (current != last) {
2154
579k
      if (GC_IS_GARBAGE(current->ref)) {
2155
30.1k
        p = GC_GET_PTR(current->ref);
2156
30.1k
        GC_LINK_UNUSED(current);
2157
30.1k
        GC_G(num_roots)--;
2158
30.1k
        efree(p);
2159
30.1k
      }
2160
579k
      current++;
2161
579k
    }
2162
2163
3.45k
    GC_G(free_time) += zend_hrtime() - free_start_time;
2164
2165
3.45k
    GC_TRACE("Collection finished");
2166
3.45k
    GC_G(collected) += count;
2167
3.45k
    total_count += count;
2168
3.45k
    GC_G(gc_active) = 0;
2169
3.45k
  }
2170
2171
526k
  gc_compact();
2172
2173
  /* Objects with destructors were removed from this GC run. Rerun GC right away to clean them
2174
   * up. We do this only once: If we encounter more destructors on the second run, we'll not
2175
   * run GC another time. */
2176
526k
  if (should_rerun_gc && !did_rerun_gc) {
2177
479
    did_rerun_gc = true;
2178
479
    goto rerun_gc;
2179
479
  }
2180
2181
542k
finish:
2182
542k
  zend_get_gc_buffer_release();
2183
2184
  /* Prevent GC from running during zend_gc_check_root_tmpvars, before
2185
   * gc_threshold is adjusted, as this may result in unbounded recursion */
2186
542k
  GC_G(gc_active) = 1;
2187
542k
  zend_gc_check_root_tmpvars();
2188
542k
  GC_G(gc_active) = 0;
2189
2190
542k
  GC_G(collector_time) += zend_hrtime() - start_time;
2191
542k
  return total_count;
2192
526k
}
2193
2194
ZEND_API void zend_gc_get_status(zend_gc_status *status)
2195
167
{
2196
167
  status->active = GC_G(gc_active);
2197
167
  status->gc_protected = GC_G(gc_protected);
2198
167
  status->full = GC_G(gc_full);
2199
167
  status->runs = GC_G(gc_runs);
2200
167
  status->collected = GC_G(collected);
2201
167
  status->threshold = GC_G(gc_threshold);
2202
167
  status->buf_size = GC_G(buf_size);
2203
167
  status->num_roots = GC_G(num_roots);
2204
167
  status->application_time = zend_hrtime() - GC_G(activated_at);
2205
167
  status->collector_time = GC_G(collector_time);
2206
167
  status->dtor_time = GC_G(dtor_time);
2207
167
  status->free_time = GC_G(free_time);
2208
167
}
2209
2210
49.0k
ZEND_API zend_get_gc_buffer *zend_get_gc_buffer_create(void) {
2211
  /* There can only be one get_gc() call active at a time,
2212
   * so there only needs to be one buffer. */
2213
49.0k
  zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
2214
49.0k
  gc_buffer->cur = gc_buffer->start;
2215
49.0k
  return gc_buffer;
2216
49.0k
}
2217
2218
1.96k
ZEND_API void zend_get_gc_buffer_grow(zend_get_gc_buffer *gc_buffer) {
2219
1.96k
  size_t old_capacity = gc_buffer->end - gc_buffer->start;
2220
1.96k
  size_t new_capacity = old_capacity == 0 ? 64 : old_capacity * 2;
2221
1.96k
  gc_buffer->start = erealloc(gc_buffer->start, new_capacity * sizeof(zval));
2222
1.96k
  gc_buffer->end = gc_buffer->start + new_capacity;
2223
1.96k
  gc_buffer->cur = gc_buffer->start + old_capacity;
2224
1.96k
}
2225
2226
542k
static void zend_get_gc_buffer_release(void) {
2227
542k
  zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
2228
542k
  efree(gc_buffer->start);
2229
542k
  gc_buffer->start = gc_buffer->end = gc_buffer->cur = NULL;
2230
542k
}
2231
2232
/* TMPVAR operands are destroyed using zval_ptr_dtor_nogc(), because they usually cannot contain
2233
 * cycles. However, there are some rare exceptions where this is possible, in which case we rely
2234
 * on the producing code to root the value. If a GC run occurs between the rooting and consumption
2235
 * of the value, we would end up leaking it. To avoid this, root all live TMPVAR values here. */
2236
542k
static void zend_gc_check_root_tmpvars(void) {
2237
542k
  zend_execute_data *ex = EG(current_execute_data);
2238
664k
  for (; ex; ex = ex->prev_execute_data) {
2239
122k
    zend_function *func = ex->func;
2240
122k
    if (!func || !ZEND_USER_CODE(func->type)) {
2241
119k
      continue;
2242
119k
    }
2243
2244
2.16k
    uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
2245
8.19k
    for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
2246
6.35k
      const zend_live_range *range = &func->op_array.live_range[i];
2247
6.35k
      if (range->start > op_num) {
2248
330
        break;
2249
330
      }
2250
6.02k
      if (range->end <= op_num) {
2251
5.69k
        continue;
2252
5.69k
      }
2253
2254
331
      uint32_t kind = range->var & ZEND_LIVE_MASK;
2255
331
      if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
2256
257
        uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
2257
257
        zval *var = ZEND_CALL_VAR(ex, var_num);
2258
257
        if (Z_COLLECTABLE_P(var)) {
2259
210
          gc_check_possible_root(Z_COUNTED_P(var));
2260
210
        }
2261
257
      }
2262
331
    }
2263
2.16k
  }
2264
542k
}
2265
2266
20.1k
static void zend_gc_remove_root_tmpvars(void) {
2267
20.1k
  zend_execute_data *ex = EG(current_execute_data);
2268
24.6k
  for (; ex; ex = ex->prev_execute_data) {
2269
4.52k
    zend_function *func = ex->func;
2270
4.52k
    if (!func || !ZEND_USER_CODE(func->type)) {
2271
2.65k
      continue;
2272
2.65k
    }
2273
2274
1.86k
    uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
2275
7.63k
    for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
2276
6.06k
      const zend_live_range *range = &func->op_array.live_range[i];
2277
6.06k
      if (range->start > op_num) {
2278
299
        break;
2279
299
      }
2280
5.76k
      if (range->end <= op_num) {
2281
5.52k
        continue;
2282
5.52k
      }
2283
2284
240
      uint32_t kind = range->var & ZEND_LIVE_MASK;
2285
240
      if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
2286
240
        uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
2287
240
        zval *var = ZEND_CALL_VAR(ex, var_num);
2288
240
        if (Z_COLLECTABLE_P(var)) {
2289
210
          GC_REMOVE_FROM_BUFFER(Z_COUNTED_P(var));
2290
210
        }
2291
240
      }
2292
240
    }
2293
1.86k
  }
2294
20.1k
}
2295
2296
#if GC_BENCH
2297
void gc_bench_print(void)
2298
{
2299
  fprintf(stderr, "GC Statistics\n");
2300
  fprintf(stderr, "-------------\n");
2301
  fprintf(stderr, "Runs:               %d\n", GC_G(gc_runs));
2302
  fprintf(stderr, "Collected:          %d\n", GC_G(collected));
2303
  fprintf(stderr, "Root buffer length: %d\n", GC_G(root_buf_length));
2304
  fprintf(stderr, "Root buffer peak:   %d\n\n", GC_G(root_buf_peak));
2305
  fprintf(stderr, "      Possible            Remove from  Marked\n");
2306
  fprintf(stderr, "        Root    Buffered     buffer     grey\n");
2307
  fprintf(stderr, "      --------  --------  -----------  ------\n");
2308
  fprintf(stderr, "ZVAL  %8d  %8d  %9d  %8d\n", GC_G(zval_possible_root), GC_G(zval_buffered), GC_G(zval_remove_from_buffer), GC_G(zval_marked_grey));
2309
}
2310
#endif
2311
2312
#ifdef ZTS
2313
size_t zend_gc_globals_size(void)
2314
{
2315
  return sizeof(zend_gc_globals);
2316
}
2317
#endif
2318
2319
static ZEND_FUNCTION(gc_destructor_fiber)
2320
150
{
2321
150
  uint32_t idx, end;
2322
2323
150
  zend_fiber *fiber = GC_G(dtor_fiber);
2324
150
  ZEND_ASSERT(fiber != NULL);
2325
150
  ZEND_ASSERT(fiber == EG(active_fiber));
2326
2327
153
  for (;;) {
2328
153
    GC_G(dtor_fiber_running) = true;
2329
2330
153
    idx = GC_G(dtor_idx);
2331
153
    end = GC_G(dtor_end);
2332
153
    if (UNEXPECTED(gc_call_destructors(idx, end, fiber) == FAILURE)) {
2333
      /* We resumed after being suspended by a destructor */
2334
79
      return;
2335
79
    }
2336
2337
    /* We have called all destructors. Suspend fiber until the next GC run
2338
     */
2339
74
    GC_G(dtor_fiber_running) = false;
2340
74
    zend_fiber_suspend(fiber, NULL, NULL);
2341
2342
74
    if (UNEXPECTED(fiber->flags & ZEND_FIBER_FLAG_DESTROYED)) {
2343
      /* Fiber is being destroyed by shutdown sequence */
2344
71
      if (GC_G(dtor_fiber) == fiber) {
2345
71
        GC_G(dtor_fiber) = NULL;
2346
71
      }
2347
71
      GC_DELREF(&fiber->std);
2348
71
      gc_check_possible_root((zend_refcounted*)&fiber->std.gc);
2349
71
      return;
2350
71
    }
2351
74
  }
2352
150
}
2353
2354
static zend_internal_function gc_destructor_fiber = {
2355
  .type = ZEND_INTERNAL_FUNCTION,
2356
  .fn_flags = ZEND_ACC_PUBLIC,
2357
  .handler = ZEND_FN(gc_destructor_fiber),
2358
};
2359
2360
void gc_init(void)
2361
16
{
2362
16
  gc_destructor_fiber.function_name = zend_string_init_interned(
2363
16
      "gc_destructor_fiber",
2364
16
      strlen("gc_destructor_fiber"),
2365
      true);
2366
16
}