Coverage Report

Created: 2025-12-14 06:05

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/php-src/Zend/zend_gc.c
Line
Count
Source
1
/*
2
   +----------------------------------------------------------------------+
3
   | Zend Engine                                                          |
4
   +----------------------------------------------------------------------+
5
   | Copyright (c) Zend Technologies Ltd. (http://www.zend.com)           |
6
   +----------------------------------------------------------------------+
7
   | This source file is subject to version 2.00 of the Zend license,     |
8
   | that is bundled with this package in the file LICENSE, and is        |
9
   | available through the world-wide-web at the following url:           |
10
   | http://www.zend.com/license/2_00.txt.                                |
11
   | If you did not receive a copy of the Zend license and are unable to  |
12
   | obtain it through the world-wide-web, please send a note to          |
13
   | license@zend.com so we can mail you a copy immediately.              |
14
   +----------------------------------------------------------------------+
15
   | Authors: David Wang <planetbeing@gmail.com>                          |
16
   |          Dmitry Stogov <dmitry@php.net>                              |
17
   +----------------------------------------------------------------------+
18
*/
19
20
/**
21
 * zend_gc_collect_cycles
22
 * ======================
23
 *
24
 * Colors and its meaning
25
 * ----------------------
26
 *
27
 * BLACK  (GC_BLACK)   - In use or free.
28
 * GREY   (GC_GREY)    - Possible member of cycle.
29
 * WHITE  (GC_WHITE)   - Member of garbage cycle.
30
 * PURPLE (GC_PURPLE)  - Possible root of cycle.
31
 *
32
 * Colors described in the paper but not used
33
 * ------------------------------------------
34
 *
35
 * GREEN - Acyclic
36
 * RED   - Candidate cycle undergoing
37
 * ORANGE - Candidate cycle awaiting epoch boundary.
38
 *
39
 *
40
 * Flow
41
 * =====
42
 *
43
 * The garbage collect cycle starts from 'gc_mark_roots', which traverses the
44
 * possible roots, and calls mark_grey for roots are marked purple with
45
 * depth-first traverse.
46
 *
47
 * After all possible roots are traversed and marked,
48
 * gc_scan_roots will be called, and each root will be called with
49
 * gc_scan(root->ref)
50
 *
51
 * gc_scan checks the colors of possible members.
52
 *
53
 * If the node is marked as grey and the refcount > 0
54
 *    gc_scan_black will be called on that node to scan it's subgraph.
55
 * otherwise (refcount == 0), it marks the node white.
56
 *
57
 * A node MAY be added to possible roots when ZEND_UNSET_VAR happens or
58
 * zend_assign_to_variable is called only when possible garbage node is
59
 * produced.
60
 * gc_possible_root() will be called to add the nodes to possible roots.
61
 *
62
 *
63
 * For objects, we call their get_gc handler (by default 'zend_std_get_gc') to
64
 * get the object properties to scan.
65
 *
66
 *
67
 * @see http://researcher.watson.ibm.com/researcher/files/us-bacon/Bacon01Concurrent.pdf
68
 */
69
#include "zend.h"
70
#include "zend_API.h"
71
#include "zend_compile.h"
72
#include "zend_errors.h"
73
#include "zend_fibers.h"
74
#include "zend_hrtime.h"
75
#include "zend_portability.h"
76
#include "zend_types.h"
77
#include "zend_weakrefs.h"
78
#include "zend_string.h"
79
80
#ifndef GC_BENCH
81
# define GC_BENCH 0
82
#endif
83
84
#ifndef ZEND_GC_DEBUG
85
# define ZEND_GC_DEBUG 0
86
#endif
87
88
/* GC_INFO layout */
89
200k
#define GC_ADDRESS  0x0fffffu
90
215k
#define GC_COLOR    0x300000u
91
92
#define GC_BLACK    0x000000u /* must be zero */
93
#define GC_WHITE    0x100000u
94
#define GC_GREY     0x200000u
95
#define GC_PURPLE   0x300000u
96
97
/* Debug tracing */
98
#if ZEND_GC_DEBUG > 1
99
# define GC_TRACE(format, ...) fprintf(stderr, format "\n", ##__VA_ARGS__);
100
# define GC_TRACE_REF(ref, format, ...) \
101
  do { \
102
    gc_trace_ref((zend_refcounted *) ref); \
103
    fprintf(stderr, format "\n", ##__VA_ARGS__); \
104
  } while (0)
105
# define GC_TRACE_SET_COLOR(ref, color) \
106
  GC_TRACE_REF(ref, "->%s", gc_color_name(color))
107
#else
108
# define GC_TRACE_REF(ref, format, ...)
109
# define GC_TRACE_SET_COLOR(ref, new_color)
110
# define GC_TRACE(str)
111
#endif
112
113
/* GC_INFO access */
114
#define GC_REF_ADDRESS(ref) \
115
200k
  (((GC_TYPE_INFO(ref)) & (GC_ADDRESS << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
116
117
#define GC_REF_COLOR(ref) \
118
  (((GC_TYPE_INFO(ref)) & (GC_COLOR << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
119
120
#define GC_REF_CHECK_COLOR(ref, color) \
121
209k
  ((GC_TYPE_INFO(ref) & (GC_COLOR << GC_INFO_SHIFT)) == ((color) << GC_INFO_SHIFT))
122
123
402k
#define GC_REF_SET_INFO(ref, info) do { \
124
402k
    GC_TYPE_INFO(ref) = \
125
402k
      (GC_TYPE_INFO(ref) & (GC_TYPE_MASK | GC_FLAGS_MASK)) | \
126
402k
      ((info) << GC_INFO_SHIFT); \
127
402k
  } while (0)
128
129
3.43k
#define GC_REF_SET_COLOR(ref, c) do { \
130
3.43k
    GC_TRACE_SET_COLOR(ref, c); \
131
3.43k
    GC_TYPE_INFO(ref) = \
132
3.43k
      (GC_TYPE_INFO(ref) & ~(GC_COLOR << GC_INFO_SHIFT)) | \
133
3.43k
      ((c) << GC_INFO_SHIFT); \
134
3.43k
  } while (0)
135
136
2.14k
#define GC_REF_SET_BLACK(ref) do { \
137
2.14k
    GC_TRACE_SET_COLOR(ref, GC_BLACK); \
138
2.14k
    GC_TYPE_INFO(ref) &= ~(GC_COLOR << GC_INFO_SHIFT); \
139
2.14k
  } while (0)
140
141
#define GC_REF_SET_PURPLE(ref) do { \
142
    GC_TRACE_SET_COLOR(ref, GC_PURPLE); \
143
    GC_TYPE_INFO(ref) |= (GC_COLOR << GC_INFO_SHIFT); \
144
  } while (0)
145
146
/* bit stealing tags for gc_root_buffer.ref */
147
4.46k
#define GC_BITS    0x3
148
149
3.47k
#define GC_ROOT    0x0 /* possible root of circular garbage     */
150
201k
#define GC_UNUSED  0x1 /* part of linked list of unused buffers */
151
322
#define GC_GARBAGE 0x2 /* garbage to delete                     */
152
10
#define GC_DTOR_GARBAGE 0x3 /* garbage on which only the dtor should be invoked */
153
154
#define GC_GET_PTR(ptr) \
155
214
  ((void*)(((uintptr_t)(ptr)) & ~GC_BITS))
156
157
#define GC_IS_ROOT(ptr) \
158
3.47k
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_ROOT)
159
#define GC_IS_UNUSED(ptr) \
160
575
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_UNUSED)
161
#define GC_IS_GARBAGE(ptr) \
162
196
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_GARBAGE)
163
#define GC_IS_DTOR_GARBAGE(ptr) \
164
8
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_DTOR_GARBAGE)
165
166
#define GC_MAKE_GARBAGE(ptr) \
167
126
  ((void*)(((uintptr_t)(ptr)) | GC_GARBAGE))
168
#define GC_MAKE_DTOR_GARBAGE(ptr) \
169
2
  ((void*)(((uintptr_t)(ptr)) | GC_DTOR_GARBAGE))
170
171
/* GC address conversion */
172
407k
#define GC_IDX2PTR(idx)      (GC_G(buf) + (idx))
173
201k
#define GC_PTR2IDX(ptr)      ((ptr) - GC_G(buf))
174
175
/* Get the value to be placed in an unused buffer entry with the specified next unused list index */
176
201k
#define GC_IDX2LIST(idx)     ((void*)(uintptr_t)(((idx) * sizeof(void*)) | GC_UNUSED))
177
/* Get the index of the next item in the unused list from the given root buffer entry. */
178
713
#define GC_LIST2IDX(list)    (((uint32_t)(uintptr_t)(list)) / sizeof(void*))
179
180
/* GC buffers */
181
102k
#define GC_INVALID           0
182
305k
#define GC_FIRST_ROOT        1
183
184
2
#define GC_DEFAULT_BUF_SIZE  (16 * 1024)
185
0
#define GC_BUF_GROW_STEP     (128 * 1024)
186
187
0
#define GC_MAX_UNCOMPRESSED  (512 * 1024)
188
0
#define GC_MAX_BUF_SIZE      0x40000000
189
190
2
#define GC_THRESHOLD_DEFAULT (10000 + GC_FIRST_ROOT)
191
0
#define GC_THRESHOLD_STEP    10000
192
0
#define GC_THRESHOLD_MAX     1000000000
193
0
#define GC_THRESHOLD_TRIGGER 100
194
195
/* GC flags */
196
64
#define GC_HAS_DESTRUCTORS  (1<<0)
197
198
/* Weak maps */
199
51
#define Z_FROM_WEAKMAP_KEY    (1<<0)
200
52
#define Z_FROM_WEAKMAP      (1<<1)
201
202
/* The WeakMap entry zv is reachable from roots by following the virtual
203
 * reference from the a WeakMap key to the entry */
204
#define GC_FROM_WEAKMAP_KEY(zv) \
205
25
  (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT))
206
207
13
#define GC_SET_FROM_WEAKMAP_KEY(zv) do {                    \
208
13
  zval *_z = (zv);                               \
209
13
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \
210
13
} while (0)
211
212
13
#define GC_UNSET_FROM_WEAKMAP_KEY(zv) do {                    \
213
13
  zval *_z = (zv);                               \
214
13
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \
215
13
} while (0)
216
217
/* The WeakMap entry zv is reachable from roots by following the reference from
218
 * the WeakMap */
219
#define GC_FROM_WEAKMAP(zv) \
220
23
  (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT))
221
222
14
#define GC_SET_FROM_WEAKMAP(zv) do {                        \
223
14
  zval *_z = (zv);                               \
224
14
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \
225
14
} while (0)
226
227
15
#define GC_UNSET_FROM_WEAKMAP(zv) do {                      \
228
15
  zval *_z = (zv);                               \
229
15
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \
230
15
} while (0)
231
232
/* unused buffers */
233
234
/* Are there any unused root buffer entries? */
235
#define GC_HAS_UNUSED() \
236
21
  (GC_G(unused) != GC_INVALID)
237
238
/* Get the next unused entry and remove it from the list */
239
#define GC_FETCH_UNUSED() \
240
713
  gc_fetch_unused()
241
242
/* Add a root buffer entry to the unused list */
243
#define GC_LINK_UNUSED(root) \
244
201k
  gc_link_unused(root)
245
246
#define GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD() \
247
  (GC_G(first_unused) < GC_G(gc_threshold))
248
#define GC_HAS_NEXT_UNUSED() \
249
21
  (GC_G(first_unused) != GC_G(buf_size))
250
#define GC_FETCH_NEXT_UNUSED() \
251
200k
  gc_fetch_next_unused()
252
253
ZEND_API int (*gc_collect_cycles)(void);
254
255
/* The type of a root buffer entry.
256
 *
257
 * The lower two bits are used for flags and need to be masked out to
258
 * reconstruct a pointer.
259
 *
260
 * When a node in the root buffer is removed, the non-flag bits of the
261
 * unused entry are used to store the index of the next entry in the unused
262
 * list. */
263
typedef struct _gc_root_buffer {
264
  zend_refcounted  *ref;
265
} gc_root_buffer;
266
267
typedef struct _zend_gc_globals {
268
  /* The root buffer, which stores possible roots of reference cycles. It is
269
   * also used to store garbage to be collected at the end of a run.
270
   * A single array which is reallocated as necessary. */
271
  gc_root_buffer   *buf;
272
273
  bool         gc_enabled;
274
  bool         gc_active;        /* GC currently running, forbid nested GC */
275
  bool         gc_protected;     /* GC protected, forbid root additions */
276
  bool         gc_full;
277
278
  uint32_t          unused;     /* linked list of unused buffers    */
279
  uint32_t          first_unused;   /* first unused buffer              */
280
  uint32_t          gc_threshold;     /* GC collection threshold          */
281
  uint32_t          buf_size;     /* size of the GC buffer            */
282
  uint32_t          num_roots;    /* number of roots in GC buffer     */
283
284
  uint32_t gc_runs;         /* number of GC runs since reset */
285
  uint32_t collected;         /* number of collected nodes since reset */
286
287
  zend_hrtime_t activated_at;     /* the timestamp of the last reset */
288
  zend_hrtime_t collector_time;   /* time spent running GC (ns) */
289
  zend_hrtime_t dtor_time;      /* time spent calling destructors (ns) */
290
  zend_hrtime_t free_time;      /* time spent destroying nodes and freeing memory (ns) */
291
292
  uint32_t dtor_idx;      /* root buffer index */
293
  uint32_t dtor_end;
294
  zend_fiber *dtor_fiber;
295
  bool dtor_fiber_running;
296
297
#if GC_BENCH
298
  uint32_t root_buf_length;
299
  uint32_t root_buf_peak;
300
  uint32_t zval_possible_root;
301
  uint32_t zval_buffered;
302
  uint32_t zval_remove_from_buffer;
303
  uint32_t zval_marked_grey;
304
#endif
305
} zend_gc_globals;
306
307
#ifdef ZTS
308
static int gc_globals_id;
309
static size_t gc_globals_offset;
310
#define GC_G(v) ZEND_TSRMG_FAST(gc_globals_offset, zend_gc_globals *, v)
311
#else
312
4.09M
#define GC_G(v) (gc_globals.v)
313
static zend_gc_globals gc_globals;
314
#endif
315
316
#if GC_BENCH
317
# define GC_BENCH_INC(counter) GC_G(counter)++
318
# define GC_BENCH_DEC(counter) GC_G(counter)--
319
# define GC_BENCH_PEAK(peak, counter) do {    \
320
    if (GC_G(counter) > GC_G(peak)) {   \
321
      GC_G(peak) = GC_G(counter);     \
322
    }                   \
323
  } while (0)
324
#else
325
# define GC_BENCH_INC(counter)
326
# define GC_BENCH_DEC(counter)
327
# define GC_BENCH_PEAK(peak, counter)
328
#endif
329
330
331
0
#define GC_STACK_SEGMENT_SIZE (((4096 - ZEND_MM_OVERHEAD) / sizeof(void*)) - 2)
332
333
typedef struct _gc_stack gc_stack;
334
335
/* The stack used for graph traversal is stored as a linked list of segments */
336
struct _gc_stack {
337
  gc_stack        *prev;
338
  gc_stack        *next;
339
  zend_refcounted *data[GC_STACK_SEGMENT_SIZE];
340
};
341
342
#define GC_STACK_DCL(init) \
343
3.38k
  gc_stack *_stack = init; \
344
3.38k
  size_t    _top = 0;
345
346
#define GC_STACK_PUSH(ref) \
347
232
  gc_stack_push(&_stack, &_top, ref);
348
349
#define GC_STACK_POP() \
350
3.61k
  gc_stack_pop(&_stack, &_top)
351
352
static zend_never_inline gc_stack* gc_stack_next(gc_stack *stack)
353
664
{
354
664
  if (UNEXPECTED(!stack->next)) {
355
664
    gc_stack *segment = emalloc(sizeof(gc_stack));
356
664
    segment->prev = stack;
357
664
    segment->next = NULL;
358
664
    stack->next = segment;
359
664
  }
360
664
  return stack->next;
361
664
}
362
363
static zend_always_inline void gc_stack_push(gc_stack **stack, size_t *top, zend_refcounted *ref)
364
232
{
365
232
  if (UNEXPECTED(*top == GC_STACK_SEGMENT_SIZE)) {
366
0
    (*stack) = gc_stack_next(*stack);
367
0
    (*top) = 0;
368
0
  }
369
232
  (*stack)->data[(*top)++] = ref;
370
232
}
371
372
static zend_always_inline zend_refcounted* gc_stack_pop(gc_stack **stack, size_t *top)
373
3.61k
{
374
3.61k
  if (UNEXPECTED((*top) == 0)) {
375
3.38k
    if (!(*stack)->prev) {
376
3.38k
      return NULL;
377
3.38k
    } else {
378
0
      (*stack) = (*stack)->prev;
379
0
      (*top) = GC_STACK_SEGMENT_SIZE - 1;
380
0
      return (*stack)->data[GC_STACK_SEGMENT_SIZE - 1];
381
0
    }
382
3.38k
  } else {
383
232
    return (*stack)->data[--(*top)];
384
232
  }
385
3.61k
}
386
387
static void gc_stack_free(gc_stack *stack)
388
724
{
389
724
  gc_stack *p = stack->next;
390
391
1.38k
  while (p) {
392
664
    stack = p->next;
393
664
    efree(p);
394
664
    p = stack;
395
664
  }
396
724
}
397
398
/* Map a full index to a compressed index.
399
 *
400
 * The root buffer can have up to 2^30 entries, but we only have 20 bits to
401
 * store the index. So we use the 1<<19 bit as a compression flag and use the
402
 * other 19 bits to store the index modulo 2^19. */
403
static zend_always_inline uint32_t gc_compress(uint32_t idx)
404
201k
{
405
201k
  if (EXPECTED(idx < GC_MAX_UNCOMPRESSED)) {
406
201k
    return idx;
407
201k
  }
408
0
  return (idx % GC_MAX_UNCOMPRESSED) | GC_MAX_UNCOMPRESSED;
409
201k
}
410
411
/* Find the root buffer entry given a pointer and a compressed index.
412
 * Iterate through the root buffer in steps of 2^19 until the pointer
413
 * matches. */
414
static zend_always_inline gc_root_buffer* gc_decompress(zend_refcounted *ref, uint32_t idx)
415
0
{
416
0
  gc_root_buffer *root = GC_IDX2PTR(idx);
417
418
0
  if (EXPECTED(GC_GET_PTR(root->ref) == ref)) {
419
0
    return root;
420
0
  }
421
422
0
  while (1) {
423
0
    idx += GC_MAX_UNCOMPRESSED;
424
0
    ZEND_ASSERT(idx < GC_G(first_unused));
425
0
    root = GC_IDX2PTR(idx);
426
0
    if (GC_GET_PTR(root->ref) == ref) {
427
0
      return root;
428
0
    }
429
0
  }
430
0
}
431
432
/* Get the index of the next unused root buffer entry, and remove it from the
433
 * unused list. GC_HAS_UNUSED() must be true before calling this. */
434
static zend_always_inline uint32_t gc_fetch_unused(void)
435
713
{
436
713
  uint32_t idx;
437
713
  gc_root_buffer *root;
438
439
713
  ZEND_ASSERT(GC_HAS_UNUSED());
440
713
  idx = GC_G(unused);
441
713
  root = GC_IDX2PTR(idx);
442
713
  ZEND_ASSERT(GC_IS_UNUSED(root->ref));
443
713
  GC_G(unused) = GC_LIST2IDX(root->ref);
444
713
  return idx;
445
713
}
446
447
/* Add a root buffer entry to the unused list */
448
static zend_always_inline void gc_link_unused(gc_root_buffer *root)
449
201k
{
450
201k
  root->ref = GC_IDX2LIST(GC_G(unused));
451
201k
  GC_G(unused) = GC_PTR2IDX(root);
452
201k
}
453
454
static zend_always_inline uint32_t gc_fetch_next_unused(void)
455
200k
{
456
200k
  uint32_t idx;
457
458
200k
  ZEND_ASSERT(GC_HAS_NEXT_UNUSED());
459
200k
  idx = GC_G(first_unused);
460
200k
  GC_G(first_unused) = GC_G(first_unused) + 1;
461
200k
  return idx;
462
200k
}
463
464
#if ZEND_GC_DEBUG > 1
465
static const char *gc_color_name(uint32_t color) {
466
  switch (color) {
467
    case GC_BLACK: return "black";
468
    case GC_WHITE: return "white";
469
    case GC_GREY: return "grey";
470
    case GC_PURPLE: return "purple";
471
    default: return "unknown";
472
  }
473
}
474
static void gc_trace_ref(zend_refcounted *ref) {
475
  if (GC_TYPE(ref) == IS_OBJECT) {
476
    zend_object *obj = (zend_object *) ref;
477
    fprintf(stderr, "[%p] rc=%d addr=%d %s object(%s)#%d ",
478
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
479
      gc_color_name(GC_REF_COLOR(ref)),
480
      obj->ce->name->val, obj->handle);
481
  } else if (GC_TYPE(ref) == IS_ARRAY) {
482
    zend_array *arr = (zend_array *) ref;
483
    fprintf(stderr, "[%p] rc=%d addr=%d %s array(%d) ",
484
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
485
      gc_color_name(GC_REF_COLOR(ref)),
486
      zend_hash_num_elements(arr));
487
  } else {
488
    fprintf(stderr, "[%p] rc=%d addr=%d %s %s ",
489
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
490
      gc_color_name(GC_REF_COLOR(ref)),
491
      GC_TYPE(ref) == IS_REFERENCE
492
        ? "reference" : zend_get_type_by_const(GC_TYPE(ref)));
493
  }
494
}
495
#endif
496
497
/* Mark a root buffer entry unused */
498
static zend_always_inline void gc_remove_from_roots(gc_root_buffer *root)
499
201k
{
500
201k
  GC_LINK_UNUSED(root);
501
201k
  GC_G(num_roots)--;
502
201k
  GC_BENCH_DEC(root_buf_length);
503
201k
}
504
505
static void root_buffer_dtor(zend_gc_globals *gc_globals)
506
0
{
507
0
  if (gc_globals->buf) {
508
0
    free(gc_globals->buf);
509
0
    gc_globals->buf = NULL;
510
0
  }
511
0
}
512
513
static void gc_globals_ctor_ex(zend_gc_globals *gc_globals)
514
2
{
515
2
  gc_globals->gc_enabled = false;
516
2
  gc_globals->gc_active = false;
517
2
  gc_globals->gc_protected = true;
518
2
  gc_globals->gc_full = false;
519
520
2
  gc_globals->buf = NULL;
521
2
  gc_globals->unused = GC_INVALID;
522
2
  gc_globals->first_unused = GC_INVALID;
523
2
  gc_globals->gc_threshold = GC_INVALID;
524
2
  gc_globals->buf_size = GC_INVALID;
525
2
  gc_globals->num_roots = 0;
526
527
2
  gc_globals->gc_runs = 0;
528
2
  gc_globals->collected = 0;
529
2
  gc_globals->collector_time = 0;
530
2
  gc_globals->dtor_time = 0;
531
2
  gc_globals->free_time = 0;
532
2
  gc_globals->activated_at = 0;
533
534
2
  gc_globals->dtor_idx = GC_FIRST_ROOT;
535
2
  gc_globals->dtor_end = 0;
536
2
  gc_globals->dtor_fiber = NULL;
537
2
  gc_globals->dtor_fiber_running = false;
538
539
#if GC_BENCH
540
  gc_globals->root_buf_length = 0;
541
  gc_globals->root_buf_peak = 0;
542
  gc_globals->zval_possible_root = 0;
543
  gc_globals->zval_buffered = 0;
544
  gc_globals->zval_remove_from_buffer = 0;
545
  gc_globals->zval_marked_grey = 0;
546
#endif
547
2
}
548
549
void gc_globals_ctor(void)
550
2
{
551
#ifdef ZTS
552
  ts_allocate_fast_id(&gc_globals_id, &gc_globals_offset, sizeof(zend_gc_globals), (ts_allocate_ctor) gc_globals_ctor_ex, (ts_allocate_dtor) root_buffer_dtor);
553
#else
554
2
  gc_globals_ctor_ex(&gc_globals);
555
2
#endif
556
2
}
557
558
void gc_globals_dtor(void)
559
0
{
560
0
#ifndef ZTS
561
0
  root_buffer_dtor(&gc_globals);
562
0
#endif
563
0
}
564
565
void gc_reset(void)
566
50.4k
{
567
50.4k
  if (GC_G(buf)) {
568
50.4k
    GC_G(gc_active) = 0;
569
50.4k
    GC_G(gc_protected) = 0;
570
50.4k
    GC_G(gc_full) = 0;
571
50.4k
    GC_G(unused) = GC_INVALID;
572
50.4k
    GC_G(first_unused) = GC_FIRST_ROOT;
573
50.4k
    GC_G(num_roots) = 0;
574
575
50.4k
    GC_G(gc_runs) = 0;
576
50.4k
    GC_G(collected) = 0;
577
578
50.4k
    GC_G(collector_time) = 0;
579
50.4k
    GC_G(dtor_time) = 0;
580
50.4k
    GC_G(free_time) = 0;
581
582
50.4k
    GC_G(dtor_idx) = GC_FIRST_ROOT;
583
50.4k
    GC_G(dtor_end) = 0;
584
50.4k
    GC_G(dtor_fiber) = NULL;
585
50.4k
    GC_G(dtor_fiber_running) = false;
586
587
#if GC_BENCH
588
    GC_G(root_buf_length) = 0;
589
    GC_G(root_buf_peak) = 0;
590
    GC_G(zval_possible_root) = 0;
591
    GC_G(zval_buffered) = 0;
592
    GC_G(zval_remove_from_buffer) = 0;
593
    GC_G(zval_marked_grey) = 0;
594
#endif
595
50.4k
  }
596
597
50.4k
  GC_G(activated_at) = zend_hrtime();
598
50.4k
}
599
600
/* Enable/disable the garbage collector.
601
 * Initialize globals if necessary. */
602
ZEND_API bool gc_enable(bool enable)
603
24
{
604
24
  bool old_enabled = GC_G(gc_enabled);
605
24
  GC_G(gc_enabled) = enable;
606
24
  if (enable && !old_enabled && GC_G(buf) == NULL) {
607
2
    GC_G(buf) = (gc_root_buffer*) pemalloc(sizeof(gc_root_buffer) * GC_DEFAULT_BUF_SIZE, 1);
608
2
    GC_G(buf)[0].ref = NULL;
609
2
    GC_G(buf_size) = GC_DEFAULT_BUF_SIZE;
610
2
    GC_G(gc_threshold) = GC_THRESHOLD_DEFAULT;
611
2
    gc_reset();
612
2
  }
613
24
  return old_enabled;
614
24
}
615
616
ZEND_API bool gc_enabled(void)
617
11
{
618
11
  return GC_G(gc_enabled);
619
11
}
620
621
/* Protect the GC root buffer (prevent additions) */
622
ZEND_API bool gc_protect(bool protect)
623
1.82k
{
624
1.82k
  bool old_protected = GC_G(gc_protected);
625
1.82k
  GC_G(gc_protected) = protect;
626
1.82k
  return old_protected;
627
1.82k
}
628
629
ZEND_API bool gc_protected(void)
630
0
{
631
0
  return GC_G(gc_protected);
632
0
}
633
634
static void gc_grow_root_buffer(void)
635
0
{
636
0
  size_t new_size;
637
638
0
  if (GC_G(buf_size) >= GC_MAX_BUF_SIZE) {
639
0
    if (!GC_G(gc_full)) {
640
0
      zend_error(E_WARNING, "GC buffer overflow (GC disabled)\n");
641
0
      GC_G(gc_active) = 1;
642
0
      GC_G(gc_protected) = 1;
643
0
      GC_G(gc_full) = 1;
644
0
      return;
645
0
    }
646
0
  }
647
0
  if (GC_G(buf_size) < GC_BUF_GROW_STEP) {
648
0
    new_size = GC_G(buf_size) * 2;
649
0
  } else {
650
0
    new_size = GC_G(buf_size) + GC_BUF_GROW_STEP;
651
0
  }
652
0
  if (new_size > GC_MAX_BUF_SIZE) {
653
0
    new_size = GC_MAX_BUF_SIZE;
654
0
  }
655
0
  GC_G(buf) = perealloc(GC_G(buf), sizeof(gc_root_buffer) * new_size, 1);
656
0
  GC_G(buf_size) = new_size;
657
0
}
658
659
/* Adjust the GC activation threshold given the number of nodes collected by the last run */
660
static void gc_adjust_threshold(int count)
661
0
{
662
0
  uint32_t new_threshold;
663
664
  /* TODO Very simple heuristic for dynamic GC buffer resizing:
665
   * If there are "too few" collections, increase the collection threshold
666
   * by a fixed step */
667
0
  if (count < GC_THRESHOLD_TRIGGER || GC_G(num_roots) >= GC_G(gc_threshold)) {
668
    /* increase */
669
0
    if (GC_G(gc_threshold) < GC_THRESHOLD_MAX) {
670
0
      new_threshold = GC_G(gc_threshold) + GC_THRESHOLD_STEP;
671
0
      if (new_threshold > GC_THRESHOLD_MAX) {
672
0
        new_threshold = GC_THRESHOLD_MAX;
673
0
      }
674
0
      if (new_threshold > GC_G(buf_size)) {
675
0
        gc_grow_root_buffer();
676
0
      }
677
0
      if (new_threshold <= GC_G(buf_size)) {
678
0
        GC_G(gc_threshold) = new_threshold;
679
0
      }
680
0
    }
681
0
  } else if (GC_G(gc_threshold) > GC_THRESHOLD_DEFAULT) {
682
0
    new_threshold = GC_G(gc_threshold) - GC_THRESHOLD_STEP;
683
0
    if (new_threshold < GC_THRESHOLD_DEFAULT) {
684
0
      new_threshold = GC_THRESHOLD_DEFAULT;
685
0
    }
686
0
    GC_G(gc_threshold) = new_threshold;
687
0
  }
688
0
}
689
690
/* Perform a GC run and then add a node as a possible root. */
691
static zend_never_inline void ZEND_FASTCALL gc_possible_root_when_full(zend_refcounted *ref)
692
0
{
693
0
  uint32_t idx;
694
0
  gc_root_buffer *newRoot;
695
696
0
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
697
0
  ZEND_ASSERT(GC_INFO(ref) == 0);
698
699
0
  if (GC_G(gc_enabled) && !GC_G(gc_active)) {
700
0
    GC_ADDREF(ref);
701
0
    gc_adjust_threshold(gc_collect_cycles());
702
0
    if (UNEXPECTED(GC_DELREF(ref) == 0)) {
703
0
      rc_dtor_func(ref);
704
0
      return;
705
0
    } else if (UNEXPECTED(GC_INFO(ref))) {
706
0
      return;
707
0
    }
708
0
  }
709
710
0
  if (GC_HAS_UNUSED()) {
711
0
    idx = GC_FETCH_UNUSED();
712
0
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
713
0
    idx = GC_FETCH_NEXT_UNUSED();
714
0
  } else {
715
0
    gc_grow_root_buffer();
716
0
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
717
0
      return;
718
0
    }
719
0
    idx = GC_FETCH_NEXT_UNUSED();
720
0
  }
721
722
0
  newRoot = GC_IDX2PTR(idx);
723
0
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
724
0
  GC_TRACE_SET_COLOR(ref, GC_PURPLE);
725
726
0
  idx = gc_compress(idx);
727
0
  GC_REF_SET_INFO(ref, idx | GC_PURPLE);
728
0
  GC_G(num_roots)++;
729
730
0
  GC_BENCH_INC(zval_buffered);
731
0
  GC_BENCH_INC(root_buf_length);
732
0
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
733
0
}
734
735
/* Add a possible root node to the buffer.
736
 * Maybe perform a GC run. */
737
ZEND_API void ZEND_FASTCALL gc_possible_root(zend_refcounted *ref)
738
206k
{
739
206k
  uint32_t idx;
740
206k
  gc_root_buffer *newRoot;
741
742
206k
  if (UNEXPECTED(GC_G(gc_protected))) {
743
4.74k
    return;
744
4.74k
  }
745
746
201k
  GC_BENCH_INC(zval_possible_root);
747
748
201k
  if (EXPECTED(GC_HAS_UNUSED())) {
749
713
    idx = GC_FETCH_UNUSED();
750
200k
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD())) {
751
200k
    idx = GC_FETCH_NEXT_UNUSED();
752
200k
  } else {
753
0
    gc_possible_root_when_full(ref);
754
0
    return;
755
0
  }
756
757
201k
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
758
201k
  ZEND_ASSERT(GC_INFO(ref) == 0);
759
760
201k
  newRoot = GC_IDX2PTR(idx);
761
201k
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
762
201k
  GC_TRACE_SET_COLOR(ref, GC_PURPLE);
763
764
201k
  idx = gc_compress(idx);
765
201k
  GC_REF_SET_INFO(ref, idx | GC_PURPLE);
766
201k
  GC_G(num_roots)++;
767
768
201k
  GC_BENCH_INC(zval_buffered);
769
201k
  GC_BENCH_INC(root_buf_length);
770
201k
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
771
201k
}
772
773
/* Add an extra root during a GC run */
774
static void ZEND_FASTCALL gc_extra_root(zend_refcounted *ref)
775
4
{
776
4
  uint32_t idx;
777
4
  gc_root_buffer *newRoot;
778
779
4
  if (EXPECTED(GC_HAS_UNUSED())) {
780
0
    idx = GC_FETCH_UNUSED();
781
4
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
782
4
    idx = GC_FETCH_NEXT_UNUSED();
783
4
  } else {
784
0
    gc_grow_root_buffer();
785
0
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
786
      /* TODO: can this really happen? */
787
0
      return;
788
0
    }
789
0
    idx = GC_FETCH_NEXT_UNUSED();
790
0
  }
791
792
4
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
793
4
  ZEND_ASSERT(GC_REF_ADDRESS(ref) == 0);
794
795
4
  newRoot = GC_IDX2PTR(idx);
796
4
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
797
798
4
  idx = gc_compress(idx);
799
4
  GC_REF_SET_INFO(ref, idx | GC_REF_COLOR(ref));
800
4
  GC_G(num_roots)++;
801
802
4
  GC_BENCH_INC(zval_buffered);
803
4
  GC_BENCH_INC(root_buf_length);
804
4
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
805
4
}
806
807
/* Remove a node from the root buffer given its compressed index */
808
static zend_never_inline void ZEND_FASTCALL gc_remove_compressed(zend_refcounted *ref, uint32_t idx)
809
0
{
810
0
  gc_root_buffer *root = gc_decompress(ref, idx);
811
0
  gc_remove_from_roots(root);
812
0
}
813
814
ZEND_API void ZEND_FASTCALL gc_remove_from_buffer(zend_refcounted *ref)
815
200k
{
816
200k
  gc_root_buffer *root;
817
200k
  uint32_t idx = GC_REF_ADDRESS(ref);
818
819
200k
  GC_BENCH_INC(zval_remove_from_buffer);
820
821
200k
  if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
822
200k
    GC_TRACE_SET_COLOR(ref, GC_BLACK);
823
200k
  }
824
200k
  GC_REF_SET_INFO(ref, 0);
825
826
  /* Perform decompression only in case of large buffers */
827
200k
  if (UNEXPECTED(GC_G(first_unused) >= GC_MAX_UNCOMPRESSED)) {
828
0
    gc_remove_compressed(ref, idx);
829
0
    return;
830
0
  }
831
832
200k
  ZEND_ASSERT(idx);
833
200k
  root = GC_IDX2PTR(idx);
834
200k
  gc_remove_from_roots(root);
835
200k
}
836
837
/* Mark all nodes reachable from ref as black (live). Restore the reference
838
 * counts decremented by gc_mark_grey(). See ScanBlack() in Bacon & Rajan.
839
 * To implement a depth-first search, discovered nodes are added to a stack
840
 * which is processed iteratively. */
841
static void gc_scan_black(zend_refcounted *ref, gc_stack *stack)
842
1.04k
{
843
1.04k
  HashTable *ht;
844
1.04k
  Bucket *p;
845
1.04k
  zval *zv;
846
1.04k
  uint32_t n;
847
1.04k
  GC_STACK_DCL(stack);
848
849
1.76k
tail_call:
850
1.76k
  if (GC_TYPE(ref) == IS_OBJECT) {
851
1.17k
    zend_object *obj = (zend_object*)ref;
852
853
1.17k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
854
1.17k
      zval *table;
855
1.17k
      int len;
856
857
1.17k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
858
6
        zend_weakmap_get_object_key_entry_gc(obj, &table, &len);
859
6
        n = len;
860
6
        zv = table;
861
12
        for (; n != 0; n-=2) {
862
6
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
863
6
          zval *entry = (zval*) Z_PTR_P(zv);
864
6
          zval *weakmap = zv+1;
865
6
          ZEND_ASSERT(Z_REFCOUNTED_P(weakmap));
866
6
          if (Z_OPT_COLLECTABLE_P(entry)) {
867
6
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
868
6
            if (GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_GREY)) {
869
              /* Weakmap was scanned in gc_mark_roots, we must
870
               * ensure that it's eventually scanned in
871
               * gc_scan_roots as well. */
872
2
              if (!GC_REF_ADDRESS(Z_COUNTED_P(weakmap))) {
873
2
                gc_extra_root(Z_COUNTED_P(weakmap));
874
2
              }
875
4
            } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK) && */ !GC_FROM_WEAKMAP(entry)) {
876
              /* Both the entry weakmap and key are BLACK, so we
877
               * can mark the entry BLACK as well.
878
               * !GC_FROM_WEAKMAP(entry) means that the weakmap
879
               * was already scanned black (or will not be
880
               * scanned), so it's our responsibility to mark the
881
               * entry */
882
3
              ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK));
883
3
              ref = Z_COUNTED_P(entry);
884
3
              GC_ADDREF(ref);
885
3
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
886
0
                GC_REF_SET_BLACK(ref);
887
0
                GC_STACK_PUSH(ref);
888
0
              }
889
3
            }
890
6
          }
891
6
          zv+=2;
892
6
        }
893
6
      }
894
895
1.17k
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
896
5
        zend_weakmap_get_key_entry_gc(obj, &table, &len);
897
5
        n = len;
898
5
        zv = table;
899
13
        for (; n != 0; n-=2) {
900
8
          ZEND_ASSERT(Z_TYPE_P(zv+1) == IS_PTR);
901
8
          zval *key = zv;
902
8
          zval *entry = (zval*) Z_PTR_P(zv+1);
903
8
          if (Z_OPT_COLLECTABLE_P(entry)) {
904
8
            GC_UNSET_FROM_WEAKMAP(entry);
905
8
            if (GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_GREY)) {
906
              /* Key was scanned in gc_mark_roots, we must
907
               * ensure that it's eventually scanned in
908
               * gc_scan_roots as well. */
909
4
              if (!GC_REF_ADDRESS(Z_COUNTED_P(key))) {
910
2
                gc_extra_root(Z_COUNTED_P(key));
911
2
              }
912
4
            } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK) && */ !GC_FROM_WEAKMAP_KEY(entry)) {
913
              /* Both the entry weakmap and key are BLACK, so we
914
               * can mark the entry BLACK as well.
915
               * !GC_FROM_WEAKMAP_KEY(entry) means that the key
916
               * was already scanned black (or will not be
917
               * scanned), so it's our responsibility to mark the
918
               * entry */
919
4
              ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK));
920
4
              ref = Z_COUNTED_P(entry);
921
4
              GC_ADDREF(ref);
922
4
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
923
4
                GC_REF_SET_BLACK(ref);
924
4
                GC_STACK_PUSH(ref);
925
4
              }
926
4
            }
927
8
          }
928
8
          zv += 2;
929
8
        }
930
5
        goto next;
931
5
      }
932
933
1.16k
      ht = obj->handlers->get_gc(obj, &table, &len);
934
1.16k
      n = len;
935
1.16k
      zv = table;
936
1.16k
      if (UNEXPECTED(ht)) {
937
217
        GC_ADDREF(ht);
938
217
        if (!GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
939
215
          GC_REF_SET_BLACK(ht);
940
367
          for (; n != 0; n--) {
941
152
            if (Z_COLLECTABLE_P(zv)) {
942
34
              ref = Z_COUNTED_P(zv);
943
34
              GC_ADDREF(ref);
944
34
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
945
26
                GC_REF_SET_BLACK(ref);
946
26
                GC_STACK_PUSH(ref);
947
26
              }
948
34
            }
949
152
            zv++;
950
152
          }
951
215
          goto handle_ht;
952
215
        }
953
217
      }
954
955
1.21k
handle_zvals:
956
2.10M
      for (; n != 0; n--) {
957
2.09M
        if (Z_COLLECTABLE_P(zv)) {
958
569
          ref = Z_COUNTED_P(zv);
959
569
          GC_ADDREF(ref);
960
569
          if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
961
538
            GC_REF_SET_BLACK(ref);
962
538
            zv++;
963
987
            while (--n) {
964
449
              if (Z_COLLECTABLE_P(zv)) {
965
316
                zend_refcounted *ref = Z_COUNTED_P(zv);
966
316
                GC_ADDREF(ref);
967
316
                if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
968
40
                  GC_REF_SET_BLACK(ref);
969
40
                  GC_STACK_PUSH(ref);
970
40
                }
971
316
              }
972
449
              zv++;
973
449
            }
974
538
            goto tail_call;
975
538
          }
976
569
        }
977
2.09M
        zv++;
978
2.09M
      }
979
1.21k
    }
980
1.17k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
981
508
    ZEND_ASSERT((zend_array*)ref != &EG(symbol_table));
982
508
    ht = (zend_array*)ref;
983
723
handle_ht:
984
723
    n = ht->nNumUsed;
985
723
    zv = ht->arPacked;
986
723
    if (HT_IS_PACKED(ht)) {
987
261
      goto handle_zvals;
988
261
    }
989
990
462
    p = (Bucket*)zv;
991
1.19k
    for (; n != 0; n--) {
992
815
      zv = &p->val;
993
815
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
994
173
        zv = Z_INDIRECT_P(zv);
995
173
      }
996
815
      if (Z_COLLECTABLE_P(zv)) {
997
283
        ref = Z_COUNTED_P(zv);
998
283
        GC_ADDREF(ref);
999
283
        if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1000
78
          GC_REF_SET_BLACK(ref);
1001
78
          p++;
1002
101
          while (--n) {
1003
23
            zv = &p->val;
1004
23
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1005
3
              zv = Z_INDIRECT_P(zv);
1006
3
            }
1007
23
            if (Z_COLLECTABLE_P(zv)) {
1008
16
              zend_refcounted *ref = Z_COUNTED_P(zv);
1009
16
              GC_ADDREF(ref);
1010
16
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1011
10
                GC_REF_SET_BLACK(ref);
1012
10
                GC_STACK_PUSH(ref);
1013
10
              }
1014
16
            }
1015
23
            p++;
1016
23
          }
1017
78
          goto tail_call;
1018
78
        }
1019
283
      }
1020
737
      p++;
1021
737
    }
1022
462
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1023
85
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1024
27
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1025
27
      GC_ADDREF(ref);
1026
27
      if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1027
22
        GC_REF_SET_BLACK(ref);
1028
22
        goto tail_call;
1029
22
      }
1030
27
    }
1031
85
  }
1032
1033
1.12k
next:
1034
1.12k
  ref = GC_STACK_POP();
1035
1.12k
  if (ref) {
1036
80
    goto tail_call;
1037
80
  }
1038
1.12k
}
1039
1040
/* Traverse the graph of nodes referred to by ref. Decrement the reference
1041
 * counts and mark visited nodes grey. See MarkGray() in Bacon & Rajan. */
1042
static void gc_mark_grey(zend_refcounted *ref, gc_stack *stack)
1043
1.13k
{
1044
1.13k
  HashTable *ht;
1045
1.13k
  Bucket *p;
1046
1.13k
  zval *zv;
1047
1.13k
  uint32_t n;
1048
1.13k
  GC_STACK_DCL(stack);
1049
1050
1.90k
tail_call:
1051
1.90k
  GC_BENCH_INC(zval_marked_grey);
1052
1053
1.90k
  if (GC_TYPE(ref) == IS_OBJECT) {
1054
1.22k
    zend_object *obj = (zend_object*)ref;
1055
1056
1.22k
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1057
1.22k
      zval *table;
1058
1.22k
      int len;
1059
1060
1.22k
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1061
13
        zend_weakmap_get_object_key_entry_gc(obj, &table, &len);
1062
13
        n = len;
1063
13
        zv = table;
1064
26
        for (; n != 0; n-=2) {
1065
13
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1066
13
          zval *entry = (zval*) Z_PTR_P(zv);
1067
13
          zval *weakmap = zv+1;
1068
13
          ZEND_ASSERT(Z_REFCOUNTED_P(weakmap));
1069
13
          if (Z_COLLECTABLE_P(entry)) {
1070
13
            GC_SET_FROM_WEAKMAP_KEY(entry);
1071
13
            ref = Z_COUNTED_P(entry);
1072
            /* Only DELREF if the contribution from the weakmap has
1073
             * not been cancelled yet */
1074
13
            if (!GC_FROM_WEAKMAP(entry)) {
1075
3
              GC_DELREF(ref);
1076
3
            }
1077
13
            if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1078
2
              GC_REF_SET_COLOR(ref, GC_GREY);
1079
2
              GC_STACK_PUSH(ref);
1080
2
            }
1081
13
          }
1082
13
          zv+=2;
1083
13
        }
1084
13
      }
1085
1086
1.22k
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
1087
8
        zend_weakmap_get_entry_gc(obj, &table, &len);
1088
8
        n = len;
1089
8
        zv = table;
1090
22
        for (; n != 0; n--) {
1091
14
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1092
14
          zval *entry = (zval*) Z_PTR_P(zv);
1093
14
          if (Z_COLLECTABLE_P(entry)) {
1094
14
            GC_SET_FROM_WEAKMAP(entry);
1095
14
            ref = Z_COUNTED_P(entry);
1096
            /* Only DELREF if the contribution from the weakmap key
1097
             * has not been cancelled yet */
1098
14
            if (!GC_FROM_WEAKMAP_KEY(entry)) {
1099
11
              GC_DELREF(ref);
1100
11
            }
1101
14
            if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1102
11
              GC_REF_SET_COLOR(ref, GC_GREY);
1103
11
              GC_STACK_PUSH(ref);
1104
11
            }
1105
14
          }
1106
14
          zv++;
1107
14
        }
1108
8
        goto next;
1109
8
      }
1110
1111
1.21k
      ht = obj->handlers->get_gc(obj, &table, &len);
1112
1.21k
      n = len;
1113
1.21k
      zv = table;
1114
1.21k
      if (UNEXPECTED(ht)) {
1115
248
        GC_DELREF(ht);
1116
248
        if (!GC_REF_CHECK_COLOR(ht, GC_GREY)) {
1117
245
          GC_REF_SET_COLOR(ht, GC_GREY);
1118
402
          for (; n != 0; n--) {
1119
157
            if (Z_COLLECTABLE_P(zv)) {
1120
36
              ref = Z_COUNTED_P(zv);
1121
36
              GC_DELREF(ref);
1122
36
              if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1123
26
                GC_REF_SET_COLOR(ref, GC_GREY);
1124
26
                GC_STACK_PUSH(ref);
1125
26
              }
1126
36
            }
1127
157
            zv++;
1128
157
          }
1129
245
          goto handle_ht;
1130
245
        }
1131
248
      }
1132
1.27k
handle_zvals:
1133
2.10M
      for (; n != 0; n--) {
1134
2.09M
        if (Z_COLLECTABLE_P(zv)) {
1135
613
          ref = Z_COUNTED_P(zv);
1136
613
          GC_DELREF(ref);
1137
613
          if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1138
576
            GC_REF_SET_COLOR(ref, GC_GREY);
1139
576
            zv++;
1140
1.03k
            while (--n) {
1141
460
              if (Z_COLLECTABLE_P(zv)) {
1142
326
                zend_refcounted *ref = Z_COUNTED_P(zv);
1143
326
                GC_DELREF(ref);
1144
326
                if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1145
41
                  GC_REF_SET_COLOR(ref, GC_GREY);
1146
41
                  GC_STACK_PUSH(ref);
1147
41
                }
1148
326
              }
1149
460
              zv++;
1150
460
            }
1151
576
            goto tail_call;
1152
576
          }
1153
613
        }
1154
2.09M
        zv++;
1155
2.09M
      }
1156
1.27k
    }
1157
1.22k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1158
551
    ZEND_ASSERT(((zend_array*)ref) != &EG(symbol_table));
1159
551
    ht = (zend_array*)ref;
1160
796
handle_ht:
1161
796
    n = ht->nNumUsed;
1162
796
    if (HT_IS_PACKED(ht)) {
1163
300
            zv = ht->arPacked;
1164
300
            goto handle_zvals;
1165
300
    }
1166
1167
496
    p = ht->arData;
1168
1.26k
    for (; n != 0; n--) {
1169
857
      zv = &p->val;
1170
857
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1171
176
        zv = Z_INDIRECT_P(zv);
1172
176
      }
1173
857
      if (Z_COLLECTABLE_P(zv)) {
1174
320
        ref = Z_COUNTED_P(zv);
1175
320
        GC_DELREF(ref);
1176
320
        if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1177
93
          GC_REF_SET_COLOR(ref, GC_GREY);
1178
93
          p++;
1179
116
          while (--n) {
1180
23
            zv = &p->val;
1181
23
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1182
3
              zv = Z_INDIRECT_P(zv);
1183
3
            }
1184
23
            if (Z_COLLECTABLE_P(zv)) {
1185
15
              zend_refcounted *ref = Z_COUNTED_P(zv);
1186
15
              GC_DELREF(ref);
1187
15
              if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1188
7
                GC_REF_SET_COLOR(ref, GC_GREY);
1189
7
                GC_STACK_PUSH(ref);
1190
7
              }
1191
15
            }
1192
23
            p++;
1193
23
          }
1194
93
          goto tail_call;
1195
93
        }
1196
320
      }
1197
764
      p++;
1198
764
    }
1199
496
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1200
128
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1201
70
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1202
70
      GC_DELREF(ref);
1203
70
      if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1204
15
        GC_REF_SET_COLOR(ref, GC_GREY);
1205
15
        goto tail_call;
1206
15
      }
1207
70
    }
1208
128
  }
1209
1210
1.21k
next:
1211
1.21k
  ref = GC_STACK_POP();
1212
1.21k
  if (ref) {
1213
87
    goto tail_call;
1214
87
  }
1215
1.21k
}
1216
1217
/* Two-Finger compaction algorithm */
1218
static void gc_compact(void)
1219
151k
{
1220
151k
  if (GC_G(num_roots) + GC_FIRST_ROOT != GC_G(first_unused)) {
1221
51.7k
    if (GC_G(num_roots)) {
1222
125
      gc_root_buffer *free = GC_IDX2PTR(GC_FIRST_ROOT);
1223
125
      gc_root_buffer *scan = GC_IDX2PTR(GC_G(first_unused) - 1);
1224
125
      gc_root_buffer *end  = GC_IDX2PTR(GC_G(num_roots));
1225
125
      uint32_t idx;
1226
125
      zend_refcounted *p;
1227
1228
261
      while (free < scan) {
1229
240
        while (!GC_IS_UNUSED(free->ref)) {
1230
76
          free++;
1231
76
        }
1232
335
        while (GC_IS_UNUSED(scan->ref)) {
1233
171
          scan--;
1234
171
        }
1235
164
        if (scan > free) {
1236
72
          p = scan->ref;
1237
72
          free->ref = p;
1238
72
          p = GC_GET_PTR(p);
1239
72
          idx = gc_compress(GC_PTR2IDX(free));
1240
72
          GC_REF_SET_INFO(p, idx | GC_REF_COLOR(p));
1241
72
          free++;
1242
72
          scan--;
1243
72
          if (scan <= end) {
1244
28
            break;
1245
28
          }
1246
72
        }
1247
164
      }
1248
125
    }
1249
51.7k
    GC_G(unused) = GC_INVALID;
1250
51.7k
    GC_G(first_unused) = GC_G(num_roots) + GC_FIRST_ROOT;
1251
51.7k
  }
1252
151k
}
1253
1254
/* For all roots marked purple, traverse the graph, decrementing the reference
1255
 * count of their child nodes. Mark visited nodes grey so that they are not
1256
 * visited again. See MarkRoots() in Bacon & Rajan. */
1257
static void gc_mark_roots(gc_stack *stack)
1258
724
{
1259
724
  gc_root_buffer *current, *last;
1260
1261
724
  gc_compact();
1262
1263
724
  current = GC_IDX2PTR(GC_FIRST_ROOT);
1264
724
  last = GC_IDX2PTR(GC_G(first_unused));
1265
1.88k
  while (current != last) {
1266
1.15k
    if (GC_IS_ROOT(current->ref)) {
1267
1.15k
      if (GC_REF_CHECK_COLOR(current->ref, GC_PURPLE)) {
1268
1.13k
        GC_REF_SET_COLOR(current->ref, GC_GREY);
1269
1.13k
        gc_mark_grey(current->ref, stack);
1270
1.13k
      }
1271
1.15k
    }
1272
1.15k
    current++;
1273
1.15k
  }
1274
724
}
1275
1276
/* Traverse the reference graph of ref. Evaluate grey nodes and mark them
1277
 * black (to keep) or white (to free). Note that nodes initially marked white
1278
 * may later become black if they are visited from a live node.
1279
 * See Scan() in Bacon & Rajan. */
1280
static void gc_scan(zend_refcounted *ref, gc_stack *stack)
1281
1.13k
{
1282
1.13k
  HashTable *ht;
1283
1.13k
  Bucket *p;
1284
1.13k
  zval *zv;
1285
1.13k
  uint32_t n;
1286
1.13k
  GC_STACK_DCL(stack);
1287
1288
1.28k
tail_call:
1289
1.28k
  if (!GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1290
4
    goto next;
1291
4
  }
1292
1293
1.28k
  if (GC_REFCOUNT(ref) > 0) {
1294
1.04k
    if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1295
1.04k
      GC_REF_SET_BLACK(ref);
1296
1.04k
      if (UNEXPECTED(!_stack->next)) {
1297
664
        gc_stack_next(_stack);
1298
664
      }
1299
      /* Split stack and reuse the tail */
1300
1.04k
      _stack->next->prev = NULL;
1301
1.04k
      gc_scan_black(ref, _stack->next);
1302
1.04k
      _stack->next->prev = _stack;
1303
1.04k
    }
1304
1.04k
    goto next;
1305
1.04k
  }
1306
1307
233
  if (GC_TYPE(ref) == IS_OBJECT) {
1308
69
    zend_object *obj = (zend_object*)ref;
1309
69
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1310
69
      zval *table;
1311
69
      int len;
1312
1313
69
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1314
7
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1315
7
        n = len;
1316
7
        zv = table;
1317
14
        for (; n != 0; n--) {
1318
7
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1319
7
          zval *entry = (zval*) Z_PTR_P(zv);
1320
7
          if (Z_OPT_COLLECTABLE_P(entry)) {
1321
7
            ref = Z_COUNTED_P(entry);
1322
7
            if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1323
1
              GC_REF_SET_COLOR(ref, GC_WHITE);
1324
1
              GC_STACK_PUSH(ref);
1325
1
            }
1326
7
          }
1327
7
          zv++;
1328
7
        }
1329
7
      }
1330
1331
69
      ht = obj->handlers->get_gc(obj, &table, &len);
1332
69
      n = len;
1333
69
      zv = table;
1334
69
      if (UNEXPECTED(ht)) {
1335
40
        if (GC_REF_CHECK_COLOR(ht, GC_GREY)) {
1336
37
          GC_REF_SET_COLOR(ht, GC_WHITE);
1337
37
          GC_STACK_PUSH((zend_refcounted *) ht);
1338
47
          for (; n != 0; n--) {
1339
10
            if (Z_COLLECTABLE_P(zv)) {
1340
5
              ref = Z_COUNTED_P(zv);
1341
5
              if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1342
4
                GC_REF_SET_COLOR(ref, GC_WHITE);
1343
4
                GC_STACK_PUSH(ref);
1344
4
              }
1345
5
            }
1346
10
            zv++;
1347
10
          }
1348
37
          goto handle_ht;
1349
37
        }
1350
40
      }
1351
1352
100
handle_zvals:
1353
157
      for (; n != 0; n--) {
1354
118
        if (Z_COLLECTABLE_P(zv)) {
1355
71
          ref = Z_COUNTED_P(zv);
1356
71
          if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1357
61
            GC_REF_SET_COLOR(ref, GC_WHITE);
1358
61
            zv++;
1359
84
            while (--n) {
1360
23
              if (Z_COLLECTABLE_P(zv)) {
1361
17
                zend_refcounted *ref = Z_COUNTED_P(zv);
1362
17
                if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1363
11
                  GC_REF_SET_COLOR(ref, GC_WHITE);
1364
11
                  GC_STACK_PUSH(ref);
1365
11
                }
1366
17
              }
1367
23
              zv++;
1368
23
            }
1369
61
            goto tail_call;
1370
61
          }
1371
71
        }
1372
57
        zv++;
1373
57
      }
1374
100
    }
1375
164
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1376
116
    ht = (HashTable *)ref;
1377
116
    ZEND_ASSERT(ht != &EG(symbol_table));
1378
1379
153
handle_ht:
1380
153
    n = ht->nNumUsed;
1381
153
    if (HT_IS_PACKED(ht)) {
1382
68
            zv = ht->arPacked;
1383
68
            goto handle_zvals;
1384
68
    }
1385
1386
85
    p = ht->arData;
1387
158
    for (; n != 0; n--) {
1388
100
      zv = &p->val;
1389
100
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1390
7
        zv = Z_INDIRECT_P(zv);
1391
7
      }
1392
100
      if (Z_COLLECTABLE_P(zv)) {
1393
71
        ref = Z_COUNTED_P(zv);
1394
71
        if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1395
27
          GC_REF_SET_COLOR(ref, GC_WHITE);
1396
27
          p++;
1397
32
          while (--n) {
1398
5
            zv = &p->val;
1399
5
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1400
0
              zv = Z_INDIRECT_P(zv);
1401
0
            }
1402
5
            if (Z_COLLECTABLE_P(zv)) {
1403
4
              zend_refcounted *ref = Z_COUNTED_P(zv);
1404
4
              if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1405
1
                GC_REF_SET_COLOR(ref, GC_WHITE);
1406
1
                GC_STACK_PUSH(ref);
1407
1
              }
1408
4
            }
1409
5
            p++;
1410
5
          }
1411
27
          goto tail_call;
1412
27
        }
1413
71
      }
1414
73
      p++;
1415
73
    }
1416
85
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1417
48
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1418
43
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1419
43
      if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1420
5
        GC_REF_SET_COLOR(ref, GC_WHITE);
1421
5
        goto tail_call;
1422
5
      }
1423
43
    }
1424
48
  }
1425
1426
1.19k
next:
1427
1.19k
  ref = GC_STACK_POP();
1428
1.19k
  if (ref) {
1429
54
    goto tail_call;
1430
54
  }
1431
1.19k
}
1432
1433
/* Scan all roots, coloring grey nodes black or white */
1434
static void gc_scan_roots(gc_stack *stack)
1435
724
{
1436
724
  uint32_t idx, end;
1437
724
  gc_root_buffer *current;
1438
1439
  /* Root buffer might be reallocated during gc_scan,
1440
   * make sure to reload pointers. */
1441
724
  idx = GC_FIRST_ROOT;
1442
724
  end = GC_G(first_unused);
1443
1.88k
  while (idx != end) {
1444
1.15k
    current = GC_IDX2PTR(idx);
1445
1.15k
    if (GC_IS_ROOT(current->ref)) {
1446
1.15k
      if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1447
1.13k
        GC_REF_SET_COLOR(current->ref, GC_WHITE);
1448
1.13k
        gc_scan(current->ref, stack);
1449
1.13k
      }
1450
1.15k
    }
1451
1.15k
    idx++;
1452
1.15k
  }
1453
1454
  /* Scan extra roots added during gc_scan */
1455
728
  while (idx != GC_G(first_unused)) {
1456
4
    current = GC_IDX2PTR(idx);
1457
4
    if (GC_IS_ROOT(current->ref)) {
1458
4
      if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1459
4
        GC_REF_SET_COLOR(current->ref, GC_WHITE);
1460
4
        gc_scan(current->ref, stack);
1461
4
      }
1462
4
    }
1463
4
    idx++;
1464
4
  }
1465
724
}
1466
1467
/* Add a node to the buffer with the garbage flag, so that it will be
1468
 * destroyed and freed when the scan is complete. */
1469
static void gc_add_garbage(zend_refcounted *ref)
1470
21
{
1471
21
  uint32_t idx;
1472
21
  gc_root_buffer *buf;
1473
1474
21
  if (GC_HAS_UNUSED()) {
1475
0
    idx = GC_FETCH_UNUSED();
1476
21
  } else if (GC_HAS_NEXT_UNUSED()) {
1477
21
    idx = GC_FETCH_NEXT_UNUSED();
1478
21
  } else {
1479
0
    gc_grow_root_buffer();
1480
0
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
1481
0
      return;
1482
0
    }
1483
0
    idx = GC_FETCH_NEXT_UNUSED();
1484
0
  }
1485
1486
21
  buf = GC_IDX2PTR(idx);
1487
21
  buf->ref = GC_MAKE_GARBAGE(ref);
1488
1489
21
  idx = gc_compress(idx);
1490
21
  GC_REF_SET_INFO(ref, idx | GC_BLACK);
1491
21
  GC_G(num_roots)++;
1492
21
}
1493
1494
/* Traverse the reference graph from ref, marking any white nodes as garbage. */
1495
static int gc_collect_white(zend_refcounted *ref, uint32_t *flags, gc_stack *stack)
1496
65
{
1497
65
  int count = 0;
1498
65
  HashTable *ht;
1499
65
  Bucket *p;
1500
65
  zval *zv;
1501
65
  uint32_t n;
1502
65
  GC_STACK_DCL(stack);
1503
1504
139
tail_call:
1505
  /* don't count references for compatibility ??? */
1506
139
  if (GC_TYPE(ref) != IS_REFERENCE) {
1507
96
    count++;
1508
96
  }
1509
1510
139
  if (GC_TYPE(ref) == IS_OBJECT) {
1511
51
    zend_object *obj = (zend_object*)ref;
1512
1513
51
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1514
51
      int len;
1515
51
      zval *table;
1516
1517
      /* optimization: color is GC_BLACK (0) */
1518
51
      if (!GC_INFO(ref)) {
1519
10
        gc_add_garbage(ref);
1520
10
      }
1521
51
      if (!(OBJ_FLAGS(obj) & IS_OBJ_DESTRUCTOR_CALLED)
1522
13
       && (obj->handlers->dtor_obj != zend_objects_destroy_object
1523
13
        || obj->ce->destructor != NULL)) {
1524
2
        *flags |= GC_HAS_DESTRUCTORS;
1525
2
      }
1526
1527
51
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1528
7
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1529
7
        n = len;
1530
7
        zv = table;
1531
14
        for (; n != 0; n--) {
1532
7
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1533
7
          zval *entry = (zval*) Z_PTR_P(zv);
1534
7
          if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP_KEY(entry)) {
1535
1
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
1536
1
            GC_UNSET_FROM_WEAKMAP(entry);
1537
1
            ref = Z_COUNTED_P(entry);
1538
1
            GC_ADDREF(ref);
1539
1
            if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1540
0
              GC_REF_SET_BLACK(ref);
1541
0
              GC_STACK_PUSH(ref);
1542
0
            }
1543
1
          }
1544
7
          zv++;
1545
7
        }
1546
7
      }
1547
1548
51
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
1549
3
        zend_weakmap_get_entry_gc(obj, &table, &len);
1550
3
        n = len;
1551
3
        zv = table;
1552
9
        for (; n != 0; n--) {
1553
6
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1554
6
          zval *entry = (zval*) Z_PTR_P(zv);
1555
6
          if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP(entry)) {
1556
6
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
1557
6
            GC_UNSET_FROM_WEAKMAP(entry);
1558
6
            ref = Z_COUNTED_P(entry);
1559
6
            GC_ADDREF(ref);
1560
6
            if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1561
6
              GC_REF_SET_BLACK(ref);
1562
6
              GC_STACK_PUSH(ref);
1563
6
            }
1564
6
          }
1565
6
          zv++;
1566
6
        }
1567
3
        goto next;
1568
3
      }
1569
1570
48
      ht = obj->handlers->get_gc(obj, &table, &len);
1571
48
      n = len;
1572
48
      zv = table;
1573
48
      if (UNEXPECTED(ht)) {
1574
31
        GC_ADDREF(ht);
1575
31
        if (GC_REF_CHECK_COLOR(ht, GC_WHITE)) {
1576
28
          GC_REF_SET_BLACK(ht);
1577
33
          for (; n != 0; n--) {
1578
5
            if (Z_COLLECTABLE_P(zv)) {
1579
2
              ref = Z_COUNTED_P(zv);
1580
2
              GC_ADDREF(ref);
1581
2
              if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1582
1
                GC_REF_SET_BLACK(ref);
1583
1
                GC_STACK_PUSH(ref);
1584
1
              }
1585
2
            }
1586
5
            zv++;
1587
5
          }
1588
28
          goto handle_ht;
1589
28
        }
1590
31
      }
1591
1592
59
handle_zvals:
1593
73
      for (; n != 0; n--) {
1594
51
        if (Z_COLLECTABLE_P(zv)) {
1595
45
          ref = Z_COUNTED_P(zv);
1596
45
          GC_ADDREF(ref);
1597
45
          if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1598
37
            GC_REF_SET_BLACK(ref);
1599
37
            zv++;
1600
47
            while (--n) {
1601
10
              if (Z_COLLECTABLE_P(zv)) {
1602
9
                zend_refcounted *ref = Z_COUNTED_P(zv);
1603
9
                GC_ADDREF(ref);
1604
9
                if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1605
3
                  GC_REF_SET_BLACK(ref);
1606
3
                  GC_STACK_PUSH(ref);
1607
3
                }
1608
9
              }
1609
10
              zv++;
1610
10
            }
1611
37
            goto tail_call;
1612
37
          }
1613
45
        }
1614
14
        zv++;
1615
14
      }
1616
59
    }
1617
88
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1618
    /* optimization: color is GC_BLACK (0) */
1619
45
    if (!GC_INFO(ref)) {
1620
11
      gc_add_garbage(ref);
1621
11
    }
1622
45
    ht = (zend_array*)ref;
1623
1624
73
handle_ht:
1625
73
    n = ht->nNumUsed;
1626
73
    if (HT_IS_PACKED(ht)) {
1627
39
      zv = ht->arPacked;
1628
39
      goto handle_zvals;
1629
39
    }
1630
1631
34
    p = ht->arData;
1632
51
    for (; n != 0; n--) {
1633
38
      zv = &p->val;
1634
38
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1635
3
        zv = Z_INDIRECT_P(zv);
1636
3
      }
1637
38
      if (Z_COLLECTABLE_P(zv)) {
1638
33
        ref = Z_COUNTED_P(zv);
1639
33
        GC_ADDREF(ref);
1640
33
        if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1641
21
          GC_REF_SET_BLACK(ref);
1642
21
          p++;
1643
25
          while (--n) {
1644
4
            zv = &p->val;
1645
4
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1646
0
              zv = Z_INDIRECT_P(zv);
1647
0
            }
1648
4
            if (Z_COLLECTABLE_P(zv)) {
1649
3
              zend_refcounted *ref = Z_COUNTED_P(zv);
1650
3
              GC_ADDREF(ref);
1651
3
              if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1652
1
                GC_REF_SET_BLACK(ref);
1653
1
                GC_STACK_PUSH(ref);
1654
1
              }
1655
3
            }
1656
4
            p++;
1657
4
          }
1658
21
          goto tail_call;
1659
21
        }
1660
33
      }
1661
17
      p++;
1662
17
    }
1663
43
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1664
43
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1665
43
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1666
43
      GC_ADDREF(ref);
1667
43
      if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1668
5
        GC_REF_SET_BLACK(ref);
1669
5
        goto tail_call;
1670
5
      }
1671
43
    }
1672
43
  }
1673
1674
76
next:
1675
76
  ref = GC_STACK_POP();
1676
76
  if (ref) {
1677
11
    goto tail_call;
1678
11
  }
1679
1680
65
  return count;
1681
76
}
1682
1683
/* Traverse the reference graph from all roots, marking white nodes as garbage. */
1684
static int gc_collect_roots(uint32_t *flags, gc_stack *stack)
1685
724
{
1686
724
  uint32_t idx, end;
1687
724
  zend_refcounted *ref;
1688
724
  int count = 0;
1689
724
  gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
1690
724
  gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
1691
1692
  /* remove non-garbage from the list */
1693
1.88k
  while (current != last) {
1694
1.16k
    if (GC_IS_ROOT(current->ref)) {
1695
1.16k
      if (GC_REF_CHECK_COLOR(current->ref, GC_BLACK)) {
1696
1.08k
        GC_REF_SET_INFO(current->ref, 0); /* reset GC_ADDRESS() and keep GC_BLACK */
1697
1.08k
        gc_remove_from_roots(current);
1698
1.08k
      }
1699
1.16k
    }
1700
1.16k
    current++;
1701
1.16k
  }
1702
1703
724
  gc_compact();
1704
1705
  /* Root buffer might be reallocated during gc_collect_white,
1706
   * make sure to reload pointers. */
1707
724
  idx = GC_FIRST_ROOT;
1708
724
  end = GC_G(first_unused);
1709
799
  while (idx != end) {
1710
75
    current = GC_IDX2PTR(idx);
1711
75
    ref = current->ref;
1712
75
    ZEND_ASSERT(GC_IS_ROOT(ref));
1713
75
    current->ref = GC_MAKE_GARBAGE(ref);
1714
75
    if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1715
65
      GC_REF_SET_BLACK(ref);
1716
65
      count += gc_collect_white(ref, flags, stack);
1717
65
    }
1718
75
    idx++;
1719
75
  }
1720
1721
724
  return count;
1722
724
}
1723
1724
static int gc_remove_nested_data_from_buffer(zend_refcounted *ref, gc_root_buffer *root, gc_stack *stack)
1725
2
{
1726
2
  HashTable *ht;
1727
2
  Bucket *p;
1728
2
  zval *zv;
1729
2
  uint32_t n;
1730
2
  int count = 0;
1731
2
  GC_STACK_DCL(stack);
1732
1733
7
tail_call:
1734
7
  if (root) {
1735
2
    root = NULL;
1736
2
    count++;
1737
5
  } else if (GC_REF_ADDRESS(ref) != 0
1738
2
   && GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1739
1
    GC_TRACE_REF(ref, "removing from buffer");
1740
1
    GC_REMOVE_FROM_BUFFER(ref);
1741
1
    count++;
1742
4
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1743
2
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1744
2
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1745
2
      goto tail_call;
1746
2
    }
1747
0
    goto next;
1748
2
  } else {
1749
2
    goto next;
1750
2
  }
1751
1752
3
  if (GC_TYPE(ref) == IS_OBJECT) {
1753
2
    zend_object *obj = (zend_object*)ref;
1754
1755
2
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1756
2
      int len;
1757
2
      zval *table;
1758
1759
2
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1760
0
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1761
0
        n = len;
1762
0
        zv = table;
1763
0
        for (; n != 0; n--) {
1764
0
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1765
0
          zval *entry = (zval*) Z_PTR_P(zv);
1766
0
          if (Z_OPT_COLLECTABLE_P(entry)) {
1767
0
            ref = Z_COUNTED_P(entry);
1768
0
            GC_STACK_PUSH(ref);
1769
0
          }
1770
0
          zv++;
1771
0
        }
1772
0
      }
1773
1774
2
      ht = obj->handlers->get_gc(obj, &table, &len);
1775
2
      n = len;
1776
2
      zv = table;
1777
2
      if (UNEXPECTED(ht)) {
1778
1
        for (; n != 0; n--) {
1779
0
          if (Z_COLLECTABLE_P(zv)) {
1780
0
            ref = Z_COUNTED_P(zv);
1781
0
            GC_STACK_PUSH(ref);
1782
0
          }
1783
0
          zv++;
1784
0
        }
1785
1
        if (GC_REF_ADDRESS(ht) != 0 && GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
1786
0
          GC_TRACE_REF(ht, "removing from buffer");
1787
0
          GC_REMOVE_FROM_BUFFER(ht);
1788
0
        }
1789
1
        goto handle_ht;
1790
1
      }
1791
1792
2
handle_zvals:
1793
2
      for (; n != 0; n--) {
1794
2
        if (Z_COLLECTABLE_P(zv)) {
1795
2
          ref = Z_COUNTED_P(zv);
1796
2
          zv++;
1797
2
          while (--n) {
1798
0
            if (Z_COLLECTABLE_P(zv)) {
1799
0
              zend_refcounted *ref = Z_COUNTED_P(zv);
1800
0
              GC_STACK_PUSH(ref);
1801
0
            }
1802
0
            zv++;
1803
0
          }
1804
2
          goto tail_call;
1805
2
        }
1806
0
        zv++;
1807
0
      }
1808
2
    }
1809
2
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1810
1
    ht = (zend_array*)ref;
1811
1812
2
handle_ht:
1813
2
    n = ht->nNumUsed;
1814
2
    if (HT_IS_PACKED(ht)) {
1815
1
      zv = ht->arPacked;
1816
1
      goto handle_zvals;
1817
1
    }
1818
1819
1
    p = ht->arData;
1820
1
    for (; n != 0; n--) {
1821
1
      zv = &p->val;
1822
1
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1823
1
        zv = Z_INDIRECT_P(zv);
1824
1
      }
1825
1
      if (Z_COLLECTABLE_P(zv)) {
1826
1
        ref = Z_COUNTED_P(zv);
1827
1
        p++;
1828
1
        while (--n) {
1829
0
          zv = &p->val;
1830
0
          if (Z_TYPE_P(zv) == IS_INDIRECT) {
1831
0
            zv = Z_INDIRECT_P(zv);
1832
0
          }
1833
0
          if (Z_COLLECTABLE_P(zv)) {
1834
0
            zend_refcounted *ref = Z_COUNTED_P(zv);
1835
0
            GC_STACK_PUSH(ref);
1836
0
          }
1837
0
          p++;
1838
0
        }
1839
1
        goto tail_call;
1840
1
      }
1841
0
      p++;
1842
0
    }
1843
1
  }
1844
1845
2
next:
1846
2
  ref = GC_STACK_POP();
1847
2
  if (ref) {
1848
0
    goto tail_call;
1849
0
  }
1850
1851
2
  return count;
1852
2
}
1853
1854
static void zend_get_gc_buffer_release(void);
1855
static void zend_gc_check_root_tmpvars(void);
1856
static void zend_gc_remove_root_tmpvars(void);
1857
1858
static zend_internal_function gc_destructor_fiber;
1859
1860
static ZEND_COLD ZEND_NORETURN void gc_create_destructor_fiber_error(void)
1861
0
{
1862
0
  zend_error_noreturn(E_ERROR, "Unable to create destructor fiber");
1863
0
}
1864
1865
static ZEND_COLD ZEND_NORETURN void gc_start_destructor_fiber_error(void)
1866
0
{
1867
0
  zend_error_noreturn(E_ERROR, "Unable to start destructor fiber");
1868
0
}
1869
1870
/* Call destructors for garbage in the buffer. */
1871
static zend_always_inline zend_result gc_call_destructors(uint32_t idx, uint32_t end, zend_fiber *fiber)
1872
2
{
1873
2
  gc_root_buffer *current;
1874
2
  zend_refcounted *p;
1875
1876
  /* The root buffer might be reallocated during destructors calls,
1877
   * make sure to reload pointers as necessary. */
1878
6
  while (idx != end) {
1879
4
    current = GC_IDX2PTR(idx);
1880
4
    if (GC_IS_DTOR_GARBAGE(current->ref)) {
1881
2
      p = GC_GET_PTR(current->ref);
1882
      /* Mark this is as a normal root for the next GC run */
1883
2
      current->ref = p;
1884
      /* Double check that the destructor hasn't been called yet. It
1885
       * could have already been invoked indirectly by some other
1886
       * destructor. */
1887
2
      if (!(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
1888
2
        if (fiber != NULL) {
1889
0
          GC_G(dtor_idx) = idx;
1890
0
        }
1891
2
        zend_object *obj = (zend_object*)p;
1892
2
        GC_TRACE_REF(obj, "calling destructor");
1893
2
        GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
1894
2
        GC_ADDREF(obj);
1895
2
        obj->handlers->dtor_obj(obj);
1896
2
        GC_TRACE_REF(obj, "returned from destructor");
1897
2
        GC_DELREF(obj);
1898
2
        if (UNEXPECTED(fiber != NULL && GC_G(dtor_fiber) != fiber)) {
1899
          /* We resumed after suspension */
1900
0
          gc_check_possible_root((zend_refcounted*)&obj->gc);
1901
0
          return FAILURE;
1902
0
        }
1903
2
      }
1904
2
    }
1905
4
    idx++;
1906
4
  }
1907
1908
2
  return SUCCESS;
1909
2
}
1910
1911
static zend_fiber *gc_create_destructor_fiber(void)
1912
0
{
1913
0
  zval zobj;
1914
0
  zend_fiber *fiber;
1915
1916
0
  GC_TRACE("starting destructor fiber");
1917
1918
0
  if (UNEXPECTED(object_init_ex(&zobj, zend_ce_fiber) == FAILURE)) {
1919
0
    gc_create_destructor_fiber_error();
1920
0
  }
1921
1922
0
  fiber = (zend_fiber *)Z_OBJ(zobj);
1923
0
  fiber->fci.size = sizeof(fiber->fci);
1924
0
  fiber->fci_cache.function_handler = (zend_function*) &gc_destructor_fiber;
1925
1926
0
  GC_G(dtor_fiber) = fiber;
1927
1928
0
  if (UNEXPECTED(zend_fiber_start(fiber, NULL) == FAILURE)) {
1929
0
    gc_start_destructor_fiber_error();
1930
0
  }
1931
1932
0
  return fiber;
1933
0
}
1934
1935
static zend_never_inline void gc_call_destructors_in_fiber(uint32_t end)
1936
0
{
1937
0
  ZEND_ASSERT(!GC_G(dtor_fiber_running));
1938
1939
0
  zend_fiber *fiber = GC_G(dtor_fiber);
1940
1941
0
  GC_G(dtor_idx) = GC_FIRST_ROOT;
1942
0
  GC_G(dtor_end) = GC_G(first_unused);
1943
1944
0
  if (UNEXPECTED(!fiber)) {
1945
0
    fiber = gc_create_destructor_fiber();
1946
0
  } else {
1947
0
    zend_fiber_resume(fiber, NULL, NULL);
1948
0
  }
1949
1950
0
  for (;;) {
1951
    /* At this point, fiber has executed until suspension */
1952
0
    GC_TRACE("resumed from destructor fiber");
1953
1954
0
    if (UNEXPECTED(GC_G(dtor_fiber_running))) {
1955
      /* Fiber was suspended by a destructor. Start a new one for the
1956
       * remaining destructors. */
1957
0
      GC_TRACE("destructor fiber suspended by destructor");
1958
0
      GC_G(dtor_fiber) = NULL;
1959
0
      GC_G(dtor_idx)++;
1960
      /* We do not own the fiber anymore. It may be collected if the
1961
       * application does not reference it. */
1962
0
      zend_object_release(&fiber->std);
1963
0
      fiber = gc_create_destructor_fiber();
1964
0
      continue;
1965
0
    } else {
1966
      /* Fiber suspended itself after calling all destructors */
1967
0
      GC_TRACE("destructor fiber suspended itself");
1968
0
      break;
1969
0
    }
1970
0
  }
1971
0
}
1972
1973
/* Perform a garbage collection run. The default implementation of gc_collect_cycles. */
1974
ZEND_API int zend_gc_collect_cycles(void)
1975
150k
{
1976
150k
  int total_count = 0;
1977
150k
  bool should_rerun_gc = false;
1978
150k
  bool did_rerun_gc = false;
1979
1980
150k
  zend_hrtime_t start_time = zend_hrtime();
1981
150k
  if (GC_G(num_roots) && !GC_G(gc_active)) {
1982
722
    zend_gc_remove_root_tmpvars();
1983
722
  }
1984
1985
150k
rerun_gc:
1986
150k
  if (GC_G(num_roots)) {
1987
724
    int count;
1988
724
    gc_root_buffer *current, *last;
1989
724
    zend_refcounted *p;
1990
724
    uint32_t gc_flags = 0;
1991
724
    uint32_t idx, end;
1992
724
    gc_stack stack;
1993
1994
724
    stack.prev = NULL;
1995
724
    stack.next = NULL;
1996
1997
724
    if (GC_G(gc_active)) {
1998
0
      GC_G(collector_time) += zend_hrtime() - start_time;
1999
0
      return 0;
2000
0
    }
2001
2002
724
    GC_TRACE("Collecting cycles");
2003
724
    GC_G(gc_runs)++;
2004
724
    GC_G(gc_active) = 1;
2005
2006
724
    GC_TRACE("Marking roots");
2007
724
    gc_mark_roots(&stack);
2008
724
    GC_TRACE("Scanning roots");
2009
724
    gc_scan_roots(&stack);
2010
2011
724
    GC_TRACE("Collecting roots");
2012
724
    count = gc_collect_roots(&gc_flags, &stack);
2013
2014
724
    if (!GC_G(num_roots)) {
2015
      /* nothing to free */
2016
662
      GC_TRACE("Nothing to free");
2017
662
      gc_stack_free(&stack);
2018
662
      GC_G(gc_active) = 0;
2019
662
      goto finish;
2020
662
    }
2021
2022
62
    end = GC_G(first_unused);
2023
2024
62
    if (gc_flags & GC_HAS_DESTRUCTORS) {
2025
2
      GC_TRACE("Calling destructors");
2026
2027
      /* During a destructor call, new externally visible references to nested data may
2028
       * be introduced. These references can be introduced in a way that does not
2029
       * modify any refcounts, so we have no real way to detect this situation
2030
       * short of rerunning full GC tracing. What we do instead is to only run
2031
       * destructors at this point and automatically re-run GC afterwards. */
2032
2
      should_rerun_gc = true;
2033
2034
      /* Mark all roots for which a dtor will be invoked as DTOR_GARBAGE. Additionally
2035
       * color them purple. This serves a double purpose: First, they should be
2036
       * considered new potential roots for the next GC run. Second, it will prevent
2037
       * their removal from the root buffer by nested data removal. */
2038
2
      idx = GC_FIRST_ROOT;
2039
2
      current = GC_IDX2PTR(GC_FIRST_ROOT);
2040
6
      while (idx != end) {
2041
4
        if (GC_IS_GARBAGE(current->ref)) {
2042
4
          p = GC_GET_PTR(current->ref);
2043
4
          if (GC_TYPE(p) == IS_OBJECT && !(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
2044
2
            zend_object *obj = (zend_object *) p;
2045
2
            if (obj->handlers->dtor_obj != zend_objects_destroy_object
2046
2
              || obj->ce->destructor) {
2047
2
              current->ref = GC_MAKE_DTOR_GARBAGE(obj);
2048
2
              GC_REF_SET_COLOR(obj, GC_PURPLE);
2049
2
            } else {
2050
0
              GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
2051
0
            }
2052
2
          }
2053
4
        }
2054
4
        current++;
2055
4
        idx++;
2056
4
      }
2057
2058
      /* Remove nested data for objects on which a destructor will be called.
2059
       * This will not remove the objects themselves, as they have been colored
2060
       * purple. */
2061
2
      idx = GC_FIRST_ROOT;
2062
2
      current = GC_IDX2PTR(GC_FIRST_ROOT);
2063
6
      while (idx != end) {
2064
4
        if (GC_IS_DTOR_GARBAGE(current->ref)) {
2065
2
          p = GC_GET_PTR(current->ref);
2066
2
          count -= gc_remove_nested_data_from_buffer(p, current, &stack);
2067
2
        }
2068
4
        current++;
2069
4
        idx++;
2070
4
      }
2071
2072
      /* Actually call destructors. */
2073
2
      zend_hrtime_t dtor_start_time = zend_hrtime();
2074
2
      if (EXPECTED(!EG(active_fiber))) {
2075
2
        gc_call_destructors(GC_FIRST_ROOT, end, NULL);
2076
2
      } else {
2077
0
        gc_call_destructors_in_fiber(end);
2078
0
      }
2079
2
      GC_G(dtor_time) += zend_hrtime() - dtor_start_time;
2080
2081
2
      if (GC_G(gc_protected)) {
2082
        /* something went wrong */
2083
0
        zend_get_gc_buffer_release();
2084
0
        GC_G(collector_time) += zend_hrtime() - start_time;
2085
0
        return 0;
2086
0
      }
2087
2
    }
2088
2089
62
    gc_stack_free(&stack);
2090
2091
    /* Destroy zvals. The root buffer may be reallocated. */
2092
62
    GC_TRACE("Destroying zvals");
2093
62
    zend_hrtime_t free_start_time = zend_hrtime();
2094
62
    idx = GC_FIRST_ROOT;
2095
158
    while (idx != end) {
2096
96
      current = GC_IDX2PTR(idx);
2097
96
      if (GC_IS_GARBAGE(current->ref)) {
2098
67
        p = GC_GET_PTR(current->ref);
2099
67
        GC_TRACE_REF(p, "destroying");
2100
67
        if (GC_TYPE(p) == IS_OBJECT) {
2101
30
          zend_object *obj = (zend_object*)p;
2102
2103
30
          EG(objects_store).object_buckets[obj->handle] = SET_OBJ_INVALID(obj);
2104
30
          GC_TYPE_INFO(obj) = GC_NULL |
2105
30
            (GC_TYPE_INFO(obj) & ~GC_TYPE_MASK);
2106
          /* Modify current before calling free_obj (bug #78811: free_obj() can cause the root buffer (with current) to be reallocated.) */
2107
30
          current->ref = GC_MAKE_GARBAGE(((char*)obj) - obj->handlers->offset);
2108
30
          if (!(OBJ_FLAGS(obj) & IS_OBJ_FREE_CALLED)) {
2109
30
            GC_ADD_FLAGS(obj, IS_OBJ_FREE_CALLED);
2110
30
            GC_ADDREF(obj);
2111
30
            obj->handlers->free_obj(obj);
2112
30
            GC_DELREF(obj);
2113
30
          }
2114
2115
30
          ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST(obj->handle);
2116
37
        } else if (GC_TYPE(p) == IS_ARRAY) {
2117
37
          zend_array *arr = (zend_array*)p;
2118
2119
37
          GC_TYPE_INFO(arr) = GC_NULL |
2120
37
            (GC_TYPE_INFO(arr) & ~GC_TYPE_MASK);
2121
2122
          /* GC may destroy arrays with rc>1. This is valid and safe. */
2123
37
          HT_ALLOW_COW_VIOLATION(arr);
2124
2125
37
          zend_hash_destroy(arr);
2126
37
        }
2127
67
      }
2128
96
      idx++;
2129
96
    }
2130
2131
    /* Free objects */
2132
62
    current = GC_IDX2PTR(GC_FIRST_ROOT);
2133
62
    last = GC_IDX2PTR(end);
2134
158
    while (current != last) {
2135
96
      if (GC_IS_GARBAGE(current->ref)) {
2136
67
        p = GC_GET_PTR(current->ref);
2137
67
        GC_LINK_UNUSED(current);
2138
67
        GC_G(num_roots)--;
2139
67
        efree(p);
2140
67
      }
2141
96
      current++;
2142
96
    }
2143
2144
62
    GC_G(free_time) += zend_hrtime() - free_start_time;
2145
2146
62
    GC_TRACE("Collection finished");
2147
62
    GC_G(collected) += count;
2148
62
    total_count += count;
2149
62
    GC_G(gc_active) = 0;
2150
62
  }
2151
2152
149k
  gc_compact();
2153
2154
  /* Objects with destructors were removed from this GC run. Rerun GC right away to clean them
2155
   * up. We do this only once: If we encounter more destructors on the second run, we'll not
2156
   * run GC another time. */
2157
149k
  if (should_rerun_gc && !did_rerun_gc) {
2158
2
    did_rerun_gc = true;
2159
2
    goto rerun_gc;
2160
2
  }
2161
2162
150k
finish:
2163
150k
  zend_get_gc_buffer_release();
2164
2165
  /* Prevent GC from running during zend_gc_check_root_tmpvars, before
2166
   * gc_threshold is adjusted, as this may result in unbounded recursion */
2167
150k
  GC_G(gc_active) = 1;
2168
150k
  zend_gc_check_root_tmpvars();
2169
150k
  GC_G(gc_active) = 0;
2170
2171
150k
  GC_G(collector_time) += zend_hrtime() - start_time;
2172
150k
  return total_count;
2173
149k
}
2174
2175
ZEND_API void zend_gc_get_status(zend_gc_status *status)
2176
3
{
2177
3
  status->active = GC_G(gc_active);
2178
3
  status->gc_protected = GC_G(gc_protected);
2179
3
  status->full = GC_G(gc_full);
2180
3
  status->runs = GC_G(gc_runs);
2181
3
  status->collected = GC_G(collected);
2182
3
  status->threshold = GC_G(gc_threshold);
2183
3
  status->buf_size = GC_G(buf_size);
2184
3
  status->num_roots = GC_G(num_roots);
2185
3
  status->application_time = zend_hrtime() - GC_G(activated_at);
2186
3
  status->collector_time = GC_G(collector_time);
2187
3
  status->dtor_time = GC_G(dtor_time);
2188
3
  status->free_time = GC_G(free_time);
2189
3
}
2190
2191
715
ZEND_API zend_get_gc_buffer *zend_get_gc_buffer_create(void) {
2192
  /* There can only be one get_gc() call active at a time,
2193
   * so there only needs to be one buffer. */
2194
715
  zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
2195
715
  gc_buffer->cur = gc_buffer->start;
2196
715
  return gc_buffer;
2197
715
}
2198
2199
52
ZEND_API void zend_get_gc_buffer_grow(zend_get_gc_buffer *gc_buffer) {
2200
52
  size_t old_capacity = gc_buffer->end - gc_buffer->start;
2201
52
  size_t new_capacity = old_capacity == 0 ? 64 : old_capacity * 2;
2202
52
  gc_buffer->start = erealloc(gc_buffer->start, new_capacity * sizeof(zval));
2203
52
  gc_buffer->end = gc_buffer->start + new_capacity;
2204
52
  gc_buffer->cur = gc_buffer->start + old_capacity;
2205
52
}
2206
2207
150k
static void zend_get_gc_buffer_release(void) {
2208
150k
  zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
2209
150k
  efree(gc_buffer->start);
2210
150k
  gc_buffer->start = gc_buffer->end = gc_buffer->cur = NULL;
2211
150k
}
2212
2213
/* TMPVAR operands are destroyed using zval_ptr_dtor_nogc(), because they usually cannot contain
2214
 * cycles. However, there are some rare exceptions where this is possible, in which case we rely
2215
 * on the producing code to root the value. If a GC run occurs between the rooting and consumption
2216
 * of the value, we would end up leaking it. To avoid this, root all live TMPVAR values here. */
2217
150k
static void zend_gc_check_root_tmpvars(void) {
2218
150k
  zend_execute_data *ex = EG(current_execute_data);
2219
150k
  for (; ex; ex = ex->prev_execute_data) {
2220
67
    zend_function *func = ex->func;
2221
67
    if (!func || !ZEND_USER_CODE(func->type)) {
2222
33
      continue;
2223
33
    }
2224
2225
34
    uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
2226
72
    for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
2227
41
      const zend_live_range *range = &func->op_array.live_range[i];
2228
41
      if (range->start > op_num) {
2229
3
        break;
2230
3
      }
2231
38
      if (range->end <= op_num) {
2232
33
        continue;
2233
33
      }
2234
2235
5
      uint32_t kind = range->var & ZEND_LIVE_MASK;
2236
5
      if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
2237
5
        uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
2238
5
        zval *var = ZEND_CALL_VAR(ex, var_num);
2239
5
        if (Z_COLLECTABLE_P(var)) {
2240
5
          gc_check_possible_root(Z_COUNTED_P(var));
2241
5
        }
2242
5
      }
2243
5
    }
2244
34
  }
2245
150k
}
2246
2247
722
static void zend_gc_remove_root_tmpvars(void) {
2248
722
  zend_execute_data *ex = EG(current_execute_data);
2249
773
  for (; ex; ex = ex->prev_execute_data) {
2250
51
    zend_function *func = ex->func;
2251
51
    if (!func || !ZEND_USER_CODE(func->type)) {
2252
25
      continue;
2253
25
    }
2254
2255
26
    uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
2256
60
    for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
2257
34
      const zend_live_range *range = &func->op_array.live_range[i];
2258
34
      if (range->start > op_num) {
2259
0
        break;
2260
0
      }
2261
34
      if (range->end <= op_num) {
2262
29
        continue;
2263
29
      }
2264
2265
5
      uint32_t kind = range->var & ZEND_LIVE_MASK;
2266
5
      if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
2267
5
        uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
2268
5
        zval *var = ZEND_CALL_VAR(ex, var_num);
2269
5
        if (Z_COLLECTABLE_P(var)) {
2270
5
          GC_REMOVE_FROM_BUFFER(Z_COUNTED_P(var));
2271
5
        }
2272
5
      }
2273
5
    }
2274
26
  }
2275
722
}
2276
2277
#if GC_BENCH
2278
void gc_bench_print(void)
2279
{
2280
  fprintf(stderr, "GC Statistics\n");
2281
  fprintf(stderr, "-------------\n");
2282
  fprintf(stderr, "Runs:               %d\n", GC_G(gc_runs));
2283
  fprintf(stderr, "Collected:          %d\n", GC_G(collected));
2284
  fprintf(stderr, "Root buffer length: %d\n", GC_G(root_buf_length));
2285
  fprintf(stderr, "Root buffer peak:   %d\n\n", GC_G(root_buf_peak));
2286
  fprintf(stderr, "      Possible            Remove from  Marked\n");
2287
  fprintf(stderr, "        Root    Buffered     buffer     grey\n");
2288
  fprintf(stderr, "      --------  --------  -----------  ------\n");
2289
  fprintf(stderr, "ZVAL  %8d  %8d  %9d  %8d\n", GC_G(zval_possible_root), GC_G(zval_buffered), GC_G(zval_remove_from_buffer), GC_G(zval_marked_grey));
2290
}
2291
#endif
2292
2293
#ifdef ZTS
2294
size_t zend_gc_globals_size(void)
2295
{
2296
  return sizeof(zend_gc_globals);
2297
}
2298
#endif
2299
2300
static ZEND_FUNCTION(gc_destructor_fiber)
2301
0
{
2302
0
  uint32_t idx, end;
2303
2304
0
  zend_fiber *fiber = GC_G(dtor_fiber);
2305
0
  ZEND_ASSERT(fiber != NULL);
2306
0
  ZEND_ASSERT(fiber == EG(active_fiber));
2307
2308
0
  for (;;) {
2309
0
    GC_G(dtor_fiber_running) = true;
2310
2311
0
    idx = GC_G(dtor_idx);
2312
0
    end = GC_G(dtor_end);
2313
0
    if (UNEXPECTED(gc_call_destructors(idx, end, fiber) == FAILURE)) {
2314
      /* We resumed after being suspended by a destructor */
2315
0
      return;
2316
0
    }
2317
2318
    /* We have called all destructors. Suspend fiber until the next GC run
2319
     */
2320
0
    GC_G(dtor_fiber_running) = false;
2321
0
    zend_fiber_suspend(fiber, NULL, NULL);
2322
2323
0
    if (UNEXPECTED(fiber->flags & ZEND_FIBER_FLAG_DESTROYED)) {
2324
      /* Fiber is being destroyed by shutdown sequence */
2325
0
      if (GC_G(dtor_fiber) == fiber) {
2326
0
        GC_G(dtor_fiber) = NULL;
2327
0
      }
2328
0
      GC_DELREF(&fiber->std);
2329
0
      gc_check_possible_root((zend_refcounted*)&fiber->std.gc);
2330
0
      return;
2331
0
    }
2332
0
  }
2333
0
}
2334
2335
static zend_internal_function gc_destructor_fiber = {
2336
  .type = ZEND_INTERNAL_FUNCTION,
2337
  .fn_flags = ZEND_ACC_PUBLIC,
2338
  .handler = ZEND_FN(gc_destructor_fiber),
2339
};
2340
2341
void gc_init(void)
2342
2
{
2343
2
  gc_destructor_fiber.function_name = zend_string_init_interned(
2344
2
      "gc_destructor_fiber",
2345
2
      strlen("gc_destructor_fiber"),
2346
      true);
2347
2
}