Coverage Report

Created: 2026-01-18 06:48

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/php-src/Zend/zend_gc.c
Line
Count
Source
1
/*
2
   +----------------------------------------------------------------------+
3
   | Zend Engine                                                          |
4
   +----------------------------------------------------------------------+
5
   | Copyright (c) Zend Technologies Ltd. (http://www.zend.com)           |
6
   +----------------------------------------------------------------------+
7
   | This source file is subject to version 2.00 of the Zend license,     |
8
   | that is bundled with this package in the file LICENSE, and is        |
9
   | available through the world-wide-web at the following url:           |
10
   | http://www.zend.com/license/2_00.txt.                                |
11
   | If you did not receive a copy of the Zend license and are unable to  |
12
   | obtain it through the world-wide-web, please send a note to          |
13
   | license@zend.com so we can mail you a copy immediately.              |
14
   +----------------------------------------------------------------------+
15
   | Authors: David Wang <planetbeing@gmail.com>                          |
16
   |          Dmitry Stogov <dmitry@php.net>                              |
17
   +----------------------------------------------------------------------+
18
*/
19
20
/**
21
 * zend_gc_collect_cycles
22
 * ======================
23
 *
24
 * Colors and its meaning
25
 * ----------------------
26
 *
27
 * BLACK  (GC_BLACK)   - In use or free.
28
 * GREY   (GC_GREY)    - Possible member of cycle.
29
 * WHITE  (GC_WHITE)   - Member of garbage cycle.
30
 * PURPLE (GC_PURPLE)  - Possible root of cycle.
31
 *
32
 * Colors described in the paper but not used
33
 * ------------------------------------------
34
 *
35
 * GREEN - Acyclic
36
 * RED   - Candidate cycle undergoing
37
 * ORANGE - Candidate cycle awaiting epoch boundary.
38
 *
39
 *
40
 * Flow
41
 * =====
42
 *
43
 * The garbage collect cycle starts from 'gc_mark_roots', which traverses the
44
 * possible roots, and calls mark_grey for roots are marked purple with
45
 * depth-first traverse.
46
 *
47
 * After all possible roots are traversed and marked,
48
 * gc_scan_roots will be called, and each root will be called with
49
 * gc_scan(root->ref)
50
 *
51
 * gc_scan checks the colors of possible members.
52
 *
53
 * If the node is marked as grey and the refcount > 0
54
 *    gc_scan_black will be called on that node to scan it's subgraph.
55
 * otherwise (refcount == 0), it marks the node white.
56
 *
57
 * A node MAY be added to possible roots when ZEND_UNSET_VAR happens or
58
 * zend_assign_to_variable is called only when possible garbage node is
59
 * produced.
60
 * gc_possible_root() will be called to add the nodes to possible roots.
61
 *
62
 *
63
 * For objects, we call their get_gc handler (by default 'zend_std_get_gc') to
64
 * get the object properties to scan.
65
 *
66
 *
67
 * @see http://researcher.watson.ibm.com/researcher/files/us-bacon/Bacon01Concurrent.pdf
68
 */
69
#include "zend.h"
70
#include "zend_API.h"
71
#include "zend_compile.h"
72
#include "zend_errors.h"
73
#include "zend_fibers.h"
74
#include "zend_hrtime.h"
75
#include "zend_portability.h"
76
#include "zend_types.h"
77
#include "zend_weakrefs.h"
78
#include "zend_string.h"
79
#include "zend_exceptions.h"
80
81
#ifndef GC_BENCH
82
# define GC_BENCH 0
83
#endif
84
85
#ifndef ZEND_GC_DEBUG
86
# define ZEND_GC_DEBUG 0
87
#endif
88
89
/* GC_INFO layout */
90
2.78k
#define GC_ADDRESS  0x0fffffu
91
8.69k
#define GC_COLOR    0x300000u
92
93
#define GC_BLACK    0x000000u /* must be zero */
94
#define GC_WHITE    0x100000u
95
#define GC_GREY     0x200000u
96
#define GC_PURPLE   0x300000u
97
98
/* Debug tracing */
99
#if ZEND_GC_DEBUG > 1
100
# define GC_TRACE(format, ...) fprintf(stderr, format "\n", ##__VA_ARGS__);
101
# define GC_TRACE_REF(ref, format, ...) \
102
  do { \
103
    gc_trace_ref((zend_refcounted *) ref); \
104
    fprintf(stderr, format "\n", ##__VA_ARGS__); \
105
  } while (0)
106
# define GC_TRACE_SET_COLOR(ref, color) \
107
  GC_TRACE_REF(ref, "->%s", gc_color_name(color))
108
#else
109
# define GC_TRACE_REF(ref, format, ...)
110
# define GC_TRACE_SET_COLOR(ref, new_color)
111
# define GC_TRACE(str)
112
#endif
113
114
/* GC_INFO access */
115
#define GC_REF_ADDRESS(ref) \
116
2.78k
  (((GC_TYPE_INFO(ref)) & (GC_ADDRESS << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
117
118
#define GC_REF_COLOR(ref) \
119
  (((GC_TYPE_INFO(ref)) & (GC_COLOR << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
120
121
#define GC_REF_CHECK_COLOR(ref, color) \
122
5.79k
  ((GC_TYPE_INFO(ref) & (GC_COLOR << GC_INFO_SHIFT)) == ((color) << GC_INFO_SHIFT))
123
124
5.68k
#define GC_REF_SET_INFO(ref, info) do { \
125
5.68k
    GC_TYPE_INFO(ref) = \
126
5.68k
      (GC_TYPE_INFO(ref) & (GC_TYPE_MASK | GC_FLAGS_MASK)) | \
127
5.68k
      ((info) << GC_INFO_SHIFT); \
128
5.68k
  } while (0)
129
130
1.47k
#define GC_REF_SET_COLOR(ref, c) do { \
131
1.47k
    GC_TRACE_SET_COLOR(ref, c); \
132
1.47k
    GC_TYPE_INFO(ref) = \
133
1.47k
      (GC_TYPE_INFO(ref) & ~(GC_COLOR << GC_INFO_SHIFT)) | \
134
1.47k
      ((c) << GC_INFO_SHIFT); \
135
1.47k
  } while (0)
136
137
1.42k
#define GC_REF_SET_BLACK(ref) do { \
138
1.42k
    GC_TRACE_SET_COLOR(ref, GC_BLACK); \
139
1.42k
    GC_TYPE_INFO(ref) &= ~(GC_COLOR << GC_INFO_SHIFT); \
140
1.42k
  } while (0)
141
142
#define GC_REF_SET_PURPLE(ref) do { \
143
    GC_TRACE_SET_COLOR(ref, GC_PURPLE); \
144
    GC_TYPE_INFO(ref) |= (GC_COLOR << GC_INFO_SHIFT); \
145
  } while (0)
146
147
/* bit stealing tags for gc_root_buffer.ref */
148
268
#define GC_BITS    0x3
149
150
165
#define GC_ROOT    0x0 /* possible root of circular garbage     */
151
2.92k
#define GC_UNUSED  0x1 /* part of linked list of unused buffers */
152
0
#define GC_GARBAGE 0x2 /* garbage to delete                     */
153
0
#define GC_DTOR_GARBAGE 0x3 /* garbage on which only the dtor should be invoked */
154
155
#define GC_GET_PTR(ptr) \
156
14
  ((void*)(((uintptr_t)(ptr)) & ~GC_BITS))
157
158
#define GC_IS_ROOT(ptr) \
159
165
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_ROOT)
160
#define GC_IS_UNUSED(ptr) \
161
89
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_UNUSED)
162
#define GC_IS_GARBAGE(ptr) \
163
0
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_GARBAGE)
164
#define GC_IS_DTOR_GARBAGE(ptr) \
165
0
  ((((uintptr_t)(ptr)) & GC_BITS) == GC_DTOR_GARBAGE)
166
167
#define GC_MAKE_GARBAGE(ptr) \
168
0
  ((void*)(((uintptr_t)(ptr)) | GC_GARBAGE))
169
#define GC_MAKE_DTOR_GARBAGE(ptr) \
170
0
  ((void*)(((uintptr_t)(ptr)) | GC_DTOR_GARBAGE))
171
172
/* GC address conversion */
173
8.54k
#define GC_IDX2PTR(idx)      (GC_G(buf) + (idx))
174
2.85k
#define GC_PTR2IDX(ptr)      ((ptr) - GC_G(buf))
175
176
/* Get the value to be placed in an unused buffer entry with the specified next unused list index */
177
2.83k
#define GC_IDX2LIST(idx)     ((void*)(uintptr_t)(((idx) * sizeof(void*)) | GC_UNUSED))
178
/* Get the index of the next item in the unused list from the given root buffer entry. */
179
2.68k
#define GC_LIST2IDX(list)    (((uint32_t)(uintptr_t)(list)) / sizeof(void*))
180
181
/* GC buffers */
182
157
#define GC_INVALID           0
183
431
#define GC_FIRST_ROOT        1
184
185
2
#define GC_DEFAULT_BUF_SIZE  (16 * 1024)
186
0
#define GC_BUF_GROW_STEP     (128 * 1024)
187
188
0
#define GC_MAX_UNCOMPRESSED  (512 * 1024)
189
0
#define GC_MAX_BUF_SIZE      0x40000000
190
191
2
#define GC_THRESHOLD_DEFAULT (10000 + GC_FIRST_ROOT)
192
0
#define GC_THRESHOLD_STEP    10000
193
0
#define GC_THRESHOLD_MAX     1000000000
194
0
#define GC_THRESHOLD_TRIGGER 100
195
196
/* GC flags */
197
0
#define GC_HAS_DESTRUCTORS  (1<<0)
198
199
/* Weak maps */
200
0
#define Z_FROM_WEAKMAP_KEY    (1<<0)
201
0
#define Z_FROM_WEAKMAP      (1<<1)
202
203
/* The WeakMap entry zv is reachable from roots by following the virtual
204
 * reference from the a WeakMap key to the entry */
205
#define GC_FROM_WEAKMAP_KEY(zv) \
206
0
  (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT))
207
208
0
#define GC_SET_FROM_WEAKMAP_KEY(zv) do {                    \
209
0
  zval *_z = (zv);                               \
210
0
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \
211
0
} while (0)
212
213
0
#define GC_UNSET_FROM_WEAKMAP_KEY(zv) do {                    \
214
0
  zval *_z = (zv);                               \
215
0
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \
216
0
} while (0)
217
218
/* The WeakMap entry zv is reachable from roots by following the reference from
219
 * the WeakMap */
220
#define GC_FROM_WEAKMAP(zv) \
221
0
  (Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT))
222
223
0
#define GC_SET_FROM_WEAKMAP(zv) do {                        \
224
0
  zval *_z = (zv);                               \
225
0
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \
226
0
} while (0)
227
228
0
#define GC_UNSET_FROM_WEAKMAP(zv) do {                      \
229
0
  zval *_z = (zv);                               \
230
0
  Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \
231
0
} while (0)
232
233
/* unused buffers */
234
235
/* Are there any unused root buffer entries? */
236
#define GC_HAS_UNUSED() \
237
0
  (GC_G(unused) != GC_INVALID)
238
239
/* Get the next unused entry and remove it from the list */
240
#define GC_FETCH_UNUSED() \
241
2.68k
  gc_fetch_unused()
242
243
/* Add a root buffer entry to the unused list */
244
#define GC_LINK_UNUSED(root) \
245
2.83k
  gc_link_unused(root)
246
247
#define GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD() \
248
  (GC_G(first_unused) < GC_G(gc_threshold))
249
#define GC_HAS_NEXT_UNUSED() \
250
0
  (GC_G(first_unused) != GC_G(buf_size))
251
#define GC_FETCH_NEXT_UNUSED() \
252
150
  gc_fetch_next_unused()
253
254
ZEND_API int (*gc_collect_cycles)(void);
255
256
/* The type of a root buffer entry.
257
 *
258
 * The lower two bits are used for flags and need to be masked out to
259
 * reconstruct a pointer.
260
 *
261
 * When a node in the root buffer is removed, the non-flag bits of the
262
 * unused entry are used to store the index of the next entry in the unused
263
 * list. */
264
typedef struct _gc_root_buffer {
265
  zend_refcounted  *ref;
266
} gc_root_buffer;
267
268
typedef struct _zend_gc_globals {
269
  /* The root buffer, which stores possible roots of reference cycles. It is
270
   * also used to store garbage to be collected at the end of a run.
271
   * A single array which is reallocated as necessary. */
272
  gc_root_buffer   *buf;
273
274
  bool         gc_enabled;
275
  bool         gc_active;        /* GC currently running, forbid nested GC */
276
  bool         gc_protected;     /* GC protected, forbid root additions */
277
  bool         gc_full;
278
279
  uint32_t          unused;     /* linked list of unused buffers    */
280
  uint32_t          first_unused;   /* first unused buffer              */
281
  uint32_t          gc_threshold;     /* GC collection threshold          */
282
  uint32_t          buf_size;     /* size of the GC buffer            */
283
  uint32_t          num_roots;    /* number of roots in GC buffer     */
284
285
  uint32_t gc_runs;         /* number of GC runs since reset */
286
  uint32_t collected;         /* number of collected nodes since reset */
287
288
  zend_hrtime_t activated_at;     /* the timestamp of the last reset */
289
  zend_hrtime_t collector_time;   /* time spent running GC (ns) */
290
  zend_hrtime_t dtor_time;      /* time spent calling destructors (ns) */
291
  zend_hrtime_t free_time;      /* time spent destroying nodes and freeing memory (ns) */
292
293
  uint32_t dtor_idx;      /* root buffer index */
294
  uint32_t dtor_end;
295
  zend_fiber *dtor_fiber;
296
  bool dtor_fiber_running;
297
298
#if GC_BENCH
299
  uint32_t root_buf_length;
300
  uint32_t root_buf_peak;
301
  uint32_t zval_possible_root;
302
  uint32_t zval_buffered;
303
  uint32_t zval_remove_from_buffer;
304
  uint32_t zval_marked_grey;
305
#endif
306
} zend_gc_globals;
307
308
#ifdef ZTS
309
static int gc_globals_id;
310
static size_t gc_globals_offset;
311
#define GC_G(v) ZEND_TSRMG_FAST(gc_globals_offset, zend_gc_globals *, v)
312
#else
313
28.9k
#define GC_G(v) (gc_globals.v)
314
static zend_gc_globals gc_globals;
315
#endif
316
317
#if GC_BENCH
318
# define GC_BENCH_INC(counter) GC_G(counter)++
319
# define GC_BENCH_DEC(counter) GC_G(counter)--
320
# define GC_BENCH_PEAK(peak, counter) do {    \
321
    if (GC_G(counter) > GC_G(peak)) {   \
322
      GC_G(peak) = GC_G(counter);     \
323
    }                   \
324
  } while (0)
325
#else
326
# define GC_BENCH_INC(counter)
327
# define GC_BENCH_DEC(counter)
328
# define GC_BENCH_PEAK(peak, counter)
329
#endif
330
331
332
0
#define GC_STACK_SEGMENT_SIZE (((4096 - ZEND_MM_OVERHEAD) / sizeof(void*)) - 2)
333
334
typedef struct _gc_stack gc_stack;
335
336
/* The stack used for graph traversal is stored as a linked list of segments */
337
struct _gc_stack {
338
  gc_stack        *prev;
339
  gc_stack        *next;
340
  zend_refcounted *data[GC_STACK_SEGMENT_SIZE];
341
};
342
343
#define GC_STACK_DCL(init) \
344
161
  gc_stack *_stack = init; \
345
161
  size_t    _top = 0;
346
347
#define GC_STACK_PUSH(ref) \
348
1.26k
  gc_stack_push(&_stack, &_top, ref);
349
350
#define GC_STACK_POP() \
351
1.42k
  gc_stack_pop(&_stack, &_top)
352
353
static zend_never_inline gc_stack* gc_stack_next(gc_stack *stack)
354
27
{
355
27
  if (UNEXPECTED(!stack->next)) {
356
27
    gc_stack *segment = emalloc(sizeof(gc_stack));
357
27
    segment->prev = stack;
358
27
    segment->next = NULL;
359
27
    stack->next = segment;
360
27
  }
361
27
  return stack->next;
362
27
}
363
364
static zend_always_inline void gc_stack_push(gc_stack **stack, size_t *top, zend_refcounted *ref)
365
1.26k
{
366
1.26k
  if (UNEXPECTED(*top == GC_STACK_SEGMENT_SIZE)) {
367
0
    (*stack) = gc_stack_next(*stack);
368
0
    (*top) = 0;
369
0
  }
370
1.26k
  (*stack)->data[(*top)++] = ref;
371
1.26k
}
372
373
static zend_always_inline zend_refcounted* gc_stack_pop(gc_stack **stack, size_t *top)
374
1.42k
{
375
1.42k
  if (UNEXPECTED((*top) == 0)) {
376
161
    if (!(*stack)->prev) {
377
161
      return NULL;
378
161
    } else {
379
0
      (*stack) = (*stack)->prev;
380
0
      (*top) = GC_STACK_SEGMENT_SIZE - 1;
381
0
      return (*stack)->data[GC_STACK_SEGMENT_SIZE - 1];
382
0
    }
383
1.26k
  } else {
384
1.26k
    return (*stack)->data[--(*top)];
385
1.26k
  }
386
1.42k
}
387
388
static void gc_stack_free(gc_stack *stack)
389
27
{
390
27
  gc_stack *p = stack->next;
391
392
54
  while (p) {
393
27
    stack = p->next;
394
27
    efree(p);
395
27
    p = stack;
396
27
  }
397
27
}
398
399
/* Map a full index to a compressed index.
400
 *
401
 * The root buffer can have up to 2^30 entries, but we only have 20 bits to
402
 * store the index. So we use the 1<<19 bit as a compression flag and use the
403
 * other 19 bits to store the index modulo 2^19. */
404
static zend_always_inline uint32_t gc_compress(uint32_t idx)
405
2.85k
{
406
2.85k
  if (EXPECTED(idx < GC_MAX_UNCOMPRESSED)) {
407
2.85k
    return idx;
408
2.85k
  }
409
0
  return (idx % GC_MAX_UNCOMPRESSED) | GC_MAX_UNCOMPRESSED;
410
2.85k
}
411
412
/* Find the root buffer entry given a pointer and a compressed index.
413
 * Iterate through the root buffer in steps of 2^19 until the pointer
414
 * matches. */
415
static zend_always_inline gc_root_buffer* gc_decompress(zend_refcounted *ref, uint32_t idx)
416
0
{
417
0
  gc_root_buffer *root = GC_IDX2PTR(idx);
418
419
0
  if (EXPECTED(GC_GET_PTR(root->ref) == ref)) {
420
0
    return root;
421
0
  }
422
423
0
  while (1) {
424
0
    idx += GC_MAX_UNCOMPRESSED;
425
0
    ZEND_ASSERT(idx < GC_G(first_unused));
426
0
    root = GC_IDX2PTR(idx);
427
0
    if (GC_GET_PTR(root->ref) == ref) {
428
0
      return root;
429
0
    }
430
0
  }
431
0
}
432
433
/* Get the index of the next unused root buffer entry, and remove it from the
434
 * unused list. GC_HAS_UNUSED() must be true before calling this. */
435
static zend_always_inline uint32_t gc_fetch_unused(void)
436
2.68k
{
437
2.68k
  uint32_t idx;
438
2.68k
  gc_root_buffer *root;
439
440
2.68k
  ZEND_ASSERT(GC_HAS_UNUSED());
441
2.68k
  idx = GC_G(unused);
442
2.68k
  root = GC_IDX2PTR(idx);
443
2.68k
  ZEND_ASSERT(GC_IS_UNUSED(root->ref));
444
2.68k
  GC_G(unused) = GC_LIST2IDX(root->ref);
445
2.68k
  return idx;
446
2.68k
}
447
448
/* Add a root buffer entry to the unused list */
449
static zend_always_inline void gc_link_unused(gc_root_buffer *root)
450
2.83k
{
451
2.83k
  root->ref = GC_IDX2LIST(GC_G(unused));
452
2.83k
  GC_G(unused) = GC_PTR2IDX(root);
453
2.83k
}
454
455
static zend_always_inline uint32_t gc_fetch_next_unused(void)
456
150
{
457
150
  uint32_t idx;
458
459
150
  ZEND_ASSERT(GC_HAS_NEXT_UNUSED());
460
150
  idx = GC_G(first_unused);
461
150
  GC_G(first_unused) = GC_G(first_unused) + 1;
462
150
  return idx;
463
150
}
464
465
#if ZEND_GC_DEBUG > 1
466
static const char *gc_color_name(uint32_t color) {
467
  switch (color) {
468
    case GC_BLACK: return "black";
469
    case GC_WHITE: return "white";
470
    case GC_GREY: return "grey";
471
    case GC_PURPLE: return "purple";
472
    default: return "unknown";
473
  }
474
}
475
static void gc_trace_ref(zend_refcounted *ref) {
476
  if (GC_TYPE(ref) == IS_OBJECT) {
477
    zend_object *obj = (zend_object *) ref;
478
    fprintf(stderr, "[%p] rc=%d addr=%d %s object(%s)#%d ",
479
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
480
      gc_color_name(GC_REF_COLOR(ref)),
481
      obj->ce->name->val, obj->handle);
482
  } else if (GC_TYPE(ref) == IS_ARRAY) {
483
    zend_array *arr = (zend_array *) ref;
484
    fprintf(stderr, "[%p] rc=%d addr=%d %s array(%d) ",
485
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
486
      gc_color_name(GC_REF_COLOR(ref)),
487
      zend_hash_num_elements(arr));
488
  } else {
489
    fprintf(stderr, "[%p] rc=%d addr=%d %s %s ",
490
      ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
491
      gc_color_name(GC_REF_COLOR(ref)),
492
      GC_TYPE(ref) == IS_REFERENCE
493
        ? "reference" : zend_get_type_by_const(GC_TYPE(ref)));
494
  }
495
}
496
#endif
497
498
/* Mark a root buffer entry unused */
499
static zend_always_inline void gc_remove_from_roots(gc_root_buffer *root)
500
2.83k
{
501
2.83k
  GC_LINK_UNUSED(root);
502
2.83k
  GC_G(num_roots)--;
503
2.83k
  GC_BENCH_DEC(root_buf_length);
504
2.83k
}
505
506
static void root_buffer_dtor(zend_gc_globals *gc_globals)
507
0
{
508
0
  if (gc_globals->buf) {
509
0
    free(gc_globals->buf);
510
0
    gc_globals->buf = NULL;
511
0
  }
512
0
}
513
514
static void gc_globals_ctor_ex(zend_gc_globals *gc_globals)
515
2
{
516
2
  gc_globals->gc_enabled = false;
517
2
  gc_globals->gc_active = false;
518
2
  gc_globals->gc_protected = true;
519
2
  gc_globals->gc_full = false;
520
521
2
  gc_globals->buf = NULL;
522
2
  gc_globals->unused = GC_INVALID;
523
2
  gc_globals->first_unused = GC_INVALID;
524
2
  gc_globals->gc_threshold = GC_INVALID;
525
2
  gc_globals->buf_size = GC_INVALID;
526
2
  gc_globals->num_roots = 0;
527
528
2
  gc_globals->gc_runs = 0;
529
2
  gc_globals->collected = 0;
530
2
  gc_globals->collector_time = 0;
531
2
  gc_globals->dtor_time = 0;
532
2
  gc_globals->free_time = 0;
533
2
  gc_globals->activated_at = 0;
534
535
2
  gc_globals->dtor_idx = GC_FIRST_ROOT;
536
2
  gc_globals->dtor_end = 0;
537
2
  gc_globals->dtor_fiber = NULL;
538
2
  gc_globals->dtor_fiber_running = false;
539
540
#if GC_BENCH
541
  gc_globals->root_buf_length = 0;
542
  gc_globals->root_buf_peak = 0;
543
  gc_globals->zval_possible_root = 0;
544
  gc_globals->zval_buffered = 0;
545
  gc_globals->zval_remove_from_buffer = 0;
546
  gc_globals->zval_marked_grey = 0;
547
#endif
548
2
}
549
550
void gc_globals_ctor(void)
551
2
{
552
#ifdef ZTS
553
  ts_allocate_fast_id(&gc_globals_id, &gc_globals_offset, sizeof(zend_gc_globals), (ts_allocate_ctor) gc_globals_ctor_ex, (ts_allocate_dtor) root_buffer_dtor);
554
#else
555
2
  gc_globals_ctor_ex(&gc_globals);
556
2
#endif
557
2
}
558
559
void gc_globals_dtor(void)
560
0
{
561
0
#ifndef ZTS
562
0
  root_buffer_dtor(&gc_globals);
563
0
#endif
564
0
}
565
566
void gc_reset(void)
567
63
{
568
63
  if (GC_G(buf)) {
569
63
    GC_G(gc_active) = 0;
570
63
    GC_G(gc_protected) = 0;
571
63
    GC_G(gc_full) = 0;
572
63
    GC_G(unused) = GC_INVALID;
573
63
    GC_G(first_unused) = GC_FIRST_ROOT;
574
63
    GC_G(num_roots) = 0;
575
576
63
    GC_G(gc_runs) = 0;
577
63
    GC_G(collected) = 0;
578
579
63
    GC_G(collector_time) = 0;
580
63
    GC_G(dtor_time) = 0;
581
63
    GC_G(free_time) = 0;
582
583
63
    GC_G(dtor_idx) = GC_FIRST_ROOT;
584
63
    GC_G(dtor_end) = 0;
585
63
    GC_G(dtor_fiber) = NULL;
586
63
    GC_G(dtor_fiber_running) = false;
587
588
#if GC_BENCH
589
    GC_G(root_buf_length) = 0;
590
    GC_G(root_buf_peak) = 0;
591
    GC_G(zval_possible_root) = 0;
592
    GC_G(zval_buffered) = 0;
593
    GC_G(zval_remove_from_buffer) = 0;
594
    GC_G(zval_marked_grey) = 0;
595
#endif
596
63
  }
597
598
63
  GC_G(activated_at) = zend_hrtime();
599
63
}
600
601
/* Enable/disable the garbage collector.
602
 * Initialize globals if necessary. */
603
ZEND_API bool gc_enable(bool enable)
604
124
{
605
124
  bool old_enabled = GC_G(gc_enabled);
606
124
  GC_G(gc_enabled) = enable;
607
124
  if (enable && !old_enabled && GC_G(buf) == NULL) {
608
2
    GC_G(buf) = (gc_root_buffer*) pemalloc(sizeof(gc_root_buffer) * GC_DEFAULT_BUF_SIZE, 1);
609
2
    GC_G(buf)[0].ref = NULL;
610
2
    GC_G(buf_size) = GC_DEFAULT_BUF_SIZE;
611
2
    GC_G(gc_threshold) = GC_THRESHOLD_DEFAULT;
612
2
    gc_reset();
613
2
  }
614
124
  return old_enabled;
615
124
}
616
617
ZEND_API bool gc_enabled(void)
618
0
{
619
0
  return GC_G(gc_enabled);
620
0
}
621
622
/* Protect the GC root buffer (prevent additions) */
623
ZEND_API bool gc_protect(bool protect)
624
62
{
625
62
  bool old_protected = GC_G(gc_protected);
626
62
  GC_G(gc_protected) = protect;
627
62
  return old_protected;
628
62
}
629
630
ZEND_API bool gc_protected(void)
631
0
{
632
0
  return GC_G(gc_protected);
633
0
}
634
635
static void gc_grow_root_buffer(void)
636
0
{
637
0
  size_t new_size;
638
639
0
  if (GC_G(buf_size) >= GC_MAX_BUF_SIZE) {
640
0
    if (!GC_G(gc_full)) {
641
0
      zend_error(E_WARNING, "GC buffer overflow (GC disabled)\n");
642
0
      GC_G(gc_active) = 1;
643
0
      GC_G(gc_protected) = 1;
644
0
      GC_G(gc_full) = 1;
645
0
      return;
646
0
    }
647
0
  }
648
0
  if (GC_G(buf_size) < GC_BUF_GROW_STEP) {
649
0
    new_size = GC_G(buf_size) * 2;
650
0
  } else {
651
0
    new_size = GC_G(buf_size) + GC_BUF_GROW_STEP;
652
0
  }
653
0
  if (new_size > GC_MAX_BUF_SIZE) {
654
0
    new_size = GC_MAX_BUF_SIZE;
655
0
  }
656
0
  GC_G(buf) = perealloc(GC_G(buf), sizeof(gc_root_buffer) * new_size, 1);
657
0
  GC_G(buf_size) = new_size;
658
0
}
659
660
/* Adjust the GC activation threshold given the number of nodes collected by the last run */
661
static void gc_adjust_threshold(int count)
662
0
{
663
0
  uint32_t new_threshold;
664
665
  /* TODO Very simple heuristic for dynamic GC buffer resizing:
666
   * If there are "too few" collections, increase the collection threshold
667
   * by a fixed step */
668
0
  if (count < GC_THRESHOLD_TRIGGER || GC_G(num_roots) >= GC_G(gc_threshold)) {
669
    /* increase */
670
0
    if (GC_G(gc_threshold) < GC_THRESHOLD_MAX) {
671
0
      new_threshold = GC_G(gc_threshold) + GC_THRESHOLD_STEP;
672
0
      if (new_threshold > GC_THRESHOLD_MAX) {
673
0
        new_threshold = GC_THRESHOLD_MAX;
674
0
      }
675
0
      if (new_threshold > GC_G(buf_size)) {
676
0
        gc_grow_root_buffer();
677
0
      }
678
0
      if (new_threshold <= GC_G(buf_size)) {
679
0
        GC_G(gc_threshold) = new_threshold;
680
0
      }
681
0
    }
682
0
  } else if (GC_G(gc_threshold) > GC_THRESHOLD_DEFAULT) {
683
0
    new_threshold = GC_G(gc_threshold) - GC_THRESHOLD_STEP;
684
0
    if (new_threshold < GC_THRESHOLD_DEFAULT) {
685
0
      new_threshold = GC_THRESHOLD_DEFAULT;
686
0
    }
687
0
    GC_G(gc_threshold) = new_threshold;
688
0
  }
689
0
}
690
691
/* Perform a GC run and then add a node as a possible root. */
692
static zend_never_inline void ZEND_FASTCALL gc_possible_root_when_full(zend_refcounted *ref)
693
0
{
694
0
  uint32_t idx;
695
0
  gc_root_buffer *newRoot;
696
697
0
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
698
0
  ZEND_ASSERT(GC_INFO(ref) == 0);
699
700
0
  if (GC_G(gc_enabled) && !GC_G(gc_active)) {
701
0
    GC_ADDREF(ref);
702
0
    gc_adjust_threshold(gc_collect_cycles());
703
0
    if (UNEXPECTED(GC_DELREF(ref) == 0)) {
704
0
      rc_dtor_func(ref);
705
0
      return;
706
0
    } else if (UNEXPECTED(GC_INFO(ref))) {
707
0
      return;
708
0
    }
709
0
  }
710
711
0
  if (GC_HAS_UNUSED()) {
712
0
    idx = GC_FETCH_UNUSED();
713
0
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
714
0
    idx = GC_FETCH_NEXT_UNUSED();
715
0
  } else {
716
0
    gc_grow_root_buffer();
717
0
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
718
0
      return;
719
0
    }
720
0
    idx = GC_FETCH_NEXT_UNUSED();
721
0
  }
722
723
0
  newRoot = GC_IDX2PTR(idx);
724
0
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
725
0
  GC_TRACE_SET_COLOR(ref, GC_PURPLE);
726
727
0
  idx = gc_compress(idx);
728
0
  GC_REF_SET_INFO(ref, idx | GC_PURPLE);
729
0
  GC_G(num_roots)++;
730
731
0
  GC_BENCH_INC(zval_buffered);
732
0
  GC_BENCH_INC(root_buf_length);
733
0
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
734
0
}
735
736
/* Add a possible root node to the buffer.
737
 * Maybe perform a GC run. */
738
ZEND_API void ZEND_FASTCALL gc_possible_root(zend_refcounted *ref)
739
3.81k
{
740
3.81k
  uint32_t idx;
741
3.81k
  gc_root_buffer *newRoot;
742
743
3.81k
  if (UNEXPECTED(GC_G(gc_protected))) {
744
974
    return;
745
974
  }
746
747
2.83k
  GC_BENCH_INC(zval_possible_root);
748
749
2.83k
  if (EXPECTED(GC_HAS_UNUSED())) {
750
2.68k
    idx = GC_FETCH_UNUSED();
751
2.68k
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD())) {
752
150
    idx = GC_FETCH_NEXT_UNUSED();
753
150
  } else {
754
0
    gc_possible_root_when_full(ref);
755
0
    return;
756
0
  }
757
758
2.83k
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
759
2.83k
  ZEND_ASSERT(GC_INFO(ref) == 0);
760
761
2.83k
  newRoot = GC_IDX2PTR(idx);
762
2.83k
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
763
2.83k
  GC_TRACE_SET_COLOR(ref, GC_PURPLE);
764
765
2.83k
  idx = gc_compress(idx);
766
2.83k
  GC_REF_SET_INFO(ref, idx | GC_PURPLE);
767
2.83k
  GC_G(num_roots)++;
768
769
2.83k
  GC_BENCH_INC(zval_buffered);
770
2.83k
  GC_BENCH_INC(root_buf_length);
771
2.83k
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
772
2.83k
}
773
774
/* Add an extra root during a GC run */
775
static void ZEND_FASTCALL gc_extra_root(zend_refcounted *ref)
776
0
{
777
0
  uint32_t idx;
778
0
  gc_root_buffer *newRoot;
779
780
0
  if (EXPECTED(GC_HAS_UNUSED())) {
781
0
    idx = GC_FETCH_UNUSED();
782
0
  } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
783
0
    idx = GC_FETCH_NEXT_UNUSED();
784
0
  } else {
785
0
    gc_grow_root_buffer();
786
0
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
787
      /* TODO: can this really happen? */
788
0
      return;
789
0
    }
790
0
    idx = GC_FETCH_NEXT_UNUSED();
791
0
  }
792
793
0
  ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
794
0
  ZEND_ASSERT(GC_REF_ADDRESS(ref) == 0);
795
796
0
  newRoot = GC_IDX2PTR(idx);
797
0
  newRoot->ref = ref; /* GC_ROOT tag is 0 */
798
799
0
  idx = gc_compress(idx);
800
0
  GC_REF_SET_INFO(ref, idx | GC_REF_COLOR(ref));
801
0
  GC_G(num_roots)++;
802
803
0
  GC_BENCH_INC(zval_buffered);
804
0
  GC_BENCH_INC(root_buf_length);
805
0
  GC_BENCH_PEAK(root_buf_peak, root_buf_length);
806
0
}
807
808
/* Remove a node from the root buffer given its compressed index */
809
static zend_never_inline void ZEND_FASTCALL gc_remove_compressed(zend_refcounted *ref, uint32_t idx)
810
0
{
811
0
  gc_root_buffer *root = gc_decompress(ref, idx);
812
0
  gc_remove_from_roots(root);
813
0
}
814
815
ZEND_API void ZEND_FASTCALL gc_remove_from_buffer(zend_refcounted *ref)
816
2.78k
{
817
2.78k
  gc_root_buffer *root;
818
2.78k
  uint32_t idx = GC_REF_ADDRESS(ref);
819
820
2.78k
  GC_BENCH_INC(zval_remove_from_buffer);
821
822
2.78k
  if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
823
2.78k
    GC_TRACE_SET_COLOR(ref, GC_BLACK);
824
2.78k
  }
825
2.78k
  GC_REF_SET_INFO(ref, 0);
826
827
  /* Perform decompression only in case of large buffers */
828
2.78k
  if (UNEXPECTED(GC_G(first_unused) >= GC_MAX_UNCOMPRESSED)) {
829
0
    gc_remove_compressed(ref, idx);
830
0
    return;
831
0
  }
832
833
2.78k
  ZEND_ASSERT(idx);
834
2.78k
  root = GC_IDX2PTR(idx);
835
2.78k
  gc_remove_from_roots(root);
836
2.78k
}
837
838
/* Mark all nodes reachable from ref as black (live). Restore the reference
839
 * counts decremented by gc_mark_grey(). See ScanBlack() in Bacon & Rajan.
840
 * To implement a depth-first search, discovered nodes are added to a stack
841
 * which is processed iteratively. */
842
static void gc_scan_black(zend_refcounted *ref, gc_stack *stack)
843
53
{
844
53
  HashTable *ht;
845
53
  Bucket *p;
846
53
  zval *zv;
847
53
  uint32_t n;
848
53
  GC_STACK_DCL(stack);
849
850
1.39k
tail_call:
851
1.39k
  if (GC_TYPE(ref) == IS_OBJECT) {
852
56
    zend_object *obj = (zend_object*)ref;
853
854
56
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
855
56
      zval *table;
856
56
      int len;
857
858
56
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
859
0
        zend_weakmap_get_object_key_entry_gc(obj, &table, &len);
860
0
        n = len;
861
0
        zv = table;
862
0
        for (; n != 0; n-=2) {
863
0
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
864
0
          zval *entry = (zval*) Z_PTR_P(zv);
865
0
          zval *weakmap = zv+1;
866
0
          ZEND_ASSERT(Z_REFCOUNTED_P(weakmap));
867
0
          if (Z_OPT_COLLECTABLE_P(entry)) {
868
0
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
869
0
            if (GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_GREY)) {
870
              /* Weakmap was scanned in gc_mark_roots, we must
871
               * ensure that it's eventually scanned in
872
               * gc_scan_roots as well. */
873
0
              if (!GC_REF_ADDRESS(Z_COUNTED_P(weakmap))) {
874
0
                gc_extra_root(Z_COUNTED_P(weakmap));
875
0
              }
876
0
            } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK) && */ !GC_FROM_WEAKMAP(entry)) {
877
              /* Both the entry weakmap and key are BLACK, so we
878
               * can mark the entry BLACK as well.
879
               * !GC_FROM_WEAKMAP(entry) means that the weakmap
880
               * was already scanned black (or will not be
881
               * scanned), so it's our responsibility to mark the
882
               * entry */
883
0
              ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK));
884
0
              ref = Z_COUNTED_P(entry);
885
0
              GC_ADDREF(ref);
886
0
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
887
0
                GC_REF_SET_BLACK(ref);
888
0
                GC_STACK_PUSH(ref);
889
0
              }
890
0
            }
891
0
          }
892
0
          zv+=2;
893
0
        }
894
0
      }
895
896
56
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
897
0
        zend_weakmap_get_key_entry_gc(obj, &table, &len);
898
0
        n = len;
899
0
        zv = table;
900
0
        for (; n != 0; n-=2) {
901
0
          ZEND_ASSERT(Z_TYPE_P(zv+1) == IS_PTR);
902
0
          zval *key = zv;
903
0
          zval *entry = (zval*) Z_PTR_P(zv+1);
904
0
          if (Z_OPT_COLLECTABLE_P(entry)) {
905
0
            GC_UNSET_FROM_WEAKMAP(entry);
906
0
            if (GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_GREY)) {
907
              /* Key was scanned in gc_mark_roots, we must
908
               * ensure that it's eventually scanned in
909
               * gc_scan_roots as well. */
910
0
              if (!GC_REF_ADDRESS(Z_COUNTED_P(key))) {
911
0
                gc_extra_root(Z_COUNTED_P(key));
912
0
              }
913
0
            } else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK) && */ !GC_FROM_WEAKMAP_KEY(entry)) {
914
              /* Both the entry weakmap and key are BLACK, so we
915
               * can mark the entry BLACK as well.
916
               * !GC_FROM_WEAKMAP_KEY(entry) means that the key
917
               * was already scanned black (or will not be
918
               * scanned), so it's our responsibility to mark the
919
               * entry */
920
0
              ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK));
921
0
              ref = Z_COUNTED_P(entry);
922
0
              GC_ADDREF(ref);
923
0
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
924
0
                GC_REF_SET_BLACK(ref);
925
0
                GC_STACK_PUSH(ref);
926
0
              }
927
0
            }
928
0
          }
929
0
          zv += 2;
930
0
        }
931
0
        goto next;
932
0
      }
933
934
56
      ht = obj->handlers->get_gc(obj, &table, &len);
935
56
      n = len;
936
56
      zv = table;
937
56
      if (UNEXPECTED(ht)) {
938
29
        GC_ADDREF(ht);
939
29
        if (!GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
940
29
          GC_REF_SET_BLACK(ht);
941
32
          for (; n != 0; n--) {
942
3
            if (Z_COLLECTABLE_P(zv)) {
943
3
              ref = Z_COUNTED_P(zv);
944
3
              GC_ADDREF(ref);
945
3
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
946
3
                GC_REF_SET_BLACK(ref);
947
3
                GC_STACK_PUSH(ref);
948
3
              }
949
3
            }
950
3
            zv++;
951
3
          }
952
29
          goto handle_ht;
953
29
        }
954
29
      }
955
956
709
handle_zvals:
957
1.52k
      for (; n != 0; n--) {
958
867
        if (Z_COLLECTABLE_P(zv)) {
959
54
          ref = Z_COUNTED_P(zv);
960
54
          GC_ADDREF(ref);
961
54
          if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
962
53
            GC_REF_SET_BLACK(ref);
963
53
            zv++;
964
708
            while (--n) {
965
655
              if (Z_COLLECTABLE_P(zv)) {
966
629
                zend_refcounted *ref = Z_COUNTED_P(zv);
967
629
                GC_ADDREF(ref);
968
629
                if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
969
629
                  GC_REF_SET_BLACK(ref);
970
629
                  GC_STACK_PUSH(ref);
971
629
                }
972
629
              }
973
655
              zv++;
974
655
            }
975
53
            goto tail_call;
976
53
          }
977
54
        }
978
814
        zv++;
979
814
      }
980
709
    }
981
1.33k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
982
1.33k
    ZEND_ASSERT((zend_array*)ref != &EG(symbol_table));
983
1.33k
    ht = (zend_array*)ref;
984
1.36k
handle_ht:
985
1.36k
    n = ht->nNumUsed;
986
1.36k
    zv = ht->arPacked;
987
1.36k
    if (HT_IS_PACKED(ht)) {
988
682
      goto handle_zvals;
989
682
    }
990
991
684
    p = (Bucket*)zv;
992
2.07k
    for (; n != 0; n--) {
993
2.04k
      zv = &p->val;
994
2.04k
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
995
0
        zv = Z_INDIRECT_P(zv);
996
0
      }
997
2.04k
      if (Z_COLLECTABLE_P(zv)) {
998
655
        ref = Z_COUNTED_P(zv);
999
655
        GC_ADDREF(ref);
1000
655
        if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1001
655
          GC_REF_SET_BLACK(ref);
1002
655
          p++;
1003
1.28k
          while (--n) {
1004
629
            zv = &p->val;
1005
629
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1006
0
              zv = Z_INDIRECT_P(zv);
1007
0
            }
1008
629
            if (Z_COLLECTABLE_P(zv)) {
1009
0
              zend_refcounted *ref = Z_COUNTED_P(zv);
1010
0
              GC_ADDREF(ref);
1011
0
              if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1012
0
                GC_REF_SET_BLACK(ref);
1013
0
                GC_STACK_PUSH(ref);
1014
0
              }
1015
0
            }
1016
629
            p++;
1017
629
          }
1018
655
          goto tail_call;
1019
655
        }
1020
655
      }
1021
1.38k
      p++;
1022
1.38k
    }
1023
684
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1024
0
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1025
0
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1026
0
      GC_ADDREF(ref);
1027
0
      if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1028
0
        GC_REF_SET_BLACK(ref);
1029
0
        goto tail_call;
1030
0
      }
1031
0
    }
1032
0
  }
1033
1034
685
next:
1035
685
  ref = GC_STACK_POP();
1036
685
  if (ref) {
1037
632
    goto tail_call;
1038
632
  }
1039
685
}
1040
1041
/* Traverse the graph of nodes referred to by ref. Decrement the reference
1042
 * counts and mark visited nodes grey. See MarkGray() in Bacon & Rajan. */
1043
static void gc_mark_grey(zend_refcounted *ref, gc_stack *stack)
1044
54
{
1045
54
  HashTable *ht;
1046
54
  Bucket *p;
1047
54
  zval *zv;
1048
54
  uint32_t n;
1049
54
  GC_STACK_DCL(stack);
1050
1051
1.39k
tail_call:
1052
1.39k
  GC_BENCH_INC(zval_marked_grey);
1053
1054
1.39k
  if (GC_TYPE(ref) == IS_OBJECT) {
1055
56
    zend_object *obj = (zend_object*)ref;
1056
1057
56
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1058
56
      zval *table;
1059
56
      int len;
1060
1061
56
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1062
0
        zend_weakmap_get_object_key_entry_gc(obj, &table, &len);
1063
0
        n = len;
1064
0
        zv = table;
1065
0
        for (; n != 0; n-=2) {
1066
0
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1067
0
          zval *entry = (zval*) Z_PTR_P(zv);
1068
0
          zval *weakmap = zv+1;
1069
0
          ZEND_ASSERT(Z_REFCOUNTED_P(weakmap));
1070
0
          if (Z_COLLECTABLE_P(entry)) {
1071
0
            GC_SET_FROM_WEAKMAP_KEY(entry);
1072
0
            ref = Z_COUNTED_P(entry);
1073
            /* Only DELREF if the contribution from the weakmap has
1074
             * not been cancelled yet */
1075
0
            if (!GC_FROM_WEAKMAP(entry)) {
1076
0
              GC_DELREF(ref);
1077
0
            }
1078
0
            if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1079
0
              GC_REF_SET_COLOR(ref, GC_GREY);
1080
0
              GC_STACK_PUSH(ref);
1081
0
            }
1082
0
          }
1083
0
          zv+=2;
1084
0
        }
1085
0
      }
1086
1087
56
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
1088
0
        zend_weakmap_get_entry_gc(obj, &table, &len);
1089
0
        n = len;
1090
0
        zv = table;
1091
0
        for (; n != 0; n--) {
1092
0
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1093
0
          zval *entry = (zval*) Z_PTR_P(zv);
1094
0
          if (Z_COLLECTABLE_P(entry)) {
1095
0
            GC_SET_FROM_WEAKMAP(entry);
1096
0
            ref = Z_COUNTED_P(entry);
1097
            /* Only DELREF if the contribution from the weakmap key
1098
             * has not been cancelled yet */
1099
0
            if (!GC_FROM_WEAKMAP_KEY(entry)) {
1100
0
              GC_DELREF(ref);
1101
0
            }
1102
0
            if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1103
0
              GC_REF_SET_COLOR(ref, GC_GREY);
1104
0
              GC_STACK_PUSH(ref);
1105
0
            }
1106
0
          }
1107
0
          zv++;
1108
0
        }
1109
0
        goto next;
1110
0
      }
1111
1112
56
      ht = obj->handlers->get_gc(obj, &table, &len);
1113
56
      n = len;
1114
56
      zv = table;
1115
56
      if (UNEXPECTED(ht)) {
1116
29
        GC_DELREF(ht);
1117
29
        if (!GC_REF_CHECK_COLOR(ht, GC_GREY)) {
1118
29
          GC_REF_SET_COLOR(ht, GC_GREY);
1119
32
          for (; n != 0; n--) {
1120
3
            if (Z_COLLECTABLE_P(zv)) {
1121
3
              ref = Z_COUNTED_P(zv);
1122
3
              GC_DELREF(ref);
1123
3
              if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1124
2
                GC_REF_SET_COLOR(ref, GC_GREY);
1125
2
                GC_STACK_PUSH(ref);
1126
2
              }
1127
3
            }
1128
3
            zv++;
1129
3
          }
1130
29
          goto handle_ht;
1131
29
        }
1132
29
      }
1133
709
handle_zvals:
1134
1.52k
      for (; n != 0; n--) {
1135
867
        if (Z_COLLECTABLE_P(zv)) {
1136
54
          ref = Z_COUNTED_P(zv);
1137
54
          GC_DELREF(ref);
1138
54
          if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1139
53
            GC_REF_SET_COLOR(ref, GC_GREY);
1140
53
            zv++;
1141
708
            while (--n) {
1142
655
              if (Z_COLLECTABLE_P(zv)) {
1143
629
                zend_refcounted *ref = Z_COUNTED_P(zv);
1144
629
                GC_DELREF(ref);
1145
629
                if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1146
629
                  GC_REF_SET_COLOR(ref, GC_GREY);
1147
629
                  GC_STACK_PUSH(ref);
1148
629
                }
1149
629
              }
1150
655
              zv++;
1151
655
            }
1152
53
            goto tail_call;
1153
53
          }
1154
54
        }
1155
814
        zv++;
1156
814
      }
1157
709
    }
1158
1.33k
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1159
1.33k
    ZEND_ASSERT(((zend_array*)ref) != &EG(symbol_table));
1160
1.33k
    ht = (zend_array*)ref;
1161
1.36k
handle_ht:
1162
1.36k
    n = ht->nNumUsed;
1163
1.36k
    if (HT_IS_PACKED(ht)) {
1164
682
            zv = ht->arPacked;
1165
682
            goto handle_zvals;
1166
682
    }
1167
1168
684
    p = ht->arData;
1169
2.07k
    for (; n != 0; n--) {
1170
2.04k
      zv = &p->val;
1171
2.04k
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1172
0
        zv = Z_INDIRECT_P(zv);
1173
0
      }
1174
2.04k
      if (Z_COLLECTABLE_P(zv)) {
1175
655
        ref = Z_COUNTED_P(zv);
1176
655
        GC_DELREF(ref);
1177
655
        if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1178
655
          GC_REF_SET_COLOR(ref, GC_GREY);
1179
655
          p++;
1180
1.28k
          while (--n) {
1181
629
            zv = &p->val;
1182
629
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1183
0
              zv = Z_INDIRECT_P(zv);
1184
0
            }
1185
629
            if (Z_COLLECTABLE_P(zv)) {
1186
0
              zend_refcounted *ref = Z_COUNTED_P(zv);
1187
0
              GC_DELREF(ref);
1188
0
              if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1189
0
                GC_REF_SET_COLOR(ref, GC_GREY);
1190
0
                GC_STACK_PUSH(ref);
1191
0
              }
1192
0
            }
1193
629
            p++;
1194
629
          }
1195
655
          goto tail_call;
1196
655
        }
1197
655
      }
1198
1.38k
      p++;
1199
1.38k
    }
1200
684
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1201
0
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1202
0
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1203
0
      GC_DELREF(ref);
1204
0
      if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1205
0
        GC_REF_SET_COLOR(ref, GC_GREY);
1206
0
        goto tail_call;
1207
0
      }
1208
0
    }
1209
0
  }
1210
1211
685
next:
1212
685
  ref = GC_STACK_POP();
1213
685
  if (ref) {
1214
631
    goto tail_call;
1215
631
  }
1216
685
}
1217
1218
/* Two-Finger compaction algorithm */
1219
static void gc_compact(void)
1220
161
{
1221
161
  if (GC_G(num_roots) + GC_FIRST_ROOT != GC_G(first_unused)) {
1222
86
    if (GC_G(num_roots)) {
1223
26
      gc_root_buffer *free = GC_IDX2PTR(GC_FIRST_ROOT);
1224
26
      gc_root_buffer *scan = GC_IDX2PTR(GC_G(first_unused) - 1);
1225
26
      gc_root_buffer *end  = GC_IDX2PTR(GC_G(num_roots));
1226
26
      uint32_t idx;
1227
26
      zend_refcounted *p;
1228
1229
38
      while (free < scan) {
1230
51
        while (!GC_IS_UNUSED(free->ref)) {
1231
25
          free++;
1232
25
        }
1233
38
        while (GC_IS_UNUSED(scan->ref)) {
1234
12
          scan--;
1235
12
        }
1236
26
        if (scan > free) {
1237
14
          p = scan->ref;
1238
14
          free->ref = p;
1239
14
          p = GC_GET_PTR(p);
1240
14
          idx = gc_compress(GC_PTR2IDX(free));
1241
14
          GC_REF_SET_INFO(p, idx | GC_REF_COLOR(p));
1242
14
          free++;
1243
14
          scan--;
1244
14
          if (scan <= end) {
1245
14
            break;
1246
14
          }
1247
14
        }
1248
26
      }
1249
26
    }
1250
86
    GC_G(unused) = GC_INVALID;
1251
86
    GC_G(first_unused) = GC_G(num_roots) + GC_FIRST_ROOT;
1252
86
  }
1253
161
}
1254
1255
/* For all roots marked purple, traverse the graph, decrementing the reference
1256
 * count of their child nodes. Mark visited nodes grey so that they are not
1257
 * visited again. See MarkRoots() in Bacon & Rajan. */
1258
static void gc_mark_roots(gc_stack *stack)
1259
27
{
1260
27
  gc_root_buffer *current, *last;
1261
1262
27
  gc_compact();
1263
1264
27
  current = GC_IDX2PTR(GC_FIRST_ROOT);
1265
27
  last = GC_IDX2PTR(GC_G(first_unused));
1266
82
  while (current != last) {
1267
55
    if (GC_IS_ROOT(current->ref)) {
1268
55
      if (GC_REF_CHECK_COLOR(current->ref, GC_PURPLE)) {
1269
54
        GC_REF_SET_COLOR(current->ref, GC_GREY);
1270
54
        gc_mark_grey(current->ref, stack);
1271
54
      }
1272
55
    }
1273
55
    current++;
1274
55
  }
1275
27
}
1276
1277
/* Traverse the reference graph of ref. Evaluate grey nodes and mark them
1278
 * black (to keep) or white (to free). Note that nodes initially marked white
1279
 * may later become black if they are visited from a live node.
1280
 * See Scan() in Bacon & Rajan. */
1281
static void gc_scan(zend_refcounted *ref, gc_stack *stack)
1282
54
{
1283
54
  HashTable *ht;
1284
54
  Bucket *p;
1285
54
  zval *zv;
1286
54
  uint32_t n;
1287
54
  GC_STACK_DCL(stack);
1288
1289
56
tail_call:
1290
56
  if (!GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1291
0
    goto next;
1292
0
  }
1293
1294
56
  if (GC_REFCOUNT(ref) > 0) {
1295
53
    if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1296
53
      GC_REF_SET_BLACK(ref);
1297
53
      if (UNEXPECTED(!_stack->next)) {
1298
27
        gc_stack_next(_stack);
1299
27
      }
1300
      /* Split stack and reuse the tail */
1301
53
      _stack->next->prev = NULL;
1302
53
      gc_scan_black(ref, _stack->next);
1303
53
      _stack->next->prev = _stack;
1304
53
    }
1305
53
    goto next;
1306
53
  }
1307
1308
3
  if (GC_TYPE(ref) == IS_OBJECT) {
1309
1
    zend_object *obj = (zend_object*)ref;
1310
1
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1311
1
      zval *table;
1312
1
      int len;
1313
1314
1
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1315
0
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1316
0
        n = len;
1317
0
        zv = table;
1318
0
        for (; n != 0; n--) {
1319
0
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1320
0
          zval *entry = (zval*) Z_PTR_P(zv);
1321
0
          if (Z_OPT_COLLECTABLE_P(entry)) {
1322
0
            ref = Z_COUNTED_P(entry);
1323
0
            if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1324
0
              GC_REF_SET_COLOR(ref, GC_WHITE);
1325
0
              GC_STACK_PUSH(ref);
1326
0
            }
1327
0
          }
1328
0
          zv++;
1329
0
        }
1330
0
      }
1331
1332
1
      ht = obj->handlers->get_gc(obj, &table, &len);
1333
1
      n = len;
1334
1
      zv = table;
1335
1
      if (UNEXPECTED(ht)) {
1336
1
        if (GC_REF_CHECK_COLOR(ht, GC_GREY)) {
1337
1
          GC_REF_SET_COLOR(ht, GC_WHITE);
1338
1
          GC_STACK_PUSH((zend_refcounted *) ht);
1339
2
          for (; n != 0; n--) {
1340
1
            if (Z_COLLECTABLE_P(zv)) {
1341
1
              ref = Z_COUNTED_P(zv);
1342
1
              if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1343
1
                GC_REF_SET_COLOR(ref, GC_WHITE);
1344
1
                GC_STACK_PUSH(ref);
1345
1
              }
1346
1
            }
1347
1
            zv++;
1348
1
          }
1349
1
          goto handle_ht;
1350
1
        }
1351
1
      }
1352
1353
1
handle_zvals:
1354
4
      for (; n != 0; n--) {
1355
3
        if (Z_COLLECTABLE_P(zv)) {
1356
0
          ref = Z_COUNTED_P(zv);
1357
0
          if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1358
0
            GC_REF_SET_COLOR(ref, GC_WHITE);
1359
0
            zv++;
1360
0
            while (--n) {
1361
0
              if (Z_COLLECTABLE_P(zv)) {
1362
0
                zend_refcounted *ref = Z_COUNTED_P(zv);
1363
0
                if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1364
0
                  GC_REF_SET_COLOR(ref, GC_WHITE);
1365
0
                  GC_STACK_PUSH(ref);
1366
0
                }
1367
0
              }
1368
0
              zv++;
1369
0
            }
1370
0
            goto tail_call;
1371
0
          }
1372
0
        }
1373
3
        zv++;
1374
3
      }
1375
1
    }
1376
2
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1377
2
    ht = (HashTable *)ref;
1378
2
    ZEND_ASSERT(ht != &EG(symbol_table));
1379
1380
3
handle_ht:
1381
3
    n = ht->nNumUsed;
1382
3
    if (HT_IS_PACKED(ht)) {
1383
1
            zv = ht->arPacked;
1384
1
            goto handle_zvals;
1385
1
    }
1386
1387
2
    p = ht->arData;
1388
2
    for (; n != 0; n--) {
1389
0
      zv = &p->val;
1390
0
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1391
0
        zv = Z_INDIRECT_P(zv);
1392
0
      }
1393
0
      if (Z_COLLECTABLE_P(zv)) {
1394
0
        ref = Z_COUNTED_P(zv);
1395
0
        if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1396
0
          GC_REF_SET_COLOR(ref, GC_WHITE);
1397
0
          p++;
1398
0
          while (--n) {
1399
0
            zv = &p->val;
1400
0
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1401
0
              zv = Z_INDIRECT_P(zv);
1402
0
            }
1403
0
            if (Z_COLLECTABLE_P(zv)) {
1404
0
              zend_refcounted *ref = Z_COUNTED_P(zv);
1405
0
              if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1406
0
                GC_REF_SET_COLOR(ref, GC_WHITE);
1407
0
                GC_STACK_PUSH(ref);
1408
0
              }
1409
0
            }
1410
0
            p++;
1411
0
          }
1412
0
          goto tail_call;
1413
0
        }
1414
0
      }
1415
0
      p++;
1416
0
    }
1417
2
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1418
0
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1419
0
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1420
0
      if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1421
0
        GC_REF_SET_COLOR(ref, GC_WHITE);
1422
0
        goto tail_call;
1423
0
      }
1424
0
    }
1425
0
  }
1426
1427
56
next:
1428
56
  ref = GC_STACK_POP();
1429
56
  if (ref) {
1430
2
    goto tail_call;
1431
2
  }
1432
56
}
1433
1434
/* Scan all roots, coloring grey nodes black or white */
1435
static void gc_scan_roots(gc_stack *stack)
1436
27
{
1437
27
  uint32_t idx, end;
1438
27
  gc_root_buffer *current;
1439
1440
  /* Root buffer might be reallocated during gc_scan,
1441
   * make sure to reload pointers. */
1442
27
  idx = GC_FIRST_ROOT;
1443
27
  end = GC_G(first_unused);
1444
82
  while (idx != end) {
1445
55
    current = GC_IDX2PTR(idx);
1446
55
    if (GC_IS_ROOT(current->ref)) {
1447
55
      if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1448
54
        GC_REF_SET_COLOR(current->ref, GC_WHITE);
1449
54
        gc_scan(current->ref, stack);
1450
54
      }
1451
55
    }
1452
55
    idx++;
1453
55
  }
1454
1455
  /* Scan extra roots added during gc_scan */
1456
27
  while (idx != GC_G(first_unused)) {
1457
0
    current = GC_IDX2PTR(idx);
1458
0
    if (GC_IS_ROOT(current->ref)) {
1459
0
      if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1460
0
        GC_REF_SET_COLOR(current->ref, GC_WHITE);
1461
0
        gc_scan(current->ref, stack);
1462
0
      }
1463
0
    }
1464
0
    idx++;
1465
0
  }
1466
27
}
1467
1468
/* Add a node to the buffer with the garbage flag, so that it will be
1469
 * destroyed and freed when the scan is complete. */
1470
static void gc_add_garbage(zend_refcounted *ref)
1471
0
{
1472
0
  uint32_t idx;
1473
0
  gc_root_buffer *buf;
1474
1475
0
  if (GC_HAS_UNUSED()) {
1476
0
    idx = GC_FETCH_UNUSED();
1477
0
  } else if (GC_HAS_NEXT_UNUSED()) {
1478
0
    idx = GC_FETCH_NEXT_UNUSED();
1479
0
  } else {
1480
0
    gc_grow_root_buffer();
1481
0
    if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
1482
0
      return;
1483
0
    }
1484
0
    idx = GC_FETCH_NEXT_UNUSED();
1485
0
  }
1486
1487
0
  buf = GC_IDX2PTR(idx);
1488
0
  buf->ref = GC_MAKE_GARBAGE(ref);
1489
1490
0
  idx = gc_compress(idx);
1491
0
  GC_REF_SET_INFO(ref, idx | GC_BLACK);
1492
0
  GC_G(num_roots)++;
1493
0
}
1494
1495
/* Traverse the reference graph from ref, marking any white nodes as garbage. */
1496
static int gc_collect_white(zend_refcounted *ref, uint32_t *flags, gc_stack *stack)
1497
0
{
1498
0
  int count = 0;
1499
0
  HashTable *ht;
1500
0
  Bucket *p;
1501
0
  zval *zv;
1502
0
  uint32_t n;
1503
0
  GC_STACK_DCL(stack);
1504
1505
0
tail_call:
1506
  /* don't count references for compatibility ??? */
1507
0
  if (GC_TYPE(ref) != IS_REFERENCE) {
1508
0
    count++;
1509
0
  }
1510
1511
0
  if (GC_TYPE(ref) == IS_OBJECT) {
1512
0
    zend_object *obj = (zend_object*)ref;
1513
1514
0
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1515
0
      int len;
1516
0
      zval *table;
1517
1518
      /* optimization: color is GC_BLACK (0) */
1519
0
      if (!GC_INFO(ref)) {
1520
0
        gc_add_garbage(ref);
1521
0
      }
1522
0
      if (!(OBJ_FLAGS(obj) & IS_OBJ_DESTRUCTOR_CALLED)
1523
0
       && (obj->handlers->dtor_obj != zend_objects_destroy_object
1524
0
        || obj->ce->destructor != NULL)) {
1525
0
        *flags |= GC_HAS_DESTRUCTORS;
1526
0
      }
1527
1528
0
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1529
0
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1530
0
        n = len;
1531
0
        zv = table;
1532
0
        for (; n != 0; n--) {
1533
0
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1534
0
          zval *entry = (zval*) Z_PTR_P(zv);
1535
0
          if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP_KEY(entry)) {
1536
0
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
1537
0
            GC_UNSET_FROM_WEAKMAP(entry);
1538
0
            ref = Z_COUNTED_P(entry);
1539
0
            GC_ADDREF(ref);
1540
0
            if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1541
0
              GC_REF_SET_BLACK(ref);
1542
0
              GC_STACK_PUSH(ref);
1543
0
            }
1544
0
          }
1545
0
          zv++;
1546
0
        }
1547
0
      }
1548
1549
0
      if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
1550
0
        zend_weakmap_get_entry_gc(obj, &table, &len);
1551
0
        n = len;
1552
0
        zv = table;
1553
0
        for (; n != 0; n--) {
1554
0
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1555
0
          zval *entry = (zval*) Z_PTR_P(zv);
1556
0
          if (Z_COLLECTABLE_P(entry) && GC_FROM_WEAKMAP(entry)) {
1557
0
            GC_UNSET_FROM_WEAKMAP_KEY(entry);
1558
0
            GC_UNSET_FROM_WEAKMAP(entry);
1559
0
            ref = Z_COUNTED_P(entry);
1560
0
            GC_ADDREF(ref);
1561
0
            if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1562
0
              GC_REF_SET_BLACK(ref);
1563
0
              GC_STACK_PUSH(ref);
1564
0
            }
1565
0
          }
1566
0
          zv++;
1567
0
        }
1568
0
        goto next;
1569
0
      }
1570
1571
0
      ht = obj->handlers->get_gc(obj, &table, &len);
1572
0
      n = len;
1573
0
      zv = table;
1574
0
      if (UNEXPECTED(ht)) {
1575
0
        GC_ADDREF(ht);
1576
0
        if (GC_REF_CHECK_COLOR(ht, GC_WHITE)) {
1577
0
          GC_REF_SET_BLACK(ht);
1578
0
          for (; n != 0; n--) {
1579
0
            if (Z_COLLECTABLE_P(zv)) {
1580
0
              ref = Z_COUNTED_P(zv);
1581
0
              GC_ADDREF(ref);
1582
0
              if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1583
0
                GC_REF_SET_BLACK(ref);
1584
0
                GC_STACK_PUSH(ref);
1585
0
              }
1586
0
            }
1587
0
            zv++;
1588
0
          }
1589
0
          goto handle_ht;
1590
0
        }
1591
0
      }
1592
1593
0
handle_zvals:
1594
0
      for (; n != 0; n--) {
1595
0
        if (Z_COLLECTABLE_P(zv)) {
1596
0
          ref = Z_COUNTED_P(zv);
1597
0
          GC_ADDREF(ref);
1598
0
          if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1599
0
            GC_REF_SET_BLACK(ref);
1600
0
            zv++;
1601
0
            while (--n) {
1602
0
              if (Z_COLLECTABLE_P(zv)) {
1603
0
                zend_refcounted *ref = Z_COUNTED_P(zv);
1604
0
                GC_ADDREF(ref);
1605
0
                if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1606
0
                  GC_REF_SET_BLACK(ref);
1607
0
                  GC_STACK_PUSH(ref);
1608
0
                }
1609
0
              }
1610
0
              zv++;
1611
0
            }
1612
0
            goto tail_call;
1613
0
          }
1614
0
        }
1615
0
        zv++;
1616
0
      }
1617
0
    }
1618
0
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1619
    /* optimization: color is GC_BLACK (0) */
1620
0
    if (!GC_INFO(ref)) {
1621
0
      gc_add_garbage(ref);
1622
0
    }
1623
0
    ht = (zend_array*)ref;
1624
1625
0
handle_ht:
1626
0
    n = ht->nNumUsed;
1627
0
    if (HT_IS_PACKED(ht)) {
1628
0
      zv = ht->arPacked;
1629
0
      goto handle_zvals;
1630
0
    }
1631
1632
0
    p = ht->arData;
1633
0
    for (; n != 0; n--) {
1634
0
      zv = &p->val;
1635
0
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1636
0
        zv = Z_INDIRECT_P(zv);
1637
0
      }
1638
0
      if (Z_COLLECTABLE_P(zv)) {
1639
0
        ref = Z_COUNTED_P(zv);
1640
0
        GC_ADDREF(ref);
1641
0
        if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1642
0
          GC_REF_SET_BLACK(ref);
1643
0
          p++;
1644
0
          while (--n) {
1645
0
            zv = &p->val;
1646
0
            if (Z_TYPE_P(zv) == IS_INDIRECT) {
1647
0
              zv = Z_INDIRECT_P(zv);
1648
0
            }
1649
0
            if (Z_COLLECTABLE_P(zv)) {
1650
0
              zend_refcounted *ref = Z_COUNTED_P(zv);
1651
0
              GC_ADDREF(ref);
1652
0
              if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1653
0
                GC_REF_SET_BLACK(ref);
1654
0
                GC_STACK_PUSH(ref);
1655
0
              }
1656
0
            }
1657
0
            p++;
1658
0
          }
1659
0
          goto tail_call;
1660
0
        }
1661
0
      }
1662
0
      p++;
1663
0
    }
1664
0
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1665
0
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1666
0
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1667
0
      GC_ADDREF(ref);
1668
0
      if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1669
0
        GC_REF_SET_BLACK(ref);
1670
0
        goto tail_call;
1671
0
      }
1672
0
    }
1673
0
  }
1674
1675
0
next:
1676
0
  ref = GC_STACK_POP();
1677
0
  if (ref) {
1678
0
    goto tail_call;
1679
0
  }
1680
1681
0
  return count;
1682
0
}
1683
1684
/* Traverse the reference graph from all roots, marking white nodes as garbage. */
1685
static int gc_collect_roots(uint32_t *flags, gc_stack *stack)
1686
27
{
1687
27
  uint32_t idx, end;
1688
27
  zend_refcounted *ref;
1689
27
  int count = 0;
1690
27
  gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
1691
27
  gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
1692
1693
  /* remove non-garbage from the list */
1694
82
  while (current != last) {
1695
55
    if (GC_IS_ROOT(current->ref)) {
1696
55
      if (GC_REF_CHECK_COLOR(current->ref, GC_BLACK)) {
1697
55
        GC_REF_SET_INFO(current->ref, 0); /* reset GC_ADDRESS() and keep GC_BLACK */
1698
55
        gc_remove_from_roots(current);
1699
55
      }
1700
55
    }
1701
55
    current++;
1702
55
  }
1703
1704
27
  gc_compact();
1705
1706
  /* Root buffer might be reallocated during gc_collect_white,
1707
   * make sure to reload pointers. */
1708
27
  idx = GC_FIRST_ROOT;
1709
27
  end = GC_G(first_unused);
1710
27
  while (idx != end) {
1711
0
    current = GC_IDX2PTR(idx);
1712
0
    ref = current->ref;
1713
0
    ZEND_ASSERT(GC_IS_ROOT(ref));
1714
0
    current->ref = GC_MAKE_GARBAGE(ref);
1715
0
    if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1716
0
      GC_REF_SET_BLACK(ref);
1717
0
      count += gc_collect_white(ref, flags, stack);
1718
0
    }
1719
0
    idx++;
1720
0
  }
1721
1722
27
  return count;
1723
27
}
1724
1725
static int gc_remove_nested_data_from_buffer(zend_refcounted *ref, gc_root_buffer *root, gc_stack *stack)
1726
0
{
1727
0
  HashTable *ht;
1728
0
  Bucket *p;
1729
0
  zval *zv;
1730
0
  uint32_t n;
1731
0
  int count = 0;
1732
0
  GC_STACK_DCL(stack);
1733
1734
0
tail_call:
1735
0
  if (root) {
1736
0
    root = NULL;
1737
0
    count++;
1738
0
  } else if (GC_REF_ADDRESS(ref) != 0
1739
0
   && GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1740
0
    GC_TRACE_REF(ref, "removing from buffer");
1741
0
    GC_REMOVE_FROM_BUFFER(ref);
1742
0
    count++;
1743
0
  } else if (GC_TYPE(ref) == IS_REFERENCE) {
1744
0
    if (Z_COLLECTABLE(((zend_reference*)ref)->val)) {
1745
0
      ref = Z_COUNTED(((zend_reference*)ref)->val);
1746
0
      goto tail_call;
1747
0
    }
1748
0
    goto next;
1749
0
  } else {
1750
0
    goto next;
1751
0
  }
1752
1753
0
  if (GC_TYPE(ref) == IS_OBJECT) {
1754
0
    zend_object *obj = (zend_object*)ref;
1755
1756
0
    if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1757
0
      int len;
1758
0
      zval *table;
1759
1760
0
      if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1761
0
        zend_weakmap_get_object_entry_gc(obj, &table, &len);
1762
0
        n = len;
1763
0
        zv = table;
1764
0
        for (; n != 0; n--) {
1765
0
          ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1766
0
          zval *entry = (zval*) Z_PTR_P(zv);
1767
0
          if (Z_OPT_COLLECTABLE_P(entry)) {
1768
0
            ref = Z_COUNTED_P(entry);
1769
0
            GC_STACK_PUSH(ref);
1770
0
          }
1771
0
          zv++;
1772
0
        }
1773
0
      }
1774
1775
0
      ht = obj->handlers->get_gc(obj, &table, &len);
1776
0
      n = len;
1777
0
      zv = table;
1778
0
      if (UNEXPECTED(ht)) {
1779
0
        for (; n != 0; n--) {
1780
0
          if (Z_COLLECTABLE_P(zv)) {
1781
0
            ref = Z_COUNTED_P(zv);
1782
0
            GC_STACK_PUSH(ref);
1783
0
          }
1784
0
          zv++;
1785
0
        }
1786
0
        if (GC_REF_ADDRESS(ht) != 0 && GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
1787
0
          GC_TRACE_REF(ht, "removing from buffer");
1788
0
          GC_REMOVE_FROM_BUFFER(ht);
1789
0
        }
1790
0
        goto handle_ht;
1791
0
      }
1792
1793
0
handle_zvals:
1794
0
      for (; n != 0; n--) {
1795
0
        if (Z_COLLECTABLE_P(zv)) {
1796
0
          ref = Z_COUNTED_P(zv);
1797
0
          zv++;
1798
0
          while (--n) {
1799
0
            if (Z_COLLECTABLE_P(zv)) {
1800
0
              zend_refcounted *ref = Z_COUNTED_P(zv);
1801
0
              GC_STACK_PUSH(ref);
1802
0
            }
1803
0
            zv++;
1804
0
          }
1805
0
          goto tail_call;
1806
0
        }
1807
0
        zv++;
1808
0
      }
1809
0
    }
1810
0
  } else if (GC_TYPE(ref) == IS_ARRAY) {
1811
0
    ht = (zend_array*)ref;
1812
1813
0
handle_ht:
1814
0
    n = ht->nNumUsed;
1815
0
    if (HT_IS_PACKED(ht)) {
1816
0
      zv = ht->arPacked;
1817
0
      goto handle_zvals;
1818
0
    }
1819
1820
0
    p = ht->arData;
1821
0
    for (; n != 0; n--) {
1822
0
      zv = &p->val;
1823
0
      if (Z_TYPE_P(zv) == IS_INDIRECT) {
1824
0
        zv = Z_INDIRECT_P(zv);
1825
0
      }
1826
0
      if (Z_COLLECTABLE_P(zv)) {
1827
0
        ref = Z_COUNTED_P(zv);
1828
0
        p++;
1829
0
        while (--n) {
1830
0
          zv = &p->val;
1831
0
          if (Z_TYPE_P(zv) == IS_INDIRECT) {
1832
0
            zv = Z_INDIRECT_P(zv);
1833
0
          }
1834
0
          if (Z_COLLECTABLE_P(zv)) {
1835
0
            zend_refcounted *ref = Z_COUNTED_P(zv);
1836
0
            GC_STACK_PUSH(ref);
1837
0
          }
1838
0
          p++;
1839
0
        }
1840
0
        goto tail_call;
1841
0
      }
1842
0
      p++;
1843
0
    }
1844
0
  }
1845
1846
0
next:
1847
0
  ref = GC_STACK_POP();
1848
0
  if (ref) {
1849
0
    goto tail_call;
1850
0
  }
1851
1852
0
  return count;
1853
0
}
1854
1855
static void zend_get_gc_buffer_release(void);
1856
static void zend_gc_check_root_tmpvars(void);
1857
static void zend_gc_remove_root_tmpvars(void);
1858
1859
static zend_internal_function gc_destructor_fiber;
1860
1861
static ZEND_COLD ZEND_NORETURN void gc_create_destructor_fiber_error(void)
1862
0
{
1863
0
  zend_error_noreturn(E_ERROR, "Unable to create destructor fiber");
1864
0
}
1865
1866
static ZEND_COLD ZEND_NORETURN void gc_start_destructor_fiber_error(void)
1867
0
{
1868
0
  zend_error_noreturn(E_ERROR, "Unable to start destructor fiber");
1869
0
}
1870
1871
/* Call destructors for garbage in the buffer. */
1872
static zend_always_inline zend_result gc_call_destructors(uint32_t idx, uint32_t end, zend_fiber *fiber)
1873
0
{
1874
0
  gc_root_buffer *current;
1875
0
  zend_refcounted *p;
1876
1877
  /* The root buffer might be reallocated during destructors calls,
1878
   * make sure to reload pointers as necessary. */
1879
0
  while (idx != end) {
1880
0
    current = GC_IDX2PTR(idx);
1881
0
    if (GC_IS_DTOR_GARBAGE(current->ref)) {
1882
0
      p = GC_GET_PTR(current->ref);
1883
      /* Mark this is as a normal root for the next GC run */
1884
0
      current->ref = p;
1885
      /* Double check that the destructor hasn't been called yet. It
1886
       * could have already been invoked indirectly by some other
1887
       * destructor. */
1888
0
      if (!(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
1889
0
        if (fiber != NULL) {
1890
0
          GC_G(dtor_idx) = idx;
1891
0
        }
1892
0
        zend_object *obj = (zend_object*)p;
1893
0
        GC_TRACE_REF(obj, "calling destructor");
1894
0
        GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
1895
0
        GC_ADDREF(obj);
1896
0
        obj->handlers->dtor_obj(obj);
1897
0
        GC_TRACE_REF(obj, "returned from destructor");
1898
0
        GC_DELREF(obj);
1899
0
        if (UNEXPECTED(fiber != NULL && GC_G(dtor_fiber) != fiber)) {
1900
          /* We resumed after suspension */
1901
0
          gc_check_possible_root((zend_refcounted*)&obj->gc);
1902
0
          return FAILURE;
1903
0
        }
1904
0
      }
1905
0
    }
1906
0
    idx++;
1907
0
  }
1908
1909
0
  return SUCCESS;
1910
0
}
1911
1912
static zend_fiber *gc_create_destructor_fiber(void)
1913
0
{
1914
0
  zval zobj;
1915
0
  zend_fiber *fiber;
1916
1917
0
  GC_TRACE("starting destructor fiber");
1918
1919
0
  if (UNEXPECTED(object_init_ex(&zobj, zend_ce_fiber) == FAILURE)) {
1920
0
    gc_create_destructor_fiber_error();
1921
0
  }
1922
1923
0
  fiber = (zend_fiber *)Z_OBJ(zobj);
1924
0
  fiber->fci.size = sizeof(fiber->fci);
1925
0
  fiber->fci_cache.function_handler = (zend_function*) &gc_destructor_fiber;
1926
1927
0
  GC_G(dtor_fiber) = fiber;
1928
1929
0
  if (UNEXPECTED(zend_fiber_start(fiber, NULL) == FAILURE)) {
1930
0
    gc_start_destructor_fiber_error();
1931
0
  }
1932
1933
0
  return fiber;
1934
0
}
1935
1936
static void remember_prev_exception(zend_object **prev_exception)
1937
0
{
1938
0
  if (EG(exception)) {
1939
0
    if (*prev_exception) {
1940
0
      zend_exception_set_previous(EG(exception), *prev_exception);
1941
0
    }
1942
0
    *prev_exception = EG(exception);
1943
0
    EG(exception) = NULL;
1944
0
  }
1945
0
}
1946
1947
static zend_never_inline void gc_call_destructors_in_fiber(uint32_t end)
1948
0
{
1949
0
  ZEND_ASSERT(!GC_G(dtor_fiber_running));
1950
1951
0
  zend_fiber *fiber = GC_G(dtor_fiber);
1952
1953
0
  GC_G(dtor_idx) = GC_FIRST_ROOT;
1954
0
  GC_G(dtor_end) = GC_G(first_unused);
1955
1956
0
  if (UNEXPECTED(!fiber)) {
1957
0
    fiber = gc_create_destructor_fiber();
1958
0
  } else {
1959
0
    zend_fiber_resume(fiber, NULL, NULL);
1960
0
  }
1961
1962
0
  zend_object *exception = NULL;
1963
0
  remember_prev_exception(&exception);
1964
1965
0
  for (;;) {
1966
    /* At this point, fiber has executed until suspension */
1967
0
    GC_TRACE("resumed from destructor fiber");
1968
1969
0
    if (UNEXPECTED(GC_G(dtor_fiber_running))) {
1970
      /* Fiber was suspended by a destructor. Start a new one for the
1971
       * remaining destructors. */
1972
0
      GC_TRACE("destructor fiber suspended by destructor");
1973
0
      GC_G(dtor_fiber) = NULL;
1974
0
      GC_G(dtor_idx)++;
1975
      /* We do not own the fiber anymore. It may be collected if the
1976
       * application does not reference it. */
1977
0
      zend_object_release(&fiber->std);
1978
0
      remember_prev_exception(&exception);
1979
0
      fiber = gc_create_destructor_fiber();
1980
0
      remember_prev_exception(&exception);
1981
0
      continue;
1982
0
    } else {
1983
      /* Fiber suspended itself after calling all destructors */
1984
0
      GC_TRACE("destructor fiber suspended itself");
1985
0
      break;
1986
0
    }
1987
0
  }
1988
1989
0
  EG(exception) = exception;
1990
0
}
1991
1992
/* Perform a garbage collection run. The default implementation of gc_collect_cycles. */
1993
ZEND_API int zend_gc_collect_cycles(void)
1994
134
{
1995
134
  int total_count = 0;
1996
134
  bool should_rerun_gc = false;
1997
134
  bool did_rerun_gc = false;
1998
1999
134
  zend_hrtime_t start_time = zend_hrtime();
2000
134
  if (GC_G(num_roots) && !GC_G(gc_active)) {
2001
27
    zend_gc_remove_root_tmpvars();
2002
27
  }
2003
2004
134
rerun_gc:
2005
134
  if (GC_G(num_roots)) {
2006
27
    int count;
2007
27
    gc_root_buffer *current, *last;
2008
27
    zend_refcounted *p;
2009
27
    uint32_t gc_flags = 0;
2010
27
    uint32_t idx, end;
2011
27
    gc_stack stack;
2012
2013
27
    stack.prev = NULL;
2014
27
    stack.next = NULL;
2015
2016
27
    if (GC_G(gc_active)) {
2017
0
      GC_G(collector_time) += zend_hrtime() - start_time;
2018
0
      return 0;
2019
0
    }
2020
2021
27
    GC_TRACE("Collecting cycles");
2022
27
    GC_G(gc_runs)++;
2023
27
    GC_G(gc_active) = 1;
2024
2025
27
    GC_TRACE("Marking roots");
2026
27
    gc_mark_roots(&stack);
2027
27
    GC_TRACE("Scanning roots");
2028
27
    gc_scan_roots(&stack);
2029
2030
27
    GC_TRACE("Collecting roots");
2031
27
    count = gc_collect_roots(&gc_flags, &stack);
2032
2033
27
    if (!GC_G(num_roots)) {
2034
      /* nothing to free */
2035
27
      GC_TRACE("Nothing to free");
2036
27
      gc_stack_free(&stack);
2037
27
      GC_G(gc_active) = 0;
2038
27
      goto finish;
2039
27
    }
2040
2041
0
    end = GC_G(first_unused);
2042
2043
0
    if (gc_flags & GC_HAS_DESTRUCTORS) {
2044
0
      GC_TRACE("Calling destructors");
2045
2046
      /* During a destructor call, new externally visible references to nested data may
2047
       * be introduced. These references can be introduced in a way that does not
2048
       * modify any refcounts, so we have no real way to detect this situation
2049
       * short of rerunning full GC tracing. What we do instead is to only run
2050
       * destructors at this point and automatically re-run GC afterwards. */
2051
0
      should_rerun_gc = true;
2052
2053
      /* Mark all roots for which a dtor will be invoked as DTOR_GARBAGE. Additionally
2054
       * color them purple. This serves a double purpose: First, they should be
2055
       * considered new potential roots for the next GC run. Second, it will prevent
2056
       * their removal from the root buffer by nested data removal. */
2057
0
      idx = GC_FIRST_ROOT;
2058
0
      current = GC_IDX2PTR(GC_FIRST_ROOT);
2059
0
      while (idx != end) {
2060
0
        if (GC_IS_GARBAGE(current->ref)) {
2061
0
          p = GC_GET_PTR(current->ref);
2062
0
          if (GC_TYPE(p) == IS_OBJECT && !(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
2063
0
            zend_object *obj = (zend_object *) p;
2064
0
            if (obj->handlers->dtor_obj != zend_objects_destroy_object
2065
0
              || obj->ce->destructor) {
2066
0
              current->ref = GC_MAKE_DTOR_GARBAGE(obj);
2067
0
              GC_REF_SET_COLOR(obj, GC_PURPLE);
2068
0
            } else {
2069
0
              GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
2070
0
            }
2071
0
          }
2072
0
        }
2073
0
        current++;
2074
0
        idx++;
2075
0
      }
2076
2077
      /* Remove nested data for objects on which a destructor will be called.
2078
       * This will not remove the objects themselves, as they have been colored
2079
       * purple. */
2080
0
      idx = GC_FIRST_ROOT;
2081
0
      current = GC_IDX2PTR(GC_FIRST_ROOT);
2082
0
      while (idx != end) {
2083
0
        if (GC_IS_DTOR_GARBAGE(current->ref)) {
2084
0
          p = GC_GET_PTR(current->ref);
2085
0
          count -= gc_remove_nested_data_from_buffer(p, current, &stack);
2086
0
        }
2087
0
        current++;
2088
0
        idx++;
2089
0
      }
2090
2091
      /* Actually call destructors. */
2092
0
      zend_hrtime_t dtor_start_time = zend_hrtime();
2093
0
      if (EXPECTED(!EG(active_fiber))) {
2094
0
        gc_call_destructors(GC_FIRST_ROOT, end, NULL);
2095
0
      } else {
2096
0
        gc_call_destructors_in_fiber(end);
2097
0
      }
2098
0
      GC_G(dtor_time) += zend_hrtime() - dtor_start_time;
2099
2100
0
      if (GC_G(gc_protected)) {
2101
        /* something went wrong */
2102
0
        zend_get_gc_buffer_release();
2103
0
        GC_G(collector_time) += zend_hrtime() - start_time;
2104
0
        return 0;
2105
0
      }
2106
0
    }
2107
2108
0
    gc_stack_free(&stack);
2109
2110
    /* Destroy zvals. The root buffer may be reallocated. */
2111
0
    GC_TRACE("Destroying zvals");
2112
0
    zend_hrtime_t free_start_time = zend_hrtime();
2113
0
    idx = GC_FIRST_ROOT;
2114
0
    while (idx != end) {
2115
0
      current = GC_IDX2PTR(idx);
2116
0
      if (GC_IS_GARBAGE(current->ref)) {
2117
0
        p = GC_GET_PTR(current->ref);
2118
0
        GC_TRACE_REF(p, "destroying");
2119
0
        if (GC_TYPE(p) == IS_OBJECT) {
2120
0
          zend_object *obj = (zend_object*)p;
2121
2122
0
          EG(objects_store).object_buckets[obj->handle] = SET_OBJ_INVALID(obj);
2123
0
          GC_TYPE_INFO(obj) = GC_NULL |
2124
0
            (GC_TYPE_INFO(obj) & ~GC_TYPE_MASK);
2125
          /* Modify current before calling free_obj (bug #78811: free_obj() can cause the root buffer (with current) to be reallocated.) */
2126
0
          current->ref = GC_MAKE_GARBAGE(((char*)obj) - obj->handlers->offset);
2127
0
          if (!(OBJ_FLAGS(obj) & IS_OBJ_FREE_CALLED)) {
2128
0
            GC_ADD_FLAGS(obj, IS_OBJ_FREE_CALLED);
2129
0
            GC_ADDREF(obj);
2130
0
            obj->handlers->free_obj(obj);
2131
0
            GC_DELREF(obj);
2132
0
          }
2133
2134
0
          ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST(obj->handle);
2135
0
        } else if (GC_TYPE(p) == IS_ARRAY) {
2136
0
          zend_array *arr = (zend_array*)p;
2137
2138
0
          GC_TYPE_INFO(arr) = GC_NULL |
2139
0
            (GC_TYPE_INFO(arr) & ~GC_TYPE_MASK);
2140
2141
          /* GC may destroy arrays with rc>1. This is valid and safe. */
2142
0
          HT_ALLOW_COW_VIOLATION(arr);
2143
2144
0
          zend_hash_destroy(arr);
2145
0
        }
2146
0
      }
2147
0
      idx++;
2148
0
    }
2149
2150
    /* Free objects */
2151
0
    current = GC_IDX2PTR(GC_FIRST_ROOT);
2152
0
    last = GC_IDX2PTR(end);
2153
0
    while (current != last) {
2154
0
      if (GC_IS_GARBAGE(current->ref)) {
2155
0
        p = GC_GET_PTR(current->ref);
2156
0
        GC_LINK_UNUSED(current);
2157
0
        GC_G(num_roots)--;
2158
0
        efree(p);
2159
0
      }
2160
0
      current++;
2161
0
    }
2162
2163
0
    GC_G(free_time) += zend_hrtime() - free_start_time;
2164
2165
0
    GC_TRACE("Collection finished");
2166
0
    GC_G(collected) += count;
2167
0
    total_count += count;
2168
0
    GC_G(gc_active) = 0;
2169
0
  }
2170
2171
107
  gc_compact();
2172
2173
  /* Objects with destructors were removed from this GC run. Rerun GC right away to clean them
2174
   * up. We do this only once: If we encounter more destructors on the second run, we'll not
2175
   * run GC another time. */
2176
107
  if (should_rerun_gc && !did_rerun_gc) {
2177
0
    did_rerun_gc = true;
2178
0
    goto rerun_gc;
2179
0
  }
2180
2181
134
finish:
2182
134
  zend_get_gc_buffer_release();
2183
2184
  /* Prevent GC from running during zend_gc_check_root_tmpvars, before
2185
   * gc_threshold is adjusted, as this may result in unbounded recursion */
2186
134
  GC_G(gc_active) = 1;
2187
134
  zend_gc_check_root_tmpvars();
2188
134
  GC_G(gc_active) = 0;
2189
2190
134
  GC_G(collector_time) += zend_hrtime() - start_time;
2191
134
  return total_count;
2192
107
}
2193
2194
ZEND_API void zend_gc_get_status(zend_gc_status *status)
2195
0
{
2196
0
  status->active = GC_G(gc_active);
2197
0
  status->gc_protected = GC_G(gc_protected);
2198
0
  status->full = GC_G(gc_full);
2199
0
  status->runs = GC_G(gc_runs);
2200
0
  status->collected = GC_G(collected);
2201
0
  status->threshold = GC_G(gc_threshold);
2202
0
  status->buf_size = GC_G(buf_size);
2203
0
  status->num_roots = GC_G(num_roots);
2204
0
  status->application_time = zend_hrtime() - GC_G(activated_at);
2205
0
  status->collector_time = GC_G(collector_time);
2206
0
  status->dtor_time = GC_G(dtor_time);
2207
0
  status->free_time = GC_G(free_time);
2208
0
}
2209
2210
2
ZEND_API zend_get_gc_buffer *zend_get_gc_buffer_create(void) {
2211
  /* There can only be one get_gc() call active at a time,
2212
   * so there only needs to be one buffer. */
2213
2
  zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
2214
2
  gc_buffer->cur = gc_buffer->start;
2215
2
  return gc_buffer;
2216
2
}
2217
2218
1
ZEND_API void zend_get_gc_buffer_grow(zend_get_gc_buffer *gc_buffer) {
2219
1
  size_t old_capacity = gc_buffer->end - gc_buffer->start;
2220
1
  size_t new_capacity = old_capacity == 0 ? 64 : old_capacity * 2;
2221
1
  gc_buffer->start = erealloc(gc_buffer->start, new_capacity * sizeof(zval));
2222
1
  gc_buffer->end = gc_buffer->start + new_capacity;
2223
1
  gc_buffer->cur = gc_buffer->start + old_capacity;
2224
1
}
2225
2226
134
static void zend_get_gc_buffer_release(void) {
2227
134
  zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
2228
134
  efree(gc_buffer->start);
2229
134
  gc_buffer->start = gc_buffer->end = gc_buffer->cur = NULL;
2230
134
}
2231
2232
/* TMPVAR operands are destroyed using zval_ptr_dtor_nogc(), because they usually cannot contain
2233
 * cycles. However, there are some rare exceptions where this is possible, in which case we rely
2234
 * on the producing code to root the value. If a GC run occurs between the rooting and consumption
2235
 * of the value, we would end up leaking it. To avoid this, root all live TMPVAR values here. */
2236
134
static void zend_gc_check_root_tmpvars(void) {
2237
134
  zend_execute_data *ex = EG(current_execute_data);
2238
134
  for (; ex; ex = ex->prev_execute_data) {
2239
0
    zend_function *func = ex->func;
2240
0
    if (!func || !ZEND_USER_CODE(func->type)) {
2241
0
      continue;
2242
0
    }
2243
2244
0
    uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
2245
0
    for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
2246
0
      const zend_live_range *range = &func->op_array.live_range[i];
2247
0
      if (range->start > op_num) {
2248
0
        break;
2249
0
      }
2250
0
      if (range->end <= op_num) {
2251
0
        continue;
2252
0
      }
2253
2254
0
      uint32_t kind = range->var & ZEND_LIVE_MASK;
2255
0
      if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
2256
0
        uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
2257
0
        zval *var = ZEND_CALL_VAR(ex, var_num);
2258
0
        if (Z_COLLECTABLE_P(var)) {
2259
0
          gc_check_possible_root(Z_COUNTED_P(var));
2260
0
        }
2261
0
      }
2262
0
    }
2263
0
  }
2264
134
}
2265
2266
27
static void zend_gc_remove_root_tmpvars(void) {
2267
27
  zend_execute_data *ex = EG(current_execute_data);
2268
27
  for (; ex; ex = ex->prev_execute_data) {
2269
0
    zend_function *func = ex->func;
2270
0
    if (!func || !ZEND_USER_CODE(func->type)) {
2271
0
      continue;
2272
0
    }
2273
2274
0
    uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
2275
0
    for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
2276
0
      const zend_live_range *range = &func->op_array.live_range[i];
2277
0
      if (range->start > op_num) {
2278
0
        break;
2279
0
      }
2280
0
      if (range->end <= op_num) {
2281
0
        continue;
2282
0
      }
2283
2284
0
      uint32_t kind = range->var & ZEND_LIVE_MASK;
2285
0
      if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
2286
0
        uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
2287
0
        zval *var = ZEND_CALL_VAR(ex, var_num);
2288
0
        if (Z_COLLECTABLE_P(var)) {
2289
0
          GC_REMOVE_FROM_BUFFER(Z_COUNTED_P(var));
2290
0
        }
2291
0
      }
2292
0
    }
2293
0
  }
2294
27
}
2295
2296
#if GC_BENCH
2297
void gc_bench_print(void)
2298
{
2299
  fprintf(stderr, "GC Statistics\n");
2300
  fprintf(stderr, "-------------\n");
2301
  fprintf(stderr, "Runs:               %d\n", GC_G(gc_runs));
2302
  fprintf(stderr, "Collected:          %d\n", GC_G(collected));
2303
  fprintf(stderr, "Root buffer length: %d\n", GC_G(root_buf_length));
2304
  fprintf(stderr, "Root buffer peak:   %d\n\n", GC_G(root_buf_peak));
2305
  fprintf(stderr, "      Possible            Remove from  Marked\n");
2306
  fprintf(stderr, "        Root    Buffered     buffer     grey\n");
2307
  fprintf(stderr, "      --------  --------  -----------  ------\n");
2308
  fprintf(stderr, "ZVAL  %8d  %8d  %9d  %8d\n", GC_G(zval_possible_root), GC_G(zval_buffered), GC_G(zval_remove_from_buffer), GC_G(zval_marked_grey));
2309
}
2310
#endif
2311
2312
#ifdef ZTS
2313
size_t zend_gc_globals_size(void)
2314
{
2315
  return sizeof(zend_gc_globals);
2316
}
2317
#endif
2318
2319
static ZEND_FUNCTION(gc_destructor_fiber)
2320
0
{
2321
0
  uint32_t idx, end;
2322
2323
0
  zend_fiber *fiber = GC_G(dtor_fiber);
2324
0
  ZEND_ASSERT(fiber != NULL);
2325
0
  ZEND_ASSERT(fiber == EG(active_fiber));
2326
2327
0
  for (;;) {
2328
0
    GC_G(dtor_fiber_running) = true;
2329
2330
0
    idx = GC_G(dtor_idx);
2331
0
    end = GC_G(dtor_end);
2332
0
    if (UNEXPECTED(gc_call_destructors(idx, end, fiber) == FAILURE)) {
2333
      /* We resumed after being suspended by a destructor */
2334
0
      return;
2335
0
    }
2336
2337
    /* We have called all destructors. Suspend fiber until the next GC run
2338
     */
2339
0
    GC_G(dtor_fiber_running) = false;
2340
0
    zend_fiber_suspend(fiber, NULL, NULL);
2341
2342
0
    if (UNEXPECTED(fiber->flags & ZEND_FIBER_FLAG_DESTROYED)) {
2343
      /* Fiber is being destroyed by shutdown sequence */
2344
0
      if (GC_G(dtor_fiber) == fiber) {
2345
0
        GC_G(dtor_fiber) = NULL;
2346
0
      }
2347
0
      GC_DELREF(&fiber->std);
2348
0
      gc_check_possible_root((zend_refcounted*)&fiber->std.gc);
2349
0
      return;
2350
0
    }
2351
0
  }
2352
0
}
2353
2354
static zend_internal_function gc_destructor_fiber = {
2355
  .type = ZEND_INTERNAL_FUNCTION,
2356
  .fn_flags = ZEND_ACC_PUBLIC,
2357
  .handler = ZEND_FN(gc_destructor_fiber),
2358
};
2359
2360
void gc_init(void)
2361
2
{
2362
2
  gc_destructor_fiber.function_name = zend_string_init_interned(
2363
2
      "gc_destructor_fiber",
2364
2
      strlen("gc_destructor_fiber"),
2365
      true);
2366
2
}