xref: /PHP-8.4/Zend/zend_gc.c (revision d093c10c)
1 /*
2    +----------------------------------------------------------------------+
3    | Zend Engine                                                          |
4    +----------------------------------------------------------------------+
5    | Copyright (c) Zend Technologies Ltd. (http://www.zend.com)           |
6    +----------------------------------------------------------------------+
7    | This source file is subject to version 2.00 of the Zend license,     |
8    | that is bundled with this package in the file LICENSE, and is        |
9    | available through the world-wide-web at the following url:           |
10    | http://www.zend.com/license/2_00.txt.                                |
11    | If you did not receive a copy of the Zend license and are unable to  |
12    | obtain it through the world-wide-web, please send a note to          |
13    | license@zend.com so we can mail you a copy immediately.              |
14    +----------------------------------------------------------------------+
15    | Authors: David Wang <planetbeing@gmail.com>                          |
16    |          Dmitry Stogov <dmitry@php.net>                              |
17    +----------------------------------------------------------------------+
18 */
19 
20 /**
21  * zend_gc_collect_cycles
22  * ======================
23  *
24  * Colors and its meaning
25  * ----------------------
26  *
27  * BLACK  (GC_BLACK)   - In use or free.
28  * GREY   (GC_GREY)    - Possible member of cycle.
29  * WHITE  (GC_WHITE)   - Member of garbage cycle.
30  * PURPLE (GC_PURPLE)  - Possible root of cycle.
31  *
32  * Colors described in the paper but not used
33  * ------------------------------------------
34  *
35  * GREEN - Acyclic
36  * RED   - Candidate cycle undergoing
37  * ORANGE - Candidate cycle awaiting epoch boundary.
38  *
39  *
40  * Flow
41  * =====
42  *
43  * The garbage collect cycle starts from 'gc_mark_roots', which traverses the
44  * possible roots, and calls mark_grey for roots are marked purple with
45  * depth-first traverse.
46  *
47  * After all possible roots are traversed and marked,
48  * gc_scan_roots will be called, and each root will be called with
49  * gc_scan(root->ref)
50  *
51  * gc_scan checks the colors of possible members.
52  *
53  * If the node is marked as grey and the refcount > 0
54  *    gc_scan_black will be called on that node to scan it's subgraph.
55  * otherwise (refcount == 0), it marks the node white.
56  *
57  * A node MAY be added to possible roots when ZEND_UNSET_VAR happens or
58  * zend_assign_to_variable is called only when possible garbage node is
59  * produced.
60  * gc_possible_root() will be called to add the nodes to possible roots.
61  *
62  *
63  * For objects, we call their get_gc handler (by default 'zend_std_get_gc') to
64  * get the object properties to scan.
65  *
66  *
67  * @see http://researcher.watson.ibm.com/researcher/files/us-bacon/Bacon01Concurrent.pdf
68  */
69 #include "zend.h"
70 #include "zend_API.h"
71 #include "zend_compile.h"
72 #include "zend_errors.h"
73 #include "zend_fibers.h"
74 #include "zend_hrtime.h"
75 #include "zend_portability.h"
76 #include "zend_types.h"
77 #include "zend_weakrefs.h"
78 #include "zend_string.h"
79 
80 #ifndef GC_BENCH
81 # define GC_BENCH 0
82 #endif
83 
84 #ifndef ZEND_GC_DEBUG
85 # define ZEND_GC_DEBUG 0
86 #endif
87 
88 /* GC_INFO layout */
89 #define GC_ADDRESS  0x0fffffu
90 #define GC_COLOR    0x300000u
91 
92 #define GC_BLACK    0x000000u /* must be zero */
93 #define GC_WHITE    0x100000u
94 #define GC_GREY     0x200000u
95 #define GC_PURPLE   0x300000u
96 
97 /* Debug tracing */
98 #if ZEND_GC_DEBUG > 1
99 # define GC_TRACE(format, ...) fprintf(stderr, format "\n", ##__VA_ARGS__);
100 # define GC_TRACE_REF(ref, format, ...) \
101 	do { \
102 		gc_trace_ref((zend_refcounted *) ref); \
103 		fprintf(stderr, format "\n", ##__VA_ARGS__); \
104 	} while (0)
105 # define GC_TRACE_SET_COLOR(ref, color) \
106 	GC_TRACE_REF(ref, "->%s", gc_color_name(color))
107 #else
108 # define GC_TRACE_REF(ref, format, ...)
109 # define GC_TRACE_SET_COLOR(ref, new_color)
110 # define GC_TRACE(str)
111 #endif
112 
113 /* GC_INFO access */
114 #define GC_REF_ADDRESS(ref) \
115 	(((GC_TYPE_INFO(ref)) & (GC_ADDRESS << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
116 
117 #define GC_REF_COLOR(ref) \
118 	(((GC_TYPE_INFO(ref)) & (GC_COLOR << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
119 
120 #define GC_REF_CHECK_COLOR(ref, color) \
121 	((GC_TYPE_INFO(ref) & (GC_COLOR << GC_INFO_SHIFT)) == ((color) << GC_INFO_SHIFT))
122 
123 #define GC_REF_SET_INFO(ref, info) do { \
124 		GC_TYPE_INFO(ref) = \
125 			(GC_TYPE_INFO(ref) & (GC_TYPE_MASK | GC_FLAGS_MASK)) | \
126 			((info) << GC_INFO_SHIFT); \
127 	} while (0)
128 
129 #define GC_REF_SET_COLOR(ref, c) do { \
130 		GC_TRACE_SET_COLOR(ref, c); \
131 		GC_TYPE_INFO(ref) = \
132 			(GC_TYPE_INFO(ref) & ~(GC_COLOR << GC_INFO_SHIFT)) | \
133 			((c) << GC_INFO_SHIFT); \
134 	} while (0)
135 
136 #define GC_REF_SET_BLACK(ref) do { \
137 		GC_TRACE_SET_COLOR(ref, GC_BLACK); \
138 		GC_TYPE_INFO(ref) &= ~(GC_COLOR << GC_INFO_SHIFT); \
139 	} while (0)
140 
141 #define GC_REF_SET_PURPLE(ref) do { \
142 		GC_TRACE_SET_COLOR(ref, GC_PURPLE); \
143 		GC_TYPE_INFO(ref) |= (GC_COLOR << GC_INFO_SHIFT); \
144 	} while (0)
145 
146 /* bit stealing tags for gc_root_buffer.ref */
147 #define GC_BITS    0x3
148 
149 #define GC_ROOT    0x0 /* possible root of circular garbage     */
150 #define GC_UNUSED  0x1 /* part of linked list of unused buffers */
151 #define GC_GARBAGE 0x2 /* garbage to delete                     */
152 #define GC_DTOR_GARBAGE 0x3 /* garbage on which only the dtor should be invoked */
153 
154 #define GC_GET_PTR(ptr) \
155 	((void*)(((uintptr_t)(ptr)) & ~GC_BITS))
156 
157 #define GC_IS_ROOT(ptr) \
158 	((((uintptr_t)(ptr)) & GC_BITS) == GC_ROOT)
159 #define GC_IS_UNUSED(ptr) \
160 	((((uintptr_t)(ptr)) & GC_BITS) == GC_UNUSED)
161 #define GC_IS_GARBAGE(ptr) \
162 	((((uintptr_t)(ptr)) & GC_BITS) == GC_GARBAGE)
163 #define GC_IS_DTOR_GARBAGE(ptr) \
164 	((((uintptr_t)(ptr)) & GC_BITS) == GC_DTOR_GARBAGE)
165 
166 #define GC_MAKE_GARBAGE(ptr) \
167 	((void*)(((uintptr_t)(ptr)) | GC_GARBAGE))
168 #define GC_MAKE_DTOR_GARBAGE(ptr) \
169 	((void*)(((uintptr_t)(ptr)) | GC_DTOR_GARBAGE))
170 
171 /* GC address conversion */
172 #define GC_IDX2PTR(idx)      (GC_G(buf) + (idx))
173 #define GC_PTR2IDX(ptr)      ((ptr) - GC_G(buf))
174 
175 #define GC_IDX2LIST(idx)     ((void*)(uintptr_t)(((idx) * sizeof(void*)) | GC_UNUSED))
176 #define GC_LIST2IDX(list)    (((uint32_t)(uintptr_t)(list)) / sizeof(void*))
177 
178 /* GC buffers */
179 #define GC_INVALID           0
180 #define GC_FIRST_ROOT        1
181 
182 #define GC_DEFAULT_BUF_SIZE  (16 * 1024)
183 #define GC_BUF_GROW_STEP     (128 * 1024)
184 
185 #define GC_MAX_UNCOMPRESSED  (512 * 1024)
186 #define GC_MAX_BUF_SIZE      0x40000000
187 
188 #define GC_THRESHOLD_DEFAULT (10000 + GC_FIRST_ROOT)
189 #define GC_THRESHOLD_STEP    10000
190 #define GC_THRESHOLD_MAX     1000000000
191 #define GC_THRESHOLD_TRIGGER 100
192 
193 /* GC flags */
194 #define GC_HAS_DESTRUCTORS  (1<<0)
195 
196 /* Weak maps */
197 #define Z_FROM_WEAKMAP_KEY		(1<<0)
198 #define Z_FROM_WEAKMAP			(1<<1)
199 
200 /* The WeakMap entry zv is reachable from roots by following the virtual
201  * reference from the a WeakMap key to the entry */
202 #define GC_FROM_WEAKMAP_KEY(zv) \
203 	(Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT))
204 
205 #define GC_SET_FROM_WEAKMAP_KEY(zv) do {									   \
206 	zval *_z = (zv);														   \
207 	Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \
208 } while (0)
209 
210 #define GC_UNSET_FROM_WEAKMAP_KEY(zv) do {									   \
211 	zval *_z = (zv);														   \
212 	Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP_KEY << Z_TYPE_INFO_EXTRA_SHIFT); \
213 } while (0)
214 
215 /* The WeakMap entry zv is reachable from roots by following the reference from
216  * the WeakMap */
217 #define GC_FROM_WEAKMAP(zv) \
218 	(Z_TYPE_INFO_P((zv)) & (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT))
219 
220 #define GC_SET_FROM_WEAKMAP(zv) do {									       \
221 	zval *_z = (zv);														   \
222 	Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) | (Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \
223 } while (0)
224 
225 #define GC_UNSET_FROM_WEAKMAP(zv) do {										   \
226 	zval *_z = (zv);														   \
227 	Z_TYPE_INFO_P(_z) = Z_TYPE_INFO_P(_z) & ~(Z_FROM_WEAKMAP << Z_TYPE_INFO_EXTRA_SHIFT); \
228 } while (0)
229 
230 /* unused buffers */
231 #define GC_HAS_UNUSED() \
232 	(GC_G(unused) != GC_INVALID)
233 #define GC_FETCH_UNUSED() \
234 	gc_fetch_unused()
235 #define GC_LINK_UNUSED(root) \
236 	gc_link_unused(root)
237 
238 #define GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD() \
239 	(GC_G(first_unused) < GC_G(gc_threshold))
240 #define GC_HAS_NEXT_UNUSED() \
241 	(GC_G(first_unused) != GC_G(buf_size))
242 #define GC_FETCH_NEXT_UNUSED() \
243 	gc_fetch_next_unused()
244 
245 ZEND_API int (*gc_collect_cycles)(void);
246 
247 typedef struct _gc_root_buffer {
248 	zend_refcounted  *ref;
249 } gc_root_buffer;
250 
251 typedef struct _zend_gc_globals {
252 	gc_root_buffer   *buf;				/* preallocated arrays of buffers   */
253 
254 	bool         gc_enabled;
255 	bool         gc_active;        /* GC currently running, forbid nested GC */
256 	bool         gc_protected;     /* GC protected, forbid root additions */
257 	bool         gc_full;
258 
259 	uint32_t          unused;			/* linked list of unused buffers    */
260 	uint32_t          first_unused;		/* first unused buffer              */
261 	uint32_t          gc_threshold;     /* GC collection threshold          */
262 	uint32_t          buf_size;			/* size of the GC buffer            */
263 	uint32_t          num_roots;		/* number of roots in GC buffer     */
264 
265 	uint32_t gc_runs;
266 	uint32_t collected;
267 
268 	zend_hrtime_t activated_at;
269 	zend_hrtime_t collector_time;
270 	zend_hrtime_t dtor_time;
271 	zend_hrtime_t free_time;
272 
273 	uint32_t dtor_idx;			/* root buffer index */
274 	uint32_t dtor_end;
275 	zend_fiber *dtor_fiber;
276 	bool dtor_fiber_running;
277 
278 #if GC_BENCH
279 	uint32_t root_buf_length;
280 	uint32_t root_buf_peak;
281 	uint32_t zval_possible_root;
282 	uint32_t zval_buffered;
283 	uint32_t zval_remove_from_buffer;
284 	uint32_t zval_marked_grey;
285 #endif
286 } zend_gc_globals;
287 
288 #ifdef ZTS
289 static int gc_globals_id;
290 static size_t gc_globals_offset;
291 #define GC_G(v) ZEND_TSRMG_FAST(gc_globals_offset, zend_gc_globals *, v)
292 #else
293 #define GC_G(v) (gc_globals.v)
294 static zend_gc_globals gc_globals;
295 #endif
296 
297 #if GC_BENCH
298 # define GC_BENCH_INC(counter) GC_G(counter)++
299 # define GC_BENCH_DEC(counter) GC_G(counter)--
300 # define GC_BENCH_PEAK(peak, counter) do {		\
301 		if (GC_G(counter) > GC_G(peak)) {		\
302 			GC_G(peak) = GC_G(counter);			\
303 		}										\
304 	} while (0)
305 #else
306 # define GC_BENCH_INC(counter)
307 # define GC_BENCH_DEC(counter)
308 # define GC_BENCH_PEAK(peak, counter)
309 #endif
310 
311 
312 #define GC_STACK_SEGMENT_SIZE (((4096 - ZEND_MM_OVERHEAD) / sizeof(void*)) - 2)
313 
314 typedef struct _gc_stack gc_stack;
315 
316 struct _gc_stack {
317 	gc_stack        *prev;
318 	gc_stack        *next;
319 	zend_refcounted *data[GC_STACK_SEGMENT_SIZE];
320 };
321 
322 #define GC_STACK_DCL(init) \
323 	gc_stack *_stack = init; \
324 	size_t    _top = 0;
325 
326 #define GC_STACK_PUSH(ref) \
327 	gc_stack_push(&_stack, &_top, ref);
328 
329 #define GC_STACK_POP() \
330 	gc_stack_pop(&_stack, &_top)
331 
gc_stack_next(gc_stack * stack)332 static zend_never_inline gc_stack* gc_stack_next(gc_stack *stack)
333 {
334 	if (UNEXPECTED(!stack->next)) {
335 		gc_stack *segment = emalloc(sizeof(gc_stack));
336 		segment->prev = stack;
337 		segment->next = NULL;
338 		stack->next = segment;
339 	}
340 	return stack->next;
341 }
342 
gc_stack_push(gc_stack ** stack,size_t * top,zend_refcounted * ref)343 static zend_always_inline void gc_stack_push(gc_stack **stack, size_t *top, zend_refcounted *ref)
344 {
345 	if (UNEXPECTED(*top == GC_STACK_SEGMENT_SIZE)) {
346 		(*stack) = gc_stack_next(*stack);
347 		(*top) = 0;
348 	}
349 	(*stack)->data[(*top)++] = ref;
350 }
351 
gc_stack_pop(gc_stack ** stack,size_t * top)352 static zend_always_inline zend_refcounted* gc_stack_pop(gc_stack **stack, size_t *top)
353 {
354 	if (UNEXPECTED((*top) == 0)) {
355 		if (!(*stack)->prev) {
356 			return NULL;
357 		} else {
358 			(*stack) = (*stack)->prev;
359 			(*top) = GC_STACK_SEGMENT_SIZE - 1;
360 			return (*stack)->data[GC_STACK_SEGMENT_SIZE - 1];
361 		}
362 	} else {
363 		return (*stack)->data[--(*top)];
364 	}
365 }
366 
gc_stack_free(gc_stack * stack)367 static void gc_stack_free(gc_stack *stack)
368 {
369 	gc_stack *p = stack->next;
370 
371 	while (p) {
372 		stack = p->next;
373 		efree(p);
374 		p = stack;
375 	}
376 }
377 
gc_compress(uint32_t idx)378 static zend_always_inline uint32_t gc_compress(uint32_t idx)
379 {
380 	if (EXPECTED(idx < GC_MAX_UNCOMPRESSED)) {
381 		return idx;
382 	}
383 	return (idx % GC_MAX_UNCOMPRESSED) | GC_MAX_UNCOMPRESSED;
384 }
385 
gc_decompress(zend_refcounted * ref,uint32_t idx)386 static zend_always_inline gc_root_buffer* gc_decompress(zend_refcounted *ref, uint32_t idx)
387 {
388 	gc_root_buffer *root = GC_IDX2PTR(idx);
389 
390 	if (EXPECTED(GC_GET_PTR(root->ref) == ref)) {
391 		return root;
392 	}
393 
394 	while (1) {
395 		idx += GC_MAX_UNCOMPRESSED;
396 		ZEND_ASSERT(idx < GC_G(first_unused));
397 		root = GC_IDX2PTR(idx);
398 		if (GC_GET_PTR(root->ref) == ref) {
399 			return root;
400 		}
401 	}
402 }
403 
gc_fetch_unused(void)404 static zend_always_inline uint32_t gc_fetch_unused(void)
405 {
406 	uint32_t idx;
407 	gc_root_buffer *root;
408 
409 	ZEND_ASSERT(GC_HAS_UNUSED());
410 	idx = GC_G(unused);
411 	root = GC_IDX2PTR(idx);
412 	ZEND_ASSERT(GC_IS_UNUSED(root->ref));
413 	GC_G(unused) = GC_LIST2IDX(root->ref);
414 	return idx;
415 }
416 
gc_link_unused(gc_root_buffer * root)417 static zend_always_inline void gc_link_unused(gc_root_buffer *root)
418 {
419 	root->ref = GC_IDX2LIST(GC_G(unused));
420 	GC_G(unused) = GC_PTR2IDX(root);
421 }
422 
gc_fetch_next_unused(void)423 static zend_always_inline uint32_t gc_fetch_next_unused(void)
424 {
425 	uint32_t idx;
426 
427 	ZEND_ASSERT(GC_HAS_NEXT_UNUSED());
428 	idx = GC_G(first_unused);
429 	GC_G(first_unused) = GC_G(first_unused) + 1;
430 	return idx;
431 }
432 
433 #if ZEND_GC_DEBUG > 1
gc_color_name(uint32_t color)434 static const char *gc_color_name(uint32_t color) {
435 	switch (color) {
436 		case GC_BLACK: return "black";
437 		case GC_WHITE: return "white";
438 		case GC_GREY: return "grey";
439 		case GC_PURPLE: return "purple";
440 		default: return "unknown";
441 	}
442 }
gc_trace_ref(zend_refcounted * ref)443 static void gc_trace_ref(zend_refcounted *ref) {
444 	if (GC_TYPE(ref) == IS_OBJECT) {
445 		zend_object *obj = (zend_object *) ref;
446 		fprintf(stderr, "[%p] rc=%d addr=%d %s object(%s)#%d ",
447 			ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
448 			gc_color_name(GC_REF_COLOR(ref)),
449 			obj->ce->name->val, obj->handle);
450 	} else if (GC_TYPE(ref) == IS_ARRAY) {
451 		zend_array *arr = (zend_array *) ref;
452 		fprintf(stderr, "[%p] rc=%d addr=%d %s array(%d) ",
453 			ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
454 			gc_color_name(GC_REF_COLOR(ref)),
455 			zend_hash_num_elements(arr));
456 	} else {
457 		fprintf(stderr, "[%p] rc=%d addr=%d %s %s ",
458 			ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
459 			gc_color_name(GC_REF_COLOR(ref)),
460 			GC_TYPE(ref) == IS_REFERENCE
461 				? "reference" : zend_get_type_by_const(GC_TYPE(ref)));
462 	}
463 }
464 #endif
465 
gc_remove_from_roots(gc_root_buffer * root)466 static zend_always_inline void gc_remove_from_roots(gc_root_buffer *root)
467 {
468 	GC_LINK_UNUSED(root);
469 	GC_G(num_roots)--;
470 	GC_BENCH_DEC(root_buf_length);
471 }
472 
root_buffer_dtor(zend_gc_globals * gc_globals)473 static void root_buffer_dtor(zend_gc_globals *gc_globals)
474 {
475 	if (gc_globals->buf) {
476 		free(gc_globals->buf);
477 		gc_globals->buf = NULL;
478 	}
479 }
480 
gc_globals_ctor_ex(zend_gc_globals * gc_globals)481 static void gc_globals_ctor_ex(zend_gc_globals *gc_globals)
482 {
483 	gc_globals->gc_enabled = 0;
484 	gc_globals->gc_active = 0;
485 	gc_globals->gc_protected = 1;
486 	gc_globals->gc_full = 0;
487 
488 	gc_globals->buf = NULL;
489 	gc_globals->unused = GC_INVALID;
490 	gc_globals->first_unused = GC_INVALID;
491 	gc_globals->gc_threshold = GC_INVALID;
492 	gc_globals->buf_size = GC_INVALID;
493 	gc_globals->num_roots = 0;
494 
495 	gc_globals->gc_runs = 0;
496 	gc_globals->collected = 0;
497 	gc_globals->collector_time = 0;
498 	gc_globals->dtor_time = 0;
499 	gc_globals->free_time = 0;
500 	gc_globals->activated_at = 0;
501 
502 	gc_globals->dtor_idx = GC_FIRST_ROOT;
503 	gc_globals->dtor_end = 0;
504 	gc_globals->dtor_fiber = NULL;
505 	gc_globals->dtor_fiber_running = false;
506 
507 #if GC_BENCH
508 	gc_globals->root_buf_length = 0;
509 	gc_globals->root_buf_peak = 0;
510 	gc_globals->zval_possible_root = 0;
511 	gc_globals->zval_buffered = 0;
512 	gc_globals->zval_remove_from_buffer = 0;
513 	gc_globals->zval_marked_grey = 0;
514 #endif
515 }
516 
gc_globals_ctor(void)517 void gc_globals_ctor(void)
518 {
519 #ifdef ZTS
520 	ts_allocate_fast_id(&gc_globals_id, &gc_globals_offset, sizeof(zend_gc_globals), (ts_allocate_ctor) gc_globals_ctor_ex, (ts_allocate_dtor) root_buffer_dtor);
521 #else
522 	gc_globals_ctor_ex(&gc_globals);
523 #endif
524 }
525 
gc_globals_dtor(void)526 void gc_globals_dtor(void)
527 {
528 #ifndef ZTS
529 	root_buffer_dtor(&gc_globals);
530 #endif
531 }
532 
gc_reset(void)533 void gc_reset(void)
534 {
535 	if (GC_G(buf)) {
536 		GC_G(gc_active) = 0;
537 		GC_G(gc_protected) = 0;
538 		GC_G(gc_full) = 0;
539 		GC_G(unused) = GC_INVALID;
540 		GC_G(first_unused) = GC_FIRST_ROOT;
541 		GC_G(num_roots) = 0;
542 
543 		GC_G(gc_runs) = 0;
544 		GC_G(collected) = 0;
545 
546 		GC_G(collector_time) = 0;
547 		GC_G(dtor_time) = 0;
548 		GC_G(free_time) = 0;
549 
550 		GC_G(dtor_idx) = GC_FIRST_ROOT;
551 		GC_G(dtor_end) = 0;
552 		GC_G(dtor_fiber) = NULL;
553 		GC_G(dtor_fiber_running) = false;
554 
555 #if GC_BENCH
556 		GC_G(root_buf_length) = 0;
557 		GC_G(root_buf_peak) = 0;
558 		GC_G(zval_possible_root) = 0;
559 		GC_G(zval_buffered) = 0;
560 		GC_G(zval_remove_from_buffer) = 0;
561 		GC_G(zval_marked_grey) = 0;
562 #endif
563 	}
564 
565 	GC_G(activated_at) = zend_hrtime();
566 }
567 
gc_enable(bool enable)568 ZEND_API bool gc_enable(bool enable)
569 {
570 	bool old_enabled = GC_G(gc_enabled);
571 	GC_G(gc_enabled) = enable;
572 	if (enable && !old_enabled && GC_G(buf) == NULL) {
573 		GC_G(buf) = (gc_root_buffer*) pemalloc(sizeof(gc_root_buffer) * GC_DEFAULT_BUF_SIZE, 1);
574 		GC_G(buf)[0].ref = NULL;
575 		GC_G(buf_size) = GC_DEFAULT_BUF_SIZE;
576 		GC_G(gc_threshold) = GC_THRESHOLD_DEFAULT;
577 		gc_reset();
578 	}
579 	return old_enabled;
580 }
581 
gc_enabled(void)582 ZEND_API bool gc_enabled(void)
583 {
584 	return GC_G(gc_enabled);
585 }
586 
gc_protect(bool protect)587 ZEND_API bool gc_protect(bool protect)
588 {
589 	bool old_protected = GC_G(gc_protected);
590 	GC_G(gc_protected) = protect;
591 	return old_protected;
592 }
593 
gc_protected(void)594 ZEND_API bool gc_protected(void)
595 {
596 	return GC_G(gc_protected);
597 }
598 
gc_grow_root_buffer(void)599 static void gc_grow_root_buffer(void)
600 {
601 	size_t new_size;
602 
603 	if (GC_G(buf_size) >= GC_MAX_BUF_SIZE) {
604 		if (!GC_G(gc_full)) {
605 			zend_error(E_WARNING, "GC buffer overflow (GC disabled)\n");
606 			GC_G(gc_active) = 1;
607 			GC_G(gc_protected) = 1;
608 			GC_G(gc_full) = 1;
609 			return;
610 		}
611 	}
612 	if (GC_G(buf_size) < GC_BUF_GROW_STEP) {
613 		new_size = GC_G(buf_size) * 2;
614 	} else {
615 		new_size = GC_G(buf_size) + GC_BUF_GROW_STEP;
616 	}
617 	if (new_size > GC_MAX_BUF_SIZE) {
618 		new_size = GC_MAX_BUF_SIZE;
619 	}
620 	GC_G(buf) = perealloc(GC_G(buf), sizeof(gc_root_buffer) * new_size, 1);
621 	GC_G(buf_size) = new_size;
622 }
623 
gc_adjust_threshold(int count)624 static void gc_adjust_threshold(int count)
625 {
626 	uint32_t new_threshold;
627 
628 	/* TODO Very simple heuristic for dynamic GC buffer resizing:
629 	 * If there are "too few" collections, increase the collection threshold
630 	 * by a fixed step */
631 	if (count < GC_THRESHOLD_TRIGGER || GC_G(num_roots) >= GC_G(gc_threshold)) {
632 		/* increase */
633 		if (GC_G(gc_threshold) < GC_THRESHOLD_MAX) {
634 			new_threshold = GC_G(gc_threshold) + GC_THRESHOLD_STEP;
635 			if (new_threshold > GC_THRESHOLD_MAX) {
636 				new_threshold = GC_THRESHOLD_MAX;
637 			}
638 			if (new_threshold > GC_G(buf_size)) {
639 				gc_grow_root_buffer();
640 			}
641 			if (new_threshold <= GC_G(buf_size)) {
642 				GC_G(gc_threshold) = new_threshold;
643 			}
644 		}
645 	} else if (GC_G(gc_threshold) > GC_THRESHOLD_DEFAULT) {
646 		new_threshold = GC_G(gc_threshold) - GC_THRESHOLD_STEP;
647 		if (new_threshold < GC_THRESHOLD_DEFAULT) {
648 			new_threshold = GC_THRESHOLD_DEFAULT;
649 		}
650 		GC_G(gc_threshold) = new_threshold;
651 	}
652 }
653 
gc_possible_root_when_full(zend_refcounted * ref)654 static zend_never_inline void ZEND_FASTCALL gc_possible_root_when_full(zend_refcounted *ref)
655 {
656 	uint32_t idx;
657 	gc_root_buffer *newRoot;
658 
659 	ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
660 	ZEND_ASSERT(GC_INFO(ref) == 0);
661 
662 	if (GC_G(gc_enabled) && !GC_G(gc_active)) {
663 		GC_ADDREF(ref);
664 		gc_adjust_threshold(gc_collect_cycles());
665 		if (UNEXPECTED(GC_DELREF(ref) == 0)) {
666 			rc_dtor_func(ref);
667 			return;
668 		} else if (UNEXPECTED(GC_INFO(ref))) {
669 			return;
670 		}
671 	}
672 
673 	if (GC_HAS_UNUSED()) {
674 		idx = GC_FETCH_UNUSED();
675 	} else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
676 		idx = GC_FETCH_NEXT_UNUSED();
677 	} else {
678 		gc_grow_root_buffer();
679 		if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
680 			return;
681 		}
682 		idx = GC_FETCH_NEXT_UNUSED();
683 	}
684 
685 	newRoot = GC_IDX2PTR(idx);
686 	newRoot->ref = ref; /* GC_ROOT tag is 0 */
687 	GC_TRACE_SET_COLOR(ref, GC_PURPLE);
688 
689 	idx = gc_compress(idx);
690 	GC_REF_SET_INFO(ref, idx | GC_PURPLE);
691 	GC_G(num_roots)++;
692 
693 	GC_BENCH_INC(zval_buffered);
694 	GC_BENCH_INC(root_buf_length);
695 	GC_BENCH_PEAK(root_buf_peak, root_buf_length);
696 }
697 
gc_possible_root(zend_refcounted * ref)698 ZEND_API void ZEND_FASTCALL gc_possible_root(zend_refcounted *ref)
699 {
700 	uint32_t idx;
701 	gc_root_buffer *newRoot;
702 
703 	if (UNEXPECTED(GC_G(gc_protected))) {
704 		return;
705 	}
706 
707 	GC_BENCH_INC(zval_possible_root);
708 
709 	if (EXPECTED(GC_HAS_UNUSED())) {
710 		idx = GC_FETCH_UNUSED();
711 	} else if (EXPECTED(GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD())) {
712 		idx = GC_FETCH_NEXT_UNUSED();
713 	} else {
714 		gc_possible_root_when_full(ref);
715 		return;
716 	}
717 
718 	ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
719 	ZEND_ASSERT(GC_INFO(ref) == 0);
720 
721 	newRoot = GC_IDX2PTR(idx);
722 	newRoot->ref = ref; /* GC_ROOT tag is 0 */
723 	GC_TRACE_SET_COLOR(ref, GC_PURPLE);
724 
725 	idx = gc_compress(idx);
726 	GC_REF_SET_INFO(ref, idx | GC_PURPLE);
727 	GC_G(num_roots)++;
728 
729 	GC_BENCH_INC(zval_buffered);
730 	GC_BENCH_INC(root_buf_length);
731 	GC_BENCH_PEAK(root_buf_peak, root_buf_length);
732 }
733 
gc_extra_root(zend_refcounted * ref)734 static void ZEND_FASTCALL gc_extra_root(zend_refcounted *ref)
735 {
736 	uint32_t idx;
737 	gc_root_buffer *newRoot;
738 
739 	if (EXPECTED(GC_HAS_UNUSED())) {
740 		idx = GC_FETCH_UNUSED();
741 	} else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
742 		idx = GC_FETCH_NEXT_UNUSED();
743 	} else {
744 		gc_grow_root_buffer();
745 		if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
746 			/* TODO: can this really happen? */
747 			return;
748 		}
749 		idx = GC_FETCH_NEXT_UNUSED();
750 	}
751 
752 	ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
753 	ZEND_ASSERT(GC_REF_ADDRESS(ref) == 0);
754 
755 	newRoot = GC_IDX2PTR(idx);
756 	newRoot->ref = ref; /* GC_ROOT tag is 0 */
757 
758 	idx = gc_compress(idx);
759 	GC_REF_SET_INFO(ref, idx | GC_REF_COLOR(ref));
760 	GC_G(num_roots)++;
761 
762 	GC_BENCH_INC(zval_buffered);
763 	GC_BENCH_INC(root_buf_length);
764 	GC_BENCH_PEAK(root_buf_peak, root_buf_length);
765 }
766 
gc_remove_compressed(zend_refcounted * ref,uint32_t idx)767 static zend_never_inline void ZEND_FASTCALL gc_remove_compressed(zend_refcounted *ref, uint32_t idx)
768 {
769 	gc_root_buffer *root = gc_decompress(ref, idx);
770 	gc_remove_from_roots(root);
771 }
772 
gc_remove_from_buffer(zend_refcounted * ref)773 ZEND_API void ZEND_FASTCALL gc_remove_from_buffer(zend_refcounted *ref)
774 {
775 	gc_root_buffer *root;
776 	uint32_t idx = GC_REF_ADDRESS(ref);
777 
778 	GC_BENCH_INC(zval_remove_from_buffer);
779 
780 	if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
781 		GC_TRACE_SET_COLOR(ref, GC_BLACK);
782 	}
783 	GC_REF_SET_INFO(ref, 0);
784 
785 	/* Perform decompression only in case of large buffers */
786 	if (UNEXPECTED(GC_G(first_unused) >= GC_MAX_UNCOMPRESSED)) {
787 		gc_remove_compressed(ref, idx);
788 		return;
789 	}
790 
791 	ZEND_ASSERT(idx);
792 	root = GC_IDX2PTR(idx);
793 	gc_remove_from_roots(root);
794 }
795 
gc_scan_black(zend_refcounted * ref,gc_stack * stack)796 static void gc_scan_black(zend_refcounted *ref, gc_stack *stack)
797 {
798 	HashTable *ht;
799 	Bucket *p;
800 	zval *zv;
801 	uint32_t n;
802 	GC_STACK_DCL(stack);
803 
804 tail_call:
805 	if (GC_TYPE(ref) == IS_OBJECT) {
806 		zend_object *obj = (zend_object*)ref;
807 
808 		if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
809 			zval *table;
810 			int len;
811 
812 			if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
813 				zend_weakmap_get_object_key_entry_gc(obj, &table, &len);
814 				n = len;
815 				zv = table;
816 				for (; n != 0; n-=2) {
817 					ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
818 					zval *entry = (zval*) Z_PTR_P(zv);
819 					zval *weakmap = zv+1;
820 					ZEND_ASSERT(Z_REFCOUNTED_P(weakmap));
821 					if (Z_OPT_REFCOUNTED_P(entry)) {
822 						GC_UNSET_FROM_WEAKMAP_KEY(entry);
823 						if (GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_GREY)) {
824 							/* Weakmap was scanned in gc_mark_roots, we must
825 							 * ensure that it's eventually scanned in
826 							 * gc_scan_roots as well. */
827 							if (!GC_REF_ADDRESS(Z_COUNTED_P(weakmap))) {
828 								gc_extra_root(Z_COUNTED_P(weakmap));
829 							}
830 						} else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK) && */ !GC_FROM_WEAKMAP(entry)) {
831 							/* Both the entry weakmap and key are BLACK, so we
832 							 * can mark the entry BLACK as well.
833 							 * !GC_FROM_WEAKMAP(entry) means that the weakmap
834 							 * was already scanned black (or will not be
835 							 * scanned), so it's our responsibility to mark the
836 							 * entry */
837 							ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(weakmap), GC_BLACK));
838 							ref = Z_COUNTED_P(entry);
839 							GC_ADDREF(ref);
840 							if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
841 								GC_REF_SET_BLACK(ref);
842 								GC_STACK_PUSH(ref);
843 							}
844 						}
845 					}
846 					zv+=2;
847 				}
848 			}
849 
850 			if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
851 				zend_weakmap_get_key_entry_gc(obj, &table, &len);
852 				n = len;
853 				zv = table;
854 				for (; n != 0; n-=2) {
855 					ZEND_ASSERT(Z_TYPE_P(zv+1) == IS_PTR);
856 					zval *key = zv;
857 					zval *entry = (zval*) Z_PTR_P(zv+1);
858 					if (Z_OPT_REFCOUNTED_P(entry)) {
859 						GC_UNSET_FROM_WEAKMAP(entry);
860 						if (GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_GREY)) {
861 							/* Key was scanned in gc_mark_roots, we must
862 							 * ensure that it's eventually scanned in
863 							 * gc_scan_roots as well. */
864 							if (!GC_REF_ADDRESS(Z_COUNTED_P(key))) {
865 								gc_extra_root(Z_COUNTED_P(key));
866 							}
867 						} else if (/* GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK) && */ !GC_FROM_WEAKMAP_KEY(entry)) {
868 							/* Both the entry weakmap and key are BLACK, so we
869 							 * can mark the entry BLACK as well.
870 							 * !GC_FROM_WEAKMAP_KEY(entry) means that the key
871 							 * was already scanned black (or will not be
872 							 * scanned), so it's our responsibility to mark the
873 							 * entry */
874 							ZEND_ASSERT(GC_REF_CHECK_COLOR(Z_COUNTED_P(key), GC_BLACK));
875 							ref = Z_COUNTED_P(entry);
876 							GC_ADDREF(ref);
877 							if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
878 								GC_REF_SET_BLACK(ref);
879 								GC_STACK_PUSH(ref);
880 							}
881 						}
882 					}
883 					zv += 2;
884 				}
885 				goto next;
886 			}
887 
888 			ht = obj->handlers->get_gc(obj, &table, &len);
889 			n = len;
890 			zv = table;
891 			if (UNEXPECTED(ht)) {
892 				GC_ADDREF(ht);
893 				if (!GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
894 					GC_REF_SET_BLACK(ht);
895 					for (; n != 0; n--) {
896 						if (Z_REFCOUNTED_P(zv)) {
897 							ref = Z_COUNTED_P(zv);
898 							GC_ADDREF(ref);
899 							if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
900 								GC_REF_SET_BLACK(ref);
901 								GC_STACK_PUSH(ref);
902 							}
903 						}
904 						zv++;
905 					}
906 					goto handle_ht;
907 				}
908 			}
909 
910 handle_zvals:
911 			for (; n != 0; n--) {
912 				if (Z_REFCOUNTED_P(zv)) {
913 					ref = Z_COUNTED_P(zv);
914 					GC_ADDREF(ref);
915 					if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
916 						GC_REF_SET_BLACK(ref);
917 						zv++;
918 						while (--n) {
919 							if (Z_REFCOUNTED_P(zv)) {
920 								zend_refcounted *ref = Z_COUNTED_P(zv);
921 								GC_ADDREF(ref);
922 								if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
923 									GC_REF_SET_BLACK(ref);
924 									GC_STACK_PUSH(ref);
925 								}
926 							}
927 							zv++;
928 						}
929 						goto tail_call;
930 					}
931 				}
932 				zv++;
933 			}
934 		}
935 	} else if (GC_TYPE(ref) == IS_ARRAY) {
936 		ZEND_ASSERT((zend_array*)ref != &EG(symbol_table));
937 		ht = (zend_array*)ref;
938 handle_ht:
939 		n = ht->nNumUsed;
940 		zv = ht->arPacked;
941 		if (HT_IS_PACKED(ht)) {
942 			goto handle_zvals;
943 		}
944 
945 		p = (Bucket*)zv;
946 		for (; n != 0; n--) {
947 			zv = &p->val;
948 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
949 				zv = Z_INDIRECT_P(zv);
950 			}
951 			if (Z_REFCOUNTED_P(zv)) {
952 				ref = Z_COUNTED_P(zv);
953 				GC_ADDREF(ref);
954 				if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
955 					GC_REF_SET_BLACK(ref);
956 					p++;
957 					while (--n) {
958 						zv = &p->val;
959 						if (Z_TYPE_P(zv) == IS_INDIRECT) {
960 							zv = Z_INDIRECT_P(zv);
961 						}
962 						if (Z_REFCOUNTED_P(zv)) {
963 							zend_refcounted *ref = Z_COUNTED_P(zv);
964 							GC_ADDREF(ref);
965 							if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
966 								GC_REF_SET_BLACK(ref);
967 								GC_STACK_PUSH(ref);
968 							}
969 						}
970 						p++;
971 					}
972 					goto tail_call;
973 				}
974 			}
975 			p++;
976 		}
977 	} else if (GC_TYPE(ref) == IS_REFERENCE) {
978 		if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
979 			ref = Z_COUNTED(((zend_reference*)ref)->val);
980 			GC_ADDREF(ref);
981 			if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
982 				GC_REF_SET_BLACK(ref);
983 				goto tail_call;
984 			}
985 		}
986 	}
987 
988 next:
989 	ref = GC_STACK_POP();
990 	if (ref) {
991 		goto tail_call;
992 	}
993 }
994 
gc_mark_grey(zend_refcounted * ref,gc_stack * stack)995 static void gc_mark_grey(zend_refcounted *ref, gc_stack *stack)
996 {
997 	HashTable *ht;
998 	Bucket *p;
999 	zval *zv;
1000 	uint32_t n;
1001 	GC_STACK_DCL(stack);
1002 
1003 tail_call:
1004 	GC_BENCH_INC(zval_marked_grey);
1005 
1006 	if (GC_TYPE(ref) == IS_OBJECT) {
1007 		zend_object *obj = (zend_object*)ref;
1008 
1009 		if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1010 			zval *table;
1011 			int len;
1012 
1013 			if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1014 				zend_weakmap_get_object_key_entry_gc(obj, &table, &len);
1015 				n = len;
1016 				zv = table;
1017 				for (; n != 0; n-=2) {
1018 					ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1019 					zval *entry = (zval*) Z_PTR_P(zv);
1020 					zval *weakmap = zv+1;
1021 					ZEND_ASSERT(Z_REFCOUNTED_P(weakmap));
1022 					if (Z_REFCOUNTED_P(entry)) {
1023 						GC_SET_FROM_WEAKMAP_KEY(entry);
1024 						ref = Z_COUNTED_P(entry);
1025 						/* Only DELREF if the contribution from the weakmap has
1026 						 * not been cancelled yet */
1027 						if (!GC_FROM_WEAKMAP(entry)) {
1028 							GC_DELREF(ref);
1029 						}
1030 						if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1031 							GC_REF_SET_COLOR(ref, GC_GREY);
1032 							GC_STACK_PUSH(ref);
1033 						}
1034 					}
1035 					zv+=2;
1036 				}
1037 			}
1038 
1039 			if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
1040 				zend_weakmap_get_entry_gc(obj, &table, &len);
1041 				n = len;
1042 				zv = table;
1043 				for (; n != 0; n--) {
1044 					ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1045 					zval *entry = (zval*) Z_PTR_P(zv);
1046 					if (Z_REFCOUNTED_P(entry)) {
1047 						GC_SET_FROM_WEAKMAP(entry);
1048 						ref = Z_COUNTED_P(entry);
1049 						/* Only DELREF if the contribution from the weakmap key
1050 						 * has not been cancelled yet */
1051 						if (!GC_FROM_WEAKMAP_KEY(entry)) {
1052 							GC_DELREF(ref);
1053 						}
1054 						if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1055 							GC_REF_SET_COLOR(ref, GC_GREY);
1056 							GC_STACK_PUSH(ref);
1057 						}
1058 					}
1059 					zv++;
1060 				}
1061 				goto next;
1062 			}
1063 
1064 			ht = obj->handlers->get_gc(obj, &table, &len);
1065 			n = len;
1066 			zv = table;
1067 			if (UNEXPECTED(ht)) {
1068 				GC_DELREF(ht);
1069 				if (!GC_REF_CHECK_COLOR(ht, GC_GREY)) {
1070 					GC_REF_SET_COLOR(ht, GC_GREY);
1071 					for (; n != 0; n--) {
1072 						if (Z_REFCOUNTED_P(zv)) {
1073 							ref = Z_COUNTED_P(zv);
1074 							GC_DELREF(ref);
1075 							if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1076 								GC_REF_SET_COLOR(ref, GC_GREY);
1077 								GC_STACK_PUSH(ref);
1078 							}
1079 						}
1080 						zv++;
1081 					}
1082 					goto handle_ht;
1083 				}
1084 			}
1085 handle_zvals:
1086 			for (; n != 0; n--) {
1087 				if (Z_REFCOUNTED_P(zv)) {
1088 					ref = Z_COUNTED_P(zv);
1089 					GC_DELREF(ref);
1090 					if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1091 						GC_REF_SET_COLOR(ref, GC_GREY);
1092 						zv++;
1093 						while (--n) {
1094 							if (Z_REFCOUNTED_P(zv)) {
1095 								zend_refcounted *ref = Z_COUNTED_P(zv);
1096 								GC_DELREF(ref);
1097 								if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1098 									GC_REF_SET_COLOR(ref, GC_GREY);
1099 									GC_STACK_PUSH(ref);
1100 								}
1101 							}
1102 							zv++;
1103 						}
1104 						goto tail_call;
1105 					}
1106 				}
1107 				zv++;
1108 			}
1109 		}
1110 	} else if (GC_TYPE(ref) == IS_ARRAY) {
1111 		ZEND_ASSERT(((zend_array*)ref) != &EG(symbol_table));
1112 		ht = (zend_array*)ref;
1113 handle_ht:
1114 		n = ht->nNumUsed;
1115 		if (HT_IS_PACKED(ht)) {
1116             zv = ht->arPacked;
1117             goto handle_zvals;
1118 		}
1119 
1120 		p = ht->arData;
1121 		for (; n != 0; n--) {
1122 			zv = &p->val;
1123 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
1124 				zv = Z_INDIRECT_P(zv);
1125 			}
1126 			if (Z_REFCOUNTED_P(zv)) {
1127 				ref = Z_COUNTED_P(zv);
1128 				GC_DELREF(ref);
1129 				if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1130 					GC_REF_SET_COLOR(ref, GC_GREY);
1131 					p++;
1132 					while (--n) {
1133 						zv = &p->val;
1134 						if (Z_TYPE_P(zv) == IS_INDIRECT) {
1135 							zv = Z_INDIRECT_P(zv);
1136 						}
1137 						if (Z_REFCOUNTED_P(zv)) {
1138 							zend_refcounted *ref = Z_COUNTED_P(zv);
1139 							GC_DELREF(ref);
1140 							if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1141 								GC_REF_SET_COLOR(ref, GC_GREY);
1142 								GC_STACK_PUSH(ref);
1143 							}
1144 						}
1145 						p++;
1146 					}
1147 					goto tail_call;
1148 				}
1149 			}
1150 			p++;
1151 		}
1152 	} else if (GC_TYPE(ref) == IS_REFERENCE) {
1153 		if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1154 			ref = Z_COUNTED(((zend_reference*)ref)->val);
1155 			GC_DELREF(ref);
1156 			if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1157 				GC_REF_SET_COLOR(ref, GC_GREY);
1158 				goto tail_call;
1159 			}
1160 		}
1161 	}
1162 
1163 next:
1164 	ref = GC_STACK_POP();
1165 	if (ref) {
1166 		goto tail_call;
1167 	}
1168 }
1169 
1170 /* Two-Finger compaction algorithm */
gc_compact(void)1171 static void gc_compact(void)
1172 {
1173 	if (GC_G(num_roots) + GC_FIRST_ROOT != GC_G(first_unused)) {
1174 		if (GC_G(num_roots)) {
1175 			gc_root_buffer *free = GC_IDX2PTR(GC_FIRST_ROOT);
1176 			gc_root_buffer *scan = GC_IDX2PTR(GC_G(first_unused) - 1);
1177 			gc_root_buffer *end  = GC_IDX2PTR(GC_G(num_roots));
1178 			uint32_t idx;
1179 			zend_refcounted *p;
1180 
1181 			while (free < scan) {
1182 				while (!GC_IS_UNUSED(free->ref)) {
1183 					free++;
1184 				}
1185 				while (GC_IS_UNUSED(scan->ref)) {
1186 					scan--;
1187 				}
1188 				if (scan > free) {
1189 					p = scan->ref;
1190 					free->ref = p;
1191 					p = GC_GET_PTR(p);
1192 					idx = gc_compress(GC_PTR2IDX(free));
1193 					GC_REF_SET_INFO(p, idx | GC_REF_COLOR(p));
1194 					free++;
1195 					scan--;
1196 					if (scan <= end) {
1197 						break;
1198 					}
1199 				}
1200 			}
1201 		}
1202 		GC_G(unused) = GC_INVALID;
1203 		GC_G(first_unused) = GC_G(num_roots) + GC_FIRST_ROOT;
1204 	}
1205 }
1206 
gc_mark_roots(gc_stack * stack)1207 static void gc_mark_roots(gc_stack *stack)
1208 {
1209 	gc_root_buffer *current, *last;
1210 
1211 	gc_compact();
1212 
1213 	current = GC_IDX2PTR(GC_FIRST_ROOT);
1214 	last = GC_IDX2PTR(GC_G(first_unused));
1215 	while (current != last) {
1216 		if (GC_IS_ROOT(current->ref)) {
1217 			if (GC_REF_CHECK_COLOR(current->ref, GC_PURPLE)) {
1218 				GC_REF_SET_COLOR(current->ref, GC_GREY);
1219 				gc_mark_grey(current->ref, stack);
1220 			}
1221 		}
1222 		current++;
1223 	}
1224 }
1225 
gc_scan(zend_refcounted * ref,gc_stack * stack)1226 static void gc_scan(zend_refcounted *ref, gc_stack *stack)
1227 {
1228 	HashTable *ht;
1229 	Bucket *p;
1230 	zval *zv;
1231 	uint32_t n;
1232 	GC_STACK_DCL(stack);
1233 
1234 tail_call:
1235 	if (!GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1236 		goto next;
1237 	}
1238 
1239 	if (GC_REFCOUNT(ref) > 0) {
1240 		if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1241 			GC_REF_SET_BLACK(ref);
1242 			if (UNEXPECTED(!_stack->next)) {
1243 				gc_stack_next(_stack);
1244 			}
1245 			/* Split stack and reuse the tail */
1246 			_stack->next->prev = NULL;
1247 			gc_scan_black(ref, _stack->next);
1248 			_stack->next->prev = _stack;
1249 		}
1250 		goto next;
1251 	}
1252 
1253 	if (GC_TYPE(ref) == IS_OBJECT) {
1254 		zend_object *obj = (zend_object*)ref;
1255 		if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1256 			zval *table;
1257 			int len;
1258 
1259 			if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1260 				zend_weakmap_get_object_entry_gc(obj, &table, &len);
1261 				n = len;
1262 				zv = table;
1263 				for (; n != 0; n--) {
1264 					ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1265 					zval *entry = (zval*) Z_PTR_P(zv);
1266 					if (Z_OPT_REFCOUNTED_P(entry)) {
1267 						ref = Z_COUNTED_P(entry);
1268 						if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1269 							GC_REF_SET_COLOR(ref, GC_WHITE);
1270 							GC_STACK_PUSH(ref);
1271 						}
1272 					}
1273 					zv++;
1274 				}
1275 			}
1276 
1277 			ht = obj->handlers->get_gc(obj, &table, &len);
1278 			n = len;
1279 			zv = table;
1280 			if (UNEXPECTED(ht)) {
1281 				if (GC_REF_CHECK_COLOR(ht, GC_GREY)) {
1282 					GC_REF_SET_COLOR(ht, GC_WHITE);
1283 					GC_STACK_PUSH((zend_refcounted *) ht);
1284 					for (; n != 0; n--) {
1285 						if (Z_REFCOUNTED_P(zv)) {
1286 							ref = Z_COUNTED_P(zv);
1287 							if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1288 								GC_REF_SET_COLOR(ref, GC_WHITE);
1289 								GC_STACK_PUSH(ref);
1290 							}
1291 						}
1292 						zv++;
1293 					}
1294 					goto handle_ht;
1295 				}
1296 			}
1297 
1298 handle_zvals:
1299 			for (; n != 0; n--) {
1300 				if (Z_REFCOUNTED_P(zv)) {
1301 					ref = Z_COUNTED_P(zv);
1302 					if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1303 						GC_REF_SET_COLOR(ref, GC_WHITE);
1304 						zv++;
1305 						while (--n) {
1306 							if (Z_REFCOUNTED_P(zv)) {
1307 								zend_refcounted *ref = Z_COUNTED_P(zv);
1308 								if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1309 									GC_REF_SET_COLOR(ref, GC_WHITE);
1310 									GC_STACK_PUSH(ref);
1311 								}
1312 							}
1313 							zv++;
1314 						}
1315 						goto tail_call;
1316 					}
1317 				}
1318 				zv++;
1319 			}
1320 		}
1321 	} else if (GC_TYPE(ref) == IS_ARRAY) {
1322 		ht = (HashTable *)ref;
1323 		ZEND_ASSERT(ht != &EG(symbol_table));
1324 
1325 handle_ht:
1326 		n = ht->nNumUsed;
1327 		if (HT_IS_PACKED(ht)) {
1328             zv = ht->arPacked;
1329             goto handle_zvals;
1330 		}
1331 
1332 		p = ht->arData;
1333 		for (; n != 0; n--) {
1334 			zv = &p->val;
1335 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
1336 				zv = Z_INDIRECT_P(zv);
1337 			}
1338 			if (Z_REFCOUNTED_P(zv)) {
1339 				ref = Z_COUNTED_P(zv);
1340 				if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1341 					GC_REF_SET_COLOR(ref, GC_WHITE);
1342 					p++;
1343 					while (--n) {
1344 						zv = &p->val;
1345 						if (Z_TYPE_P(zv) == IS_INDIRECT) {
1346 							zv = Z_INDIRECT_P(zv);
1347 						}
1348 						if (Z_REFCOUNTED_P(zv)) {
1349 							zend_refcounted *ref = Z_COUNTED_P(zv);
1350 							if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1351 								GC_REF_SET_COLOR(ref, GC_WHITE);
1352 								GC_STACK_PUSH(ref);
1353 							}
1354 						}
1355 						p++;
1356 					}
1357 					goto tail_call;
1358 				}
1359 			}
1360 			p++;
1361 		}
1362 	} else if (GC_TYPE(ref) == IS_REFERENCE) {
1363 		if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1364 			ref = Z_COUNTED(((zend_reference*)ref)->val);
1365 			if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1366 				GC_REF_SET_COLOR(ref, GC_WHITE);
1367 				goto tail_call;
1368 			}
1369 		}
1370 	}
1371 
1372 next:
1373 	ref = GC_STACK_POP();
1374 	if (ref) {
1375 		goto tail_call;
1376 	}
1377 }
1378 
gc_scan_roots(gc_stack * stack)1379 static void gc_scan_roots(gc_stack *stack)
1380 {
1381 	uint32_t idx, end;
1382 	gc_root_buffer *current;
1383 
1384 	/* Root buffer might be reallocated during gc_scan,
1385 	 * make sure to reload pointers. */
1386 	idx = GC_FIRST_ROOT;
1387 	end = GC_G(first_unused);
1388 	while (idx != end) {
1389 		current = GC_IDX2PTR(idx);
1390 		if (GC_IS_ROOT(current->ref)) {
1391 			if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1392 				GC_REF_SET_COLOR(current->ref, GC_WHITE);
1393 				gc_scan(current->ref, stack);
1394 			}
1395 		}
1396 		idx++;
1397 	}
1398 
1399 	/* Scan extra roots added during gc_scan */
1400 	while (idx != GC_G(first_unused)) {
1401 		current = GC_IDX2PTR(idx);
1402 		if (GC_IS_ROOT(current->ref)) {
1403 			if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1404 				GC_REF_SET_COLOR(current->ref, GC_WHITE);
1405 				gc_scan(current->ref, stack);
1406 			}
1407 		}
1408 		idx++;
1409 	}
1410 }
1411 
gc_add_garbage(zend_refcounted * ref)1412 static void gc_add_garbage(zend_refcounted *ref)
1413 {
1414 	uint32_t idx;
1415 	gc_root_buffer *buf;
1416 
1417 	if (GC_HAS_UNUSED()) {
1418 		idx = GC_FETCH_UNUSED();
1419 	} else if (GC_HAS_NEXT_UNUSED()) {
1420 		idx = GC_FETCH_NEXT_UNUSED();
1421 	} else {
1422 		gc_grow_root_buffer();
1423 		if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
1424 			return;
1425 		}
1426 		idx = GC_FETCH_NEXT_UNUSED();
1427 	}
1428 
1429 	buf = GC_IDX2PTR(idx);
1430 	buf->ref = GC_MAKE_GARBAGE(ref);
1431 
1432 	idx = gc_compress(idx);
1433 	GC_REF_SET_INFO(ref, idx | GC_BLACK);
1434 	GC_G(num_roots)++;
1435 }
1436 
gc_collect_white(zend_refcounted * ref,uint32_t * flags,gc_stack * stack)1437 static int gc_collect_white(zend_refcounted *ref, uint32_t *flags, gc_stack *stack)
1438 {
1439 	int count = 0;
1440 	HashTable *ht;
1441 	Bucket *p;
1442 	zval *zv;
1443 	uint32_t n;
1444 	GC_STACK_DCL(stack);
1445 
1446 tail_call:
1447 	/* don't count references for compatibility ??? */
1448 	if (GC_TYPE(ref) != IS_REFERENCE) {
1449 		count++;
1450 	}
1451 
1452 	if (GC_TYPE(ref) == IS_OBJECT) {
1453 		zend_object *obj = (zend_object*)ref;
1454 
1455 		if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1456 			int len;
1457 			zval *table;
1458 
1459 			/* optimization: color is GC_BLACK (0) */
1460 			if (!GC_INFO(ref)) {
1461 				gc_add_garbage(ref);
1462 			}
1463 			if (!(OBJ_FLAGS(obj) & IS_OBJ_DESTRUCTOR_CALLED)
1464 			 && (obj->handlers->dtor_obj != zend_objects_destroy_object
1465 			  || obj->ce->destructor != NULL)) {
1466 				*flags |= GC_HAS_DESTRUCTORS;
1467 			}
1468 
1469 			if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1470 				zend_weakmap_get_object_entry_gc(obj, &table, &len);
1471 				n = len;
1472 				zv = table;
1473 				for (; n != 0; n--) {
1474 					ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1475 					zval *entry = (zval*) Z_PTR_P(zv);
1476 					if (Z_REFCOUNTED_P(entry) && GC_FROM_WEAKMAP_KEY(entry)) {
1477 						GC_UNSET_FROM_WEAKMAP_KEY(entry);
1478 						GC_UNSET_FROM_WEAKMAP(entry);
1479 						ref = Z_COUNTED_P(entry);
1480 						GC_ADDREF(ref);
1481 						if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1482 							GC_REF_SET_BLACK(ref);
1483 							GC_STACK_PUSH(ref);
1484 						}
1485 					}
1486 					zv++;
1487 				}
1488 			}
1489 
1490 			if (UNEXPECTED(obj->handlers->get_gc == zend_weakmap_get_gc)) {
1491 				zend_weakmap_get_entry_gc(obj, &table, &len);
1492 				n = len;
1493 				zv = table;
1494 				for (; n != 0; n--) {
1495 					ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1496 					zval *entry = (zval*) Z_PTR_P(zv);
1497 					if (Z_REFCOUNTED_P(entry) && GC_FROM_WEAKMAP(entry)) {
1498 						GC_UNSET_FROM_WEAKMAP_KEY(entry);
1499 						GC_UNSET_FROM_WEAKMAP(entry);
1500 						ref = Z_COUNTED_P(entry);
1501 						GC_ADDREF(ref);
1502 						if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1503 							GC_REF_SET_BLACK(ref);
1504 							GC_STACK_PUSH(ref);
1505 						}
1506 					}
1507 					zv++;
1508 				}
1509 				goto next;
1510 			}
1511 
1512 			ht = obj->handlers->get_gc(obj, &table, &len);
1513 			n = len;
1514 			zv = table;
1515 			if (UNEXPECTED(ht)) {
1516 				GC_ADDREF(ht);
1517 				if (GC_REF_CHECK_COLOR(ht, GC_WHITE)) {
1518 					GC_REF_SET_BLACK(ht);
1519 					for (; n != 0; n--) {
1520 						if (Z_REFCOUNTED_P(zv)) {
1521 							ref = Z_COUNTED_P(zv);
1522 							GC_ADDREF(ref);
1523 							if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1524 								GC_REF_SET_BLACK(ref);
1525 								GC_STACK_PUSH(ref);
1526 							}
1527 						}
1528 						zv++;
1529 					}
1530 					goto handle_ht;
1531 				}
1532 			}
1533 
1534 handle_zvals:
1535 			for (; n != 0; n--) {
1536 				if (Z_REFCOUNTED_P(zv)) {
1537 					ref = Z_COUNTED_P(zv);
1538 					GC_ADDREF(ref);
1539 					if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1540 						GC_REF_SET_BLACK(ref);
1541 						zv++;
1542 						while (--n) {
1543 							if (Z_REFCOUNTED_P(zv)) {
1544 								zend_refcounted *ref = Z_COUNTED_P(zv);
1545 								GC_ADDREF(ref);
1546 								if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1547 									GC_REF_SET_BLACK(ref);
1548 									GC_STACK_PUSH(ref);
1549 								}
1550 							}
1551 							zv++;
1552 						}
1553 						goto tail_call;
1554 					}
1555 				}
1556 				zv++;
1557 			}
1558 		}
1559 	} else if (GC_TYPE(ref) == IS_ARRAY) {
1560 		/* optimization: color is GC_BLACK (0) */
1561 		if (!GC_INFO(ref)) {
1562 			gc_add_garbage(ref);
1563 		}
1564 		ht = (zend_array*)ref;
1565 
1566 handle_ht:
1567 		n = ht->nNumUsed;
1568 		if (HT_IS_PACKED(ht)) {
1569 			zv = ht->arPacked;
1570 			goto handle_zvals;
1571 		}
1572 
1573 		p = ht->arData;
1574 		for (; n != 0; n--) {
1575 			zv = &p->val;
1576 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
1577 				zv = Z_INDIRECT_P(zv);
1578 			}
1579 			if (Z_REFCOUNTED_P(zv)) {
1580 				ref = Z_COUNTED_P(zv);
1581 				GC_ADDREF(ref);
1582 				if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1583 					GC_REF_SET_BLACK(ref);
1584 					p++;
1585 					while (--n) {
1586 						zv = &p->val;
1587 						if (Z_TYPE_P(zv) == IS_INDIRECT) {
1588 							zv = Z_INDIRECT_P(zv);
1589 						}
1590 						if (Z_REFCOUNTED_P(zv)) {
1591 							zend_refcounted *ref = Z_COUNTED_P(zv);
1592 							GC_ADDREF(ref);
1593 							if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1594 								GC_REF_SET_BLACK(ref);
1595 								GC_STACK_PUSH(ref);
1596 							}
1597 						}
1598 						p++;
1599 					}
1600 					goto tail_call;
1601 				}
1602 			}
1603 			p++;
1604 		}
1605 	} else if (GC_TYPE(ref) == IS_REFERENCE) {
1606 		if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1607 			ref = Z_COUNTED(((zend_reference*)ref)->val);
1608 			GC_ADDREF(ref);
1609 			if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1610 				GC_REF_SET_BLACK(ref);
1611 				goto tail_call;
1612 			}
1613 		}
1614 	}
1615 
1616 next:
1617 	ref = GC_STACK_POP();
1618 	if (ref) {
1619 		goto tail_call;
1620 	}
1621 
1622 	return count;
1623 }
1624 
gc_collect_roots(uint32_t * flags,gc_stack * stack)1625 static int gc_collect_roots(uint32_t *flags, gc_stack *stack)
1626 {
1627 	uint32_t idx, end;
1628 	zend_refcounted *ref;
1629 	int count = 0;
1630 	gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
1631 	gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
1632 
1633 	/* remove non-garbage from the list */
1634 	while (current != last) {
1635 		if (GC_IS_ROOT(current->ref)) {
1636 			if (GC_REF_CHECK_COLOR(current->ref, GC_BLACK)) {
1637 				GC_REF_SET_INFO(current->ref, 0); /* reset GC_ADDRESS() and keep GC_BLACK */
1638 				gc_remove_from_roots(current);
1639 			}
1640 		}
1641 		current++;
1642 	}
1643 
1644 	gc_compact();
1645 
1646 	/* Root buffer might be reallocated during gc_collect_white,
1647 	 * make sure to reload pointers. */
1648 	idx = GC_FIRST_ROOT;
1649 	end = GC_G(first_unused);
1650 	while (idx != end) {
1651 		current = GC_IDX2PTR(idx);
1652 		ref = current->ref;
1653 		ZEND_ASSERT(GC_IS_ROOT(ref));
1654 		current->ref = GC_MAKE_GARBAGE(ref);
1655 		if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1656 			GC_REF_SET_BLACK(ref);
1657 			count += gc_collect_white(ref, flags, stack);
1658 		}
1659 		idx++;
1660 	}
1661 
1662 	return count;
1663 }
1664 
gc_remove_nested_data_from_buffer(zend_refcounted * ref,gc_root_buffer * root,gc_stack * stack)1665 static int gc_remove_nested_data_from_buffer(zend_refcounted *ref, gc_root_buffer *root, gc_stack *stack)
1666 {
1667 	HashTable *ht;
1668 	Bucket *p;
1669 	zval *zv;
1670 	uint32_t n;
1671 	int count = 0;
1672 	GC_STACK_DCL(stack);
1673 
1674 tail_call:
1675 	if (root) {
1676 		root = NULL;
1677 		count++;
1678 	} else if (GC_REF_ADDRESS(ref) != 0
1679 	 && GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1680 		GC_TRACE_REF(ref, "removing from buffer");
1681 		GC_REMOVE_FROM_BUFFER(ref);
1682 		count++;
1683 	} else if (GC_TYPE(ref) == IS_REFERENCE) {
1684 		if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1685 			ref = Z_COUNTED(((zend_reference*)ref)->val);
1686 			goto tail_call;
1687 		}
1688 		goto next;
1689 	} else {
1690 		goto next;
1691 	}
1692 
1693 	if (GC_TYPE(ref) == IS_OBJECT) {
1694 		zend_object *obj = (zend_object*)ref;
1695 
1696 		if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1697 			int len;
1698 			zval *table;
1699 
1700 			if (UNEXPECTED(GC_FLAGS(obj) & IS_OBJ_WEAKLY_REFERENCED)) {
1701 				zend_weakmap_get_object_entry_gc(obj, &table, &len);
1702 				n = len;
1703 				zv = table;
1704 				for (; n != 0; n--) {
1705 					ZEND_ASSERT(Z_TYPE_P(zv) == IS_PTR);
1706 					zval *entry = (zval*) Z_PTR_P(zv);
1707 					if (Z_OPT_REFCOUNTED_P(entry)) {
1708 						ref = Z_COUNTED_P(entry);
1709 						GC_STACK_PUSH(ref);
1710 					}
1711 					zv++;
1712 				}
1713 			}
1714 
1715 			ht = obj->handlers->get_gc(obj, &table, &len);
1716 			n = len;
1717 			zv = table;
1718 			if (UNEXPECTED(ht)) {
1719 				for (; n != 0; n--) {
1720 					if (Z_REFCOUNTED_P(zv)) {
1721 						ref = Z_COUNTED_P(zv);
1722 						GC_STACK_PUSH(ref);
1723 					}
1724 					zv++;
1725 				}
1726 				if (GC_REF_ADDRESS(ht) != 0 && GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
1727 					GC_TRACE_REF(ht, "removing from buffer");
1728 					GC_REMOVE_FROM_BUFFER(ht);
1729 				}
1730 				goto handle_ht;
1731 			}
1732 
1733 handle_zvals:
1734 			for (; n != 0; n--) {
1735 				if (Z_REFCOUNTED_P(zv)) {
1736 					ref = Z_COUNTED_P(zv);
1737 					zv++;
1738 					while (--n) {
1739 						if (Z_REFCOUNTED_P(zv)) {
1740 							zend_refcounted *ref = Z_COUNTED_P(zv);
1741 							GC_STACK_PUSH(ref);
1742 						}
1743 						zv++;
1744 					}
1745 					goto tail_call;
1746 				}
1747 				zv++;
1748 			}
1749 		}
1750 	} else if (GC_TYPE(ref) == IS_ARRAY) {
1751 		ht = (zend_array*)ref;
1752 
1753 handle_ht:
1754 		n = ht->nNumUsed;
1755 		if (HT_IS_PACKED(ht)) {
1756 			zv = ht->arPacked;
1757 			goto handle_zvals;
1758 		}
1759 
1760 		p = ht->arData;
1761 		for (; n != 0; n--) {
1762 			zv = &p->val;
1763 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
1764 				zv = Z_INDIRECT_P(zv);
1765 			}
1766 			if (Z_REFCOUNTED_P(zv)) {
1767 				ref = Z_COUNTED_P(zv);
1768 				p++;
1769 				while (--n) {
1770 					zv = &p->val;
1771 					if (Z_TYPE_P(zv) == IS_INDIRECT) {
1772 						zv = Z_INDIRECT_P(zv);
1773 					}
1774 					if (Z_REFCOUNTED_P(zv)) {
1775 						zend_refcounted *ref = Z_COUNTED_P(zv);
1776 						GC_STACK_PUSH(ref);
1777 					}
1778 					p++;
1779 				}
1780 				goto tail_call;
1781 			}
1782 			p++;
1783 		}
1784 	}
1785 
1786 next:
1787 	ref = GC_STACK_POP();
1788 	if (ref) {
1789 		goto tail_call;
1790 	}
1791 
1792 	return count;
1793 }
1794 
1795 static void zend_get_gc_buffer_release(void);
1796 static void zend_gc_check_root_tmpvars(void);
1797 static void zend_gc_remove_root_tmpvars(void);
1798 
1799 static zend_internal_function gc_destructor_fiber;
1800 
gc_create_destructor_fiber_error(void)1801 static ZEND_COLD ZEND_NORETURN void gc_create_destructor_fiber_error(void)
1802 {
1803 	zend_error_noreturn(E_ERROR, "Unable to create destructor fiber");
1804 }
1805 
gc_start_destructor_fiber_error(void)1806 static ZEND_COLD ZEND_NORETURN void gc_start_destructor_fiber_error(void)
1807 {
1808 	zend_error_noreturn(E_ERROR, "Unable to start destructor fiber");
1809 }
1810 
gc_call_destructors(uint32_t idx,uint32_t end,zend_fiber * fiber)1811 static zend_always_inline zend_result gc_call_destructors(uint32_t idx, uint32_t end, zend_fiber *fiber)
1812 {
1813 	gc_root_buffer *current;
1814 	zend_refcounted *p;
1815 
1816 	/* The root buffer might be reallocated during destructors calls,
1817 	 * make sure to reload pointers as necessary. */
1818 	while (idx != end) {
1819 		current = GC_IDX2PTR(idx);
1820 		if (GC_IS_DTOR_GARBAGE(current->ref)) {
1821 			p = GC_GET_PTR(current->ref);
1822 			/* Mark this is as a normal root for the next GC run */
1823 			current->ref = p;
1824 			/* Double check that the destructor hasn't been called yet. It
1825 			 * could have already been invoked indirectly by some other
1826 			 * destructor. */
1827 			if (!(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
1828 				if (fiber != NULL) {
1829 					GC_G(dtor_idx) = idx;
1830 				}
1831 				zend_object *obj = (zend_object*)p;
1832 				GC_TRACE_REF(obj, "calling destructor");
1833 				GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
1834 				GC_ADDREF(obj);
1835 				obj->handlers->dtor_obj(obj);
1836 				GC_TRACE_REF(obj, "returned from destructor");
1837 				GC_DELREF(obj);
1838 				if (UNEXPECTED(fiber != NULL && GC_G(dtor_fiber) != fiber)) {
1839 					/* We resumed after suspension */
1840 					gc_check_possible_root((zend_refcounted*)&obj->gc);
1841 					return FAILURE;
1842 				}
1843 			}
1844 		}
1845 		idx++;
1846 	}
1847 
1848 	return SUCCESS;
1849 }
1850 
gc_create_destructor_fiber(void)1851 static zend_fiber *gc_create_destructor_fiber(void)
1852 {
1853 	zval zobj;
1854 	zend_fiber *fiber;
1855 
1856 	GC_TRACE("starting destructor fiber");
1857 
1858 	if (UNEXPECTED(object_init_ex(&zobj, zend_ce_fiber) == FAILURE)) {
1859 		gc_create_destructor_fiber_error();
1860 	}
1861 
1862 	fiber = (zend_fiber *)Z_OBJ(zobj);
1863 	fiber->fci.size = sizeof(fiber->fci);
1864 	fiber->fci_cache.function_handler = (zend_function*) &gc_destructor_fiber;
1865 
1866 	GC_G(dtor_fiber) = fiber;
1867 
1868 	if (UNEXPECTED(zend_fiber_start(fiber, NULL) == FAILURE)) {
1869 		gc_start_destructor_fiber_error();
1870 	}
1871 
1872 	return fiber;
1873 }
1874 
gc_call_destructors_in_fiber(uint32_t end)1875 static zend_never_inline void gc_call_destructors_in_fiber(uint32_t end)
1876 {
1877 	ZEND_ASSERT(!GC_G(dtor_fiber_running));
1878 
1879 	zend_fiber *fiber = GC_G(dtor_fiber);
1880 
1881 	GC_G(dtor_idx) = GC_FIRST_ROOT;
1882 	GC_G(dtor_end) = GC_G(first_unused);
1883 
1884 	if (UNEXPECTED(!fiber)) {
1885 		fiber = gc_create_destructor_fiber();
1886 	} else {
1887 		zend_fiber_resume(fiber, NULL, NULL);
1888 	}
1889 
1890 	for (;;) {
1891 		/* At this point, fiber has executed until suspension */
1892 		GC_TRACE("resumed from destructor fiber");
1893 
1894 		if (UNEXPECTED(GC_G(dtor_fiber_running))) {
1895 			/* Fiber was suspended by a destructor. Start a new one for the
1896 			 * remaining destructors. */
1897 			GC_TRACE("destructor fiber suspended by destructor");
1898 			GC_G(dtor_fiber) = NULL;
1899 			GC_G(dtor_idx)++;
1900 			/* We do not own the fiber anymore. It may be collected if the
1901 			 * application does not reference it. */
1902 			zend_object_release(&fiber->std);
1903 			fiber = gc_create_destructor_fiber();
1904 			continue;
1905 		} else {
1906 			/* Fiber suspended itself after calling all destructors */
1907 			GC_TRACE("destructor fiber suspended itself");
1908 			break;
1909 		}
1910 	}
1911 }
1912 
zend_gc_collect_cycles(void)1913 ZEND_API int zend_gc_collect_cycles(void)
1914 {
1915 	int total_count = 0;
1916 	bool should_rerun_gc = 0;
1917 	bool did_rerun_gc = 0;
1918 
1919 	zend_hrtime_t start_time = zend_hrtime();
1920 	if (GC_G(num_roots) && GC_G(gc_active)) {
1921 		zend_gc_remove_root_tmpvars();
1922 	}
1923 
1924 rerun_gc:
1925 	if (GC_G(num_roots)) {
1926 		int count;
1927 		gc_root_buffer *current, *last;
1928 		zend_refcounted *p;
1929 		uint32_t gc_flags = 0;
1930 		uint32_t idx, end;
1931 		gc_stack stack;
1932 
1933 		stack.prev = NULL;
1934 		stack.next = NULL;
1935 
1936 		if (GC_G(gc_active)) {
1937 			GC_G(collector_time) += zend_hrtime() - start_time;
1938 			return 0;
1939 		}
1940 
1941 		GC_TRACE("Collecting cycles");
1942 		GC_G(gc_runs)++;
1943 		GC_G(gc_active) = 1;
1944 
1945 		GC_TRACE("Marking roots");
1946 		gc_mark_roots(&stack);
1947 		GC_TRACE("Scanning roots");
1948 		gc_scan_roots(&stack);
1949 
1950 		GC_TRACE("Collecting roots");
1951 		count = gc_collect_roots(&gc_flags, &stack);
1952 
1953 		if (!GC_G(num_roots)) {
1954 			/* nothing to free */
1955 			GC_TRACE("Nothing to free");
1956 			gc_stack_free(&stack);
1957 			GC_G(gc_active) = 0;
1958 			goto finish;
1959 		}
1960 
1961 		end = GC_G(first_unused);
1962 
1963 		if (gc_flags & GC_HAS_DESTRUCTORS) {
1964 			GC_TRACE("Calling destructors");
1965 
1966 			/* During a destructor call, new externally visible references to nested data may
1967 			 * be introduced. These references can be introduced in a way that does not
1968 			 * modify any refcounts, so we have no real way to detect this situation
1969 			 * short of rerunning full GC tracing. What we do instead is to only run
1970 			 * destructors at this point and automatically re-run GC afterwards. */
1971 			should_rerun_gc = 1;
1972 
1973 			/* Mark all roots for which a dtor will be invoked as DTOR_GARBAGE. Additionally
1974 			 * color them purple. This serves a double purpose: First, they should be
1975 			 * considered new potential roots for the next GC run. Second, it will prevent
1976 			 * their removal from the root buffer by nested data removal. */
1977 			idx = GC_FIRST_ROOT;
1978 			current = GC_IDX2PTR(GC_FIRST_ROOT);
1979 			while (idx != end) {
1980 				if (GC_IS_GARBAGE(current->ref)) {
1981 					p = GC_GET_PTR(current->ref);
1982 					if (GC_TYPE(p) == IS_OBJECT && !(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
1983 						zend_object *obj = (zend_object *) p;
1984 						if (obj->handlers->dtor_obj != zend_objects_destroy_object
1985 							|| obj->ce->destructor) {
1986 							current->ref = GC_MAKE_DTOR_GARBAGE(obj);
1987 							GC_REF_SET_COLOR(obj, GC_PURPLE);
1988 						} else {
1989 							GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
1990 						}
1991 					}
1992 				}
1993 				current++;
1994 				idx++;
1995 			}
1996 
1997 			/* Remove nested data for objects on which a destructor will be called.
1998 			 * This will not remove the objects themselves, as they have been colored
1999 			 * purple. */
2000 			idx = GC_FIRST_ROOT;
2001 			current = GC_IDX2PTR(GC_FIRST_ROOT);
2002 			while (idx != end) {
2003 				if (GC_IS_DTOR_GARBAGE(current->ref)) {
2004 					p = GC_GET_PTR(current->ref);
2005 					count -= gc_remove_nested_data_from_buffer(p, current, &stack);
2006 				}
2007 				current++;
2008 				idx++;
2009 			}
2010 
2011 			/* Actually call destructors. */
2012 			zend_hrtime_t dtor_start_time = zend_hrtime();
2013 			if (EXPECTED(!EG(active_fiber))) {
2014 				gc_call_destructors(GC_FIRST_ROOT, end, NULL);
2015 			} else {
2016 				gc_call_destructors_in_fiber(end);
2017 			}
2018 			GC_G(dtor_time) += zend_hrtime() - dtor_start_time;
2019 
2020 			if (GC_G(gc_protected)) {
2021 				/* something went wrong */
2022 				zend_get_gc_buffer_release();
2023 				GC_G(collector_time) += zend_hrtime() - start_time;
2024 				return 0;
2025 			}
2026 		}
2027 
2028 		gc_stack_free(&stack);
2029 
2030 		/* Destroy zvals. The root buffer may be reallocated. */
2031 		GC_TRACE("Destroying zvals");
2032 		zend_hrtime_t free_start_time = zend_hrtime();
2033 		idx = GC_FIRST_ROOT;
2034 		while (idx != end) {
2035 			current = GC_IDX2PTR(idx);
2036 			if (GC_IS_GARBAGE(current->ref)) {
2037 				p = GC_GET_PTR(current->ref);
2038 				GC_TRACE_REF(p, "destroying");
2039 				if (GC_TYPE(p) == IS_OBJECT) {
2040 					zend_object *obj = (zend_object*)p;
2041 
2042 					EG(objects_store).object_buckets[obj->handle] = SET_OBJ_INVALID(obj);
2043 					GC_TYPE_INFO(obj) = GC_NULL |
2044 						(GC_TYPE_INFO(obj) & ~GC_TYPE_MASK);
2045 					/* Modify current before calling free_obj (bug #78811: free_obj() can cause the root buffer (with current) to be reallocated.) */
2046 					current->ref = GC_MAKE_GARBAGE(((char*)obj) - obj->handlers->offset);
2047 					if (!(OBJ_FLAGS(obj) & IS_OBJ_FREE_CALLED)) {
2048 						GC_ADD_FLAGS(obj, IS_OBJ_FREE_CALLED);
2049 						GC_ADDREF(obj);
2050 						obj->handlers->free_obj(obj);
2051 						GC_DELREF(obj);
2052 					}
2053 
2054 					ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST(obj->handle);
2055 				} else if (GC_TYPE(p) == IS_ARRAY) {
2056 					zend_array *arr = (zend_array*)p;
2057 
2058 					GC_TYPE_INFO(arr) = GC_NULL |
2059 						(GC_TYPE_INFO(arr) & ~GC_TYPE_MASK);
2060 
2061 					/* GC may destroy arrays with rc>1. This is valid and safe. */
2062 					HT_ALLOW_COW_VIOLATION(arr);
2063 
2064 					zend_hash_destroy(arr);
2065 				}
2066 			}
2067 			idx++;
2068 		}
2069 
2070 		/* Free objects */
2071 		current = GC_IDX2PTR(GC_FIRST_ROOT);
2072 		last = GC_IDX2PTR(end);
2073 		while (current != last) {
2074 			if (GC_IS_GARBAGE(current->ref)) {
2075 				p = GC_GET_PTR(current->ref);
2076 				GC_LINK_UNUSED(current);
2077 				GC_G(num_roots)--;
2078 				efree(p);
2079 			}
2080 			current++;
2081 		}
2082 
2083 		GC_G(free_time) += zend_hrtime() - free_start_time;
2084 
2085 		GC_TRACE("Collection finished");
2086 		GC_G(collected) += count;
2087 		total_count += count;
2088 		GC_G(gc_active) = 0;
2089 	}
2090 
2091 	gc_compact();
2092 
2093 	/* Objects with destructors were removed from this GC run. Rerun GC right away to clean them
2094 	 * up. We do this only once: If we encounter more destructors on the second run, we'll not
2095 	 * run GC another time. */
2096 	if (should_rerun_gc && !did_rerun_gc) {
2097 		did_rerun_gc = 1;
2098 		goto rerun_gc;
2099 	}
2100 
2101 finish:
2102 	zend_get_gc_buffer_release();
2103 
2104 	/* Prevent GC from running during zend_gc_check_root_tmpvars, before
2105 	 * gc_threshold is adjusted, as this may result in unbounded recursion */
2106 	GC_G(gc_active) = 1;
2107 	zend_gc_check_root_tmpvars();
2108 	GC_G(gc_active) = 0;
2109 
2110 	GC_G(collector_time) += zend_hrtime() - start_time;
2111 	return total_count;
2112 }
2113 
zend_gc_get_status(zend_gc_status * status)2114 ZEND_API void zend_gc_get_status(zend_gc_status *status)
2115 {
2116 	status->active = GC_G(gc_active);
2117 	status->gc_protected = GC_G(gc_protected);
2118 	status->full = GC_G(gc_full);
2119 	status->runs = GC_G(gc_runs);
2120 	status->collected = GC_G(collected);
2121 	status->threshold = GC_G(gc_threshold);
2122 	status->buf_size = GC_G(buf_size);
2123 	status->num_roots = GC_G(num_roots);
2124 	status->application_time = zend_hrtime() - GC_G(activated_at);
2125 	status->collector_time = GC_G(collector_time);
2126 	status->dtor_time = GC_G(dtor_time);
2127 	status->free_time = GC_G(free_time);
2128 }
2129 
zend_get_gc_buffer_create(void)2130 ZEND_API zend_get_gc_buffer *zend_get_gc_buffer_create(void) {
2131 	/* There can only be one get_gc() call active at a time,
2132 	 * so there only needs to be one buffer. */
2133 	zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
2134 	gc_buffer->cur = gc_buffer->start;
2135 	return gc_buffer;
2136 }
2137 
zend_get_gc_buffer_grow(zend_get_gc_buffer * gc_buffer)2138 ZEND_API void zend_get_gc_buffer_grow(zend_get_gc_buffer *gc_buffer) {
2139 	size_t old_capacity = gc_buffer->end - gc_buffer->start;
2140 	size_t new_capacity = old_capacity == 0 ? 64 : old_capacity * 2;
2141 	gc_buffer->start = erealloc(gc_buffer->start, new_capacity * sizeof(zval));
2142 	gc_buffer->end = gc_buffer->start + new_capacity;
2143 	gc_buffer->cur = gc_buffer->start + old_capacity;
2144 }
2145 
zend_get_gc_buffer_release(void)2146 static void zend_get_gc_buffer_release(void) {
2147 	zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
2148 	efree(gc_buffer->start);
2149 	gc_buffer->start = gc_buffer->end = gc_buffer->cur = NULL;
2150 }
2151 
2152 /* TMPVAR operands are destroyed using zval_ptr_dtor_nogc(), because they usually cannot contain
2153  * cycles. However, there are some rare exceptions where this is possible, in which case we rely
2154  * on the producing code to root the value. If a GC run occurs between the rooting and consumption
2155  * of the value, we would end up leaking it. To avoid this, root all live TMPVAR values here. */
zend_gc_check_root_tmpvars(void)2156 static void zend_gc_check_root_tmpvars(void) {
2157 	zend_execute_data *ex = EG(current_execute_data);
2158 	for (; ex; ex = ex->prev_execute_data) {
2159 		zend_function *func = ex->func;
2160 		if (!func || !ZEND_USER_CODE(func->type)) {
2161 			continue;
2162 		}
2163 
2164 		uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
2165 		for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
2166 			const zend_live_range *range = &func->op_array.live_range[i];
2167 			if (range->start > op_num) {
2168 				break;
2169 			}
2170 			if (range->end <= op_num) {
2171 				continue;
2172 			}
2173 
2174 			uint32_t kind = range->var & ZEND_LIVE_MASK;
2175 			if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
2176 				uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
2177 				zval *var = ZEND_CALL_VAR(ex, var_num);
2178 				if (Z_REFCOUNTED_P(var)) {
2179 					gc_check_possible_root(Z_COUNTED_P(var));
2180 				}
2181 			}
2182 		}
2183 	}
2184 }
2185 
zend_gc_remove_root_tmpvars(void)2186 static void zend_gc_remove_root_tmpvars(void) {
2187 	zend_execute_data *ex = EG(current_execute_data);
2188 	for (; ex; ex = ex->prev_execute_data) {
2189 		zend_function *func = ex->func;
2190 		if (!func || !ZEND_USER_CODE(func->type)) {
2191 			continue;
2192 		}
2193 
2194 		uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
2195 		for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
2196 			const zend_live_range *range = &func->op_array.live_range[i];
2197 			if (range->start > op_num) {
2198 				break;
2199 			}
2200 			if (range->end <= op_num) {
2201 				continue;
2202 			}
2203 
2204 			uint32_t kind = range->var & ZEND_LIVE_MASK;
2205 			if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
2206 				uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
2207 				zval *var = ZEND_CALL_VAR(ex, var_num);
2208 				if (Z_REFCOUNTED_P(var)) {
2209 					GC_REMOVE_FROM_BUFFER(Z_COUNTED_P(var));
2210 				}
2211 			}
2212 		}
2213 	}
2214 }
2215 
2216 #if GC_BENCH
gc_bench_print(void)2217 void gc_bench_print(void)
2218 {
2219 	fprintf(stderr, "GC Statistics\n");
2220 	fprintf(stderr, "-------------\n");
2221 	fprintf(stderr, "Runs:               %d\n", GC_G(gc_runs));
2222 	fprintf(stderr, "Collected:          %d\n", GC_G(collected));
2223 	fprintf(stderr, "Root buffer length: %d\n", GC_G(root_buf_length));
2224 	fprintf(stderr, "Root buffer peak:   %d\n\n", GC_G(root_buf_peak));
2225 	fprintf(stderr, "      Possible            Remove from  Marked\n");
2226 	fprintf(stderr, "        Root    Buffered     buffer     grey\n");
2227 	fprintf(stderr, "      --------  --------  -----------  ------\n");
2228 	fprintf(stderr, "ZVAL  %8d  %8d  %9d  %8d\n", GC_G(zval_possible_root), GC_G(zval_buffered), GC_G(zval_remove_from_buffer), GC_G(zval_marked_grey));
2229 }
2230 #endif
2231 
2232 #ifdef ZTS
zend_gc_globals_size(void)2233 size_t zend_gc_globals_size(void)
2234 {
2235 	return sizeof(zend_gc_globals);
2236 }
2237 #endif
2238 
ZEND_FUNCTION(gc_destructor_fiber)2239 static ZEND_FUNCTION(gc_destructor_fiber)
2240 {
2241 	uint32_t idx, end;
2242 
2243 	zend_fiber *fiber = GC_G(dtor_fiber);
2244 	ZEND_ASSERT(fiber != NULL);
2245 	ZEND_ASSERT(fiber == EG(active_fiber));
2246 
2247 	for (;;) {
2248 		GC_G(dtor_fiber_running) = true;
2249 
2250 		idx = GC_G(dtor_idx);
2251 		end = GC_G(dtor_end);
2252 		if (UNEXPECTED(gc_call_destructors(idx, end, fiber) == FAILURE)) {
2253 			/* We resumed after being suspended by a destructor */
2254 			return;
2255 		}
2256 
2257 		/* We have called all destructors. Suspend fiber until the next GC run
2258 		 */
2259 		GC_G(dtor_fiber_running) = false;
2260 		zend_fiber_suspend(fiber, NULL, NULL);
2261 
2262 		if (UNEXPECTED(fiber->flags & ZEND_FIBER_FLAG_DESTROYED)) {
2263 			/* Fiber is being destroyed by shutdown sequence */
2264 			if (GC_G(dtor_fiber) == fiber) {
2265 				GC_G(dtor_fiber) = NULL;
2266 			}
2267 			GC_DELREF(&fiber->std);
2268 			gc_check_possible_root((zend_refcounted*)&fiber->std.gc);
2269 			return;
2270 		}
2271 	}
2272 }
2273 
2274 static zend_internal_function gc_destructor_fiber = {
2275 	.type = ZEND_INTERNAL_FUNCTION,
2276 	.fn_flags = ZEND_ACC_PUBLIC,
2277 	.handler = ZEND_FN(gc_destructor_fiber),
2278 };
2279 
gc_init(void)2280 void gc_init(void)
2281 {
2282 	gc_destructor_fiber.function_name = zend_string_init_interned(
2283 			"gc_destructor_fiber",
2284 			strlen("gc_destructor_fiber"),
2285 			true);
2286 }
2287