xref: /PHP-8.0/Zend/zend_gc.c (revision 0fb2374e)
1 /*
2    +----------------------------------------------------------------------+
3    | Zend Engine                                                          |
4    +----------------------------------------------------------------------+
5    | Copyright (c) Zend Technologies Ltd. (http://www.zend.com)           |
6    +----------------------------------------------------------------------+
7    | This source file is subject to version 2.00 of the Zend license,     |
8    | that is bundled with this package in the file LICENSE, and is        |
9    | available through the world-wide-web at the following url:           |
10    | http://www.zend.com/license/2_00.txt.                                |
11    | If you did not receive a copy of the Zend license and are unable to  |
12    | obtain it through the world-wide-web, please send a note to          |
13    | license@zend.com so we can mail you a copy immediately.              |
14    +----------------------------------------------------------------------+
15    | Authors: David Wang <planetbeing@gmail.com>                          |
16    |          Dmitry Stogov <dmitry@php.net>                              |
17    +----------------------------------------------------------------------+
18 */
19 
20 /**
21  * zend_gc_collect_cycles
22  * ======================
23  *
24  * Colors and its meaning
25  * ----------------------
26  *
27  * BLACK  (GC_BLACK)   - In use or free.
28  * GREY   (GC_GREY)    - Possible member of cycle.
29  * WHITE  (GC_WHITE)   - Member of garbage cycle.
30  * PURPLE (GC_PURPLE)  - Possible root of cycle.
31  *
32  * Colors described in the paper but not used
33  * ------------------------------------------
34  *
35  * GREEN - Acyclic
36  * RED   - Candidate cycle underogin
37  * ORANGE - Candidate cycle awaiting epoch boundary.
38  *
39  *
40  * Flow
41  * =====
42  *
43  * The garbage collect cycle starts from 'gc_mark_roots', which traverses the
44  * possible roots, and calls mark_grey for roots are marked purple with
45  * depth-first traverse.
46  *
47  * After all possible roots are traversed and marked,
48  * gc_scan_roots will be called, and each root will be called with
49  * gc_scan(root->ref)
50  *
51  * gc_scan checks the colors of possible members.
52  *
53  * If the node is marked as grey and the refcount > 0
54  *    gc_scan_black will be called on that node to scan it's subgraph.
55  * otherwise (refcount == 0), it marks the node white.
56  *
57  * A node MAY be added to possible roots when ZEND_UNSET_VAR happens or
58  * zend_assign_to_variable is called only when possible garbage node is
59  * produced.
60  * gc_possible_root() will be called to add the nodes to possible roots.
61  *
62  *
63  * For objects, we call their get_gc handler (by default 'zend_std_get_gc') to
64  * get the object properties to scan.
65  *
66  *
67  * @see http://researcher.watson.ibm.com/researcher/files/us-bacon/Bacon01Concurrent.pdf
68  */
69 #include "zend.h"
70 #include "zend_API.h"
71 
72 #ifndef GC_BENCH
73 # define GC_BENCH 0
74 #endif
75 
76 #ifndef ZEND_GC_DEBUG
77 # define ZEND_GC_DEBUG 0
78 #endif
79 
80 /* GC_INFO layout */
81 #define GC_ADDRESS  0x0fffffu
82 #define GC_COLOR    0x300000u
83 
84 #define GC_BLACK    0x000000u /* must be zero */
85 #define GC_WHITE    0x100000u
86 #define GC_GREY     0x200000u
87 #define GC_PURPLE   0x300000u
88 
89 /* Debug tracing */
90 #if ZEND_GC_DEBUG > 1
91 # define GC_TRACE(format, ...) fprintf(stderr, format "\n", ##__VA_ARGS__);
92 # define GC_TRACE_REF(ref, format, ...) \
93 	do { \
94 		gc_trace_ref((zend_refcounted *) ref); \
95 		fprintf(stderr, format "\n", ##__VA_ARGS__); \
96 	} while (0)
97 # define GC_TRACE_SET_COLOR(ref, color) \
98 	GC_TRACE_REF(ref, "->%s", gc_color_name(color))
99 #else
100 # define GC_TRACE_REF(ref, format, ...)
101 # define GC_TRACE_SET_COLOR(ref, new_color)
102 # define GC_TRACE(str)
103 #endif
104 
105 /* GC_INFO access */
106 #define GC_REF_ADDRESS(ref) \
107 	(((GC_TYPE_INFO(ref)) & (GC_ADDRESS << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
108 
109 #define GC_REF_COLOR(ref) \
110 	(((GC_TYPE_INFO(ref)) & (GC_COLOR << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
111 
112 #define GC_REF_CHECK_COLOR(ref, color) \
113 	((GC_TYPE_INFO(ref) & (GC_COLOR << GC_INFO_SHIFT)) == ((color) << GC_INFO_SHIFT))
114 
115 #define GC_REF_SET_INFO(ref, info) do { \
116 		GC_TYPE_INFO(ref) = \
117 			(GC_TYPE_INFO(ref) & (GC_TYPE_MASK | GC_FLAGS_MASK)) | \
118 			((info) << GC_INFO_SHIFT); \
119 	} while (0)
120 
121 #define GC_REF_SET_COLOR(ref, c) do { \
122 		GC_TRACE_SET_COLOR(ref, c); \
123 		GC_TYPE_INFO(ref) = \
124 			(GC_TYPE_INFO(ref) & ~(GC_COLOR << GC_INFO_SHIFT)) | \
125 			((c) << GC_INFO_SHIFT); \
126 	} while (0)
127 
128 #define GC_REF_SET_BLACK(ref) do { \
129 		GC_TRACE_SET_COLOR(ref, GC_BLACK); \
130 		GC_TYPE_INFO(ref) &= ~(GC_COLOR << GC_INFO_SHIFT); \
131 	} while (0)
132 
133 #define GC_REF_SET_PURPLE(ref) do { \
134 		GC_TRACE_SET_COLOR(ref, GC_PURPLE); \
135 		GC_TYPE_INFO(ref) |= (GC_COLOR << GC_INFO_SHIFT); \
136 	} while (0)
137 
138 /* bit stealing tags for gc_root_buffer.ref */
139 #define GC_BITS    0x3
140 
141 #define GC_ROOT    0x0 /* possible root of circular garbage     */
142 #define GC_UNUSED  0x1 /* part of linked list of unused buffers */
143 #define GC_GARBAGE 0x2 /* garbage to delete                     */
144 #define GC_DTOR_GARBAGE 0x3 /* garbage on which only the dtor should be invoked */
145 
146 #define GC_GET_PTR(ptr) \
147 	((void*)(((uintptr_t)(ptr)) & ~GC_BITS))
148 
149 #define GC_IS_ROOT(ptr) \
150 	((((uintptr_t)(ptr)) & GC_BITS) == GC_ROOT)
151 #define GC_IS_UNUSED(ptr) \
152 	((((uintptr_t)(ptr)) & GC_BITS) == GC_UNUSED)
153 #define GC_IS_GARBAGE(ptr) \
154 	((((uintptr_t)(ptr)) & GC_BITS) == GC_GARBAGE)
155 #define GC_IS_DTOR_GARBAGE(ptr) \
156 	((((uintptr_t)(ptr)) & GC_BITS) == GC_DTOR_GARBAGE)
157 
158 #define GC_MAKE_GARBAGE(ptr) \
159 	((void*)(((uintptr_t)(ptr)) | GC_GARBAGE))
160 #define GC_MAKE_DTOR_GARBAGE(ptr) \
161 	((void*)(((uintptr_t)(ptr)) | GC_DTOR_GARBAGE))
162 
163 /* GC address conversion */
164 #define GC_IDX2PTR(idx)      (GC_G(buf) + (idx))
165 #define GC_PTR2IDX(ptr)      ((ptr) - GC_G(buf))
166 
167 #define GC_IDX2LIST(idx)     ((void*)(uintptr_t)(((idx) * sizeof(void*)) | GC_UNUSED))
168 #define GC_LIST2IDX(list)    (((uint32_t)(uintptr_t)(list)) / sizeof(void*))
169 
170 /* GC buffers */
171 #define GC_INVALID           0
172 #define GC_FIRST_ROOT        1
173 
174 #define GC_DEFAULT_BUF_SIZE  (16 * 1024)
175 #define GC_BUF_GROW_STEP     (128 * 1024)
176 
177 #define GC_MAX_UNCOMPRESSED  (512 * 1024)
178 #define GC_MAX_BUF_SIZE      0x40000000
179 
180 #define GC_THRESHOLD_DEFAULT (10000 + GC_FIRST_ROOT)
181 #define GC_THRESHOLD_STEP    10000
182 #define GC_THRESHOLD_MAX     1000000000
183 #define GC_THRESHOLD_TRIGGER 100
184 
185 /* GC flags */
186 #define GC_HAS_DESTRUCTORS  (1<<0)
187 
188 /* unused buffers */
189 #define GC_HAS_UNUSED() \
190 	(GC_G(unused) != GC_INVALID)
191 #define GC_FETCH_UNUSED() \
192 	gc_fetch_unused()
193 #define GC_LINK_UNUSED(root) \
194 	gc_link_unused(root)
195 
196 #define GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD() \
197 	(GC_G(first_unused) < GC_G(gc_threshold))
198 #define GC_HAS_NEXT_UNUSED() \
199 	(GC_G(first_unused) != GC_G(buf_size))
200 #define GC_FETCH_NEXT_UNUSED() \
201 	gc_fetch_next_unused()
202 
203 ZEND_API int (*gc_collect_cycles)(void);
204 
205 typedef struct _gc_root_buffer {
206 	zend_refcounted  *ref;
207 } gc_root_buffer;
208 
209 typedef struct _zend_gc_globals {
210 	gc_root_buffer   *buf;				/* preallocated arrays of buffers   */
211 
212 	zend_bool         gc_enabled;
213 	zend_bool         gc_active;        /* GC currently running, forbid nested GC */
214 	zend_bool         gc_protected;     /* GC protected, forbid root additions */
215 	zend_bool         gc_full;
216 
217 	uint32_t          unused;			/* linked list of unused buffers    */
218 	uint32_t          first_unused;		/* first unused buffer              */
219 	uint32_t          gc_threshold;     /* GC collection threshold          */
220 	uint32_t          buf_size;			/* size of the GC buffer            */
221 	uint32_t          num_roots;		/* number of roots in GC buffer     */
222 
223 	uint32_t gc_runs;
224 	uint32_t collected;
225 
226 #if GC_BENCH
227 	uint32_t root_buf_length;
228 	uint32_t root_buf_peak;
229 	uint32_t zval_possible_root;
230 	uint32_t zval_buffered;
231 	uint32_t zval_remove_from_buffer;
232 	uint32_t zval_marked_grey;
233 #endif
234 } zend_gc_globals;
235 
236 #ifdef ZTS
237 static int gc_globals_id;
238 static size_t gc_globals_offset;
239 #define GC_G(v) ZEND_TSRMG_FAST(gc_globals_offset, zend_gc_globals *, v)
240 #else
241 #define GC_G(v) (gc_globals.v)
242 static zend_gc_globals gc_globals;
243 #endif
244 
245 #if GC_BENCH
246 # define GC_BENCH_INC(counter) GC_G(counter)++
247 # define GC_BENCH_DEC(counter) GC_G(counter)--
248 # define GC_BENCH_PEAK(peak, counter) do {		\
249 		if (GC_G(counter) > GC_G(peak)) {		\
250 			GC_G(peak) = GC_G(counter);			\
251 		}										\
252 	} while (0)
253 #else
254 # define GC_BENCH_INC(counter)
255 # define GC_BENCH_DEC(counter)
256 # define GC_BENCH_PEAK(peak, counter)
257 #endif
258 
259 
260 #define GC_STACK_SEGMENT_SIZE (((4096 - ZEND_MM_OVERHEAD) / sizeof(void*)) - 2)
261 
262 typedef struct _gc_stack gc_stack;
263 
264 struct _gc_stack {
265 	gc_stack        *prev;
266 	gc_stack        *next;
267 	zend_refcounted *data[GC_STACK_SEGMENT_SIZE];
268 };
269 
270 #define GC_STACK_DCL(init) \
271 	gc_stack *_stack = init; \
272 	size_t    _top = 0;
273 
274 #define GC_STACK_PUSH(ref) \
275 	gc_stack_push(&_stack, &_top, ref);
276 
277 #define GC_STACK_POP() \
278 	gc_stack_pop(&_stack, &_top)
279 
gc_stack_next(gc_stack * stack)280 static zend_never_inline gc_stack* gc_stack_next(gc_stack *stack)
281 {
282 	if (UNEXPECTED(!stack->next)) {
283 		gc_stack *segment = emalloc(sizeof(gc_stack));
284 		segment->prev = stack;
285 		segment->next = NULL;
286 		stack->next = segment;
287 	}
288 	return stack->next;
289 }
290 
gc_stack_push(gc_stack ** stack,size_t * top,zend_refcounted * ref)291 static zend_always_inline void gc_stack_push(gc_stack **stack, size_t *top, zend_refcounted *ref)
292 {
293 	if (UNEXPECTED(*top == GC_STACK_SEGMENT_SIZE)) {
294 		(*stack) = gc_stack_next(*stack);
295 		(*top) = 0;
296 	}
297 	(*stack)->data[(*top)++] = ref;
298 }
299 
gc_stack_pop(gc_stack ** stack,size_t * top)300 static zend_always_inline zend_refcounted* gc_stack_pop(gc_stack **stack, size_t *top)
301 {
302 	if (UNEXPECTED((*top) == 0)) {
303 		if (!(*stack)->prev) {
304 			return NULL;
305 		} else {
306 			(*stack) = (*stack)->prev;
307 			(*top) = GC_STACK_SEGMENT_SIZE - 1;
308 			return (*stack)->data[GC_STACK_SEGMENT_SIZE - 1];
309 		}
310 	} else {
311 		return (*stack)->data[--(*top)];
312 	}
313 }
314 
gc_stack_free(gc_stack * stack)315 static void gc_stack_free(gc_stack *stack)
316 {
317 	gc_stack *p = stack->next;
318 
319 	while (p) {
320 		stack = p->next;
321 		efree(p);
322 		p = stack;
323 	}
324 }
325 
gc_compress(uint32_t idx)326 static zend_always_inline uint32_t gc_compress(uint32_t idx)
327 {
328 	if (EXPECTED(idx < GC_MAX_UNCOMPRESSED)) {
329 		return idx;
330 	}
331 	return (idx % GC_MAX_UNCOMPRESSED) | GC_MAX_UNCOMPRESSED;
332 }
333 
gc_decompress(zend_refcounted * ref,uint32_t idx)334 static zend_always_inline gc_root_buffer* gc_decompress(zend_refcounted *ref, uint32_t idx)
335 {
336 	gc_root_buffer *root = GC_IDX2PTR(idx);
337 
338 	if (EXPECTED(GC_GET_PTR(root->ref) == ref)) {
339 		return root;
340 	}
341 
342 	while (1) {
343 		idx += GC_MAX_UNCOMPRESSED;
344 		ZEND_ASSERT(idx < GC_G(first_unused));
345 		root = GC_IDX2PTR(idx);
346 		if (GC_GET_PTR(root->ref) == ref) {
347 			return root;
348 		}
349 	}
350 }
351 
gc_fetch_unused(void)352 static zend_always_inline uint32_t gc_fetch_unused(void)
353 {
354 	uint32_t idx;
355 	gc_root_buffer *root;
356 
357 	ZEND_ASSERT(GC_HAS_UNUSED());
358 	idx = GC_G(unused);
359 	root = GC_IDX2PTR(idx);
360 	ZEND_ASSERT(GC_IS_UNUSED(root->ref));
361 	GC_G(unused) = GC_LIST2IDX(root->ref);
362 	return idx;
363 }
364 
gc_link_unused(gc_root_buffer * root)365 static zend_always_inline void gc_link_unused(gc_root_buffer *root)
366 {
367 	root->ref = GC_IDX2LIST(GC_G(unused));
368 	GC_G(unused) = GC_PTR2IDX(root);
369 }
370 
gc_fetch_next_unused(void)371 static zend_always_inline uint32_t gc_fetch_next_unused(void)
372 {
373 	uint32_t idx;
374 
375 	ZEND_ASSERT(GC_HAS_NEXT_UNUSED());
376 	idx = GC_G(first_unused);
377 	GC_G(first_unused) = GC_G(first_unused) + 1;
378 	return idx;
379 }
380 
381 #if ZEND_GC_DEBUG > 1
gc_color_name(uint32_t color)382 static const char *gc_color_name(uint32_t color) {
383 	switch (color) {
384 		case GC_BLACK: return "black";
385 		case GC_WHITE: return "white";
386 		case GC_GREY: return "grey";
387 		case GC_PURPLE: return "purple";
388 		default: return "unknown";
389 	}
390 }
gc_trace_ref(zend_refcounted * ref)391 static void gc_trace_ref(zend_refcounted *ref) {
392 	if (GC_TYPE(ref) == IS_OBJECT) {
393 		zend_object *obj = (zend_object *) ref;
394 		fprintf(stderr, "[%p] rc=%d addr=%d %s object(%s)#%d ",
395 			ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
396 			gc_color_name(GC_REF_COLOR(ref)),
397 			obj->ce->name->val, obj->handle);
398 	} else if (GC_TYPE(ref) == IS_ARRAY) {
399 		zend_array *arr = (zend_array *) ref;
400 		fprintf(stderr, "[%p] rc=%d addr=%d %s array(%d) ",
401 			ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
402 			gc_color_name(GC_REF_COLOR(ref)),
403 			zend_hash_num_elements(arr));
404 	} else {
405 		fprintf(stderr, "[%p] rc=%d addr=%d %s %s ",
406 			ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
407 			gc_color_name(GC_REF_COLOR(ref)),
408 			GC_TYPE(ref) == IS_REFERENCE
409 				? "reference" : zend_get_type_by_const(GC_TYPE(ref)));
410 	}
411 }
412 #endif
413 
gc_remove_from_roots(gc_root_buffer * root)414 static zend_always_inline void gc_remove_from_roots(gc_root_buffer *root)
415 {
416 	GC_LINK_UNUSED(root);
417 	GC_G(num_roots)--;
418 	GC_BENCH_DEC(root_buf_length);
419 }
420 
root_buffer_dtor(zend_gc_globals * gc_globals)421 static void root_buffer_dtor(zend_gc_globals *gc_globals)
422 {
423 	if (gc_globals->buf) {
424 		free(gc_globals->buf);
425 		gc_globals->buf = NULL;
426 	}
427 }
428 
gc_globals_ctor_ex(zend_gc_globals * gc_globals)429 static void gc_globals_ctor_ex(zend_gc_globals *gc_globals)
430 {
431 	gc_globals->gc_enabled = 0;
432 	gc_globals->gc_active = 0;
433 	gc_globals->gc_protected = 1;
434 	gc_globals->gc_full = 0;
435 
436 	gc_globals->buf = NULL;
437 	gc_globals->unused = GC_INVALID;
438 	gc_globals->first_unused = GC_INVALID;
439 	gc_globals->gc_threshold = GC_INVALID;
440 	gc_globals->buf_size = GC_INVALID;
441 	gc_globals->num_roots = 0;
442 
443 	gc_globals->gc_runs = 0;
444 	gc_globals->collected = 0;
445 
446 #if GC_BENCH
447 	gc_globals->root_buf_length = 0;
448 	gc_globals->root_buf_peak = 0;
449 	gc_globals->zval_possible_root = 0;
450 	gc_globals->zval_buffered = 0;
451 	gc_globals->zval_remove_from_buffer = 0;
452 	gc_globals->zval_marked_grey = 0;
453 #endif
454 }
455 
gc_globals_ctor(void)456 void gc_globals_ctor(void)
457 {
458 #ifdef ZTS
459 	ts_allocate_fast_id(&gc_globals_id, &gc_globals_offset, sizeof(zend_gc_globals), (ts_allocate_ctor) gc_globals_ctor_ex, (ts_allocate_dtor) root_buffer_dtor);
460 #else
461 	gc_globals_ctor_ex(&gc_globals);
462 #endif
463 }
464 
gc_globals_dtor(void)465 void gc_globals_dtor(void)
466 {
467 #ifndef ZTS
468 	root_buffer_dtor(&gc_globals);
469 #endif
470 }
471 
gc_reset(void)472 void gc_reset(void)
473 {
474 	if (GC_G(buf)) {
475 		GC_G(gc_active) = 0;
476 		GC_G(gc_protected) = 0;
477 		GC_G(gc_full) = 0;
478 		GC_G(unused) = GC_INVALID;
479 		GC_G(first_unused) = GC_FIRST_ROOT;
480 		GC_G(num_roots) = 0;
481 
482 		GC_G(gc_runs) = 0;
483 		GC_G(collected) = 0;
484 
485 #if GC_BENCH
486 		GC_G(root_buf_length) = 0;
487 		GC_G(root_buf_peak) = 0;
488 		GC_G(zval_possible_root) = 0;
489 		GC_G(zval_buffered) = 0;
490 		GC_G(zval_remove_from_buffer) = 0;
491 		GC_G(zval_marked_grey) = 0;
492 #endif
493 	}
494 }
495 
gc_enable(zend_bool enable)496 ZEND_API zend_bool gc_enable(zend_bool enable)
497 {
498 	zend_bool old_enabled = GC_G(gc_enabled);
499 	GC_G(gc_enabled) = enable;
500 	if (enable && !old_enabled && GC_G(buf) == NULL) {
501 		GC_G(buf) = (gc_root_buffer*) pemalloc(sizeof(gc_root_buffer) * GC_DEFAULT_BUF_SIZE, 1);
502 		GC_G(buf)[0].ref = NULL;
503 		GC_G(buf_size) = GC_DEFAULT_BUF_SIZE;
504 		GC_G(gc_threshold) = GC_THRESHOLD_DEFAULT;
505 		gc_reset();
506 	}
507 	return old_enabled;
508 }
509 
gc_enabled(void)510 ZEND_API zend_bool gc_enabled(void)
511 {
512 	return GC_G(gc_enabled);
513 }
514 
gc_protect(zend_bool protect)515 ZEND_API zend_bool gc_protect(zend_bool protect)
516 {
517 	zend_bool old_protected = GC_G(gc_protected);
518 	GC_G(gc_protected) = protect;
519 	return old_protected;
520 }
521 
gc_protected(void)522 ZEND_API zend_bool gc_protected(void)
523 {
524 	return GC_G(gc_protected);
525 }
526 
gc_grow_root_buffer(void)527 static void gc_grow_root_buffer(void)
528 {
529 	size_t new_size;
530 
531 	if (GC_G(buf_size) >= GC_MAX_BUF_SIZE) {
532 		if (!GC_G(gc_full)) {
533 			zend_error(E_WARNING, "GC buffer overflow (GC disabled)\n");
534 			GC_G(gc_active) = 1;
535 			GC_G(gc_protected) = 1;
536 			GC_G(gc_full) = 1;
537 			return;
538 		}
539 	}
540 	if (GC_G(buf_size) < GC_BUF_GROW_STEP) {
541 		new_size = GC_G(buf_size) * 2;
542 	} else {
543 		new_size = GC_G(buf_size) + GC_BUF_GROW_STEP;
544 	}
545 	if (new_size > GC_MAX_BUF_SIZE) {
546 		new_size = GC_MAX_BUF_SIZE;
547 	}
548 	GC_G(buf) = perealloc(GC_G(buf), sizeof(gc_root_buffer) * new_size, 1);
549 	GC_G(buf_size) = new_size;
550 }
551 
gc_adjust_threshold(int count)552 static void gc_adjust_threshold(int count)
553 {
554 	uint32_t new_threshold;
555 
556 	/* TODO Very simple heuristic for dynamic GC buffer resizing:
557 	 * If there are "too few" collections, increase the collection threshold
558 	 * by a fixed step */
559 	if (count < GC_THRESHOLD_TRIGGER) {
560 		/* increase */
561 		if (GC_G(gc_threshold) < GC_THRESHOLD_MAX) {
562 			new_threshold = GC_G(gc_threshold) + GC_THRESHOLD_STEP;
563 			if (new_threshold > GC_THRESHOLD_MAX) {
564 				new_threshold = GC_THRESHOLD_MAX;
565 			}
566 			if (new_threshold > GC_G(buf_size)) {
567 				gc_grow_root_buffer();
568 			}
569 			if (new_threshold <= GC_G(buf_size)) {
570 				GC_G(gc_threshold) = new_threshold;
571 			}
572 		}
573 	} else if (GC_G(gc_threshold) > GC_THRESHOLD_DEFAULT) {
574 		new_threshold = GC_G(gc_threshold) - GC_THRESHOLD_STEP;
575 		if (new_threshold < GC_THRESHOLD_DEFAULT) {
576 			new_threshold = GC_THRESHOLD_DEFAULT;
577 		}
578 		GC_G(gc_threshold) = new_threshold;
579 	}
580 }
581 
gc_possible_root_when_full(zend_refcounted * ref)582 static zend_never_inline void ZEND_FASTCALL gc_possible_root_when_full(zend_refcounted *ref)
583 {
584 	uint32_t idx;
585 	gc_root_buffer *newRoot;
586 
587 	ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
588 	ZEND_ASSERT(GC_INFO(ref) == 0);
589 
590 	if (GC_G(gc_enabled) && !GC_G(gc_active)) {
591 		GC_ADDREF(ref);
592 		gc_adjust_threshold(gc_collect_cycles());
593 		if (UNEXPECTED(GC_DELREF(ref)) == 0) {
594 			rc_dtor_func(ref);
595 			return;
596 		} else if (UNEXPECTED(GC_INFO(ref))) {
597 			return;
598 		}
599 	}
600 
601 	if (GC_HAS_UNUSED()) {
602 		idx = GC_FETCH_UNUSED();
603 	} else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
604 		idx = GC_FETCH_NEXT_UNUSED();
605 	} else {
606 		gc_grow_root_buffer();
607 		if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
608 			return;
609 		}
610 		idx = GC_FETCH_NEXT_UNUSED();
611 	}
612 
613 	newRoot = GC_IDX2PTR(idx);
614 	newRoot->ref = ref; /* GC_ROOT tag is 0 */
615 	GC_TRACE_SET_COLOR(ref, GC_PURPLE);
616 
617 	idx = gc_compress(idx);
618 	GC_REF_SET_INFO(ref, idx | GC_PURPLE);
619 	GC_G(num_roots)++;
620 
621 	GC_BENCH_INC(zval_buffered);
622 	GC_BENCH_INC(root_buf_length);
623 	GC_BENCH_PEAK(root_buf_peak, root_buf_length);
624 }
625 
gc_possible_root(zend_refcounted * ref)626 ZEND_API void ZEND_FASTCALL gc_possible_root(zend_refcounted *ref)
627 {
628 	uint32_t idx;
629 	gc_root_buffer *newRoot;
630 
631 	if (UNEXPECTED(GC_G(gc_protected))) {
632 		return;
633 	}
634 
635 	GC_BENCH_INC(zval_possible_root);
636 
637 	if (EXPECTED(GC_HAS_UNUSED())) {
638 		idx = GC_FETCH_UNUSED();
639 	} else if (EXPECTED(GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD())) {
640 		idx = GC_FETCH_NEXT_UNUSED();
641 	} else {
642 		gc_possible_root_when_full(ref);
643 		return;
644 	}
645 
646 	ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
647 	ZEND_ASSERT(GC_INFO(ref) == 0);
648 
649 	newRoot = GC_IDX2PTR(idx);
650 	newRoot->ref = ref; /* GC_ROOT tag is 0 */
651 	GC_TRACE_SET_COLOR(ref, GC_PURPLE);
652 
653 	idx = gc_compress(idx);
654 	GC_REF_SET_INFO(ref, idx | GC_PURPLE);
655 	GC_G(num_roots)++;
656 
657 	GC_BENCH_INC(zval_buffered);
658 	GC_BENCH_INC(root_buf_length);
659 	GC_BENCH_PEAK(root_buf_peak, root_buf_length);
660 }
661 
gc_remove_compressed(zend_refcounted * ref,uint32_t idx)662 static zend_never_inline void ZEND_FASTCALL gc_remove_compressed(zend_refcounted *ref, uint32_t idx)
663 {
664 	gc_root_buffer *root = gc_decompress(ref, idx);
665 	gc_remove_from_roots(root);
666 }
667 
gc_remove_from_buffer(zend_refcounted * ref)668 ZEND_API void ZEND_FASTCALL gc_remove_from_buffer(zend_refcounted *ref)
669 {
670 	gc_root_buffer *root;
671 	uint32_t idx = GC_REF_ADDRESS(ref);
672 
673 	GC_BENCH_INC(zval_remove_from_buffer);
674 
675 	if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
676 		GC_TRACE_SET_COLOR(ref, GC_BLACK);
677 	}
678 	GC_REF_SET_INFO(ref, 0);
679 
680 	/* Perform decompression only in case of large buffers */
681 	if (UNEXPECTED(GC_G(first_unused) >= GC_MAX_UNCOMPRESSED)) {
682 		gc_remove_compressed(ref, idx);
683 		return;
684 	}
685 
686 	ZEND_ASSERT(idx);
687 	root = GC_IDX2PTR(idx);
688 	gc_remove_from_roots(root);
689 }
690 
gc_scan_black(zend_refcounted * ref,gc_stack * stack)691 static void gc_scan_black(zend_refcounted *ref, gc_stack *stack)
692 {
693 	HashTable *ht = NULL;
694 	Bucket *p, *end;
695 	zval *zv;
696 	GC_STACK_DCL(stack);
697 
698 tail_call:
699 	if (GC_TYPE(ref) == IS_OBJECT) {
700 		zend_object *obj = (zend_object*)ref;
701 
702 		if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
703 			int n;
704 			zval *zv, *end;
705 
706 			ht = obj->handlers->get_gc(obj, &zv, &n);
707 			if (EXPECTED(!ht) || UNEXPECTED(GC_REF_CHECK_COLOR(ht, GC_BLACK))) {
708 				ht = NULL;
709 				if (!n) goto next;
710 				end = zv + n;
711 				while (!Z_REFCOUNTED_P(--end)) {
712 					if (zv == end) goto next;
713 				}
714 			} else {
715 				GC_REF_SET_BLACK(ht);
716 				if (!n) goto handle_ht;
717 				end = zv + n;
718 			}
719 			while (zv != end) {
720 				if (Z_REFCOUNTED_P(zv)) {
721 					ref = Z_COUNTED_P(zv);
722 					GC_ADDREF(ref);
723 					if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
724 						GC_REF_SET_BLACK(ref);
725 						GC_STACK_PUSH(ref);
726 					}
727 				}
728 				zv++;
729 			}
730 			if (EXPECTED(!ht)) {
731 				ref = Z_COUNTED_P(zv);
732 				GC_ADDREF(ref);
733 				if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
734 					GC_REF_SET_BLACK(ref);
735 					goto tail_call;
736 				}
737 				goto next;
738 			}
739 		} else {
740 			goto next;
741 		}
742 	} else if (GC_TYPE(ref) == IS_ARRAY) {
743 		if ((zend_array*)ref != &EG(symbol_table)) {
744 			ht = (zend_array*)ref;
745 		} else {
746 			goto next;
747 		}
748 	} else if (GC_TYPE(ref) == IS_REFERENCE) {
749 		if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
750 			ref = Z_COUNTED(((zend_reference*)ref)->val);
751 			GC_ADDREF(ref);
752 			if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
753 				GC_REF_SET_BLACK(ref);
754 				goto tail_call;
755 			}
756 		}
757 		goto next;
758 	} else {
759 		goto next;
760 	}
761 
762 handle_ht:
763 	if (!ht->nNumUsed) goto next;
764 	p = ht->arData;
765 	end = p + ht->nNumUsed;
766 	while (1) {
767 		end--;
768 		zv = &end->val;
769 		if (Z_TYPE_P(zv) == IS_INDIRECT) {
770 			zv = Z_INDIRECT_P(zv);
771 		}
772 		if (Z_REFCOUNTED_P(zv)) {
773 			break;
774 		}
775 		if (p == end) goto next;
776 	}
777 	while (p != end) {
778 		zv = &p->val;
779 		if (Z_TYPE_P(zv) == IS_INDIRECT) {
780 			zv = Z_INDIRECT_P(zv);
781 		}
782 		if (Z_REFCOUNTED_P(zv)) {
783 			ref = Z_COUNTED_P(zv);
784 			GC_ADDREF(ref);
785 			if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
786 				GC_REF_SET_BLACK(ref);
787 				GC_STACK_PUSH(ref);
788 			}
789 		}
790 		p++;
791 	}
792 	zv = &p->val;
793 	if (Z_TYPE_P(zv) == IS_INDIRECT) {
794 		zv = Z_INDIRECT_P(zv);
795 	}
796 	ref = Z_COUNTED_P(zv);
797 	GC_ADDREF(ref);
798 	if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
799 		GC_REF_SET_BLACK(ref);
800 		goto tail_call;
801 	}
802 
803 next:
804 	ref = GC_STACK_POP();
805 	if (ref) {
806 		goto tail_call;
807 	}
808 }
809 
gc_mark_grey(zend_refcounted * ref,gc_stack * stack)810 static void gc_mark_grey(zend_refcounted *ref, gc_stack *stack)
811 {
812 	HashTable *ht = NULL;
813 	Bucket *p, *end;
814 	zval *zv;
815 	GC_STACK_DCL(stack);
816 
817 	do {
818 		GC_BENCH_INC(zval_marked_grey);
819 
820 		if (GC_TYPE(ref) == IS_OBJECT) {
821 			zend_object *obj = (zend_object*)ref;
822 
823 			if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
824 				int n;
825 				zval *zv, *end;
826 
827 				ht = obj->handlers->get_gc(obj, &zv, &n);
828 				if (EXPECTED(!ht) || UNEXPECTED(GC_REF_CHECK_COLOR(ht, GC_GREY))) {
829 					ht = NULL;
830 					if (!n) goto next;
831 					end = zv + n;
832 					while (!Z_REFCOUNTED_P(--end)) {
833 						if (zv == end) goto next;
834 					}
835 				} else {
836 					GC_REF_SET_COLOR(ht, GC_GREY);
837 					if (!n) goto handle_ht;
838 					end = zv + n;
839 				}
840 				while (zv != end) {
841 					if (Z_REFCOUNTED_P(zv)) {
842 						ref = Z_COUNTED_P(zv);
843 						GC_DELREF(ref);
844 						if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
845 							GC_REF_SET_COLOR(ref, GC_GREY);
846 							GC_STACK_PUSH(ref);
847 						}
848 					}
849 					zv++;
850 				}
851 				if (EXPECTED(!ht)) {
852 					ref = Z_COUNTED_P(zv);
853 					GC_DELREF(ref);
854 					if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
855 						GC_REF_SET_COLOR(ref, GC_GREY);
856 						continue;
857 					}
858 					goto next;
859 				}
860 			} else {
861 				goto next;
862 			}
863 		} else if (GC_TYPE(ref) == IS_ARRAY) {
864 			if (((zend_array*)ref) == &EG(symbol_table)) {
865 				GC_REF_SET_BLACK(ref);
866 				goto next;
867 			} else {
868 				ht = (zend_array*)ref;
869 			}
870 		} else if (GC_TYPE(ref) == IS_REFERENCE) {
871 			if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
872 				ref = Z_COUNTED(((zend_reference*)ref)->val);
873 				GC_DELREF(ref);
874 				if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
875 					GC_REF_SET_COLOR(ref, GC_GREY);
876 					continue;
877 				}
878 			}
879 			goto next;
880 		} else {
881 			goto next;
882 		}
883 
884 handle_ht:
885 		if (!ht->nNumUsed) goto next;
886 		p = ht->arData;
887 		end = p + ht->nNumUsed;
888 		while (1) {
889 			end--;
890 			zv = &end->val;
891 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
892 				zv = Z_INDIRECT_P(zv);
893 			}
894 			if (Z_REFCOUNTED_P(zv)) {
895 				break;
896 			}
897 			if (p == end) goto next;
898 		}
899 		while (p != end) {
900 			zv = &p->val;
901 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
902 				zv = Z_INDIRECT_P(zv);
903 			}
904 			if (Z_REFCOUNTED_P(zv)) {
905 				ref = Z_COUNTED_P(zv);
906 				GC_DELREF(ref);
907 				if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
908 					GC_REF_SET_COLOR(ref, GC_GREY);
909 					GC_STACK_PUSH(ref);
910 				}
911 			}
912 			p++;
913 		}
914 		zv = &p->val;
915 		if (Z_TYPE_P(zv) == IS_INDIRECT) {
916 			zv = Z_INDIRECT_P(zv);
917 		}
918 		ref = Z_COUNTED_P(zv);
919 		GC_DELREF(ref);
920 		if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
921 			GC_REF_SET_COLOR(ref, GC_GREY);
922 			continue;
923 		}
924 
925 next:
926 		ref = GC_STACK_POP();
927 	} while (ref);
928 }
929 
930 /* Two-Finger compaction algorithm */
gc_compact(void)931 static void gc_compact(void)
932 {
933 	if (GC_G(num_roots) + GC_FIRST_ROOT != GC_G(first_unused)) {
934 		if (GC_G(num_roots)) {
935 			gc_root_buffer *free = GC_IDX2PTR(GC_FIRST_ROOT);
936 			gc_root_buffer *scan = GC_IDX2PTR(GC_G(first_unused) - 1);
937 			gc_root_buffer *end  = GC_IDX2PTR(GC_G(num_roots));
938 			uint32_t idx;
939 			zend_refcounted *p;
940 
941 			while (free < scan) {
942 				while (!GC_IS_UNUSED(free->ref)) {
943 					free++;
944 				}
945 				while (GC_IS_UNUSED(scan->ref)) {
946 					scan--;
947 				}
948 				if (scan > free) {
949 					p = scan->ref;
950 					free->ref = p;
951 					p = GC_GET_PTR(p);
952 					idx = gc_compress(GC_PTR2IDX(free));
953 					GC_REF_SET_INFO(p, idx | GC_REF_COLOR(p));
954 					free++;
955 					scan--;
956 					if (scan <= end) {
957 						break;
958 					}
959 				}
960 			}
961 		}
962 		GC_G(unused) = GC_INVALID;
963 		GC_G(first_unused) = GC_G(num_roots) + GC_FIRST_ROOT;
964 	}
965 }
966 
gc_mark_roots(gc_stack * stack)967 static void gc_mark_roots(gc_stack *stack)
968 {
969 	gc_root_buffer *current, *last;
970 
971 	gc_compact();
972 
973 	current = GC_IDX2PTR(GC_FIRST_ROOT);
974 	last = GC_IDX2PTR(GC_G(first_unused));
975 	while (current != last) {
976 		if (GC_IS_ROOT(current->ref)) {
977 			if (GC_REF_CHECK_COLOR(current->ref, GC_PURPLE)) {
978 				GC_REF_SET_COLOR(current->ref, GC_GREY);
979 				gc_mark_grey(current->ref, stack);
980 			}
981 		}
982 		current++;
983 	}
984 }
985 
gc_scan(zend_refcounted * ref,gc_stack * stack)986 static void gc_scan(zend_refcounted *ref, gc_stack *stack)
987 {
988 	HashTable *ht = NULL;
989 	Bucket *p, *end;
990 	zval *zv;
991 	GC_STACK_DCL(stack);
992 
993 tail_call:
994 	if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
995 		if (GC_REFCOUNT(ref) > 0) {
996 			if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
997 				GC_REF_SET_BLACK(ref);
998 				if (UNEXPECTED(!_stack->next)) {
999 					gc_stack_next(_stack);
1000 				}
1001 				/* Split stack and reuse the tail */
1002 				_stack->next->prev = NULL;
1003 				gc_scan_black(ref, _stack->next);
1004 				_stack->next->prev = _stack;
1005 			}
1006 		} else {
1007 			if (GC_TYPE(ref) == IS_OBJECT) {
1008 				zend_object *obj = (zend_object*)ref;
1009 
1010 				if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1011 					int n;
1012 					zval *zv, *end;
1013 
1014 					ht = obj->handlers->get_gc(obj, &zv, &n);
1015 					if (EXPECTED(!ht) || UNEXPECTED(!GC_REF_CHECK_COLOR(ht, GC_GREY))) {
1016 						ht = NULL;
1017 						if (!n) goto next;
1018 						end = zv + n;
1019 						while (!Z_REFCOUNTED_P(--end)) {
1020 							if (zv == end) goto next;
1021 						}
1022 					} else {
1023 						GC_REF_SET_COLOR(ht, GC_WHITE);
1024 						if (!n) goto handle_ht;
1025 						end = zv + n;
1026 					}
1027 					while (zv != end) {
1028 						if (Z_REFCOUNTED_P(zv)) {
1029 							ref = Z_COUNTED_P(zv);
1030 							if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1031 								GC_REF_SET_COLOR(ref, GC_WHITE);
1032 								GC_STACK_PUSH(ref);
1033 							}
1034 						}
1035 						zv++;
1036 					}
1037 					if (EXPECTED(!ht)) {
1038 						ref = Z_COUNTED_P(zv);
1039 						if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1040 							GC_REF_SET_COLOR(ref, GC_WHITE);
1041 							goto tail_call;
1042 						}
1043 						goto next;
1044 					}
1045 				} else {
1046 					goto next;
1047 				}
1048 			} else if (GC_TYPE(ref) == IS_ARRAY) {
1049 				if ((zend_array*)ref == &EG(symbol_table)) {
1050 					GC_REF_SET_BLACK(ref);
1051 					goto next;
1052 				} else {
1053 					ht = (zend_array*)ref;
1054 				}
1055 			} else if (GC_TYPE(ref) == IS_REFERENCE) {
1056 				if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1057 					ref = Z_COUNTED(((zend_reference*)ref)->val);
1058 					if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1059 						GC_REF_SET_COLOR(ref, GC_WHITE);
1060 						goto tail_call;
1061 					}
1062 				}
1063 				goto next;
1064 			} else {
1065 				goto next;
1066 			}
1067 
1068 handle_ht:
1069 			if (!ht->nNumUsed) goto next;
1070 			p = ht->arData;
1071 			end = p + ht->nNumUsed;
1072 			while (1) {
1073 				end--;
1074 				zv = &end->val;
1075 				if (Z_TYPE_P(zv) == IS_INDIRECT) {
1076 					zv = Z_INDIRECT_P(zv);
1077 				}
1078 				if (Z_REFCOUNTED_P(zv)) {
1079 					break;
1080 				}
1081 				if (p == end) goto next;
1082 			}
1083 			while (p != end) {
1084 				zv = &p->val;
1085 				if (Z_TYPE_P(zv) == IS_INDIRECT) {
1086 					zv = Z_INDIRECT_P(zv);
1087 				}
1088 				if (Z_REFCOUNTED_P(zv)) {
1089 					ref = Z_COUNTED_P(zv);
1090 					if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1091 						GC_REF_SET_COLOR(ref, GC_WHITE);
1092 						GC_STACK_PUSH(ref);
1093 					}
1094 				}
1095 				p++;
1096 			}
1097 			zv = &p->val;
1098 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
1099 				zv = Z_INDIRECT_P(zv);
1100 			}
1101 			ref = Z_COUNTED_P(zv);
1102 			if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1103 				GC_REF_SET_COLOR(ref, GC_WHITE);
1104 				goto tail_call;
1105 			}
1106 		}
1107 	}
1108 
1109 next:
1110 	ref = GC_STACK_POP();
1111 	if (ref) {
1112 		goto tail_call;
1113 	}
1114 }
1115 
gc_scan_roots(gc_stack * stack)1116 static void gc_scan_roots(gc_stack *stack)
1117 {
1118 	gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
1119 	gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
1120 
1121 	while (current != last) {
1122 		if (GC_IS_ROOT(current->ref)) {
1123 			if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1124 				GC_REF_SET_COLOR(current->ref, GC_WHITE);
1125 				gc_scan(current->ref, stack);
1126 			}
1127 		}
1128 		current++;
1129 	}
1130 }
1131 
gc_add_garbage(zend_refcounted * ref)1132 static void gc_add_garbage(zend_refcounted *ref)
1133 {
1134 	uint32_t idx;
1135 	gc_root_buffer *buf;
1136 
1137 	if (GC_HAS_UNUSED()) {
1138 		idx = GC_FETCH_UNUSED();
1139 	} else if (GC_HAS_NEXT_UNUSED()) {
1140 		idx = GC_FETCH_NEXT_UNUSED();
1141 	} else {
1142 		gc_grow_root_buffer();
1143 		if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
1144 			return;
1145 		}
1146 		idx = GC_FETCH_NEXT_UNUSED();
1147 	}
1148 
1149 	buf = GC_IDX2PTR(idx);
1150 	buf->ref = GC_MAKE_GARBAGE(ref);
1151 
1152 	idx = gc_compress(idx);
1153 	GC_REF_SET_INFO(ref, idx | GC_BLACK);
1154 	GC_G(num_roots)++;
1155 }
1156 
gc_collect_white(zend_refcounted * ref,uint32_t * flags,gc_stack * stack)1157 static int gc_collect_white(zend_refcounted *ref, uint32_t *flags, gc_stack *stack)
1158 {
1159 	int count = 0;
1160 	HashTable *ht = NULL;
1161 	Bucket *p, *end;
1162 	zval *zv;
1163 	GC_STACK_DCL(stack);
1164 
1165 	do {
1166 		/* don't count references for compatibility ??? */
1167 		if (GC_TYPE(ref) != IS_REFERENCE) {
1168 			count++;
1169 		}
1170 
1171 		if (GC_TYPE(ref) == IS_OBJECT) {
1172 			zend_object *obj = (zend_object*)ref;
1173 
1174 			if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1175 				int n;
1176 				zval *zv, *end;
1177 
1178 				/* optimization: color is GC_BLACK (0) */
1179 				if (!GC_INFO(ref)) {
1180 					gc_add_garbage(ref);
1181 				}
1182 				if (!(OBJ_FLAGS(obj) & IS_OBJ_DESTRUCTOR_CALLED)
1183 				 && (obj->handlers->dtor_obj != zend_objects_destroy_object
1184 				  || obj->ce->destructor != NULL)) {
1185 					*flags |= GC_HAS_DESTRUCTORS;
1186 				}
1187 				ht = obj->handlers->get_gc(obj, &zv, &n);
1188 				if (EXPECTED(!ht) || UNEXPECTED(GC_REF_CHECK_COLOR(ht, GC_BLACK))) {
1189 					ht = NULL;
1190 					if (!n) goto next;
1191 					end = zv + n;
1192 					while (!Z_REFCOUNTED_P(--end)) {
1193 						if (zv == end) goto next;
1194 					}
1195 				} else {
1196 					GC_REF_SET_BLACK(ht);
1197 					if (!n) goto handle_ht;
1198 					end = zv + n;
1199 				}
1200 				while (zv != end) {
1201 					if (Z_REFCOUNTED_P(zv)) {
1202 						ref = Z_COUNTED_P(zv);
1203 						GC_ADDREF(ref);
1204 						if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1205 							GC_REF_SET_BLACK(ref);
1206 							GC_STACK_PUSH(ref);
1207 						}
1208 					}
1209 					zv++;
1210 				}
1211 				if (EXPECTED(!ht)) {
1212 					ref = Z_COUNTED_P(zv);
1213 					GC_ADDREF(ref);
1214 					if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1215 						GC_REF_SET_BLACK(ref);
1216 						continue;
1217 					}
1218 					goto next;
1219 				}
1220 			} else {
1221 				goto next;
1222 			}
1223 		} else if (GC_TYPE(ref) == IS_ARRAY) {
1224 			/* optimization: color is GC_BLACK (0) */
1225 			if (!GC_INFO(ref)) {
1226 				gc_add_garbage(ref);
1227 			}
1228 			ht = (zend_array*)ref;
1229 		} else if (GC_TYPE(ref) == IS_REFERENCE) {
1230 			if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1231 				ref = Z_COUNTED(((zend_reference*)ref)->val);
1232 				GC_ADDREF(ref);
1233 				if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1234 					GC_REF_SET_BLACK(ref);
1235 					continue;
1236 				}
1237 			}
1238 			goto next;
1239 		} else {
1240 			goto next;
1241 		}
1242 
1243 handle_ht:
1244 		if (!ht->nNumUsed) goto next;
1245 		p = ht->arData;
1246 		end = p + ht->nNumUsed;
1247 		while (1) {
1248 			end--;
1249 			zv = &end->val;
1250 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
1251 				zv = Z_INDIRECT_P(zv);
1252 			}
1253 			if (Z_REFCOUNTED_P(zv)) {
1254 				break;
1255 			}
1256 			if (p == end) goto next;
1257 		}
1258 		while (p != end) {
1259 			zv = &p->val;
1260 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
1261 				zv = Z_INDIRECT_P(zv);
1262 			}
1263 			if (Z_REFCOUNTED_P(zv)) {
1264 				ref = Z_COUNTED_P(zv);
1265 				GC_ADDREF(ref);
1266 				if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1267 					GC_REF_SET_BLACK(ref);
1268 					GC_STACK_PUSH(ref);
1269 				}
1270 			}
1271 			p++;
1272 		}
1273 		zv = &p->val;
1274 		if (Z_TYPE_P(zv) == IS_INDIRECT) {
1275 			zv = Z_INDIRECT_P(zv);
1276 		}
1277 		ref = Z_COUNTED_P(zv);
1278 		GC_ADDREF(ref);
1279 		if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1280 			GC_REF_SET_BLACK(ref);
1281 			continue;
1282 		}
1283 
1284 next:
1285 		ref = GC_STACK_POP();
1286 	} while (ref);
1287 
1288 	return count;
1289 }
1290 
gc_collect_roots(uint32_t * flags,gc_stack * stack)1291 static int gc_collect_roots(uint32_t *flags, gc_stack *stack)
1292 {
1293 	uint32_t idx, end;
1294 	zend_refcounted *ref;
1295 	int count = 0;
1296 	gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
1297 	gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
1298 
1299 	/* remove non-garbage from the list */
1300 	while (current != last) {
1301 		if (GC_IS_ROOT(current->ref)) {
1302 			if (GC_REF_CHECK_COLOR(current->ref, GC_BLACK)) {
1303 				GC_REF_SET_INFO(current->ref, 0); /* reset GC_ADDRESS() and keep GC_BLACK */
1304 				gc_remove_from_roots(current);
1305 			}
1306 		}
1307 		current++;
1308 	}
1309 
1310 	gc_compact();
1311 
1312 	/* Root buffer might be reallocated during gc_collect_white,
1313 	 * make sure to reload pointers. */
1314 	idx = GC_FIRST_ROOT;
1315 	end = GC_G(first_unused);
1316 	while (idx != end) {
1317 		current = GC_IDX2PTR(idx);
1318 		ref = current->ref;
1319 		ZEND_ASSERT(GC_IS_ROOT(ref));
1320 		current->ref = GC_MAKE_GARBAGE(ref);
1321 		if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1322 			GC_REF_SET_BLACK(ref);
1323 			count += gc_collect_white(ref, flags, stack);
1324 		}
1325 		idx++;
1326 	}
1327 
1328 	return count;
1329 }
1330 
gc_remove_nested_data_from_buffer(zend_refcounted * ref,gc_root_buffer * root,gc_stack * stack)1331 static int gc_remove_nested_data_from_buffer(zend_refcounted *ref, gc_root_buffer *root, gc_stack *stack)
1332 {
1333 	HashTable *ht = NULL;
1334 	Bucket *p, *end;
1335 	zval *zv;
1336 	int count = 0;
1337 	GC_STACK_DCL(stack);
1338 
1339 	do {
1340 		if (root) {
1341 			root = NULL;
1342 			count++;
1343 		} else if (GC_REF_ADDRESS(ref) != 0
1344 		 && GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1345 			GC_TRACE_REF(ref, "removing from buffer");
1346 			GC_REMOVE_FROM_BUFFER(ref);
1347 			count++;
1348 		} else if (GC_TYPE(ref) == IS_REFERENCE) {
1349 			if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1350 				ref = Z_COUNTED(((zend_reference*)ref)->val);
1351 				continue;
1352 			}
1353 			goto next;
1354 		} else {
1355 			goto next;
1356 		}
1357 
1358 		if (GC_TYPE(ref) == IS_OBJECT) {
1359 			zend_object *obj = (zend_object*)ref;
1360 
1361 			if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1362 				int n;
1363 				zval *zv, *end;
1364 
1365 				ht = obj->handlers->get_gc(obj, &zv, &n);
1366 				if (EXPECTED(!ht)) {
1367 					if (!n) goto next;
1368 					end = zv + n;
1369 					while (!Z_REFCOUNTED_P(--end)) {
1370 						if (zv == end) goto next;
1371 					}
1372 				} else {
1373 					if (!n) goto handle_ht;
1374 					end = zv + n;
1375 				}
1376 				while (zv != end) {
1377 					if (Z_REFCOUNTED_P(zv)) {
1378 						ref = Z_COUNTED_P(zv);
1379 						GC_STACK_PUSH(ref);
1380 					}
1381 					zv++;
1382 				}
1383 				if (EXPECTED(!ht)) {
1384 					ref = Z_COUNTED_P(zv);
1385 					continue;
1386 				}
1387 handle_ht:
1388 				if (GC_REF_ADDRESS(ht) != 0 && GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
1389 					GC_TRACE_REF(ht, "removing from buffer");
1390 					GC_REMOVE_FROM_BUFFER(ht);
1391 				}
1392 			} else {
1393 				goto next;
1394 			}
1395 		} else if (GC_TYPE(ref) == IS_ARRAY) {
1396 			ht = (zend_array*)ref;
1397 		} else {
1398 			goto next;
1399 		}
1400 
1401 		if (!ht->nNumUsed) goto next;
1402 		p = ht->arData;
1403 		end = p + ht->nNumUsed;
1404 		while (1) {
1405 			end--;
1406 			zv = &end->val;
1407 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
1408 				zv = Z_INDIRECT_P(zv);
1409 			}
1410 			if (Z_REFCOUNTED_P(zv)) {
1411 				break;
1412 			}
1413 			if (p == end) goto next;
1414 		}
1415 		while (p != end) {
1416 			zv = &p->val;
1417 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
1418 				zv = Z_INDIRECT_P(zv);
1419 			}
1420 			if (Z_REFCOUNTED_P(zv)) {
1421 				ref = Z_COUNTED_P(zv);
1422 				GC_STACK_PUSH(ref);
1423 			}
1424 			p++;
1425 		}
1426 		zv = &p->val;
1427 		if (Z_TYPE_P(zv) == IS_INDIRECT) {
1428 			zv = Z_INDIRECT_P(zv);
1429 		}
1430 		ref = Z_COUNTED_P(zv);
1431 		continue;
1432 
1433 next:
1434 		ref = GC_STACK_POP();
1435 	} while (ref);
1436 	return count;
1437 }
1438 
1439 static void zend_get_gc_buffer_release();
1440 
zend_gc_collect_cycles(void)1441 ZEND_API int zend_gc_collect_cycles(void)
1442 {
1443 	int count = 0;
1444 
1445 	if (GC_G(num_roots)) {
1446 		gc_root_buffer *current, *last;
1447 		zend_refcounted *p;
1448 		uint32_t gc_flags = 0;
1449 		uint32_t idx, end;
1450 		gc_stack stack;
1451 
1452 		stack.prev = NULL;
1453 		stack.next = NULL;
1454 
1455 		if (GC_G(gc_active)) {
1456 			return 0;
1457 		}
1458 
1459 		GC_TRACE("Collecting cycles");
1460 		GC_G(gc_runs)++;
1461 		GC_G(gc_active) = 1;
1462 
1463 		GC_TRACE("Marking roots");
1464 		gc_mark_roots(&stack);
1465 		GC_TRACE("Scanning roots");
1466 		gc_scan_roots(&stack);
1467 
1468 		GC_TRACE("Collecting roots");
1469 		count = gc_collect_roots(&gc_flags, &stack);
1470 
1471 		if (!GC_G(num_roots)) {
1472 			/* nothing to free */
1473 			GC_TRACE("Nothing to free");
1474 			gc_stack_free(&stack);
1475 			zend_get_gc_buffer_release();
1476 			GC_G(gc_active) = 0;
1477 			return 0;
1478 		}
1479 
1480 		end = GC_G(first_unused);
1481 
1482 		if (gc_flags & GC_HAS_DESTRUCTORS) {
1483 			GC_TRACE("Calling destructors");
1484 
1485 			/* During a destructor call, new externally visible references to nested data may
1486 			 * be introduced. These references can be introduced in a way that does not
1487 			 * modify any refcounts, so we have no real way to detect this situation
1488 			 * short of rerunning full GC tracing. What we do instead is to only run
1489 			 * destructors at this point, and leave the actual freeing of the objects
1490 			 * until the next GC run. */
1491 
1492 			/* Mark all roots for which a dtor will be invoked as DTOR_GARBAGE. Additionally
1493 			 * color them purple. This serves a double purpose: First, they should be
1494 			 * considered new potential roots for the next GC run. Second, it will prevent
1495 			 * their removal from the root buffer by nested data removal. */
1496 			idx = GC_FIRST_ROOT;
1497 			current = GC_IDX2PTR(GC_FIRST_ROOT);
1498 			while (idx != end) {
1499 				if (GC_IS_GARBAGE(current->ref)) {
1500 					p = GC_GET_PTR(current->ref);
1501 					if (GC_TYPE(p) == IS_OBJECT && !(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
1502 						zend_object *obj = (zend_object *) p;
1503 						if (obj->handlers->dtor_obj != zend_objects_destroy_object
1504 							|| obj->ce->destructor) {
1505 							current->ref = GC_MAKE_DTOR_GARBAGE(obj);
1506 							GC_REF_SET_COLOR(obj, GC_PURPLE);
1507 						} else {
1508 							GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
1509 						}
1510 					}
1511 				}
1512 				current++;
1513 				idx++;
1514 			}
1515 
1516 			/* Remove nested data for objects on which a destructor will be called.
1517 			 * This will not remove the objects themselves, as they have been colored
1518 			 * purple. */
1519 			idx = GC_FIRST_ROOT;
1520 			current = GC_IDX2PTR(GC_FIRST_ROOT);
1521 			while (idx != end) {
1522 				if (GC_IS_DTOR_GARBAGE(current->ref)) {
1523 					p = GC_GET_PTR(current->ref);
1524 					count -= gc_remove_nested_data_from_buffer(p, current, &stack);
1525 				}
1526 				current++;
1527 				idx++;
1528 			}
1529 
1530 			/* Actually call destructors.
1531 			 *
1532 			 * The root buffer might be reallocated during destructors calls,
1533 			 * make sure to reload pointers as necessary. */
1534 			idx = GC_FIRST_ROOT;
1535 			while (idx != end) {
1536 				current = GC_IDX2PTR(idx);
1537 				if (GC_IS_DTOR_GARBAGE(current->ref)) {
1538 					p = GC_GET_PTR(current->ref);
1539 					/* Mark this is as a normal root for the next GC run,
1540 					 * it's no longer garbage for this run. */
1541 					current->ref = p;
1542 					/* Double check that the destructor hasn't been called yet. It could have
1543 					 * already been invoked indirectly by some other destructor. */
1544 					if (!(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
1545 						zend_object *obj = (zend_object*)p;
1546 						GC_TRACE_REF(obj, "calling destructor");
1547 						GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
1548 						GC_ADDREF(obj);
1549 						obj->handlers->dtor_obj(obj);
1550 						GC_DELREF(obj);
1551 					}
1552 				}
1553 				idx++;
1554 			}
1555 
1556 			if (GC_G(gc_protected)) {
1557 				/* something went wrong */
1558 				zend_get_gc_buffer_release();
1559 				return 0;
1560 			}
1561 		}
1562 
1563 		gc_stack_free(&stack);
1564 
1565 		/* Destroy zvals. The root buffer may be reallocated. */
1566 		GC_TRACE("Destroying zvals");
1567 		idx = GC_FIRST_ROOT;
1568 		while (idx != end) {
1569 			current = GC_IDX2PTR(idx);
1570 			if (GC_IS_GARBAGE(current->ref)) {
1571 				p = GC_GET_PTR(current->ref);
1572 				GC_TRACE_REF(p, "destroying");
1573 				if (GC_TYPE(p) == IS_OBJECT) {
1574 					zend_object *obj = (zend_object*)p;
1575 
1576 					EG(objects_store).object_buckets[obj->handle] = SET_OBJ_INVALID(obj);
1577 					GC_TYPE_INFO(obj) = GC_NULL |
1578 						(GC_TYPE_INFO(obj) & ~GC_TYPE_MASK);
1579 					/* Modify current before calling free_obj (bug #78811: free_obj() can cause the root buffer (with current) to be reallocated.) */
1580 					current->ref = GC_MAKE_GARBAGE(((char*)obj) - obj->handlers->offset);
1581 					if (!(OBJ_FLAGS(obj) & IS_OBJ_FREE_CALLED)) {
1582 						GC_ADD_FLAGS(obj, IS_OBJ_FREE_CALLED);
1583 						GC_ADDREF(obj);
1584 						obj->handlers->free_obj(obj);
1585 						GC_DELREF(obj);
1586 					}
1587 
1588 					ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST(obj->handle);
1589 				} else if (GC_TYPE(p) == IS_ARRAY) {
1590 					zend_array *arr = (zend_array*)p;
1591 
1592 					GC_TYPE_INFO(arr) = GC_NULL |
1593 						(GC_TYPE_INFO(arr) & ~GC_TYPE_MASK);
1594 
1595 					/* GC may destroy arrays with rc>1. This is valid and safe. */
1596 					HT_ALLOW_COW_VIOLATION(arr);
1597 
1598 					zend_hash_destroy(arr);
1599 				}
1600 			}
1601 			idx++;
1602 		}
1603 
1604 		/* Free objects */
1605 		current = GC_IDX2PTR(GC_FIRST_ROOT);
1606 		last = GC_IDX2PTR(end);
1607 		while (current != last) {
1608 			if (GC_IS_GARBAGE(current->ref)) {
1609 				p = GC_GET_PTR(current->ref);
1610 				GC_LINK_UNUSED(current);
1611 				GC_G(num_roots)--;
1612 				efree(p);
1613 			}
1614 			current++;
1615 		}
1616 
1617 		GC_TRACE("Collection finished");
1618 		GC_G(collected) += count;
1619 		GC_G(gc_active) = 0;
1620 	}
1621 
1622 	gc_compact();
1623 	zend_get_gc_buffer_release();
1624 	return count;
1625 }
1626 
zend_gc_get_status(zend_gc_status * status)1627 ZEND_API void zend_gc_get_status(zend_gc_status *status)
1628 {
1629 	status->runs = GC_G(gc_runs);
1630 	status->collected = GC_G(collected);
1631 	status->threshold = GC_G(gc_threshold);
1632 	status->num_roots = GC_G(num_roots);
1633 }
1634 
zend_get_gc_buffer_create(void)1635 ZEND_API zend_get_gc_buffer *zend_get_gc_buffer_create(void) {
1636 	/* There can only be one get_gc() call active at a time,
1637 	 * so there only needs to be one buffer. */
1638 	zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
1639 	gc_buffer->cur = gc_buffer->start;
1640 	return gc_buffer;
1641 }
1642 
zend_get_gc_buffer_grow(zend_get_gc_buffer * gc_buffer)1643 ZEND_API void zend_get_gc_buffer_grow(zend_get_gc_buffer *gc_buffer) {
1644 	size_t old_capacity = gc_buffer->end - gc_buffer->start;
1645 	size_t new_capacity = old_capacity == 0 ? 64 : old_capacity * 2;
1646 	gc_buffer->start = erealloc(gc_buffer->start, new_capacity * sizeof(zval));
1647 	gc_buffer->end = gc_buffer->start + new_capacity;
1648 	gc_buffer->cur = gc_buffer->start + old_capacity;
1649 }
1650 
zend_get_gc_buffer_release()1651 static void zend_get_gc_buffer_release() {
1652 	zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
1653 	efree(gc_buffer->start);
1654 	gc_buffer->start = gc_buffer->end = gc_buffer->cur = NULL;
1655 }
1656 
1657 #ifdef ZTS
zend_gc_globals_size(void)1658 size_t zend_gc_globals_size(void)
1659 {
1660 	return sizeof(zend_gc_globals);
1661 }
1662 #endif
1663