xref: /PHP-7.4/Zend/zend_gc.c (revision b037fe5b)
1 /*
2    +----------------------------------------------------------------------+
3    | Zend Engine                                                          |
4    +----------------------------------------------------------------------+
5    | Copyright (c) Zend Technologies Ltd. (http://www.zend.com)           |
6    +----------------------------------------------------------------------+
7    | This source file is subject to version 2.00 of the Zend license,     |
8    | that is bundled with this package in the file LICENSE, and is        |
9    | available through the world-wide-web at the following url:           |
10    | http://www.zend.com/license/2_00.txt.                                |
11    | If you did not receive a copy of the Zend license and are unable to  |
12    | obtain it through the world-wide-web, please send a note to          |
13    | license@zend.com so we can mail you a copy immediately.              |
14    +----------------------------------------------------------------------+
15    | Authors: David Wang <planetbeing@gmail.com>                          |
16    |          Dmitry Stogov <dmitry@php.net>                              |
17    +----------------------------------------------------------------------+
18 */
19 
20 /**
21  * zend_gc_collect_cycles
22  * ======================
23  *
24  * Colors and its meaning
25  * ----------------------
26  *
27  * BLACK  (GC_BLACK)   - In use or free.
28  * GREY   (GC_GREY)    - Possible member of cycle.
29  * WHITE  (GC_WHITE)   - Member of garbage cycle.
30  * PURPLE (GC_PURPLE)  - Possible root of cycle.
31  *
32  * Colors described in the paper but not used
33  * ------------------------------------------
34  *
35  * GREEN - Acyclic
36  * RED   - Candidate cycle underogin
37  * ORANGE - Candidate cycle awaiting epoch boundary.
38  *
39  *
40  * Flow
41  * =====
42  *
43  * The garbage collect cycle starts from 'gc_mark_roots', which traverses the
44  * possible roots, and calls mark_grey for roots are marked purple with
45  * depth-first traverse.
46  *
47  * After all possible roots are traversed and marked,
48  * gc_scan_roots will be called, and each root will be called with
49  * gc_scan(root->ref)
50  *
51  * gc_scan checks the colors of possible members.
52  *
53  * If the node is marked as grey and the refcount > 0
54  *    gc_scan_black will be called on that node to scan it's subgraph.
55  * otherwise (refcount == 0), it marks the node white.
56  *
57  * A node MAY be added to possible roots when ZEND_UNSET_VAR happens or
58  * zend_assign_to_variable is called only when possible garbage node is
59  * produced.
60  * gc_possible_root() will be called to add the nodes to possible roots.
61  *
62  *
63  * For objects, we call their get_gc handler (by default 'zend_std_get_gc') to
64  * get the object properties to scan.
65  *
66  *
67  * @see http://researcher.watson.ibm.com/researcher/files/us-bacon/Bacon01Concurrent.pdf
68  */
69 #include "zend.h"
70 #include "zend_API.h"
71 
72 #ifndef GC_BENCH
73 # define GC_BENCH 0
74 #endif
75 
76 #ifndef ZEND_GC_DEBUG
77 # define ZEND_GC_DEBUG 0
78 #endif
79 
80 /* GC_INFO layout */
81 #define GC_ADDRESS  0x0fffffu
82 #define GC_COLOR    0x300000u
83 
84 #define GC_BLACK    0x000000u /* must be zero */
85 #define GC_WHITE    0x100000u
86 #define GC_GREY     0x200000u
87 #define GC_PURPLE   0x300000u
88 
89 /* Debug tracing */
90 #if ZEND_GC_DEBUG > 1
91 # define GC_TRACE(format, ...) fprintf(stderr, format "\n", ##__VA_ARGS__);
92 # define GC_TRACE_REF(ref, format, ...) \
93 	do { \
94 		gc_trace_ref((zend_refcounted *) ref); \
95 		fprintf(stderr, format "\n", ##__VA_ARGS__); \
96 	} while (0)
97 # define GC_TRACE_SET_COLOR(ref, color) \
98 	GC_TRACE_REF(ref, "->%s", gc_color_name(color))
99 #else
100 # define GC_TRACE_REF(ref, format, ...)
101 # define GC_TRACE_SET_COLOR(ref, new_color)
102 # define GC_TRACE(str)
103 #endif
104 
105 /* GC_INFO access */
106 #define GC_REF_ADDRESS(ref) \
107 	(((GC_TYPE_INFO(ref)) & (GC_ADDRESS << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
108 
109 #define GC_REF_COLOR(ref) \
110 	(((GC_TYPE_INFO(ref)) & (GC_COLOR << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
111 
112 #define GC_REF_CHECK_COLOR(ref, color) \
113 	((GC_TYPE_INFO(ref) & (GC_COLOR << GC_INFO_SHIFT)) == ((color) << GC_INFO_SHIFT))
114 
115 #define GC_REF_SET_INFO(ref, info) do { \
116 		GC_TYPE_INFO(ref) = \
117 			(GC_TYPE_INFO(ref) & (GC_TYPE_MASK | GC_FLAGS_MASK)) | \
118 			((info) << GC_INFO_SHIFT); \
119 	} while (0)
120 
121 #define GC_REF_SET_COLOR(ref, c) do { \
122 		GC_TRACE_SET_COLOR(ref, c); \
123 		GC_TYPE_INFO(ref) = \
124 			(GC_TYPE_INFO(ref) & ~(GC_COLOR << GC_INFO_SHIFT)) | \
125 			((c) << GC_INFO_SHIFT); \
126 	} while (0)
127 
128 #define GC_REF_SET_BLACK(ref) do { \
129 		GC_TRACE_SET_COLOR(ref, GC_BLACK); \
130 		GC_TYPE_INFO(ref) &= ~(GC_COLOR << GC_INFO_SHIFT); \
131 	} while (0)
132 
133 #define GC_REF_SET_PURPLE(ref) do { \
134 		GC_TRACE_SET_COLOR(ref, GC_PURPLE); \
135 		GC_TYPE_INFO(ref) |= (GC_COLOR << GC_INFO_SHIFT); \
136 	} while (0)
137 
138 /* bit stealing tags for gc_root_buffer.ref */
139 #define GC_BITS    0x3
140 
141 #define GC_ROOT    0x0 /* possible root of circular garbage     */
142 #define GC_UNUSED  0x1 /* part of linked list of unused buffers */
143 #define GC_GARBAGE 0x2 /* garbage to delete                     */
144 #define GC_DTOR_GARBAGE 0x3 /* garbage on which only the dtor should be invoked */
145 
146 #define GC_GET_PTR(ptr) \
147 	((void*)(((uintptr_t)(ptr)) & ~GC_BITS))
148 
149 #define GC_IS_ROOT(ptr) \
150 	((((uintptr_t)(ptr)) & GC_BITS) == GC_ROOT)
151 #define GC_IS_UNUSED(ptr) \
152 	((((uintptr_t)(ptr)) & GC_BITS) == GC_UNUSED)
153 #define GC_IS_GARBAGE(ptr) \
154 	((((uintptr_t)(ptr)) & GC_BITS) == GC_GARBAGE)
155 #define GC_IS_DTOR_GARBAGE(ptr) \
156 	((((uintptr_t)(ptr)) & GC_BITS) == GC_DTOR_GARBAGE)
157 
158 #define GC_MAKE_GARBAGE(ptr) \
159 	((void*)(((uintptr_t)(ptr)) | GC_GARBAGE))
160 #define GC_MAKE_DTOR_GARBAGE(ptr) \
161 	((void*)(((uintptr_t)(ptr)) | GC_DTOR_GARBAGE))
162 
163 /* GC address conversion */
164 #define GC_IDX2PTR(idx)      (GC_G(buf) + (idx))
165 #define GC_PTR2IDX(ptr)      ((ptr) - GC_G(buf))
166 
167 #define GC_IDX2LIST(idx)     ((void*)(uintptr_t)(((idx) * sizeof(void*)) | GC_UNUSED))
168 #define GC_LIST2IDX(list)    (((uint32_t)(uintptr_t)(list)) / sizeof(void*))
169 
170 /* GC buffers */
171 #define GC_INVALID           0
172 #define GC_FIRST_ROOT        1
173 
174 #define GC_DEFAULT_BUF_SIZE  (16 * 1024)
175 #define GC_BUF_GROW_STEP     (128 * 1024)
176 
177 #define GC_MAX_UNCOMPRESSED  (512 * 1024)
178 #define GC_MAX_BUF_SIZE      0x40000000
179 
180 #define GC_THRESHOLD_DEFAULT 10000
181 #define GC_THRESHOLD_STEP    10000
182 #define GC_THRESHOLD_MAX     1000000000
183 #define GC_THRESHOLD_TRIGGER 100
184 
185 /* GC flags */
186 #define GC_HAS_DESTRUCTORS  (1<<0)
187 
188 /* unused buffers */
189 #define GC_HAS_UNUSED() \
190 	(GC_G(unused) != GC_INVALID)
191 #define GC_FETCH_UNUSED() \
192 	gc_fetch_unused()
193 #define GC_LINK_UNUSED(root) \
194 	gc_link_unused(root)
195 
196 #define GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD() \
197 	(GC_G(first_unused) < GC_G(gc_threshold))
198 #define GC_HAS_NEXT_UNUSED() \
199 	(GC_G(first_unused) != GC_G(buf_size))
200 #define GC_FETCH_NEXT_UNUSED() \
201 	gc_fetch_next_unused()
202 
203 ZEND_API int (*gc_collect_cycles)(void);
204 
205 typedef struct _gc_root_buffer {
206 	zend_refcounted  *ref;
207 } gc_root_buffer;
208 
209 typedef struct _zend_gc_globals {
210 	gc_root_buffer   *buf;				/* preallocated arrays of buffers   */
211 
212 	zend_bool         gc_enabled;
213 	zend_bool         gc_active;        /* GC currently running, forbid nested GC */
214 	zend_bool         gc_protected;     /* GC protected, forbid root additions */
215 	zend_bool         gc_full;
216 
217 	uint32_t          unused;			/* linked list of unused buffers    */
218 	uint32_t          first_unused;		/* first unused buffer              */
219 	uint32_t          gc_threshold;     /* GC collection threshold          */
220 	uint32_t          buf_size;			/* size of the GC buffer            */
221 	uint32_t          num_roots;		/* number of roots in GC buffer     */
222 
223 	uint32_t gc_runs;
224 	uint32_t collected;
225 
226 #if GC_BENCH
227 	uint32_t root_buf_length;
228 	uint32_t root_buf_peak;
229 	uint32_t zval_possible_root;
230 	uint32_t zval_buffered;
231 	uint32_t zval_remove_from_buffer;
232 	uint32_t zval_marked_grey;
233 #endif
234 } zend_gc_globals;
235 
236 #ifdef ZTS
237 static int gc_globals_id;
238 static size_t gc_globals_offset;
239 #define GC_G(v) ZEND_TSRMG_FAST(gc_globals_offset, zend_gc_globals *, v)
240 #else
241 #define GC_G(v) (gc_globals.v)
242 static zend_gc_globals gc_globals;
243 #endif
244 
245 #if GC_BENCH
246 # define GC_BENCH_INC(counter) GC_G(counter)++
247 # define GC_BENCH_DEC(counter) GC_G(counter)--
248 # define GC_BENCH_PEAK(peak, counter) do {		\
249 		if (GC_G(counter) > GC_G(peak)) {		\
250 			GC_G(peak) = GC_G(counter);			\
251 		}										\
252 	} while (0)
253 #else
254 # define GC_BENCH_INC(counter)
255 # define GC_BENCH_DEC(counter)
256 # define GC_BENCH_PEAK(peak, counter)
257 #endif
258 
259 
260 #define GC_STACK_SEGMENT_SIZE (((4096 - ZEND_MM_OVERHEAD) / sizeof(void*)) - 2)
261 
262 typedef struct _gc_stack gc_stack;
263 
264 struct _gc_stack {
265 	gc_stack        *prev;
266 	gc_stack        *next;
267 	zend_refcounted *data[GC_STACK_SEGMENT_SIZE];
268 };
269 
270 #define GC_STACK_DCL(init) \
271 	gc_stack *_stack = init; \
272 	size_t    _top = 0;
273 
274 #define GC_STACK_PUSH(ref) \
275 	gc_stack_push(&_stack, &_top, ref);
276 
277 #define GC_STACK_POP() \
278 	gc_stack_pop(&_stack, &_top)
279 
gc_stack_next(gc_stack * stack)280 static zend_never_inline gc_stack* gc_stack_next(gc_stack *stack)
281 {
282 	if (UNEXPECTED(!stack->next)) {
283 		gc_stack *segment = emalloc(sizeof(gc_stack));
284 		segment->prev = stack;
285 		segment->next = NULL;
286 		stack->next = segment;
287 	}
288 	return stack->next;
289 }
290 
gc_stack_push(gc_stack ** stack,size_t * top,zend_refcounted * ref)291 static zend_always_inline void gc_stack_push(gc_stack **stack, size_t *top, zend_refcounted *ref)
292 {
293 	if (UNEXPECTED(*top == GC_STACK_SEGMENT_SIZE)) {
294 		(*stack) = gc_stack_next(*stack);
295 		(*top) = 0;
296 	}
297 	(*stack)->data[(*top)++] = ref;
298 }
299 
gc_stack_pop(gc_stack ** stack,size_t * top)300 static zend_always_inline zend_refcounted* gc_stack_pop(gc_stack **stack, size_t *top)
301 {
302 	if (UNEXPECTED((*top) == 0)) {
303 		if (!(*stack)->prev) {
304 			return NULL;
305 		} else {
306 			(*stack) = (*stack)->prev;
307 			(*top) = GC_STACK_SEGMENT_SIZE - 1;
308 			return (*stack)->data[GC_STACK_SEGMENT_SIZE - 1];
309 		}
310 	} else {
311 		return (*stack)->data[--(*top)];
312 	}
313 }
314 
gc_stack_free(gc_stack * stack)315 static void gc_stack_free(gc_stack *stack)
316 {
317 	gc_stack *p = stack->next;
318 
319 	while (p) {
320 		stack = p->next;
321 		efree(p);
322 		p = stack;
323 	}
324 }
325 
gc_compress(uint32_t idx)326 static zend_always_inline uint32_t gc_compress(uint32_t idx)
327 {
328 	if (EXPECTED(idx < GC_MAX_UNCOMPRESSED)) {
329 		return idx;
330 	}
331 	return (idx % GC_MAX_UNCOMPRESSED) | GC_MAX_UNCOMPRESSED;
332 }
333 
gc_decompress(zend_refcounted * ref,uint32_t idx)334 static zend_always_inline gc_root_buffer* gc_decompress(zend_refcounted *ref, uint32_t idx)
335 {
336 	gc_root_buffer *root = GC_IDX2PTR(idx);
337 
338 	if (EXPECTED(GC_GET_PTR(root->ref) == ref)) {
339 		return root;
340 	}
341 
342 	while (1) {
343 		idx += GC_MAX_UNCOMPRESSED;
344 		ZEND_ASSERT(idx < GC_G(first_unused));
345 		root = GC_IDX2PTR(idx);
346 		if (GC_GET_PTR(root->ref) == ref) {
347 			return root;
348 		}
349 	}
350 }
351 
gc_fetch_unused(void)352 static zend_always_inline uint32_t gc_fetch_unused(void)
353 {
354 	uint32_t idx;
355 	gc_root_buffer *root;
356 
357 	ZEND_ASSERT(GC_HAS_UNUSED());
358 	idx = GC_G(unused);
359 	root = GC_IDX2PTR(idx);
360 	ZEND_ASSERT(GC_IS_UNUSED(root->ref));
361 	GC_G(unused) = GC_LIST2IDX(root->ref);
362 	return idx;
363 }
364 
gc_link_unused(gc_root_buffer * root)365 static zend_always_inline void gc_link_unused(gc_root_buffer *root)
366 {
367 	root->ref = GC_IDX2LIST(GC_G(unused));
368 	GC_G(unused) = GC_PTR2IDX(root);
369 }
370 
gc_fetch_next_unused(void)371 static zend_always_inline uint32_t gc_fetch_next_unused(void)
372 {
373 	uint32_t idx;
374 
375 	ZEND_ASSERT(GC_HAS_NEXT_UNUSED());
376 	idx = GC_G(first_unused);
377 	GC_G(first_unused) = GC_G(first_unused) + 1;
378 	return idx;
379 }
380 
381 #if ZEND_GC_DEBUG > 1
gc_color_name(uint32_t color)382 static const char *gc_color_name(uint32_t color) {
383 	switch (color) {
384 		case GC_BLACK: return "black";
385 		case GC_WHITE: return "white";
386 		case GC_GREY: return "grey";
387 		case GC_PURPLE: return "purple";
388 		default: return "unknown";
389 	}
390 }
gc_trace_ref(zend_refcounted * ref)391 static void gc_trace_ref(zend_refcounted *ref) {
392 	if (GC_TYPE(ref) == IS_OBJECT) {
393 		zend_object *obj = (zend_object *) ref;
394 		fprintf(stderr, "[%p] rc=%d addr=%d %s object(%s)#%d ",
395 			ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
396 			gc_color_name(GC_REF_COLOR(ref)),
397 			obj->ce->name->val, obj->handle);
398 	} else if (GC_TYPE(ref) == IS_ARRAY) {
399 		zend_array *arr = (zend_array *) ref;
400 		fprintf(stderr, "[%p] rc=%d addr=%d %s array(%d) ",
401 			ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
402 			gc_color_name(GC_REF_COLOR(ref)),
403 			zend_hash_num_elements(arr));
404 	} else {
405 		fprintf(stderr, "[%p] rc=%d addr=%d %s %s ",
406 			ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
407 			gc_color_name(GC_REF_COLOR(ref)),
408 			GC_TYPE(ref) == IS_REFERENCE
409 				? "reference" : zend_get_type_by_const(GC_TYPE(ref)));
410 	}
411 }
412 #endif
413 
gc_remove_from_roots(gc_root_buffer * root)414 static zend_always_inline void gc_remove_from_roots(gc_root_buffer *root)
415 {
416 	GC_LINK_UNUSED(root);
417 	GC_G(num_roots)--;
418 	GC_BENCH_DEC(root_buf_length);
419 }
420 
root_buffer_dtor(zend_gc_globals * gc_globals)421 static void root_buffer_dtor(zend_gc_globals *gc_globals)
422 {
423 	if (gc_globals->buf) {
424 		free(gc_globals->buf);
425 		gc_globals->buf = NULL;
426 	}
427 }
428 
gc_globals_ctor_ex(zend_gc_globals * gc_globals)429 static void gc_globals_ctor_ex(zend_gc_globals *gc_globals)
430 {
431 	gc_globals->gc_enabled = 0;
432 	gc_globals->gc_active = 0;
433 	gc_globals->gc_protected = 1;
434 	gc_globals->gc_full = 0;
435 
436 	gc_globals->buf = NULL;
437 	gc_globals->unused = GC_INVALID;
438 	gc_globals->first_unused = GC_INVALID;
439 	gc_globals->gc_threshold = GC_INVALID;
440 	gc_globals->buf_size = GC_INVALID;
441 	gc_globals->num_roots = 0;
442 
443 	gc_globals->gc_runs = 0;
444 	gc_globals->collected = 0;
445 
446 #if GC_BENCH
447 	gc_globals->root_buf_length = 0;
448 	gc_globals->root_buf_peak = 0;
449 	gc_globals->zval_possible_root = 0;
450 	gc_globals->zval_buffered = 0;
451 	gc_globals->zval_remove_from_buffer = 0;
452 	gc_globals->zval_marked_grey = 0;
453 #endif
454 }
455 
gc_globals_ctor(void)456 void gc_globals_ctor(void)
457 {
458 #ifdef ZTS
459 	ts_allocate_fast_id(&gc_globals_id, &gc_globals_offset, sizeof(zend_gc_globals), (ts_allocate_ctor) gc_globals_ctor_ex, (ts_allocate_dtor) root_buffer_dtor);
460 #else
461 	gc_globals_ctor_ex(&gc_globals);
462 #endif
463 }
464 
gc_globals_dtor(void)465 void gc_globals_dtor(void)
466 {
467 #ifndef ZTS
468 	root_buffer_dtor(&gc_globals);
469 #endif
470 }
471 
gc_reset(void)472 void gc_reset(void)
473 {
474 	if (GC_G(buf)) {
475 		GC_G(gc_active) = 0;
476 		GC_G(gc_protected) = 0;
477 		GC_G(gc_full) = 0;
478 		GC_G(unused) = GC_INVALID;
479 		GC_G(first_unused) = GC_FIRST_ROOT;
480 		GC_G(num_roots) = 0;
481 
482 		GC_G(gc_runs) = 0;
483 		GC_G(collected) = 0;
484 
485 #if GC_BENCH
486 		GC_G(root_buf_length) = 0;
487 		GC_G(root_buf_peak) = 0;
488 		GC_G(zval_possible_root) = 0;
489 		GC_G(zval_buffered) = 0;
490 		GC_G(zval_remove_from_buffer) = 0;
491 		GC_G(zval_marked_grey) = 0;
492 #endif
493 	}
494 }
495 
gc_enable(zend_bool enable)496 ZEND_API zend_bool gc_enable(zend_bool enable)
497 {
498 	zend_bool old_enabled = GC_G(gc_enabled);
499 	GC_G(gc_enabled) = enable;
500 	if (enable && !old_enabled && GC_G(buf) == NULL) {
501 		GC_G(buf) = (gc_root_buffer*) pemalloc(sizeof(gc_root_buffer) * GC_DEFAULT_BUF_SIZE, 1);
502 		GC_G(buf)[0].ref = NULL;
503 		GC_G(buf_size) = GC_DEFAULT_BUF_SIZE;
504 		GC_G(gc_threshold) = GC_THRESHOLD_DEFAULT + GC_FIRST_ROOT;
505 		gc_reset();
506 	}
507 	return old_enabled;
508 }
509 
gc_enabled(void)510 ZEND_API zend_bool gc_enabled(void)
511 {
512 	return GC_G(gc_enabled);
513 }
514 
gc_protect(zend_bool protect)515 ZEND_API zend_bool gc_protect(zend_bool protect)
516 {
517 	zend_bool old_protected = GC_G(gc_protected);
518 	GC_G(gc_protected) = protect;
519 	return old_protected;
520 }
521 
gc_protected(void)522 ZEND_API zend_bool gc_protected(void)
523 {
524 	return GC_G(gc_protected);
525 }
526 
gc_grow_root_buffer(void)527 static void gc_grow_root_buffer(void)
528 {
529 	size_t new_size;
530 
531 	if (GC_G(buf_size) >= GC_MAX_BUF_SIZE) {
532 		if (!GC_G(gc_full)) {
533 			zend_error(E_WARNING, "GC buffer overflow (GC disabled)\n");
534 			GC_G(gc_active) = 1;
535 			GC_G(gc_protected) = 1;
536 			GC_G(gc_full) = 1;
537 			return;
538 		}
539 	}
540 	if (GC_G(buf_size) < GC_BUF_GROW_STEP) {
541 		new_size = GC_G(buf_size) * 2;
542 	} else {
543 		new_size = GC_G(buf_size) + GC_BUF_GROW_STEP;
544 	}
545 	if (new_size > GC_MAX_BUF_SIZE) {
546 		new_size = GC_MAX_BUF_SIZE;
547 	}
548 	GC_G(buf) = perealloc(GC_G(buf), sizeof(gc_root_buffer) * new_size, 1);
549 	GC_G(buf_size) = new_size;
550 }
551 
gc_adjust_threshold(int count)552 static void gc_adjust_threshold(int count)
553 {
554 	uint32_t new_threshold;
555 
556 	/* TODO Very simple heuristic for dynamic GC buffer resizing:
557 	 * If there are "too few" collections, increase the collection threshold
558 	 * by a fixed step */
559 	if (count < GC_THRESHOLD_TRIGGER) {
560 		/* increase */
561 		if (GC_G(gc_threshold) < GC_THRESHOLD_MAX) {
562 			new_threshold = GC_G(gc_threshold) + GC_THRESHOLD_STEP;
563 			if (new_threshold > GC_THRESHOLD_MAX) {
564 				new_threshold = GC_THRESHOLD_MAX;
565 			}
566 			if (new_threshold > GC_G(buf_size)) {
567 				gc_grow_root_buffer();
568 			}
569 			if (new_threshold <= GC_G(buf_size)) {
570 				GC_G(gc_threshold) = new_threshold;
571 			}
572 		}
573 	} else if (GC_G(gc_threshold) > GC_THRESHOLD_DEFAULT) {
574 		new_threshold = GC_G(gc_threshold) - GC_THRESHOLD_STEP;
575 		if (new_threshold < GC_THRESHOLD_DEFAULT) {
576 			new_threshold = GC_THRESHOLD_DEFAULT;
577 		}
578 		GC_G(gc_threshold) = new_threshold;
579 	}
580 }
581 
gc_possible_root_when_full(zend_refcounted * ref)582 static zend_never_inline void ZEND_FASTCALL gc_possible_root_when_full(zend_refcounted *ref)
583 {
584 	uint32_t idx;
585 	gc_root_buffer *newRoot;
586 
587 	ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
588 	ZEND_ASSERT(GC_INFO(ref) == 0);
589 
590 	if (GC_G(gc_enabled) && !GC_G(gc_active)) {
591 		GC_ADDREF(ref);
592 		gc_adjust_threshold(gc_collect_cycles());
593 		if (UNEXPECTED(GC_DELREF(ref)) == 0) {
594 			rc_dtor_func(ref);
595 			return;
596 		} else if (UNEXPECTED(GC_INFO(ref))) {
597 			return;
598 		}
599 	}
600 
601 	if (GC_HAS_UNUSED()) {
602 		idx = GC_FETCH_UNUSED();
603 	} else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
604 		idx = GC_FETCH_NEXT_UNUSED();
605 	} else {
606 		gc_grow_root_buffer();
607 		if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
608 			return;
609 		}
610 		idx = GC_FETCH_NEXT_UNUSED();
611 	}
612 
613 	newRoot = GC_IDX2PTR(idx);
614 	newRoot->ref = ref; /* GC_ROOT tag is 0 */
615 	GC_TRACE_SET_COLOR(ref, GC_PURPLE);
616 
617 	idx = gc_compress(idx);
618 	GC_REF_SET_INFO(ref, idx | GC_PURPLE);
619 	GC_G(num_roots)++;
620 
621 	GC_BENCH_INC(zval_buffered);
622 	GC_BENCH_INC(root_buf_length);
623 	GC_BENCH_PEAK(root_buf_peak, root_buf_length);
624 }
625 
gc_possible_root(zend_refcounted * ref)626 ZEND_API void ZEND_FASTCALL gc_possible_root(zend_refcounted *ref)
627 {
628 	uint32_t idx;
629 	gc_root_buffer *newRoot;
630 
631 	if (UNEXPECTED(GC_G(gc_protected))) {
632 		return;
633 	}
634 
635 	GC_BENCH_INC(zval_possible_root);
636 
637 	if (EXPECTED(GC_HAS_UNUSED())) {
638 		idx = GC_FETCH_UNUSED();
639 	} else if (EXPECTED(GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD())) {
640 		idx = GC_FETCH_NEXT_UNUSED();
641 	} else {
642 		gc_possible_root_when_full(ref);
643 		return;
644 	}
645 
646 	ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
647 	ZEND_ASSERT(GC_INFO(ref) == 0);
648 
649 	newRoot = GC_IDX2PTR(idx);
650 	newRoot->ref = ref; /* GC_ROOT tag is 0 */
651 	GC_TRACE_SET_COLOR(ref, GC_PURPLE);
652 
653 	idx = gc_compress(idx);
654 	GC_REF_SET_INFO(ref, idx | GC_PURPLE);
655 	GC_G(num_roots)++;
656 
657 	GC_BENCH_INC(zval_buffered);
658 	GC_BENCH_INC(root_buf_length);
659 	GC_BENCH_PEAK(root_buf_peak, root_buf_length);
660 }
661 
gc_remove_compressed(zend_refcounted * ref,uint32_t idx)662 static zend_never_inline void ZEND_FASTCALL gc_remove_compressed(zend_refcounted *ref, uint32_t idx)
663 {
664 	gc_root_buffer *root = gc_decompress(ref, idx);
665 	gc_remove_from_roots(root);
666 }
667 
gc_remove_from_buffer(zend_refcounted * ref)668 ZEND_API void ZEND_FASTCALL gc_remove_from_buffer(zend_refcounted *ref)
669 {
670 	gc_root_buffer *root;
671 	uint32_t idx = GC_REF_ADDRESS(ref);
672 
673 	GC_BENCH_INC(zval_remove_from_buffer);
674 
675 	if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
676 		GC_TRACE_SET_COLOR(ref, GC_BLACK);
677 	}
678 	GC_REF_SET_INFO(ref, 0);
679 
680 	/* Perform decompression only in case of large buffers */
681 	if (UNEXPECTED(GC_G(first_unused) >= GC_MAX_UNCOMPRESSED)) {
682 		gc_remove_compressed(ref, idx);
683 		return;
684 	}
685 
686 	ZEND_ASSERT(idx);
687 	root = GC_IDX2PTR(idx);
688 	gc_remove_from_roots(root);
689 }
690 
gc_scan_black(zend_refcounted * ref,gc_stack * stack)691 static void gc_scan_black(zend_refcounted *ref, gc_stack *stack)
692 {
693 	HashTable *ht = NULL;
694 	Bucket *p, *end;
695 	zval *zv;
696 	GC_STACK_DCL(stack);
697 
698 tail_call:
699 	if (GC_TYPE(ref) == IS_OBJECT) {
700 		zend_object *obj = (zend_object*)ref;
701 
702 		if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
703 			int n;
704 			zval *zv, *end;
705 			zval tmp;
706 
707 			ZVAL_OBJ(&tmp, obj);
708 			ht = obj->handlers->get_gc(&tmp, &zv, &n);
709 			end = zv + n;
710 			if (EXPECTED(!ht) || UNEXPECTED(GC_REF_CHECK_COLOR(ht, GC_BLACK))) {
711 				ht = NULL;
712 				if (!n) goto next;
713 				while (!Z_REFCOUNTED_P(--end)) {
714 					if (zv == end) goto next;
715 				}
716 			} else {
717 				GC_REF_SET_BLACK(ht);
718 			}
719 			while (zv != end) {
720 				if (Z_REFCOUNTED_P(zv)) {
721 					ref = Z_COUNTED_P(zv);
722 					GC_ADDREF(ref);
723 					if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
724 						GC_REF_SET_BLACK(ref);
725 						GC_STACK_PUSH(ref);
726 					}
727 				}
728 				zv++;
729 			}
730 			if (EXPECTED(!ht)) {
731 				ref = Z_COUNTED_P(zv);
732 				GC_ADDREF(ref);
733 				if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
734 					GC_REF_SET_BLACK(ref);
735 					goto tail_call;
736 				}
737 				goto next;
738 			}
739 		} else {
740 			goto next;
741 		}
742 	} else if (GC_TYPE(ref) == IS_ARRAY) {
743 		if ((zend_array*)ref != &EG(symbol_table)) {
744 			ht = (zend_array*)ref;
745 		} else {
746 			goto next;
747 		}
748 	} else if (GC_TYPE(ref) == IS_REFERENCE) {
749 		if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
750 			ref = Z_COUNTED(((zend_reference*)ref)->val);
751 			GC_ADDREF(ref);
752 			if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
753 				GC_REF_SET_BLACK(ref);
754 				goto tail_call;
755 			}
756 		}
757 		goto next;
758 	} else {
759 		goto next;
760 	}
761 
762 	if (!ht->nNumUsed) goto next;
763 	p = ht->arData;
764 	end = p + ht->nNumUsed;
765 	while (1) {
766 		end--;
767 		zv = &end->val;
768 		if (Z_TYPE_P(zv) == IS_INDIRECT) {
769 			zv = Z_INDIRECT_P(zv);
770 		}
771 		if (Z_REFCOUNTED_P(zv)) {
772 			break;
773 		}
774 		if (p == end) goto next;
775 	}
776 	while (p != end) {
777 		zv = &p->val;
778 		if (Z_TYPE_P(zv) == IS_INDIRECT) {
779 			zv = Z_INDIRECT_P(zv);
780 		}
781 		if (Z_REFCOUNTED_P(zv)) {
782 			ref = Z_COUNTED_P(zv);
783 			GC_ADDREF(ref);
784 			if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
785 				GC_REF_SET_BLACK(ref);
786 				GC_STACK_PUSH(ref);
787 			}
788 		}
789 		p++;
790 	}
791 	zv = &p->val;
792 	if (Z_TYPE_P(zv) == IS_INDIRECT) {
793 		zv = Z_INDIRECT_P(zv);
794 	}
795 	ref = Z_COUNTED_P(zv);
796 	GC_ADDREF(ref);
797 	if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
798 		GC_REF_SET_BLACK(ref);
799 		goto tail_call;
800 	}
801 
802 next:
803 	ref = GC_STACK_POP();
804 	if (ref) {
805 		goto tail_call;
806 	}
807 }
808 
gc_mark_grey(zend_refcounted * ref,gc_stack * stack)809 static void gc_mark_grey(zend_refcounted *ref, gc_stack *stack)
810 {
811 	HashTable *ht = NULL;
812 	Bucket *p, *end;
813 	zval *zv;
814 	GC_STACK_DCL(stack);
815 
816 	do {
817 		GC_BENCH_INC(zval_marked_grey);
818 
819 		if (GC_TYPE(ref) == IS_OBJECT) {
820 			zend_object *obj = (zend_object*)ref;
821 
822 			if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
823 				int n;
824 				zval *zv, *end;
825 				zval tmp;
826 
827 				ZVAL_OBJ(&tmp, obj);
828 				ht = obj->handlers->get_gc(&tmp, &zv, &n);
829 				end = zv + n;
830 				if (EXPECTED(!ht) || UNEXPECTED(GC_REF_CHECK_COLOR(ht, GC_GREY))) {
831 					ht = NULL;
832 					if (!n) goto next;
833 					while (!Z_REFCOUNTED_P(--end)) {
834 						if (zv == end) goto next;
835 					}
836 				} else {
837 					GC_REF_SET_COLOR(ht, GC_GREY);
838 				}
839 				while (zv != end) {
840 					if (Z_REFCOUNTED_P(zv)) {
841 						ref = Z_COUNTED_P(zv);
842 						GC_DELREF(ref);
843 						if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
844 							GC_REF_SET_COLOR(ref, GC_GREY);
845 							GC_STACK_PUSH(ref);
846 						}
847 					}
848 					zv++;
849 				}
850 				if (EXPECTED(!ht)) {
851 					ref = Z_COUNTED_P(zv);
852 					GC_DELREF(ref);
853 					if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
854 						GC_REF_SET_COLOR(ref, GC_GREY);
855 						continue;
856 					}
857 					goto next;
858 				}
859 			} else {
860 				goto next;
861 			}
862 		} else if (GC_TYPE(ref) == IS_ARRAY) {
863 			if (((zend_array*)ref) == &EG(symbol_table)) {
864 				GC_REF_SET_BLACK(ref);
865 				goto next;
866 			} else {
867 				ht = (zend_array*)ref;
868 			}
869 		} else if (GC_TYPE(ref) == IS_REFERENCE) {
870 			if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
871 				ref = Z_COUNTED(((zend_reference*)ref)->val);
872 				GC_DELREF(ref);
873 				if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
874 					GC_REF_SET_COLOR(ref, GC_GREY);
875 					continue;
876 				}
877 			}
878 			goto next;
879 		} else {
880 			goto next;
881 		}
882 
883 		if (!ht->nNumUsed) goto next;
884 		p = ht->arData;
885 		end = p + ht->nNumUsed;
886 		while (1) {
887 			end--;
888 			zv = &end->val;
889 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
890 				zv = Z_INDIRECT_P(zv);
891 			}
892 			if (Z_REFCOUNTED_P(zv)) {
893 				break;
894 			}
895 			if (p == end) goto next;
896 		}
897 		while (p != end) {
898 			zv = &p->val;
899 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
900 				zv = Z_INDIRECT_P(zv);
901 			}
902 			if (Z_REFCOUNTED_P(zv)) {
903 				ref = Z_COUNTED_P(zv);
904 				GC_DELREF(ref);
905 				if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
906 					GC_REF_SET_COLOR(ref, GC_GREY);
907 					GC_STACK_PUSH(ref);
908 				}
909 			}
910 			p++;
911 		}
912 		zv = &p->val;
913 		if (Z_TYPE_P(zv) == IS_INDIRECT) {
914 			zv = Z_INDIRECT_P(zv);
915 		}
916 		ref = Z_COUNTED_P(zv);
917 		GC_DELREF(ref);
918 		if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
919 			GC_REF_SET_COLOR(ref, GC_GREY);
920 			continue;
921 		}
922 
923 next:
924 		ref = GC_STACK_POP();
925 	} while (ref);
926 }
927 
928 /* Two-Finger compaction algorithm */
gc_compact(void)929 static void gc_compact(void)
930 {
931 	if (GC_G(num_roots) + GC_FIRST_ROOT != GC_G(first_unused)) {
932 		if (GC_G(num_roots)) {
933 			gc_root_buffer *free = GC_IDX2PTR(GC_FIRST_ROOT);
934 			gc_root_buffer *scan = GC_IDX2PTR(GC_G(first_unused) - 1);
935 			gc_root_buffer *end  = GC_IDX2PTR(GC_G(num_roots));
936 			uint32_t idx;
937 			zend_refcounted *p;
938 
939 			while (free < scan) {
940 				while (!GC_IS_UNUSED(free->ref)) {
941 					free++;
942 				}
943 				while (GC_IS_UNUSED(scan->ref)) {
944 					scan--;
945 				}
946 				if (scan > free) {
947 					p = scan->ref;
948 					free->ref = p;
949 					p = GC_GET_PTR(p);
950 					idx = gc_compress(GC_PTR2IDX(free));
951 					GC_REF_SET_INFO(p, idx | GC_REF_COLOR(p));
952 					free++;
953 					scan--;
954 					if (scan <= end) {
955 						break;
956 					}
957 				}
958 			}
959 		}
960 		GC_G(unused) = GC_INVALID;
961 		GC_G(first_unused) = GC_G(num_roots) + GC_FIRST_ROOT;
962 	}
963 }
964 
gc_mark_roots(gc_stack * stack)965 static void gc_mark_roots(gc_stack *stack)
966 {
967 	gc_root_buffer *current, *last;
968 
969 	gc_compact();
970 
971 	current = GC_IDX2PTR(GC_FIRST_ROOT);
972 	last = GC_IDX2PTR(GC_G(first_unused));
973 	while (current != last) {
974 		if (GC_IS_ROOT(current->ref)) {
975 			if (GC_REF_CHECK_COLOR(current->ref, GC_PURPLE)) {
976 				GC_REF_SET_COLOR(current->ref, GC_GREY);
977 				gc_mark_grey(current->ref, stack);
978 			}
979 		}
980 		current++;
981 	}
982 }
983 
gc_scan(zend_refcounted * ref,gc_stack * stack)984 static void gc_scan(zend_refcounted *ref, gc_stack *stack)
985 {
986 	HashTable *ht = NULL;
987 	Bucket *p, *end;
988 	zval *zv;
989 	GC_STACK_DCL(stack);
990 
991 tail_call:
992 	if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
993 		if (GC_REFCOUNT(ref) > 0) {
994 			if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
995 				GC_REF_SET_BLACK(ref);
996 				if (UNEXPECTED(!_stack->next)) {
997 					gc_stack_next(_stack);
998 				}
999 				/* Split stack and reuse the tail */
1000 				_stack->next->prev = NULL;
1001 				gc_scan_black(ref, _stack->next);
1002 				_stack->next->prev = _stack;
1003 			}
1004 		} else {
1005 			if (GC_TYPE(ref) == IS_OBJECT) {
1006 				zend_object *obj = (zend_object*)ref;
1007 
1008 				if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1009 					int n;
1010 					zval *zv, *end;
1011 					zval tmp;
1012 
1013 					ZVAL_OBJ(&tmp, obj);
1014 					ht = obj->handlers->get_gc(&tmp, &zv, &n);
1015 					end = zv + n;
1016 					if (EXPECTED(!ht) || UNEXPECTED(!GC_REF_CHECK_COLOR(ht, GC_GREY))) {
1017 						ht = NULL;
1018 						if (!n) goto next;
1019 						while (!Z_REFCOUNTED_P(--end)) {
1020 							if (zv == end) goto next;
1021 						}
1022 					} else {
1023 						GC_REF_SET_COLOR(ht, GC_WHITE);
1024 					}
1025 					while (zv != end) {
1026 						if (Z_REFCOUNTED_P(zv)) {
1027 							ref = Z_COUNTED_P(zv);
1028 							if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1029 								GC_REF_SET_COLOR(ref, GC_WHITE);
1030 								GC_STACK_PUSH(ref);
1031 							}
1032 						}
1033 						zv++;
1034 					}
1035 					if (EXPECTED(!ht)) {
1036 						ref = Z_COUNTED_P(zv);
1037 						if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1038 							GC_REF_SET_COLOR(ref, GC_WHITE);
1039 							goto tail_call;
1040 						}
1041 						goto next;
1042 					}
1043 				} else {
1044 					goto next;
1045 				}
1046 			} else if (GC_TYPE(ref) == IS_ARRAY) {
1047 				if ((zend_array*)ref == &EG(symbol_table)) {
1048 					GC_REF_SET_BLACK(ref);
1049 					goto next;
1050 				} else {
1051 					ht = (zend_array*)ref;
1052 				}
1053 			} else if (GC_TYPE(ref) == IS_REFERENCE) {
1054 				if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1055 					ref = Z_COUNTED(((zend_reference*)ref)->val);
1056 					if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1057 						GC_REF_SET_COLOR(ref, GC_WHITE);
1058 						goto tail_call;
1059 					}
1060 				}
1061 				goto next;
1062 			} else {
1063 				goto next;
1064 			}
1065 
1066 			if (!ht->nNumUsed) goto next;
1067 			p = ht->arData;
1068 			end = p + ht->nNumUsed;
1069 			while (1) {
1070 				end--;
1071 				zv = &end->val;
1072 				if (Z_TYPE_P(zv) == IS_INDIRECT) {
1073 					zv = Z_INDIRECT_P(zv);
1074 				}
1075 				if (Z_REFCOUNTED_P(zv)) {
1076 					break;
1077 				}
1078 				if (p == end) goto next;
1079 			}
1080 			while (p != end) {
1081 				zv = &p->val;
1082 				if (Z_TYPE_P(zv) == IS_INDIRECT) {
1083 					zv = Z_INDIRECT_P(zv);
1084 				}
1085 				if (Z_REFCOUNTED_P(zv)) {
1086 					ref = Z_COUNTED_P(zv);
1087 					if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1088 						GC_REF_SET_COLOR(ref, GC_WHITE);
1089 						GC_STACK_PUSH(ref);
1090 					}
1091 				}
1092 				p++;
1093 			}
1094 			zv = &p->val;
1095 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
1096 				zv = Z_INDIRECT_P(zv);
1097 			}
1098 			ref = Z_COUNTED_P(zv);
1099 			if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1100 				GC_REF_SET_COLOR(ref, GC_WHITE);
1101 				goto tail_call;
1102 			}
1103 		}
1104 	}
1105 
1106 next:
1107 	ref = GC_STACK_POP();
1108 	if (ref) {
1109 		goto tail_call;
1110 	}
1111 }
1112 
gc_scan_roots(gc_stack * stack)1113 static void gc_scan_roots(gc_stack *stack)
1114 {
1115 	gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
1116 	gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
1117 
1118 	while (current != last) {
1119 		if (GC_IS_ROOT(current->ref)) {
1120 			if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1121 				GC_REF_SET_COLOR(current->ref, GC_WHITE);
1122 				gc_scan(current->ref, stack);
1123 			}
1124 		}
1125 		current++;
1126 	}
1127 }
1128 
gc_add_garbage(zend_refcounted * ref)1129 static void gc_add_garbage(zend_refcounted *ref)
1130 {
1131 	uint32_t idx;
1132 	gc_root_buffer *buf;
1133 
1134 	if (GC_HAS_UNUSED()) {
1135 		idx = GC_FETCH_UNUSED();
1136 	} else if (GC_HAS_NEXT_UNUSED()) {
1137 		idx = GC_FETCH_NEXT_UNUSED();
1138 	} else {
1139 		gc_grow_root_buffer();
1140 		if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
1141 			return;
1142 		}
1143 		idx = GC_FETCH_NEXT_UNUSED();
1144 	}
1145 
1146 	buf = GC_IDX2PTR(idx);
1147 	buf->ref = GC_MAKE_GARBAGE(ref);
1148 
1149 	idx = gc_compress(idx);
1150 	GC_REF_SET_INFO(ref, idx | GC_BLACK);
1151 	GC_G(num_roots)++;
1152 }
1153 
gc_collect_white(zend_refcounted * ref,uint32_t * flags,gc_stack * stack)1154 static int gc_collect_white(zend_refcounted *ref, uint32_t *flags, gc_stack *stack)
1155 {
1156 	int count = 0;
1157 	HashTable *ht = NULL;
1158 	Bucket *p, *end;
1159 	zval *zv;
1160 	GC_STACK_DCL(stack);
1161 
1162 	do {
1163 		/* don't count references for compatibility ??? */
1164 		if (GC_TYPE(ref) != IS_REFERENCE) {
1165 			count++;
1166 		}
1167 
1168 		if (GC_TYPE(ref) == IS_OBJECT) {
1169 			zend_object *obj = (zend_object*)ref;
1170 
1171 			if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1172 				int n;
1173 				zval *zv, *end;
1174 				zval tmp;
1175 
1176 				/* optimization: color is GC_BLACK (0) */
1177 				if (!GC_INFO(ref)) {
1178 					gc_add_garbage(ref);
1179 				}
1180 				if (!(OBJ_FLAGS(obj) & IS_OBJ_DESTRUCTOR_CALLED)
1181 				 && (obj->handlers->dtor_obj != zend_objects_destroy_object
1182 				  || obj->ce->destructor != NULL)) {
1183 					*flags |= GC_HAS_DESTRUCTORS;
1184 				}
1185 				ZVAL_OBJ(&tmp, obj);
1186 				ht = obj->handlers->get_gc(&tmp, &zv, &n);
1187 				end = zv + n;
1188 				if (EXPECTED(!ht) || UNEXPECTED(GC_REF_CHECK_COLOR(ht, GC_BLACK))) {
1189 					ht = NULL;
1190 					if (!n) goto next;
1191 					while (!Z_REFCOUNTED_P(--end)) {
1192 						if (zv == end) goto next;
1193 					}
1194 				} else {
1195 					GC_REF_SET_BLACK(ht);
1196 				}
1197 				while (zv != end) {
1198 					if (Z_REFCOUNTED_P(zv)) {
1199 						ref = Z_COUNTED_P(zv);
1200 						GC_ADDREF(ref);
1201 						if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1202 							GC_REF_SET_BLACK(ref);
1203 							GC_STACK_PUSH(ref);
1204 						}
1205 					}
1206 					zv++;
1207 				}
1208 				if (EXPECTED(!ht)) {
1209 					ref = Z_COUNTED_P(zv);
1210 					GC_ADDREF(ref);
1211 					if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1212 						GC_REF_SET_BLACK(ref);
1213 						continue;
1214 					}
1215 					goto next;
1216 				}
1217 			} else {
1218 				goto next;
1219 			}
1220 		} else if (GC_TYPE(ref) == IS_ARRAY) {
1221 			/* optimization: color is GC_BLACK (0) */
1222 			if (!GC_INFO(ref)) {
1223 				gc_add_garbage(ref);
1224 			}
1225 			ht = (zend_array*)ref;
1226 		} else if (GC_TYPE(ref) == IS_REFERENCE) {
1227 			if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1228 				ref = Z_COUNTED(((zend_reference*)ref)->val);
1229 				GC_ADDREF(ref);
1230 				if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1231 					GC_REF_SET_BLACK(ref);
1232 					continue;
1233 				}
1234 			}
1235 			goto next;
1236 		} else {
1237 			goto next;
1238 		}
1239 
1240 		if (!ht->nNumUsed) goto next;
1241 		p = ht->arData;
1242 		end = p + ht->nNumUsed;
1243 		while (1) {
1244 			end--;
1245 			zv = &end->val;
1246 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
1247 				zv = Z_INDIRECT_P(zv);
1248 			}
1249 			if (Z_REFCOUNTED_P(zv)) {
1250 				break;
1251 			}
1252 			if (p == end) goto next;
1253 		}
1254 		while (p != end) {
1255 			zv = &p->val;
1256 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
1257 				zv = Z_INDIRECT_P(zv);
1258 			}
1259 			if (Z_REFCOUNTED_P(zv)) {
1260 				ref = Z_COUNTED_P(zv);
1261 				GC_ADDREF(ref);
1262 				if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1263 					GC_REF_SET_BLACK(ref);
1264 					GC_STACK_PUSH(ref);
1265 				}
1266 			}
1267 			p++;
1268 		}
1269 		zv = &p->val;
1270 		if (Z_TYPE_P(zv) == IS_INDIRECT) {
1271 			zv = Z_INDIRECT_P(zv);
1272 		}
1273 		ref = Z_COUNTED_P(zv);
1274 		GC_ADDREF(ref);
1275 		if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1276 			GC_REF_SET_BLACK(ref);
1277 			continue;
1278 		}
1279 
1280 next:
1281 		ref = GC_STACK_POP();
1282 	} while (ref);
1283 
1284 	return count;
1285 }
1286 
gc_collect_roots(uint32_t * flags,gc_stack * stack)1287 static int gc_collect_roots(uint32_t *flags, gc_stack *stack)
1288 {
1289 	uint32_t idx, end;
1290 	zend_refcounted *ref;
1291 	int count = 0;
1292 	gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
1293 	gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
1294 
1295 	/* remove non-garbage from the list */
1296 	while (current != last) {
1297 		if (GC_IS_ROOT(current->ref)) {
1298 			if (GC_REF_CHECK_COLOR(current->ref, GC_BLACK)) {
1299 				GC_REF_SET_INFO(current->ref, 0); /* reset GC_ADDRESS() and keep GC_BLACK */
1300 				gc_remove_from_roots(current);
1301 			}
1302 		}
1303 		current++;
1304 	}
1305 
1306 	gc_compact();
1307 
1308 	/* Root buffer might be reallocated during gc_collect_white,
1309 	 * make sure to reload pointers. */
1310 	idx = GC_FIRST_ROOT;
1311 	end = GC_G(first_unused);
1312 	while (idx != end) {
1313 		current = GC_IDX2PTR(idx);
1314 		ref = current->ref;
1315 		ZEND_ASSERT(GC_IS_ROOT(ref));
1316 		current->ref = GC_MAKE_GARBAGE(ref);
1317 		if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1318 			GC_REF_SET_BLACK(ref);
1319 			count += gc_collect_white(ref, flags, stack);
1320 		}
1321 		idx++;
1322 	}
1323 
1324 	return count;
1325 }
1326 
gc_remove_nested_data_from_buffer(zend_refcounted * ref,gc_root_buffer * root)1327 static int gc_remove_nested_data_from_buffer(zend_refcounted *ref, gc_root_buffer *root)
1328 {
1329 	HashTable *ht = NULL;
1330 	Bucket *p, *end;
1331 	zval *zv;
1332 	int count = 0;
1333 
1334 tail_call:
1335 	do {
1336 		if (root) {
1337 			root = NULL;
1338 			count++;
1339 		} else if (GC_REF_ADDRESS(ref) != 0
1340 		 && GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1341 			GC_TRACE_REF(ref, "removing from buffer");
1342 			GC_REMOVE_FROM_BUFFER(ref);
1343 			count++;
1344 		} else if (GC_TYPE(ref) == IS_REFERENCE) {
1345 			if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1346 				ref = Z_COUNTED(((zend_reference*)ref)->val);
1347 				goto tail_call;
1348 			}
1349 			return count;
1350 		} else {
1351 			return count;
1352 		}
1353 
1354 		if (GC_TYPE(ref) == IS_OBJECT) {
1355 			zend_object *obj = (zend_object*)ref;
1356 
1357 			if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1358 				int n;
1359 				zval *zv, *end;
1360 				zval tmp;
1361 
1362 				ZVAL_OBJ(&tmp, obj);
1363 				ht = obj->handlers->get_gc(&tmp, &zv, &n);
1364 				end = zv + n;
1365 				if (EXPECTED(!ht)) {
1366 					if (!n) return count;
1367 					while (!Z_REFCOUNTED_P(--end)) {
1368 						if (zv == end) return count;
1369 					}
1370 				}
1371 				while (zv != end) {
1372 					if (Z_REFCOUNTED_P(zv)) {
1373 						ref = Z_COUNTED_P(zv);
1374 						count += gc_remove_nested_data_from_buffer(ref, NULL);
1375 					}
1376 					zv++;
1377 				}
1378 				if (EXPECTED(!ht)) {
1379 					ref = Z_COUNTED_P(zv);
1380 					goto tail_call;
1381 				}
1382 				if (GC_REF_ADDRESS(ht) != 0 && GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
1383 					GC_TRACE_REF(ht, "removing from buffer");
1384 					GC_REMOVE_FROM_BUFFER(ht);
1385 				}
1386 			} else {
1387 				return count;
1388 			}
1389 		} else if (GC_TYPE(ref) == IS_ARRAY) {
1390 			ht = (zend_array*)ref;
1391 		} else {
1392 			return count;
1393 		}
1394 
1395 		if (!ht->nNumUsed) return count;
1396 		p = ht->arData;
1397 		end = p + ht->nNumUsed;
1398 		while (1) {
1399 			end--;
1400 			zv = &end->val;
1401 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
1402 				zv = Z_INDIRECT_P(zv);
1403 			}
1404 			if (Z_REFCOUNTED_P(zv)) {
1405 				break;
1406 			}
1407 			if (p == end) return count;
1408 		}
1409 		while (p != end) {
1410 			zv = &p->val;
1411 			if (Z_TYPE_P(zv) == IS_INDIRECT) {
1412 				zv = Z_INDIRECT_P(zv);
1413 			}
1414 			if (Z_REFCOUNTED_P(zv)) {
1415 				ref = Z_COUNTED_P(zv);
1416 				count += gc_remove_nested_data_from_buffer(ref, NULL);
1417 			}
1418 			p++;
1419 		}
1420 		zv = &p->val;
1421 		if (Z_TYPE_P(zv) == IS_INDIRECT) {
1422 			zv = Z_INDIRECT_P(zv);
1423 		}
1424 		ref = Z_COUNTED_P(zv);
1425 		goto tail_call;
1426 	} while (0);
1427 }
1428 
zend_gc_collect_cycles(void)1429 ZEND_API int zend_gc_collect_cycles(void)
1430 {
1431 	int count = 0;
1432 
1433 	if (GC_G(num_roots)) {
1434 		gc_root_buffer *current, *last;
1435 		zend_refcounted *p;
1436 		uint32_t gc_flags = 0;
1437 		uint32_t idx, end;
1438 		gc_stack stack;
1439 
1440 		stack.prev = NULL;
1441 		stack.next = NULL;
1442 
1443 		if (GC_G(gc_active)) {
1444 			return 0;
1445 		}
1446 
1447 		GC_TRACE("Collecting cycles");
1448 		GC_G(gc_runs)++;
1449 		GC_G(gc_active) = 1;
1450 
1451 		GC_TRACE("Marking roots");
1452 		gc_mark_roots(&stack);
1453 		GC_TRACE("Scanning roots");
1454 		gc_scan_roots(&stack);
1455 
1456 		GC_TRACE("Collecting roots");
1457 		count = gc_collect_roots(&gc_flags, &stack);
1458 
1459 		gc_stack_free(&stack);
1460 
1461 		if (!GC_G(num_roots)) {
1462 			/* nothing to free */
1463 			GC_TRACE("Nothing to free");
1464 			GC_G(gc_active) = 0;
1465 			return 0;
1466 		}
1467 
1468 		end = GC_G(first_unused);
1469 
1470 		if (gc_flags & GC_HAS_DESTRUCTORS) {
1471 			GC_TRACE("Calling destructors");
1472 
1473 			/* During a destructor call, new externally visible references to nested data may
1474 			 * be introduced. These references can be introduced in a way that does not
1475 			 * modify any refcounts, so we have no real way to detect this situation
1476 			 * short of rerunning full GC tracing. What we do instead is to only run
1477 			 * destructors at this point, and leave the actual freeing of the objects
1478 			 * until the next GC run. */
1479 
1480 			/* Mark all roots for which a dtor will be invoked as DTOR_GARBAGE. Additionally
1481 			 * color them purple. This serves a double purpose: First, they should be
1482 			 * considered new potential roots for the next GC run. Second, it will prevent
1483 			 * their removal from the root buffer by nested data removal. */
1484 			idx = GC_FIRST_ROOT;
1485 			current = GC_IDX2PTR(GC_FIRST_ROOT);
1486 			while (idx != end) {
1487 				if (GC_IS_GARBAGE(current->ref)) {
1488 					p = GC_GET_PTR(current->ref);
1489 					if (GC_TYPE(p) == IS_OBJECT && !(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
1490 						zend_object *obj = (zend_object *) p;
1491 						if (obj->handlers->dtor_obj != zend_objects_destroy_object
1492 							|| obj->ce->destructor) {
1493 							current->ref = GC_MAKE_DTOR_GARBAGE(obj);
1494 							GC_REF_SET_COLOR(obj, GC_PURPLE);
1495 						} else {
1496 							GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
1497 						}
1498 					}
1499 				}
1500 				current++;
1501 				idx++;
1502 			}
1503 
1504 			/* Remove nested data for objects on which a destructor will be called.
1505 			 * This will not remove the objects themselves, as they have been colored
1506 			 * purple. */
1507 			idx = GC_FIRST_ROOT;
1508 			current = GC_IDX2PTR(GC_FIRST_ROOT);
1509 			while (idx != end) {
1510 				if (GC_IS_DTOR_GARBAGE(current->ref)) {
1511 					p = GC_GET_PTR(current->ref);
1512 					count -= gc_remove_nested_data_from_buffer(p, current);
1513 				}
1514 				current++;
1515 				idx++;
1516 			}
1517 
1518 			/* Actually call destructors.
1519 			 *
1520 			 * The root buffer might be reallocated during destructors calls,
1521 			 * make sure to reload pointers as necessary. */
1522 			idx = GC_FIRST_ROOT;
1523 			while (idx != end) {
1524 				current = GC_IDX2PTR(idx);
1525 				if (GC_IS_DTOR_GARBAGE(current->ref)) {
1526 					p = GC_GET_PTR(current->ref);
1527 					/* Mark this is as a normal root for the next GC run,
1528 					 * it's no longer garbage for this run. */
1529 					current->ref = p;
1530 					/* Double check that the destructor hasn't been called yet. It could have
1531 					 * already been invoked indirectly by some other destructor. */
1532 					if (!(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
1533 						zend_object *obj = (zend_object*)p;
1534 						GC_TRACE_REF(obj, "calling destructor");
1535 						GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
1536 						GC_ADDREF(obj);
1537 						obj->handlers->dtor_obj(obj);
1538 						GC_DELREF(obj);
1539 					}
1540 				}
1541 				idx++;
1542 			}
1543 
1544 			if (GC_G(gc_protected)) {
1545 				/* something went wrong */
1546 				return 0;
1547 			}
1548 		}
1549 
1550 		/* Destroy zvals. The root buffer may be reallocated. */
1551 		GC_TRACE("Destroying zvals");
1552 		idx = GC_FIRST_ROOT;
1553 		while (idx != end) {
1554 			current = GC_IDX2PTR(idx);
1555 			if (GC_IS_GARBAGE(current->ref)) {
1556 				p = GC_GET_PTR(current->ref);
1557 				GC_TRACE_REF(p, "destroying");
1558 				if (GC_TYPE(p) == IS_OBJECT) {
1559 					zend_object *obj = (zend_object*)p;
1560 
1561 					EG(objects_store).object_buckets[obj->handle] = SET_OBJ_INVALID(obj);
1562 					GC_TYPE_INFO(obj) = IS_NULL |
1563 						(GC_TYPE_INFO(obj) & ~GC_TYPE_MASK);
1564 					/* Modify current before calling free_obj (bug #78811: free_obj() can cause the root buffer (with current) to be reallocated.) */
1565 					current->ref = GC_MAKE_GARBAGE(((char*)obj) - obj->handlers->offset);
1566 					if (!(OBJ_FLAGS(obj) & IS_OBJ_FREE_CALLED)) {
1567 						GC_ADD_FLAGS(obj, IS_OBJ_FREE_CALLED);
1568 						GC_ADDREF(obj);
1569 						obj->handlers->free_obj(obj);
1570 						GC_DELREF(obj);
1571 					}
1572 
1573 					ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST(obj->handle);
1574 				} else if (GC_TYPE(p) == IS_ARRAY) {
1575 					zend_array *arr = (zend_array*)p;
1576 
1577 					GC_TYPE_INFO(arr) = IS_NULL |
1578 						(GC_TYPE_INFO(arr) & ~GC_TYPE_MASK);
1579 
1580 					/* GC may destroy arrays with rc>1. This is valid and safe. */
1581 					HT_ALLOW_COW_VIOLATION(arr);
1582 
1583 					zend_hash_destroy(arr);
1584 				}
1585 			}
1586 			idx++;
1587 		}
1588 
1589 		/* Free objects */
1590 		current = GC_IDX2PTR(GC_FIRST_ROOT);
1591 		last = GC_IDX2PTR(end);
1592 		while (current != last) {
1593 			if (GC_IS_GARBAGE(current->ref)) {
1594 				p = GC_GET_PTR(current->ref);
1595 				GC_LINK_UNUSED(current);
1596 				GC_G(num_roots)--;
1597 				efree(p);
1598 			}
1599 			current++;
1600 		}
1601 
1602 		GC_TRACE("Collection finished");
1603 		GC_G(collected) += count;
1604 		GC_G(gc_active) = 0;
1605 	}
1606 
1607 	gc_compact();
1608 
1609 	return count;
1610 }
1611 
zend_gc_get_status(zend_gc_status * status)1612 ZEND_API void zend_gc_get_status(zend_gc_status *status)
1613 {
1614 	status->runs = GC_G(gc_runs);
1615 	status->collected = GC_G(collected);
1616 	status->threshold = GC_G(gc_threshold);
1617 	status->num_roots = GC_G(num_roots);
1618 }
1619 
1620 #ifdef ZTS
zend_gc_globals_size(void)1621 size_t zend_gc_globals_size(void)
1622 {
1623 	return sizeof(zend_gc_globals);
1624 }
1625 #endif
1626