1 /*
2 +----------------------------------------------------------------------+
3 | Zend Engine |
4 +----------------------------------------------------------------------+
5 | Copyright (c) Zend Technologies Ltd. (http://www.zend.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 2.00 of the Zend license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.zend.com/license/2_00.txt. |
11 | If you did not receive a copy of the Zend license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@zend.com so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
15 | Authors: David Wang <planetbeing@gmail.com> |
16 | Dmitry Stogov <dmitry@php.net> |
17 +----------------------------------------------------------------------+
18 */
19
20 /**
21 * zend_gc_collect_cycles
22 * ======================
23 *
24 * Colors and its meaning
25 * ----------------------
26 *
27 * BLACK (GC_BLACK) - In use or free.
28 * GREY (GC_GREY) - Possible member of cycle.
29 * WHITE (GC_WHITE) - Member of garbage cycle.
30 * PURPLE (GC_PURPLE) - Possible root of cycle.
31 *
32 * Colors described in the paper but not used
33 * ------------------------------------------
34 *
35 * GREEN - Acyclic
36 * RED - Candidate cycle undergoing
37 * ORANGE - Candidate cycle awaiting epoch boundary.
38 *
39 *
40 * Flow
41 * =====
42 *
43 * The garbage collect cycle starts from 'gc_mark_roots', which traverses the
44 * possible roots, and calls mark_grey for roots are marked purple with
45 * depth-first traverse.
46 *
47 * After all possible roots are traversed and marked,
48 * gc_scan_roots will be called, and each root will be called with
49 * gc_scan(root->ref)
50 *
51 * gc_scan checks the colors of possible members.
52 *
53 * If the node is marked as grey and the refcount > 0
54 * gc_scan_black will be called on that node to scan it's subgraph.
55 * otherwise (refcount == 0), it marks the node white.
56 *
57 * A node MAY be added to possible roots when ZEND_UNSET_VAR happens or
58 * zend_assign_to_variable is called only when possible garbage node is
59 * produced.
60 * gc_possible_root() will be called to add the nodes to possible roots.
61 *
62 *
63 * For objects, we call their get_gc handler (by default 'zend_std_get_gc') to
64 * get the object properties to scan.
65 *
66 *
67 * @see http://researcher.watson.ibm.com/researcher/files/us-bacon/Bacon01Concurrent.pdf
68 */
69 #include "zend.h"
70 #include "zend_API.h"
71 #include "zend_fibers.h"
72
73 #ifndef GC_BENCH
74 # define GC_BENCH 0
75 #endif
76
77 #ifndef ZEND_GC_DEBUG
78 # define ZEND_GC_DEBUG 0
79 #endif
80
81 /* GC_INFO layout */
82 #define GC_ADDRESS 0x0fffffu
83 #define GC_COLOR 0x300000u
84
85 #define GC_BLACK 0x000000u /* must be zero */
86 #define GC_WHITE 0x100000u
87 #define GC_GREY 0x200000u
88 #define GC_PURPLE 0x300000u
89
90 /* Debug tracing */
91 #if ZEND_GC_DEBUG > 1
92 # define GC_TRACE(format, ...) fprintf(stderr, format "\n", ##__VA_ARGS__);
93 # define GC_TRACE_REF(ref, format, ...) \
94 do { \
95 gc_trace_ref((zend_refcounted *) ref); \
96 fprintf(stderr, format "\n", ##__VA_ARGS__); \
97 } while (0)
98 # define GC_TRACE_SET_COLOR(ref, color) \
99 GC_TRACE_REF(ref, "->%s", gc_color_name(color))
100 #else
101 # define GC_TRACE_REF(ref, format, ...)
102 # define GC_TRACE_SET_COLOR(ref, new_color)
103 # define GC_TRACE(str)
104 #endif
105
106 /* GC_INFO access */
107 #define GC_REF_ADDRESS(ref) \
108 (((GC_TYPE_INFO(ref)) & (GC_ADDRESS << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
109
110 #define GC_REF_COLOR(ref) \
111 (((GC_TYPE_INFO(ref)) & (GC_COLOR << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
112
113 #define GC_REF_CHECK_COLOR(ref, color) \
114 ((GC_TYPE_INFO(ref) & (GC_COLOR << GC_INFO_SHIFT)) == ((color) << GC_INFO_SHIFT))
115
116 #define GC_REF_SET_INFO(ref, info) do { \
117 GC_TYPE_INFO(ref) = \
118 (GC_TYPE_INFO(ref) & (GC_TYPE_MASK | GC_FLAGS_MASK)) | \
119 ((info) << GC_INFO_SHIFT); \
120 } while (0)
121
122 #define GC_REF_SET_COLOR(ref, c) do { \
123 GC_TRACE_SET_COLOR(ref, c); \
124 GC_TYPE_INFO(ref) = \
125 (GC_TYPE_INFO(ref) & ~(GC_COLOR << GC_INFO_SHIFT)) | \
126 ((c) << GC_INFO_SHIFT); \
127 } while (0)
128
129 #define GC_REF_SET_BLACK(ref) do { \
130 GC_TRACE_SET_COLOR(ref, GC_BLACK); \
131 GC_TYPE_INFO(ref) &= ~(GC_COLOR << GC_INFO_SHIFT); \
132 } while (0)
133
134 #define GC_REF_SET_PURPLE(ref) do { \
135 GC_TRACE_SET_COLOR(ref, GC_PURPLE); \
136 GC_TYPE_INFO(ref) |= (GC_COLOR << GC_INFO_SHIFT); \
137 } while (0)
138
139 /* bit stealing tags for gc_root_buffer.ref */
140 #define GC_BITS 0x3
141
142 #define GC_ROOT 0x0 /* possible root of circular garbage */
143 #define GC_UNUSED 0x1 /* part of linked list of unused buffers */
144 #define GC_GARBAGE 0x2 /* garbage to delete */
145 #define GC_DTOR_GARBAGE 0x3 /* garbage on which only the dtor should be invoked */
146
147 #define GC_GET_PTR(ptr) \
148 ((void*)(((uintptr_t)(ptr)) & ~GC_BITS))
149
150 #define GC_IS_ROOT(ptr) \
151 ((((uintptr_t)(ptr)) & GC_BITS) == GC_ROOT)
152 #define GC_IS_UNUSED(ptr) \
153 ((((uintptr_t)(ptr)) & GC_BITS) == GC_UNUSED)
154 #define GC_IS_GARBAGE(ptr) \
155 ((((uintptr_t)(ptr)) & GC_BITS) == GC_GARBAGE)
156 #define GC_IS_DTOR_GARBAGE(ptr) \
157 ((((uintptr_t)(ptr)) & GC_BITS) == GC_DTOR_GARBAGE)
158
159 #define GC_MAKE_GARBAGE(ptr) \
160 ((void*)(((uintptr_t)(ptr)) | GC_GARBAGE))
161 #define GC_MAKE_DTOR_GARBAGE(ptr) \
162 ((void*)(((uintptr_t)(ptr)) | GC_DTOR_GARBAGE))
163
164 /* GC address conversion */
165 #define GC_IDX2PTR(idx) (GC_G(buf) + (idx))
166 #define GC_PTR2IDX(ptr) ((ptr) - GC_G(buf))
167
168 #define GC_IDX2LIST(idx) ((void*)(uintptr_t)(((idx) * sizeof(void*)) | GC_UNUSED))
169 #define GC_LIST2IDX(list) (((uint32_t)(uintptr_t)(list)) / sizeof(void*))
170
171 /* GC buffers */
172 #define GC_INVALID 0
173 #define GC_FIRST_ROOT 1
174
175 #define GC_DEFAULT_BUF_SIZE (16 * 1024)
176 #define GC_BUF_GROW_STEP (128 * 1024)
177
178 #define GC_MAX_UNCOMPRESSED (512 * 1024)
179 #define GC_MAX_BUF_SIZE 0x40000000
180
181 #define GC_THRESHOLD_DEFAULT (10000 + GC_FIRST_ROOT)
182 #define GC_THRESHOLD_STEP 10000
183 #define GC_THRESHOLD_MAX 1000000000
184 #define GC_THRESHOLD_TRIGGER 100
185
186 /* GC flags */
187 #define GC_HAS_DESTRUCTORS (1<<0)
188
189 /* unused buffers */
190 #define GC_HAS_UNUSED() \
191 (GC_G(unused) != GC_INVALID)
192 #define GC_FETCH_UNUSED() \
193 gc_fetch_unused()
194 #define GC_LINK_UNUSED(root) \
195 gc_link_unused(root)
196
197 #define GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD() \
198 (GC_G(first_unused) < GC_G(gc_threshold))
199 #define GC_HAS_NEXT_UNUSED() \
200 (GC_G(first_unused) != GC_G(buf_size))
201 #define GC_FETCH_NEXT_UNUSED() \
202 gc_fetch_next_unused()
203
204 ZEND_API int (*gc_collect_cycles)(void);
205
206 typedef struct _gc_root_buffer {
207 zend_refcounted *ref;
208 } gc_root_buffer;
209
210 typedef struct _zend_gc_globals {
211 gc_root_buffer *buf; /* preallocated arrays of buffers */
212
213 bool gc_enabled;
214 bool gc_active; /* GC currently running, forbid nested GC */
215 bool gc_protected; /* GC protected, forbid root additions */
216 bool gc_full;
217
218 uint32_t unused; /* linked list of unused buffers */
219 uint32_t first_unused; /* first unused buffer */
220 uint32_t gc_threshold; /* GC collection threshold */
221 uint32_t buf_size; /* size of the GC buffer */
222 uint32_t num_roots; /* number of roots in GC buffer */
223
224 uint32_t gc_runs;
225 uint32_t collected;
226
227 #if GC_BENCH
228 uint32_t root_buf_length;
229 uint32_t root_buf_peak;
230 uint32_t zval_possible_root;
231 uint32_t zval_buffered;
232 uint32_t zval_remove_from_buffer;
233 uint32_t zval_marked_grey;
234 #endif
235 } zend_gc_globals;
236
237 #ifdef ZTS
238 static int gc_globals_id;
239 static size_t gc_globals_offset;
240 #define GC_G(v) ZEND_TSRMG_FAST(gc_globals_offset, zend_gc_globals *, v)
241 #else
242 #define GC_G(v) (gc_globals.v)
243 static zend_gc_globals gc_globals;
244 #endif
245
246 #if GC_BENCH
247 # define GC_BENCH_INC(counter) GC_G(counter)++
248 # define GC_BENCH_DEC(counter) GC_G(counter)--
249 # define GC_BENCH_PEAK(peak, counter) do { \
250 if (GC_G(counter) > GC_G(peak)) { \
251 GC_G(peak) = GC_G(counter); \
252 } \
253 } while (0)
254 #else
255 # define GC_BENCH_INC(counter)
256 # define GC_BENCH_DEC(counter)
257 # define GC_BENCH_PEAK(peak, counter)
258 #endif
259
260
261 #define GC_STACK_SEGMENT_SIZE (((4096 - ZEND_MM_OVERHEAD) / sizeof(void*)) - 2)
262
263 typedef struct _gc_stack gc_stack;
264
265 struct _gc_stack {
266 gc_stack *prev;
267 gc_stack *next;
268 zend_refcounted *data[GC_STACK_SEGMENT_SIZE];
269 };
270
271 #define GC_STACK_DCL(init) \
272 gc_stack *_stack = init; \
273 size_t _top = 0;
274
275 #define GC_STACK_PUSH(ref) \
276 gc_stack_push(&_stack, &_top, ref);
277
278 #define GC_STACK_POP() \
279 gc_stack_pop(&_stack, &_top)
280
gc_stack_next(gc_stack * stack)281 static zend_never_inline gc_stack* gc_stack_next(gc_stack *stack)
282 {
283 if (UNEXPECTED(!stack->next)) {
284 gc_stack *segment = emalloc(sizeof(gc_stack));
285 segment->prev = stack;
286 segment->next = NULL;
287 stack->next = segment;
288 }
289 return stack->next;
290 }
291
gc_stack_push(gc_stack ** stack,size_t * top,zend_refcounted * ref)292 static zend_always_inline void gc_stack_push(gc_stack **stack, size_t *top, zend_refcounted *ref)
293 {
294 if (UNEXPECTED(*top == GC_STACK_SEGMENT_SIZE)) {
295 (*stack) = gc_stack_next(*stack);
296 (*top) = 0;
297 }
298 (*stack)->data[(*top)++] = ref;
299 }
300
gc_stack_pop(gc_stack ** stack,size_t * top)301 static zend_always_inline zend_refcounted* gc_stack_pop(gc_stack **stack, size_t *top)
302 {
303 if (UNEXPECTED((*top) == 0)) {
304 if (!(*stack)->prev) {
305 return NULL;
306 } else {
307 (*stack) = (*stack)->prev;
308 (*top) = GC_STACK_SEGMENT_SIZE - 1;
309 return (*stack)->data[GC_STACK_SEGMENT_SIZE - 1];
310 }
311 } else {
312 return (*stack)->data[--(*top)];
313 }
314 }
315
gc_stack_free(gc_stack * stack)316 static void gc_stack_free(gc_stack *stack)
317 {
318 gc_stack *p = stack->next;
319
320 while (p) {
321 stack = p->next;
322 efree(p);
323 p = stack;
324 }
325 }
326
gc_compress(uint32_t idx)327 static zend_always_inline uint32_t gc_compress(uint32_t idx)
328 {
329 if (EXPECTED(idx < GC_MAX_UNCOMPRESSED)) {
330 return idx;
331 }
332 return (idx % GC_MAX_UNCOMPRESSED) | GC_MAX_UNCOMPRESSED;
333 }
334
gc_decompress(zend_refcounted * ref,uint32_t idx)335 static zend_always_inline gc_root_buffer* gc_decompress(zend_refcounted *ref, uint32_t idx)
336 {
337 gc_root_buffer *root = GC_IDX2PTR(idx);
338
339 if (EXPECTED(GC_GET_PTR(root->ref) == ref)) {
340 return root;
341 }
342
343 while (1) {
344 idx += GC_MAX_UNCOMPRESSED;
345 ZEND_ASSERT(idx < GC_G(first_unused));
346 root = GC_IDX2PTR(idx);
347 if (GC_GET_PTR(root->ref) == ref) {
348 return root;
349 }
350 }
351 }
352
gc_fetch_unused(void)353 static zend_always_inline uint32_t gc_fetch_unused(void)
354 {
355 uint32_t idx;
356 gc_root_buffer *root;
357
358 ZEND_ASSERT(GC_HAS_UNUSED());
359 idx = GC_G(unused);
360 root = GC_IDX2PTR(idx);
361 ZEND_ASSERT(GC_IS_UNUSED(root->ref));
362 GC_G(unused) = GC_LIST2IDX(root->ref);
363 return idx;
364 }
365
gc_link_unused(gc_root_buffer * root)366 static zend_always_inline void gc_link_unused(gc_root_buffer *root)
367 {
368 root->ref = GC_IDX2LIST(GC_G(unused));
369 GC_G(unused) = GC_PTR2IDX(root);
370 }
371
gc_fetch_next_unused(void)372 static zend_always_inline uint32_t gc_fetch_next_unused(void)
373 {
374 uint32_t idx;
375
376 ZEND_ASSERT(GC_HAS_NEXT_UNUSED());
377 idx = GC_G(first_unused);
378 GC_G(first_unused) = GC_G(first_unused) + 1;
379 return idx;
380 }
381
382 #if ZEND_GC_DEBUG > 1
gc_color_name(uint32_t color)383 static const char *gc_color_name(uint32_t color) {
384 switch (color) {
385 case GC_BLACK: return "black";
386 case GC_WHITE: return "white";
387 case GC_GREY: return "grey";
388 case GC_PURPLE: return "purple";
389 default: return "unknown";
390 }
391 }
gc_trace_ref(zend_refcounted * ref)392 static void gc_trace_ref(zend_refcounted *ref) {
393 if (GC_TYPE(ref) == IS_OBJECT) {
394 zend_object *obj = (zend_object *) ref;
395 fprintf(stderr, "[%p] rc=%d addr=%d %s object(%s)#%d ",
396 ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
397 gc_color_name(GC_REF_COLOR(ref)),
398 obj->ce->name->val, obj->handle);
399 } else if (GC_TYPE(ref) == IS_ARRAY) {
400 zend_array *arr = (zend_array *) ref;
401 fprintf(stderr, "[%p] rc=%d addr=%d %s array(%d) ",
402 ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
403 gc_color_name(GC_REF_COLOR(ref)),
404 zend_hash_num_elements(arr));
405 } else {
406 fprintf(stderr, "[%p] rc=%d addr=%d %s %s ",
407 ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
408 gc_color_name(GC_REF_COLOR(ref)),
409 GC_TYPE(ref) == IS_REFERENCE
410 ? "reference" : zend_get_type_by_const(GC_TYPE(ref)));
411 }
412 }
413 #endif
414
gc_remove_from_roots(gc_root_buffer * root)415 static zend_always_inline void gc_remove_from_roots(gc_root_buffer *root)
416 {
417 GC_LINK_UNUSED(root);
418 GC_G(num_roots)--;
419 GC_BENCH_DEC(root_buf_length);
420 }
421
root_buffer_dtor(zend_gc_globals * gc_globals)422 static void root_buffer_dtor(zend_gc_globals *gc_globals)
423 {
424 if (gc_globals->buf) {
425 free(gc_globals->buf);
426 gc_globals->buf = NULL;
427 }
428 }
429
gc_globals_ctor_ex(zend_gc_globals * gc_globals)430 static void gc_globals_ctor_ex(zend_gc_globals *gc_globals)
431 {
432 gc_globals->gc_enabled = 0;
433 gc_globals->gc_active = 0;
434 gc_globals->gc_protected = 1;
435 gc_globals->gc_full = 0;
436
437 gc_globals->buf = NULL;
438 gc_globals->unused = GC_INVALID;
439 gc_globals->first_unused = GC_INVALID;
440 gc_globals->gc_threshold = GC_INVALID;
441 gc_globals->buf_size = GC_INVALID;
442 gc_globals->num_roots = 0;
443
444 gc_globals->gc_runs = 0;
445 gc_globals->collected = 0;
446
447 #if GC_BENCH
448 gc_globals->root_buf_length = 0;
449 gc_globals->root_buf_peak = 0;
450 gc_globals->zval_possible_root = 0;
451 gc_globals->zval_buffered = 0;
452 gc_globals->zval_remove_from_buffer = 0;
453 gc_globals->zval_marked_grey = 0;
454 #endif
455 }
456
gc_globals_ctor(void)457 void gc_globals_ctor(void)
458 {
459 #ifdef ZTS
460 ts_allocate_fast_id(&gc_globals_id, &gc_globals_offset, sizeof(zend_gc_globals), (ts_allocate_ctor) gc_globals_ctor_ex, (ts_allocate_dtor) root_buffer_dtor);
461 #else
462 gc_globals_ctor_ex(&gc_globals);
463 #endif
464 }
465
gc_globals_dtor(void)466 void gc_globals_dtor(void)
467 {
468 #ifndef ZTS
469 root_buffer_dtor(&gc_globals);
470 #endif
471 }
472
gc_reset(void)473 void gc_reset(void)
474 {
475 if (GC_G(buf)) {
476 GC_G(gc_active) = 0;
477 GC_G(gc_protected) = 0;
478 GC_G(gc_full) = 0;
479 GC_G(unused) = GC_INVALID;
480 GC_G(first_unused) = GC_FIRST_ROOT;
481 GC_G(num_roots) = 0;
482
483 GC_G(gc_runs) = 0;
484 GC_G(collected) = 0;
485
486 #if GC_BENCH
487 GC_G(root_buf_length) = 0;
488 GC_G(root_buf_peak) = 0;
489 GC_G(zval_possible_root) = 0;
490 GC_G(zval_buffered) = 0;
491 GC_G(zval_remove_from_buffer) = 0;
492 GC_G(zval_marked_grey) = 0;
493 #endif
494 }
495 }
496
gc_enable(bool enable)497 ZEND_API bool gc_enable(bool enable)
498 {
499 bool old_enabled = GC_G(gc_enabled);
500 GC_G(gc_enabled) = enable;
501 if (enable && !old_enabled && GC_G(buf) == NULL) {
502 GC_G(buf) = (gc_root_buffer*) pemalloc(sizeof(gc_root_buffer) * GC_DEFAULT_BUF_SIZE, 1);
503 GC_G(buf)[0].ref = NULL;
504 GC_G(buf_size) = GC_DEFAULT_BUF_SIZE;
505 GC_G(gc_threshold) = GC_THRESHOLD_DEFAULT;
506 gc_reset();
507 }
508 return old_enabled;
509 }
510
gc_enabled(void)511 ZEND_API bool gc_enabled(void)
512 {
513 return GC_G(gc_enabled);
514 }
515
gc_protect(bool protect)516 ZEND_API bool gc_protect(bool protect)
517 {
518 bool old_protected = GC_G(gc_protected);
519 GC_G(gc_protected) = protect;
520 return old_protected;
521 }
522
gc_protected(void)523 ZEND_API bool gc_protected(void)
524 {
525 return GC_G(gc_protected);
526 }
527
gc_grow_root_buffer(void)528 static void gc_grow_root_buffer(void)
529 {
530 size_t new_size;
531
532 if (GC_G(buf_size) >= GC_MAX_BUF_SIZE) {
533 if (!GC_G(gc_full)) {
534 zend_error(E_WARNING, "GC buffer overflow (GC disabled)\n");
535 GC_G(gc_active) = 1;
536 GC_G(gc_protected) = 1;
537 GC_G(gc_full) = 1;
538 return;
539 }
540 }
541 if (GC_G(buf_size) < GC_BUF_GROW_STEP) {
542 new_size = GC_G(buf_size) * 2;
543 } else {
544 new_size = GC_G(buf_size) + GC_BUF_GROW_STEP;
545 }
546 if (new_size > GC_MAX_BUF_SIZE) {
547 new_size = GC_MAX_BUF_SIZE;
548 }
549 GC_G(buf) = perealloc(GC_G(buf), sizeof(gc_root_buffer) * new_size, 1);
550 GC_G(buf_size) = new_size;
551 }
552
gc_adjust_threshold(int count)553 static void gc_adjust_threshold(int count)
554 {
555 uint32_t new_threshold;
556
557 /* TODO Very simple heuristic for dynamic GC buffer resizing:
558 * If there are "too few" collections, increase the collection threshold
559 * by a fixed step */
560 if (count < GC_THRESHOLD_TRIGGER || GC_G(num_roots) >= GC_G(gc_threshold)) {
561 /* increase */
562 if (GC_G(gc_threshold) < GC_THRESHOLD_MAX) {
563 new_threshold = GC_G(gc_threshold) + GC_THRESHOLD_STEP;
564 if (new_threshold > GC_THRESHOLD_MAX) {
565 new_threshold = GC_THRESHOLD_MAX;
566 }
567 if (new_threshold > GC_G(buf_size)) {
568 gc_grow_root_buffer();
569 }
570 if (new_threshold <= GC_G(buf_size)) {
571 GC_G(gc_threshold) = new_threshold;
572 }
573 }
574 } else if (GC_G(gc_threshold) > GC_THRESHOLD_DEFAULT) {
575 new_threshold = GC_G(gc_threshold) - GC_THRESHOLD_STEP;
576 if (new_threshold < GC_THRESHOLD_DEFAULT) {
577 new_threshold = GC_THRESHOLD_DEFAULT;
578 }
579 GC_G(gc_threshold) = new_threshold;
580 }
581 }
582
gc_possible_root_when_full(zend_refcounted * ref)583 static zend_never_inline void ZEND_FASTCALL gc_possible_root_when_full(zend_refcounted *ref)
584 {
585 uint32_t idx;
586 gc_root_buffer *newRoot;
587
588 ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
589 ZEND_ASSERT(GC_INFO(ref) == 0);
590
591 if (GC_G(gc_enabled) && !GC_G(gc_active)) {
592 GC_ADDREF(ref);
593 gc_adjust_threshold(gc_collect_cycles());
594 if (UNEXPECTED(GC_DELREF(ref)) == 0) {
595 rc_dtor_func(ref);
596 return;
597 } else if (UNEXPECTED(GC_INFO(ref))) {
598 return;
599 }
600 }
601
602 if (GC_HAS_UNUSED()) {
603 idx = GC_FETCH_UNUSED();
604 } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
605 idx = GC_FETCH_NEXT_UNUSED();
606 } else {
607 gc_grow_root_buffer();
608 if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
609 return;
610 }
611 idx = GC_FETCH_NEXT_UNUSED();
612 }
613
614 newRoot = GC_IDX2PTR(idx);
615 newRoot->ref = ref; /* GC_ROOT tag is 0 */
616 GC_TRACE_SET_COLOR(ref, GC_PURPLE);
617
618 idx = gc_compress(idx);
619 GC_REF_SET_INFO(ref, idx | GC_PURPLE);
620 GC_G(num_roots)++;
621
622 GC_BENCH_INC(zval_buffered);
623 GC_BENCH_INC(root_buf_length);
624 GC_BENCH_PEAK(root_buf_peak, root_buf_length);
625 }
626
gc_possible_root(zend_refcounted * ref)627 ZEND_API void ZEND_FASTCALL gc_possible_root(zend_refcounted *ref)
628 {
629 uint32_t idx;
630 gc_root_buffer *newRoot;
631
632 if (UNEXPECTED(GC_G(gc_protected))) {
633 return;
634 }
635
636 GC_BENCH_INC(zval_possible_root);
637
638 if (EXPECTED(GC_HAS_UNUSED())) {
639 idx = GC_FETCH_UNUSED();
640 } else if (EXPECTED(GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD())) {
641 idx = GC_FETCH_NEXT_UNUSED();
642 } else {
643 gc_possible_root_when_full(ref);
644 return;
645 }
646
647 ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
648 ZEND_ASSERT(GC_INFO(ref) == 0);
649
650 newRoot = GC_IDX2PTR(idx);
651 newRoot->ref = ref; /* GC_ROOT tag is 0 */
652 GC_TRACE_SET_COLOR(ref, GC_PURPLE);
653
654 idx = gc_compress(idx);
655 GC_REF_SET_INFO(ref, idx | GC_PURPLE);
656 GC_G(num_roots)++;
657
658 GC_BENCH_INC(zval_buffered);
659 GC_BENCH_INC(root_buf_length);
660 GC_BENCH_PEAK(root_buf_peak, root_buf_length);
661 }
662
gc_remove_compressed(zend_refcounted * ref,uint32_t idx)663 static zend_never_inline void ZEND_FASTCALL gc_remove_compressed(zend_refcounted *ref, uint32_t idx)
664 {
665 gc_root_buffer *root = gc_decompress(ref, idx);
666 gc_remove_from_roots(root);
667 }
668
gc_remove_from_buffer(zend_refcounted * ref)669 ZEND_API void ZEND_FASTCALL gc_remove_from_buffer(zend_refcounted *ref)
670 {
671 gc_root_buffer *root;
672 uint32_t idx = GC_REF_ADDRESS(ref);
673
674 GC_BENCH_INC(zval_remove_from_buffer);
675
676 if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
677 GC_TRACE_SET_COLOR(ref, GC_BLACK);
678 }
679 GC_REF_SET_INFO(ref, 0);
680
681 /* Perform decompression only in case of large buffers */
682 if (UNEXPECTED(GC_G(first_unused) >= GC_MAX_UNCOMPRESSED)) {
683 gc_remove_compressed(ref, idx);
684 return;
685 }
686
687 ZEND_ASSERT(idx);
688 root = GC_IDX2PTR(idx);
689 gc_remove_from_roots(root);
690 }
691
gc_scan_black(zend_refcounted * ref,gc_stack * stack)692 static void gc_scan_black(zend_refcounted *ref, gc_stack *stack)
693 {
694 HashTable *ht;
695 Bucket *p;
696 zval *zv;
697 uint32_t n;
698 GC_STACK_DCL(stack);
699
700 tail_call:
701 if (GC_TYPE(ref) == IS_OBJECT) {
702 zend_object *obj = (zend_object*)ref;
703
704 if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
705 zval *table;
706 int len;
707
708 ht = obj->handlers->get_gc(obj, &table, &len);
709 n = len;
710 zv = table;
711 if (UNEXPECTED(ht)) {
712 GC_ADDREF(ht);
713 if (!GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
714 GC_REF_SET_BLACK(ht);
715 for (; n != 0; n--) {
716 if (Z_REFCOUNTED_P(zv)) {
717 ref = Z_COUNTED_P(zv);
718 GC_ADDREF(ref);
719 if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
720 GC_REF_SET_BLACK(ref);
721 GC_STACK_PUSH(ref);
722 }
723 }
724 zv++;
725 }
726 goto handle_ht;
727 }
728 }
729
730 handle_zvals:
731 for (; n != 0; n--) {
732 if (Z_REFCOUNTED_P(zv)) {
733 ref = Z_COUNTED_P(zv);
734 GC_ADDREF(ref);
735 if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
736 GC_REF_SET_BLACK(ref);
737 zv++;
738 while (--n) {
739 if (Z_REFCOUNTED_P(zv)) {
740 zend_refcounted *ref = Z_COUNTED_P(zv);
741 GC_ADDREF(ref);
742 if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
743 GC_REF_SET_BLACK(ref);
744 GC_STACK_PUSH(ref);
745 }
746 }
747 zv++;
748 }
749 goto tail_call;
750 }
751 }
752 zv++;
753 }
754 }
755 } else if (GC_TYPE(ref) == IS_ARRAY) {
756 ZEND_ASSERT((zend_array*)ref != &EG(symbol_table));
757 ht = (zend_array*)ref;
758 handle_ht:
759 n = ht->nNumUsed;
760 zv = ht->arPacked;
761 if (HT_IS_PACKED(ht)) {
762 goto handle_zvals;
763 }
764
765 p = (Bucket*)zv;
766 for (; n != 0; n--) {
767 zv = &p->val;
768 if (Z_TYPE_P(zv) == IS_INDIRECT) {
769 zv = Z_INDIRECT_P(zv);
770 }
771 if (Z_REFCOUNTED_P(zv)) {
772 ref = Z_COUNTED_P(zv);
773 GC_ADDREF(ref);
774 if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
775 GC_REF_SET_BLACK(ref);
776 p++;
777 while (--n) {
778 zv = &p->val;
779 if (Z_TYPE_P(zv) == IS_INDIRECT) {
780 zv = Z_INDIRECT_P(zv);
781 }
782 if (Z_REFCOUNTED_P(zv)) {
783 zend_refcounted *ref = Z_COUNTED_P(zv);
784 GC_ADDREF(ref);
785 if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
786 GC_REF_SET_BLACK(ref);
787 GC_STACK_PUSH(ref);
788 }
789 }
790 p++;
791 }
792 goto tail_call;
793 }
794 }
795 p++;
796 }
797 } else if (GC_TYPE(ref) == IS_REFERENCE) {
798 if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
799 ref = Z_COUNTED(((zend_reference*)ref)->val);
800 GC_ADDREF(ref);
801 if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
802 GC_REF_SET_BLACK(ref);
803 goto tail_call;
804 }
805 }
806 }
807
808 ref = GC_STACK_POP();
809 if (ref) {
810 goto tail_call;
811 }
812 }
813
gc_mark_grey(zend_refcounted * ref,gc_stack * stack)814 static void gc_mark_grey(zend_refcounted *ref, gc_stack *stack)
815 {
816 HashTable *ht;
817 Bucket *p;
818 zval *zv;
819 uint32_t n;
820 GC_STACK_DCL(stack);
821
822 tail_call:
823 GC_BENCH_INC(zval_marked_grey);
824
825 if (GC_TYPE(ref) == IS_OBJECT) {
826 zend_object *obj = (zend_object*)ref;
827
828 if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
829 zval *table;
830 int len;
831
832 ht = obj->handlers->get_gc(obj, &table, &len);
833 n = len;
834 zv = table;
835 if (UNEXPECTED(ht)) {
836 GC_DELREF(ht);
837 if (!GC_REF_CHECK_COLOR(ht, GC_GREY)) {
838 GC_REF_SET_COLOR(ht, GC_GREY);
839 for (; n != 0; n--) {
840 if (Z_REFCOUNTED_P(zv)) {
841 ref = Z_COUNTED_P(zv);
842 GC_DELREF(ref);
843 if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
844 GC_REF_SET_COLOR(ref, GC_GREY);
845 GC_STACK_PUSH(ref);
846 }
847 }
848 zv++;
849 }
850 goto handle_ht;
851 }
852 }
853 handle_zvals:
854 for (; n != 0; n--) {
855 if (Z_REFCOUNTED_P(zv)) {
856 ref = Z_COUNTED_P(zv);
857 GC_DELREF(ref);
858 if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
859 GC_REF_SET_COLOR(ref, GC_GREY);
860 zv++;
861 while (--n) {
862 if (Z_REFCOUNTED_P(zv)) {
863 zend_refcounted *ref = Z_COUNTED_P(zv);
864 GC_DELREF(ref);
865 if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
866 GC_REF_SET_COLOR(ref, GC_GREY);
867 GC_STACK_PUSH(ref);
868 }
869 }
870 zv++;
871 }
872 goto tail_call;
873 }
874 }
875 zv++;
876 }
877 }
878 } else if (GC_TYPE(ref) == IS_ARRAY) {
879 ZEND_ASSERT(((zend_array*)ref) != &EG(symbol_table));
880 ht = (zend_array*)ref;
881 handle_ht:
882 n = ht->nNumUsed;
883 if (HT_IS_PACKED(ht)) {
884 zv = ht->arPacked;
885 goto handle_zvals;
886 }
887
888 p = ht->arData;
889 for (; n != 0; n--) {
890 zv = &p->val;
891 if (Z_TYPE_P(zv) == IS_INDIRECT) {
892 zv = Z_INDIRECT_P(zv);
893 }
894 if (Z_REFCOUNTED_P(zv)) {
895 ref = Z_COUNTED_P(zv);
896 GC_DELREF(ref);
897 if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
898 GC_REF_SET_COLOR(ref, GC_GREY);
899 p++;
900 while (--n) {
901 zv = &p->val;
902 if (Z_TYPE_P(zv) == IS_INDIRECT) {
903 zv = Z_INDIRECT_P(zv);
904 }
905 if (Z_REFCOUNTED_P(zv)) {
906 zend_refcounted *ref = Z_COUNTED_P(zv);
907 GC_DELREF(ref);
908 if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
909 GC_REF_SET_COLOR(ref, GC_GREY);
910 GC_STACK_PUSH(ref);
911 }
912 }
913 p++;
914 }
915 goto tail_call;
916 }
917 }
918 p++;
919 }
920 } else if (GC_TYPE(ref) == IS_REFERENCE) {
921 if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
922 ref = Z_COUNTED(((zend_reference*)ref)->val);
923 GC_DELREF(ref);
924 if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
925 GC_REF_SET_COLOR(ref, GC_GREY);
926 goto tail_call;
927 }
928 }
929 }
930
931 ref = GC_STACK_POP();
932 if (ref) {
933 goto tail_call;
934 }
935 }
936
937 /* Two-Finger compaction algorithm */
gc_compact(void)938 static void gc_compact(void)
939 {
940 if (GC_G(num_roots) + GC_FIRST_ROOT != GC_G(first_unused)) {
941 if (GC_G(num_roots)) {
942 gc_root_buffer *free = GC_IDX2PTR(GC_FIRST_ROOT);
943 gc_root_buffer *scan = GC_IDX2PTR(GC_G(first_unused) - 1);
944 gc_root_buffer *end = GC_IDX2PTR(GC_G(num_roots));
945 uint32_t idx;
946 zend_refcounted *p;
947
948 while (free < scan) {
949 while (!GC_IS_UNUSED(free->ref)) {
950 free++;
951 }
952 while (GC_IS_UNUSED(scan->ref)) {
953 scan--;
954 }
955 if (scan > free) {
956 p = scan->ref;
957 free->ref = p;
958 p = GC_GET_PTR(p);
959 idx = gc_compress(GC_PTR2IDX(free));
960 GC_REF_SET_INFO(p, idx | GC_REF_COLOR(p));
961 free++;
962 scan--;
963 if (scan <= end) {
964 break;
965 }
966 }
967 }
968 }
969 GC_G(unused) = GC_INVALID;
970 GC_G(first_unused) = GC_G(num_roots) + GC_FIRST_ROOT;
971 }
972 }
973
gc_mark_roots(gc_stack * stack)974 static void gc_mark_roots(gc_stack *stack)
975 {
976 gc_root_buffer *current, *last;
977
978 gc_compact();
979
980 current = GC_IDX2PTR(GC_FIRST_ROOT);
981 last = GC_IDX2PTR(GC_G(first_unused));
982 while (current != last) {
983 if (GC_IS_ROOT(current->ref)) {
984 if (GC_REF_CHECK_COLOR(current->ref, GC_PURPLE)) {
985 GC_REF_SET_COLOR(current->ref, GC_GREY);
986 gc_mark_grey(current->ref, stack);
987 }
988 }
989 current++;
990 }
991 }
992
gc_scan(zend_refcounted * ref,gc_stack * stack)993 static void gc_scan(zend_refcounted *ref, gc_stack *stack)
994 {
995 HashTable *ht;
996 Bucket *p;
997 zval *zv;
998 uint32_t n;
999 GC_STACK_DCL(stack);
1000
1001 tail_call:
1002 if (!GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1003 goto next;
1004 }
1005
1006 if (GC_REFCOUNT(ref) > 0) {
1007 if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1008 GC_REF_SET_BLACK(ref);
1009 if (UNEXPECTED(!_stack->next)) {
1010 gc_stack_next(_stack);
1011 }
1012 /* Split stack and reuse the tail */
1013 _stack->next->prev = NULL;
1014 gc_scan_black(ref, _stack->next);
1015 _stack->next->prev = _stack;
1016 }
1017 goto next;
1018 }
1019
1020 if (GC_TYPE(ref) == IS_OBJECT) {
1021 zend_object *obj = (zend_object*)ref;
1022 if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1023 zval *table;
1024 int len;
1025
1026 ht = obj->handlers->get_gc(obj, &table, &len);
1027 n = len;
1028 zv = table;
1029 if (UNEXPECTED(ht)) {
1030 if (GC_REF_CHECK_COLOR(ht, GC_GREY)) {
1031 GC_REF_SET_COLOR(ht, GC_WHITE);
1032 GC_STACK_PUSH((zend_refcounted *) ht);
1033 for (; n != 0; n--) {
1034 if (Z_REFCOUNTED_P(zv)) {
1035 ref = Z_COUNTED_P(zv);
1036 if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1037 GC_REF_SET_COLOR(ref, GC_WHITE);
1038 GC_STACK_PUSH(ref);
1039 }
1040 }
1041 zv++;
1042 }
1043 goto handle_ht;
1044 }
1045 }
1046
1047 handle_zvals:
1048 for (; n != 0; n--) {
1049 if (Z_REFCOUNTED_P(zv)) {
1050 ref = Z_COUNTED_P(zv);
1051 if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1052 GC_REF_SET_COLOR(ref, GC_WHITE);
1053 zv++;
1054 while (--n) {
1055 if (Z_REFCOUNTED_P(zv)) {
1056 zend_refcounted *ref = Z_COUNTED_P(zv);
1057 if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1058 GC_REF_SET_COLOR(ref, GC_WHITE);
1059 GC_STACK_PUSH(ref);
1060 }
1061 }
1062 zv++;
1063 }
1064 goto tail_call;
1065 }
1066 }
1067 zv++;
1068 }
1069 }
1070 } else if (GC_TYPE(ref) == IS_ARRAY) {
1071 ht = (HashTable *)ref;
1072 ZEND_ASSERT(ht != &EG(symbol_table));
1073
1074 handle_ht:
1075 n = ht->nNumUsed;
1076 if (HT_IS_PACKED(ht)) {
1077 zv = ht->arPacked;
1078 goto handle_zvals;
1079 }
1080
1081 p = ht->arData;
1082 for (; n != 0; n--) {
1083 zv = &p->val;
1084 if (Z_TYPE_P(zv) == IS_INDIRECT) {
1085 zv = Z_INDIRECT_P(zv);
1086 }
1087 if (Z_REFCOUNTED_P(zv)) {
1088 ref = Z_COUNTED_P(zv);
1089 if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1090 GC_REF_SET_COLOR(ref, GC_WHITE);
1091 p++;
1092 while (--n) {
1093 zv = &p->val;
1094 if (Z_TYPE_P(zv) == IS_INDIRECT) {
1095 zv = Z_INDIRECT_P(zv);
1096 }
1097 if (Z_REFCOUNTED_P(zv)) {
1098 zend_refcounted *ref = Z_COUNTED_P(zv);
1099 if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1100 GC_REF_SET_COLOR(ref, GC_WHITE);
1101 GC_STACK_PUSH(ref);
1102 }
1103 }
1104 p++;
1105 }
1106 goto tail_call;
1107 }
1108 }
1109 p++;
1110 }
1111 } else if (GC_TYPE(ref) == IS_REFERENCE) {
1112 if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1113 ref = Z_COUNTED(((zend_reference*)ref)->val);
1114 if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1115 GC_REF_SET_COLOR(ref, GC_WHITE);
1116 goto tail_call;
1117 }
1118 }
1119 }
1120
1121 next:
1122 ref = GC_STACK_POP();
1123 if (ref) {
1124 goto tail_call;
1125 }
1126 }
1127
gc_scan_roots(gc_stack * stack)1128 static void gc_scan_roots(gc_stack *stack)
1129 {
1130 gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
1131 gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
1132
1133 while (current != last) {
1134 if (GC_IS_ROOT(current->ref)) {
1135 if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1136 GC_REF_SET_COLOR(current->ref, GC_WHITE);
1137 gc_scan(current->ref, stack);
1138 }
1139 }
1140 current++;
1141 }
1142 }
1143
gc_add_garbage(zend_refcounted * ref)1144 static void gc_add_garbage(zend_refcounted *ref)
1145 {
1146 uint32_t idx;
1147 gc_root_buffer *buf;
1148
1149 if (GC_HAS_UNUSED()) {
1150 idx = GC_FETCH_UNUSED();
1151 } else if (GC_HAS_NEXT_UNUSED()) {
1152 idx = GC_FETCH_NEXT_UNUSED();
1153 } else {
1154 gc_grow_root_buffer();
1155 if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
1156 return;
1157 }
1158 idx = GC_FETCH_NEXT_UNUSED();
1159 }
1160
1161 buf = GC_IDX2PTR(idx);
1162 buf->ref = GC_MAKE_GARBAGE(ref);
1163
1164 idx = gc_compress(idx);
1165 GC_REF_SET_INFO(ref, idx | GC_BLACK);
1166 GC_G(num_roots)++;
1167 }
1168
gc_collect_white(zend_refcounted * ref,uint32_t * flags,gc_stack * stack)1169 static int gc_collect_white(zend_refcounted *ref, uint32_t *flags, gc_stack *stack)
1170 {
1171 int count = 0;
1172 HashTable *ht;
1173 Bucket *p;
1174 zval *zv;
1175 uint32_t n;
1176 GC_STACK_DCL(stack);
1177
1178 tail_call:
1179 /* don't count references for compatibility ??? */
1180 if (GC_TYPE(ref) != IS_REFERENCE) {
1181 count++;
1182 }
1183
1184 if (GC_TYPE(ref) == IS_OBJECT) {
1185 zend_object *obj = (zend_object*)ref;
1186
1187 if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1188 int len;
1189 zval *table;
1190
1191 /* optimization: color is GC_BLACK (0) */
1192 if (!GC_INFO(ref)) {
1193 gc_add_garbage(ref);
1194 }
1195 if (!(OBJ_FLAGS(obj) & IS_OBJ_DESTRUCTOR_CALLED)
1196 && (obj->handlers->dtor_obj != zend_objects_destroy_object
1197 || obj->ce->destructor != NULL)) {
1198 *flags |= GC_HAS_DESTRUCTORS;
1199 }
1200 ht = obj->handlers->get_gc(obj, &table, &len);
1201 n = len;
1202 zv = table;
1203 if (UNEXPECTED(ht)) {
1204 GC_ADDREF(ht);
1205 if (GC_REF_CHECK_COLOR(ht, GC_WHITE)) {
1206 GC_REF_SET_BLACK(ht);
1207 for (; n != 0; n--) {
1208 if (Z_REFCOUNTED_P(zv)) {
1209 ref = Z_COUNTED_P(zv);
1210 GC_ADDREF(ref);
1211 if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1212 GC_REF_SET_BLACK(ref);
1213 GC_STACK_PUSH(ref);
1214 }
1215 }
1216 zv++;
1217 }
1218 goto handle_ht;
1219 }
1220 }
1221
1222 handle_zvals:
1223 for (; n != 0; n--) {
1224 if (Z_REFCOUNTED_P(zv)) {
1225 ref = Z_COUNTED_P(zv);
1226 GC_ADDREF(ref);
1227 if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1228 GC_REF_SET_BLACK(ref);
1229 zv++;
1230 while (--n) {
1231 if (Z_REFCOUNTED_P(zv)) {
1232 zend_refcounted *ref = Z_COUNTED_P(zv);
1233 GC_ADDREF(ref);
1234 if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1235 GC_REF_SET_BLACK(ref);
1236 GC_STACK_PUSH(ref);
1237 }
1238 }
1239 zv++;
1240 }
1241 goto tail_call;
1242 }
1243 }
1244 zv++;
1245 }
1246 }
1247 } else if (GC_TYPE(ref) == IS_ARRAY) {
1248 /* optimization: color is GC_BLACK (0) */
1249 if (!GC_INFO(ref)) {
1250 gc_add_garbage(ref);
1251 }
1252 ht = (zend_array*)ref;
1253
1254 handle_ht:
1255 n = ht->nNumUsed;
1256 if (HT_IS_PACKED(ht)) {
1257 zv = ht->arPacked;
1258 goto handle_zvals;
1259 }
1260
1261 p = ht->arData;
1262 for (; n != 0; n--) {
1263 zv = &p->val;
1264 if (Z_TYPE_P(zv) == IS_INDIRECT) {
1265 zv = Z_INDIRECT_P(zv);
1266 }
1267 if (Z_REFCOUNTED_P(zv)) {
1268 ref = Z_COUNTED_P(zv);
1269 GC_ADDREF(ref);
1270 if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1271 GC_REF_SET_BLACK(ref);
1272 p++;
1273 while (--n) {
1274 zv = &p->val;
1275 if (Z_TYPE_P(zv) == IS_INDIRECT) {
1276 zv = Z_INDIRECT_P(zv);
1277 }
1278 if (Z_REFCOUNTED_P(zv)) {
1279 zend_refcounted *ref = Z_COUNTED_P(zv);
1280 GC_ADDREF(ref);
1281 if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1282 GC_REF_SET_BLACK(ref);
1283 GC_STACK_PUSH(ref);
1284 }
1285 }
1286 p++;
1287 }
1288 goto tail_call;
1289 }
1290 }
1291 p++;
1292 }
1293 } else if (GC_TYPE(ref) == IS_REFERENCE) {
1294 if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1295 ref = Z_COUNTED(((zend_reference*)ref)->val);
1296 GC_ADDREF(ref);
1297 if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1298 GC_REF_SET_BLACK(ref);
1299 goto tail_call;
1300 }
1301 }
1302 }
1303
1304 ref = GC_STACK_POP();
1305 if (ref) {
1306 goto tail_call;
1307 }
1308
1309 return count;
1310 }
1311
gc_collect_roots(uint32_t * flags,gc_stack * stack)1312 static int gc_collect_roots(uint32_t *flags, gc_stack *stack)
1313 {
1314 uint32_t idx, end;
1315 zend_refcounted *ref;
1316 int count = 0;
1317 gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
1318 gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
1319
1320 /* remove non-garbage from the list */
1321 while (current != last) {
1322 if (GC_IS_ROOT(current->ref)) {
1323 if (GC_REF_CHECK_COLOR(current->ref, GC_BLACK)) {
1324 GC_REF_SET_INFO(current->ref, 0); /* reset GC_ADDRESS() and keep GC_BLACK */
1325 gc_remove_from_roots(current);
1326 }
1327 }
1328 current++;
1329 }
1330
1331 gc_compact();
1332
1333 /* Root buffer might be reallocated during gc_collect_white,
1334 * make sure to reload pointers. */
1335 idx = GC_FIRST_ROOT;
1336 end = GC_G(first_unused);
1337 while (idx != end) {
1338 current = GC_IDX2PTR(idx);
1339 ref = current->ref;
1340 ZEND_ASSERT(GC_IS_ROOT(ref));
1341 current->ref = GC_MAKE_GARBAGE(ref);
1342 if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1343 GC_REF_SET_BLACK(ref);
1344 count += gc_collect_white(ref, flags, stack);
1345 }
1346 idx++;
1347 }
1348
1349 return count;
1350 }
1351
gc_remove_nested_data_from_buffer(zend_refcounted * ref,gc_root_buffer * root,gc_stack * stack)1352 static int gc_remove_nested_data_from_buffer(zend_refcounted *ref, gc_root_buffer *root, gc_stack *stack)
1353 {
1354 HashTable *ht;
1355 Bucket *p;
1356 zval *zv;
1357 uint32_t n;
1358 int count = 0;
1359 GC_STACK_DCL(stack);
1360
1361 tail_call:
1362 if (root) {
1363 root = NULL;
1364 count++;
1365 } else if (GC_REF_ADDRESS(ref) != 0
1366 && GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1367 GC_TRACE_REF(ref, "removing from buffer");
1368 GC_REMOVE_FROM_BUFFER(ref);
1369 count++;
1370 } else if (GC_TYPE(ref) == IS_REFERENCE) {
1371 if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1372 ref = Z_COUNTED(((zend_reference*)ref)->val);
1373 goto tail_call;
1374 }
1375 goto next;
1376 } else {
1377 goto next;
1378 }
1379
1380 if (GC_TYPE(ref) == IS_OBJECT) {
1381 zend_object *obj = (zend_object*)ref;
1382
1383 if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED))) {
1384 int len;
1385 zval *table;
1386
1387 ht = obj->handlers->get_gc(obj, &table, &len);
1388 n = len;
1389 zv = table;
1390 if (UNEXPECTED(ht)) {
1391 for (; n != 0; n--) {
1392 if (Z_REFCOUNTED_P(zv)) {
1393 ref = Z_COUNTED_P(zv);
1394 GC_STACK_PUSH(ref);
1395 }
1396 zv++;
1397 }
1398 if (GC_REF_ADDRESS(ht) != 0 && GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
1399 GC_TRACE_REF(ht, "removing from buffer");
1400 GC_REMOVE_FROM_BUFFER(ht);
1401 }
1402 goto handle_ht;
1403 }
1404
1405 handle_zvals:
1406 for (; n != 0; n--) {
1407 if (Z_REFCOUNTED_P(zv)) {
1408 ref = Z_COUNTED_P(zv);
1409 zv++;
1410 while (--n) {
1411 if (Z_REFCOUNTED_P(zv)) {
1412 zend_refcounted *ref = Z_COUNTED_P(zv);
1413 GC_STACK_PUSH(ref);
1414 }
1415 zv++;
1416 }
1417 goto tail_call;
1418 }
1419 zv++;
1420 }
1421 }
1422 } else if (GC_TYPE(ref) == IS_ARRAY) {
1423 ht = (zend_array*)ref;
1424
1425 handle_ht:
1426 n = ht->nNumUsed;
1427 if (HT_IS_PACKED(ht)) {
1428 zv = ht->arPacked;
1429 goto handle_zvals;
1430 }
1431
1432 p = ht->arData;
1433 for (; n != 0; n--) {
1434 zv = &p->val;
1435 if (Z_TYPE_P(zv) == IS_INDIRECT) {
1436 zv = Z_INDIRECT_P(zv);
1437 }
1438 if (Z_REFCOUNTED_P(zv)) {
1439 ref = Z_COUNTED_P(zv);
1440 p++;
1441 while (--n) {
1442 zv = &p->val;
1443 if (Z_TYPE_P(zv) == IS_INDIRECT) {
1444 zv = Z_INDIRECT_P(zv);
1445 }
1446 if (Z_REFCOUNTED_P(zv)) {
1447 zend_refcounted *ref = Z_COUNTED_P(zv);
1448 GC_STACK_PUSH(ref);
1449 }
1450 p++;
1451 }
1452 goto tail_call;
1453 }
1454 p++;
1455 }
1456 }
1457
1458 next:
1459 ref = GC_STACK_POP();
1460 if (ref) {
1461 goto tail_call;
1462 }
1463
1464 return count;
1465 }
1466
1467 static void zend_get_gc_buffer_release(void);
1468 static void zend_gc_check_root_tmpvars(void);
1469 static void zend_gc_remove_root_tmpvars(void);
1470
zend_gc_collect_cycles(void)1471 ZEND_API int zend_gc_collect_cycles(void)
1472 {
1473 int total_count = 0;
1474 bool should_rerun_gc = 0;
1475 bool did_rerun_gc = 0;
1476
1477 if (GC_G(num_roots) && GC_G(gc_active)) {
1478 zend_gc_remove_root_tmpvars();
1479 }
1480
1481 rerun_gc:
1482 if (GC_G(num_roots)) {
1483 int count;
1484 gc_root_buffer *current, *last;
1485 zend_refcounted *p;
1486 uint32_t gc_flags = 0;
1487 uint32_t idx, end;
1488 gc_stack stack;
1489
1490 stack.prev = NULL;
1491 stack.next = NULL;
1492
1493 if (GC_G(gc_active)) {
1494 return 0;
1495 }
1496
1497 GC_TRACE("Collecting cycles");
1498 GC_G(gc_runs)++;
1499 GC_G(gc_active) = 1;
1500
1501 GC_TRACE("Marking roots");
1502 gc_mark_roots(&stack);
1503 GC_TRACE("Scanning roots");
1504 gc_scan_roots(&stack);
1505
1506 GC_TRACE("Collecting roots");
1507 count = gc_collect_roots(&gc_flags, &stack);
1508
1509 if (!GC_G(num_roots)) {
1510 /* nothing to free */
1511 GC_TRACE("Nothing to free");
1512 gc_stack_free(&stack);
1513 GC_G(gc_active) = 0;
1514 goto finish;
1515 }
1516
1517 zend_fiber_switch_block();
1518
1519 end = GC_G(first_unused);
1520
1521 if (gc_flags & GC_HAS_DESTRUCTORS) {
1522 GC_TRACE("Calling destructors");
1523
1524 /* During a destructor call, new externally visible references to nested data may
1525 * be introduced. These references can be introduced in a way that does not
1526 * modify any refcounts, so we have no real way to detect this situation
1527 * short of rerunning full GC tracing. What we do instead is to only run
1528 * destructors at this point and automatically re-run GC afterwards. */
1529 should_rerun_gc = 1;
1530
1531 /* Mark all roots for which a dtor will be invoked as DTOR_GARBAGE. Additionally
1532 * color them purple. This serves a double purpose: First, they should be
1533 * considered new potential roots for the next GC run. Second, it will prevent
1534 * their removal from the root buffer by nested data removal. */
1535 idx = GC_FIRST_ROOT;
1536 current = GC_IDX2PTR(GC_FIRST_ROOT);
1537 while (idx != end) {
1538 if (GC_IS_GARBAGE(current->ref)) {
1539 p = GC_GET_PTR(current->ref);
1540 if (GC_TYPE(p) == IS_OBJECT && !(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
1541 zend_object *obj = (zend_object *) p;
1542 if (obj->handlers->dtor_obj != zend_objects_destroy_object
1543 || obj->ce->destructor) {
1544 current->ref = GC_MAKE_DTOR_GARBAGE(obj);
1545 GC_REF_SET_COLOR(obj, GC_PURPLE);
1546 } else {
1547 GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
1548 }
1549 }
1550 }
1551 current++;
1552 idx++;
1553 }
1554
1555 /* Remove nested data for objects on which a destructor will be called.
1556 * This will not remove the objects themselves, as they have been colored
1557 * purple. */
1558 idx = GC_FIRST_ROOT;
1559 current = GC_IDX2PTR(GC_FIRST_ROOT);
1560 while (idx != end) {
1561 if (GC_IS_DTOR_GARBAGE(current->ref)) {
1562 p = GC_GET_PTR(current->ref);
1563 count -= gc_remove_nested_data_from_buffer(p, current, &stack);
1564 }
1565 current++;
1566 idx++;
1567 }
1568
1569 /* Actually call destructors.
1570 *
1571 * The root buffer might be reallocated during destructors calls,
1572 * make sure to reload pointers as necessary. */
1573 idx = GC_FIRST_ROOT;
1574 while (idx != end) {
1575 current = GC_IDX2PTR(idx);
1576 if (GC_IS_DTOR_GARBAGE(current->ref)) {
1577 p = GC_GET_PTR(current->ref);
1578 /* Mark this is as a normal root for the next GC run,
1579 * it's no longer garbage for this run. */
1580 current->ref = p;
1581 /* Double check that the destructor hasn't been called yet. It could have
1582 * already been invoked indirectly by some other destructor. */
1583 if (!(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
1584 zend_object *obj = (zend_object*)p;
1585 GC_TRACE_REF(obj, "calling destructor");
1586 GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
1587 GC_ADDREF(obj);
1588 obj->handlers->dtor_obj(obj);
1589 GC_DELREF(obj);
1590 }
1591 }
1592 idx++;
1593 }
1594
1595 if (GC_G(gc_protected)) {
1596 /* something went wrong */
1597 zend_get_gc_buffer_release();
1598 zend_fiber_switch_unblock();
1599 return 0;
1600 }
1601 }
1602
1603 gc_stack_free(&stack);
1604
1605 /* Destroy zvals. The root buffer may be reallocated. */
1606 GC_TRACE("Destroying zvals");
1607 idx = GC_FIRST_ROOT;
1608 while (idx != end) {
1609 current = GC_IDX2PTR(idx);
1610 if (GC_IS_GARBAGE(current->ref)) {
1611 p = GC_GET_PTR(current->ref);
1612 GC_TRACE_REF(p, "destroying");
1613 if (GC_TYPE(p) == IS_OBJECT) {
1614 zend_object *obj = (zend_object*)p;
1615
1616 EG(objects_store).object_buckets[obj->handle] = SET_OBJ_INVALID(obj);
1617 GC_TYPE_INFO(obj) = GC_NULL |
1618 (GC_TYPE_INFO(obj) & ~GC_TYPE_MASK);
1619 /* Modify current before calling free_obj (bug #78811: free_obj() can cause the root buffer (with current) to be reallocated.) */
1620 current->ref = GC_MAKE_GARBAGE(((char*)obj) - obj->handlers->offset);
1621 if (!(OBJ_FLAGS(obj) & IS_OBJ_FREE_CALLED)) {
1622 GC_ADD_FLAGS(obj, IS_OBJ_FREE_CALLED);
1623 GC_ADDREF(obj);
1624 obj->handlers->free_obj(obj);
1625 GC_DELREF(obj);
1626 }
1627
1628 ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST(obj->handle);
1629 } else if (GC_TYPE(p) == IS_ARRAY) {
1630 zend_array *arr = (zend_array*)p;
1631
1632 GC_TYPE_INFO(arr) = GC_NULL |
1633 (GC_TYPE_INFO(arr) & ~GC_TYPE_MASK);
1634
1635 /* GC may destroy arrays with rc>1. This is valid and safe. */
1636 HT_ALLOW_COW_VIOLATION(arr);
1637
1638 zend_hash_destroy(arr);
1639 }
1640 }
1641 idx++;
1642 }
1643
1644 /* Free objects */
1645 current = GC_IDX2PTR(GC_FIRST_ROOT);
1646 last = GC_IDX2PTR(end);
1647 while (current != last) {
1648 if (GC_IS_GARBAGE(current->ref)) {
1649 p = GC_GET_PTR(current->ref);
1650 GC_LINK_UNUSED(current);
1651 GC_G(num_roots)--;
1652 efree(p);
1653 }
1654 current++;
1655 }
1656
1657 zend_fiber_switch_unblock();
1658
1659 GC_TRACE("Collection finished");
1660 GC_G(collected) += count;
1661 total_count += count;
1662 GC_G(gc_active) = 0;
1663 }
1664
1665 gc_compact();
1666
1667 /* Objects with destructors were removed from this GC run. Rerun GC right away to clean them
1668 * up. We do this only once: If we encounter more destructors on the second run, we'll not
1669 * run GC another time. */
1670 if (should_rerun_gc && !did_rerun_gc) {
1671 did_rerun_gc = 1;
1672 goto rerun_gc;
1673 }
1674
1675 finish:
1676 zend_get_gc_buffer_release();
1677
1678 /* Prevent GC from running during zend_gc_check_root_tmpvars, before
1679 * gc_threshold is adjusted, as this may result in unbounded recursion */
1680 GC_G(gc_active) = 1;
1681 zend_gc_check_root_tmpvars();
1682 GC_G(gc_active) = 0;
1683
1684 return total_count;
1685 }
1686
zend_gc_get_status(zend_gc_status * status)1687 ZEND_API void zend_gc_get_status(zend_gc_status *status)
1688 {
1689 status->runs = GC_G(gc_runs);
1690 status->collected = GC_G(collected);
1691 status->threshold = GC_G(gc_threshold);
1692 status->num_roots = GC_G(num_roots);
1693 }
1694
zend_get_gc_buffer_create(void)1695 ZEND_API zend_get_gc_buffer *zend_get_gc_buffer_create(void) {
1696 /* There can only be one get_gc() call active at a time,
1697 * so there only needs to be one buffer. */
1698 zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
1699 gc_buffer->cur = gc_buffer->start;
1700 return gc_buffer;
1701 }
1702
zend_get_gc_buffer_grow(zend_get_gc_buffer * gc_buffer)1703 ZEND_API void zend_get_gc_buffer_grow(zend_get_gc_buffer *gc_buffer) {
1704 size_t old_capacity = gc_buffer->end - gc_buffer->start;
1705 size_t new_capacity = old_capacity == 0 ? 64 : old_capacity * 2;
1706 gc_buffer->start = erealloc(gc_buffer->start, new_capacity * sizeof(zval));
1707 gc_buffer->end = gc_buffer->start + new_capacity;
1708 gc_buffer->cur = gc_buffer->start + old_capacity;
1709 }
1710
zend_get_gc_buffer_release(void)1711 static void zend_get_gc_buffer_release(void) {
1712 zend_get_gc_buffer *gc_buffer = &EG(get_gc_buffer);
1713 efree(gc_buffer->start);
1714 gc_buffer->start = gc_buffer->end = gc_buffer->cur = NULL;
1715 }
1716
1717 /* TMPVAR operands are destroyed using zval_ptr_dtor_nogc(), because they usually cannot contain
1718 * cycles. However, there are some rare exceptions where this is possible, in which case we rely
1719 * on the producing code to root the value. If a GC run occurs between the rooting and consumption
1720 * of the value, we would end up leaking it. To avoid this, root all live TMPVAR values here. */
zend_gc_check_root_tmpvars(void)1721 static void zend_gc_check_root_tmpvars(void) {
1722 zend_execute_data *ex = EG(current_execute_data);
1723 for (; ex; ex = ex->prev_execute_data) {
1724 zend_function *func = ex->func;
1725 if (!func || !ZEND_USER_CODE(func->type)) {
1726 continue;
1727 }
1728
1729 uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
1730 for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
1731 const zend_live_range *range = &func->op_array.live_range[i];
1732 if (range->start > op_num) {
1733 break;
1734 }
1735 if (range->end <= op_num) {
1736 continue;
1737 }
1738
1739 uint32_t kind = range->var & ZEND_LIVE_MASK;
1740 if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
1741 uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
1742 zval *var = ZEND_CALL_VAR(ex, var_num);
1743 if (Z_REFCOUNTED_P(var)) {
1744 gc_check_possible_root(Z_COUNTED_P(var));
1745 }
1746 }
1747 }
1748 }
1749 }
1750
zend_gc_remove_root_tmpvars(void)1751 static void zend_gc_remove_root_tmpvars(void) {
1752 zend_execute_data *ex = EG(current_execute_data);
1753 for (; ex; ex = ex->prev_execute_data) {
1754 zend_function *func = ex->func;
1755 if (!func || !ZEND_USER_CODE(func->type)) {
1756 continue;
1757 }
1758
1759 uint32_t op_num = ex->opline - ex->func->op_array.opcodes;
1760 for (uint32_t i = 0; i < func->op_array.last_live_range; i++) {
1761 const zend_live_range *range = &func->op_array.live_range[i];
1762 if (range->start > op_num) {
1763 break;
1764 }
1765 if (range->end <= op_num) {
1766 continue;
1767 }
1768
1769 uint32_t kind = range->var & ZEND_LIVE_MASK;
1770 if (kind == ZEND_LIVE_TMPVAR || kind == ZEND_LIVE_LOOP) {
1771 uint32_t var_num = range->var & ~ZEND_LIVE_MASK;
1772 zval *var = ZEND_CALL_VAR(ex, var_num);
1773 if (Z_REFCOUNTED_P(var)) {
1774 GC_REMOVE_FROM_BUFFER(Z_COUNTED_P(var));
1775 }
1776 }
1777 }
1778 }
1779 }
1780
1781 #ifdef ZTS
zend_gc_globals_size(void)1782 size_t zend_gc_globals_size(void)
1783 {
1784 return sizeof(zend_gc_globals);
1785 }
1786 #endif
1787