1 /*
2 +----------------------------------------------------------------------+
3 | Zend Engine |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 1998-2018 Zend Technologies Ltd. (http://www.zend.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 2.00 of the Zend license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.zend.com/license/2_00.txt. |
11 | If you did not receive a copy of the Zend license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@zend.com so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
15 | Authors: David Wang <planetbeing@gmail.com> |
16 | Dmitry Stogov <dmitry@php.net> |
17 +----------------------------------------------------------------------+
18 */
19
20 /**
21 * zend_gc_collect_cycles
22 * ======================
23 *
24 * Colors and its meaning
25 * ----------------------
26 *
27 * BLACK (GC_BLACK) - In use or free.
28 * GREY (GC_GREY) - Possible member of cycle.
29 * WHITE (GC_WHITE) - Member of garbage cycle.
30 * PURPLE (GC_PURPLE) - Possible root of cycle.
31 *
32 * Colors described in the paper but not used
33 * ------------------------------------------
34 *
35 * GREEN - Acyclic
36 * RED - Candidate cycle underogin
37 * ORANGE - Candidate cycle awaiting epoch boundary.
38 *
39 *
40 * Flow
41 * =====
42 *
43 * The garbage collect cycle starts from 'gc_mark_roots', which traverses the
44 * possible roots, and calls mark_grey for roots are marked purple with
45 * depth-first traverse.
46 *
47 * After all possible roots are traversed and marked,
48 * gc_scan_roots will be called, and each root will be called with
49 * gc_scan(root->ref)
50 *
51 * gc_scan checks the colors of possible members.
52 *
53 * If the node is marked as grey and the refcount > 0
54 * gc_scan_black will be called on that node to scan it's subgraph.
55 * otherwise (refcount == 0), it marks the node white.
56 *
57 * A node MAY be added to possbile roots when ZEND_UNSET_VAR happens or
58 * zend_assign_to_variable is called only when possible garbage node is
59 * produced.
60 * gc_possible_root() will be called to add the nodes to possible roots.
61 *
62 *
63 * For objects, we call their get_gc handler (by default 'zend_std_get_gc') to
64 * get the object properties to scan.
65 *
66 *
67 * @see http://researcher.watson.ibm.com/researcher/files/us-bacon/Bacon01Concurrent.pdf
68 */
69 #include "zend.h"
70 #include "zend_API.h"
71
72 #ifndef GC_BENCH
73 # define GC_BENCH 0
74 #endif
75
76 #ifndef ZEND_GC_DEBUG
77 # define ZEND_GC_DEBUG 0
78 #endif
79
80 /* GC_INFO layout */
81 #define GC_ADDRESS 0x0fffff
82 #define GC_COLOR 0x300000
83
84 #define GC_BLACK 0x000000 /* must be zero */
85 #define GC_WHITE 0x100000
86 #define GC_GREY 0x200000
87 #define GC_PURPLE 0x300000
88
89 /* Debug tracing */
90 #if ZEND_GC_DEBUG > 1
91 # define GC_TRACE(format, ...) fprintf(stderr, format "\n", ##__VA_ARGS__);
92 # define GC_TRACE_REF(ref, format, ...) \
93 do { \
94 gc_trace_ref((zend_refcounted *) ref); \
95 fprintf(stderr, format "\n", ##__VA_ARGS__); \
96 } while (0)
97 # define GC_TRACE_SET_COLOR(ref, color) \
98 GC_TRACE_REF(ref, "->%s", gc_color_name(color))
99 #else
100 # define GC_TRACE_REF(ref, format, ...)
101 # define GC_TRACE_SET_COLOR(ref, new_color)
102 # define GC_TRACE(str)
103 #endif
104
105 /* GC_INFO access */
106 #define GC_REF_ADDRESS(ref) \
107 (((GC_TYPE_INFO(ref)) & (GC_ADDRESS << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
108
109 #define GC_REF_COLOR(ref) \
110 (((GC_TYPE_INFO(ref)) & (GC_COLOR << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
111
112 #define GC_REF_CHECK_COLOR(ref, color) \
113 ((GC_TYPE_INFO(ref) & (GC_COLOR << GC_INFO_SHIFT)) == ((color) << GC_INFO_SHIFT))
114
115 #define GC_REF_SET_INFO(ref, info) do { \
116 GC_TYPE_INFO(ref) = \
117 (GC_TYPE_INFO(ref) & (GC_TYPE_MASK | GC_FLAGS_MASK)) | \
118 ((info) << GC_INFO_SHIFT); \
119 } while (0)
120
121 #define GC_REF_SET_COLOR(ref, c) do { \
122 GC_TRACE_SET_COLOR(ref, c); \
123 GC_TYPE_INFO(ref) = \
124 (GC_TYPE_INFO(ref) & ~(GC_COLOR << GC_INFO_SHIFT)) | \
125 ((c) << GC_INFO_SHIFT); \
126 } while (0)
127
128 #define GC_REF_SET_BLACK(ref) do { \
129 GC_TRACE_SET_COLOR(ref, GC_BLACK); \
130 GC_TYPE_INFO(ref) &= ~(GC_COLOR << GC_INFO_SHIFT); \
131 } while (0)
132
133 #define GC_REF_SET_PURPLE(ref) do { \
134 GC_TRACE_SET_COLOR(ref, GC_PURPLE); \
135 GC_TYPE_INFO(ref) |= (GC_COLOR << GC_INFO_SHIFT); \
136 } while (0)
137
138 /* bit stealing tags for gc_root_buffer.ref */
139 #define GC_BITS 0x3
140
141 #define GC_ROOT 0x0 /* possible root of circular garbage */
142 #define GC_UNUSED 0x1 /* part of linked list of unused buffers */
143 #define GC_GARBAGE 0x2 /* garbage to delete */
144
145 #define GC_GET_PTR(ptr) \
146 ((void*)(((uintptr_t)(ptr)) & ~GC_BITS))
147
148 #define GC_IS_ROOT(ptr) \
149 ((((uintptr_t)(ptr)) & GC_BITS) == GC_ROOT)
150 #define GC_IS_UNUSED(ptr) \
151 ((((uintptr_t)(ptr)) & GC_BITS) == GC_UNUSED)
152 #define GC_IS_GARBAGE(ptr) \
153 ((((uintptr_t)(ptr)) & GC_BITS) == GC_GARBAGE)
154
155 #define GC_MAKE_GARBAGE(ptr) \
156 ((void*)(((uintptr_t)(ptr)) | GC_GARBAGE))
157
158 /* GC address conversion */
159 #define GC_IDX2PTR(idx) (GC_G(buf) + (idx))
160 #define GC_PTR2IDX(ptr) ((ptr) - GC_G(buf))
161
162 #define GC_IDX2LIST(idx) ((void*)(uintptr_t)(((idx) * sizeof(void*)) | GC_UNUSED))
163 #define GC_LIST2IDX(list) (((uint32_t)(uintptr_t)(list)) / sizeof(void*))
164
165 /* GC buffers */
166 #define GC_INVALID 0
167 #define GC_FIRST_ROOT 1
168
169 #define GC_DEFAULT_BUF_SIZE (16 * 1024)
170 #define GC_BUF_GROW_STEP (128 * 1024)
171
172 #define GC_MAX_UNCOMPRESSED (512 * 1024)
173 #define GC_MAX_BUF_SIZE 0x40000000
174
175 #define GC_THRESHOLD_DEFAULT 10000
176 #define GC_THRESHOLD_STEP 10000
177 #define GC_THRESHOLD_MAX 1000000000
178 #define GC_THRESHOLD_TRIGGER 100
179
180 /* GC flags */
181 #define GC_HAS_DESTRUCTORS (1<<0)
182
183 /* unused buffers */
184 #define GC_HAS_UNUSED() \
185 (GC_G(unused) != GC_INVALID)
186 #define GC_FETCH_UNUSED() \
187 gc_fetch_unused()
188 #define GC_LINK_UNUSED(root) \
189 gc_link_unused(root)
190
191 #define GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD() \
192 (GC_G(first_unused) < GC_G(gc_threshold))
193 #define GC_HAS_NEXT_UNUSED() \
194 (GC_G(first_unused) != GC_G(buf_size))
195 #define GC_FETCH_NEXT_UNUSED() \
196 gc_fetch_next_unused()
197
198 ZEND_API int (*gc_collect_cycles)(void);
199
200 typedef struct _gc_root_buffer {
201 zend_refcounted *ref;
202 } gc_root_buffer;
203
204 typedef struct _zend_gc_globals {
205 zend_bool gc_enabled;
206 zend_bool gc_active; /* GC currently running, forbid nested GC */
207 zend_bool gc_protected; /* GC protected, forbid root additions */
208 zend_bool gc_full;
209
210 gc_root_buffer *buf; /* preallocated arrays of buffers */
211 uint32_t unused; /* linked list of unused buffers */
212 uint32_t first_unused; /* first unused buffer */
213 uint32_t gc_threshold; /* GC collection threshold */
214 uint32_t buf_size; /* size of the GC buffer */
215 uint32_t num_roots; /* number of roots in GC buffer */
216
217 uint32_t gc_runs;
218 uint32_t collected;
219
220 #if GC_BENCH
221 uint32_t root_buf_length;
222 uint32_t root_buf_peak;
223 uint32_t zval_possible_root;
224 uint32_t zval_buffered;
225 uint32_t zval_remove_from_buffer;
226 uint32_t zval_marked_grey;
227 #endif
228 } zend_gc_globals;
229
230 #ifdef ZTS
231 static int gc_globals_id;
232 #define GC_G(v) ZEND_TSRMG(gc_globals_id, zend_gc_globals *, v)
233 #else
234 #define GC_G(v) (gc_globals.v)
235 static zend_gc_globals gc_globals;
236 #endif
237
238 #if GC_BENCH
239 # define GC_BENCH_INC(counter) GC_G(counter)++
240 # define GC_BENCH_DEC(counter) GC_G(counter)--
241 # define GC_BENCH_PEAK(peak, counter) do { \
242 if (GC_G(counter) > GC_G(peak)) { \
243 GC_G(peak) = GC_G(counter); \
244 } \
245 } while (0)
246 #else
247 # define GC_BENCH_INC(counter)
248 # define GC_BENCH_DEC(counter)
249 # define GC_BENCH_PEAK(peak, counter)
250 #endif
251
252
253 #define GC_STACK_SEGMENT_SIZE (((4096 - ZEND_MM_OVERHEAD) / sizeof(void*)) - 2)
254
255 typedef struct _gc_stack gc_stack;
256
257 struct _gc_stack {
258 gc_stack *prev;
259 gc_stack *next;
260 zend_refcounted *data[GC_STACK_SEGMENT_SIZE];
261 };
262
263 #define GC_STACK_DCL(init) \
264 gc_stack *_stack = init; \
265 size_t _top = 0;
266
267 #define GC_STACK_PUSH(ref) \
268 gc_stack_push(&_stack, &_top, ref);
269
270 #define GC_STACK_POP() \
271 gc_stack_pop(&_stack, &_top)
272
gc_stack_next(gc_stack * stack)273 static zend_never_inline gc_stack* gc_stack_next(gc_stack *stack)
274 {
275 if (UNEXPECTED(!stack->next)) {
276 gc_stack *segment = emalloc(sizeof(gc_stack));
277 segment->prev = stack;
278 segment->next = NULL;
279 stack->next = segment;
280 }
281 return stack->next;
282 }
283
gc_stack_push(gc_stack ** stack,size_t * top,zend_refcounted * ref)284 static zend_always_inline void gc_stack_push(gc_stack **stack, size_t *top, zend_refcounted *ref)
285 {
286 if (UNEXPECTED(*top == GC_STACK_SEGMENT_SIZE)) {
287 (*stack) = gc_stack_next(*stack);
288 (*top) = 0;
289 }
290 (*stack)->data[(*top)++] = ref;
291 }
292
gc_stack_pop(gc_stack ** stack,size_t * top)293 static zend_always_inline zend_refcounted* gc_stack_pop(gc_stack **stack, size_t *top)
294 {
295 if (UNEXPECTED((*top) == 0)) {
296 if (!(*stack)->prev) {
297 return NULL;
298 } else {
299 (*stack) = (*stack)->prev;
300 (*top) = GC_STACK_SEGMENT_SIZE - 1;
301 return (*stack)->data[GC_STACK_SEGMENT_SIZE - 1];
302 }
303 } else {
304 return (*stack)->data[--(*top)];
305 }
306 }
307
gc_stack_free(gc_stack * stack)308 static void gc_stack_free(gc_stack *stack)
309 {
310 gc_stack *p = stack->next;
311
312 while (p) {
313 stack = p->next;
314 efree(p);
315 p = stack;
316 }
317 }
318
gc_compress(uint32_t idx)319 static zend_always_inline uint32_t gc_compress(uint32_t idx)
320 {
321 if (EXPECTED(idx < GC_MAX_UNCOMPRESSED)) {
322 return idx;
323 }
324 return (idx % GC_MAX_UNCOMPRESSED) | GC_MAX_UNCOMPRESSED;
325 }
326
gc_decompress(zend_refcounted * ref,uint32_t idx)327 static zend_always_inline gc_root_buffer* gc_decompress(zend_refcounted *ref, uint32_t idx)
328 {
329 gc_root_buffer *root = GC_IDX2PTR(idx);
330
331 if (EXPECTED(GC_GET_PTR(root->ref) == ref)) {
332 return root;
333 }
334
335 while (1) {
336 idx += GC_MAX_UNCOMPRESSED;
337 ZEND_ASSERT(idx < GC_G(first_unused));
338 root = GC_IDX2PTR(idx);
339 if (GC_GET_PTR(root->ref) == ref) {
340 return root;
341 }
342 }
343 }
344
gc_fetch_unused(void)345 static zend_always_inline uint32_t gc_fetch_unused(void)
346 {
347 uint32_t idx;
348 gc_root_buffer *root;
349
350 ZEND_ASSERT(GC_HAS_UNUSED());
351 idx = GC_G(unused);
352 root = GC_IDX2PTR(idx);
353 ZEND_ASSERT(GC_IS_UNUSED(root->ref));
354 GC_G(unused) = GC_LIST2IDX(root->ref);
355 return idx;
356 }
357
gc_link_unused(gc_root_buffer * root)358 static zend_always_inline void gc_link_unused(gc_root_buffer *root)
359 {
360 root->ref = GC_IDX2LIST(GC_G(unused));
361 GC_G(unused) = GC_PTR2IDX(root);
362 }
363
gc_fetch_next_unused(void)364 static zend_always_inline uint32_t gc_fetch_next_unused(void)
365 {
366 uint32_t idx;
367
368 ZEND_ASSERT(GC_HAS_NEXT_UNUSED());
369 idx = GC_G(first_unused);
370 GC_G(first_unused) = GC_G(first_unused) + 1;
371 return idx;
372 }
373
374 #if ZEND_GC_DEBUG > 1
gc_color_name(uint32_t color)375 static const char *gc_color_name(uint32_t color) {
376 switch (color) {
377 case GC_BLACK: return "black";
378 case GC_WHITE: return "white";
379 case GC_GREY: return "grey";
380 case GC_PURPLE: return "purple";
381 default: return "unknown";
382 }
383 }
gc_trace_ref(zend_refcounted * ref)384 static void gc_trace_ref(zend_refcounted *ref) {
385 if (GC_TYPE(ref) == IS_OBJECT) {
386 zend_object *obj = (zend_object *) ref;
387 fprintf(stderr, "[%p] rc=%d addr=%d %s object(%s)#%d ",
388 ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
389 gc_color_name(GC_REF_COLOR(ref)),
390 obj->ce->name->val, obj->handle);
391 } else if (GC_TYPE(ref) == IS_ARRAY) {
392 zend_array *arr = (zend_array *) ref;
393 fprintf(stderr, "[%p] rc=%d addr=%d %s array(%d) ",
394 ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
395 gc_color_name(GC_REF_COLOR(ref)),
396 zend_hash_num_elements(arr));
397 } else {
398 fprintf(stderr, "[%p] rc=%d addr=%d %s %s ",
399 ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
400 gc_color_name(GC_REF_COLOR(ref)),
401 zend_get_type_by_const(GC_TYPE(ref)));
402 }
403 }
404 #endif
405
gc_remove_from_roots(gc_root_buffer * root)406 static zend_always_inline void gc_remove_from_roots(gc_root_buffer *root)
407 {
408 GC_LINK_UNUSED(root);
409 GC_G(num_roots)--;
410 GC_BENCH_DEC(root_buf_length);
411 }
412
root_buffer_dtor(zend_gc_globals * gc_globals)413 static void root_buffer_dtor(zend_gc_globals *gc_globals)
414 {
415 if (gc_globals->buf) {
416 free(gc_globals->buf);
417 gc_globals->buf = NULL;
418 }
419 }
420
gc_globals_ctor_ex(zend_gc_globals * gc_globals)421 static void gc_globals_ctor_ex(zend_gc_globals *gc_globals)
422 {
423 gc_globals->gc_enabled = 0;
424 gc_globals->gc_active = 0;
425 gc_globals->gc_protected = 1;
426 gc_globals->gc_full = 0;
427
428 gc_globals->buf = NULL;
429 gc_globals->unused = GC_INVALID;
430 gc_globals->first_unused = GC_INVALID;
431 gc_globals->gc_threshold = GC_INVALID;
432 gc_globals->buf_size = GC_INVALID;
433 gc_globals->num_roots = 0;
434
435 gc_globals->gc_runs = 0;
436 gc_globals->collected = 0;
437
438 #if GC_BENCH
439 gc_globals->root_buf_length = 0;
440 gc_globals->root_buf_peak = 0;
441 gc_globals->zval_possible_root = 0;
442 gc_globals->zval_buffered = 0;
443 gc_globals->zval_remove_from_buffer = 0;
444 gc_globals->zval_marked_grey = 0;
445 #endif
446 }
447
gc_globals_ctor(void)448 void gc_globals_ctor(void)
449 {
450 #ifdef ZTS
451 ts_allocate_id(&gc_globals_id, sizeof(zend_gc_globals), (ts_allocate_ctor) gc_globals_ctor_ex, (ts_allocate_dtor) root_buffer_dtor);
452 #else
453 gc_globals_ctor_ex(&gc_globals);
454 #endif
455 }
456
gc_globals_dtor(void)457 void gc_globals_dtor(void)
458 {
459 #ifndef ZTS
460 root_buffer_dtor(&gc_globals);
461 #endif
462 }
463
gc_reset(void)464 void gc_reset(void)
465 {
466 if (GC_G(buf)) {
467 GC_G(gc_active) = 0;
468 GC_G(gc_protected) = 0;
469 GC_G(gc_full) = 0;
470 GC_G(unused) = GC_INVALID;
471 GC_G(first_unused) = GC_FIRST_ROOT;
472 GC_G(num_roots) = 0;
473
474 GC_G(gc_runs) = 0;
475 GC_G(collected) = 0;
476
477 #if GC_BENCH
478 GC_G(root_buf_length) = 0;
479 GC_G(root_buf_peak) = 0;
480 GC_G(zval_possible_root) = 0;
481 GC_G(zval_buffered) = 0;
482 GC_G(zval_remove_from_buffer) = 0;
483 GC_G(zval_marked_grey) = 0;
484 #endif
485 }
486 }
487
gc_enable(zend_bool enable)488 ZEND_API zend_bool gc_enable(zend_bool enable)
489 {
490 zend_bool old_enabled = GC_G(gc_enabled);
491 GC_G(gc_enabled) = enable;
492 if (enable && !old_enabled && GC_G(buf) == NULL) {
493 GC_G(buf) = (gc_root_buffer*) pemalloc(sizeof(gc_root_buffer) * GC_DEFAULT_BUF_SIZE, 1);
494 GC_G(buf)[0].ref = NULL;
495 GC_G(buf_size) = GC_DEFAULT_BUF_SIZE;
496 GC_G(gc_threshold) = GC_THRESHOLD_DEFAULT + GC_FIRST_ROOT;
497 gc_reset();
498 }
499 return old_enabled;
500 }
501
gc_enabled(void)502 ZEND_API zend_bool gc_enabled(void)
503 {
504 return GC_G(gc_enabled);
505 }
506
gc_protect(zend_bool protect)507 ZEND_API zend_bool gc_protect(zend_bool protect)
508 {
509 zend_bool old_protected = GC_G(gc_protected);
510 GC_G(gc_protected) = protect;
511 return old_protected;
512 }
513
gc_protected(void)514 ZEND_API zend_bool gc_protected(void)
515 {
516 return GC_G(gc_protected);
517 }
518
gc_grow_root_buffer(void)519 static void gc_grow_root_buffer(void)
520 {
521 size_t new_size;
522
523 if (GC_G(buf_size) >= GC_MAX_BUF_SIZE) {
524 if (!GC_G(gc_full)) {
525 zend_error(E_WARNING, "GC buffer overflow (GC disabled)\n");
526 GC_G(gc_active) = 1;
527 GC_G(gc_protected) = 1;
528 GC_G(gc_full) = 1;
529 return;
530 }
531 }
532 if (GC_G(buf_size) < GC_BUF_GROW_STEP) {
533 new_size = GC_G(buf_size) * 2;
534 } else {
535 new_size = GC_G(buf_size) + GC_BUF_GROW_STEP;
536 }
537 if (new_size > GC_MAX_BUF_SIZE) {
538 new_size = GC_MAX_BUF_SIZE;
539 }
540 GC_G(buf) = perealloc(GC_G(buf), sizeof(gc_root_buffer) * new_size, 1);
541 GC_G(buf_size) = new_size;
542 }
543
gc_adjust_threshold(int count)544 static void gc_adjust_threshold(int count)
545 {
546 uint32_t new_threshold;
547
548 /* TODO Very simple heuristic for dynamic GC buffer resizing:
549 * If there are "too few" collections, increase the collection threshold
550 * by a fixed step */
551 if (count < GC_THRESHOLD_TRIGGER) {
552 /* increase */
553 if (GC_G(gc_threshold) < GC_THRESHOLD_MAX) {
554 new_threshold = GC_G(gc_threshold) + GC_THRESHOLD_STEP;
555 if (new_threshold > GC_THRESHOLD_MAX) {
556 new_threshold = GC_THRESHOLD_MAX;
557 }
558 if (new_threshold > GC_G(buf_size)) {
559 gc_grow_root_buffer();
560 }
561 if (new_threshold <= GC_G(buf_size)) {
562 GC_G(gc_threshold) = new_threshold;
563 }
564 }
565 } else if (GC_G(gc_threshold) > GC_THRESHOLD_DEFAULT) {
566 new_threshold = GC_G(gc_threshold) - GC_THRESHOLD_STEP;
567 if (new_threshold < GC_THRESHOLD_DEFAULT) {
568 new_threshold = GC_THRESHOLD_DEFAULT;
569 }
570 GC_G(gc_threshold) = new_threshold;
571 }
572 }
573
gc_possible_root_when_full(zend_refcounted * ref)574 static zend_never_inline void ZEND_FASTCALL gc_possible_root_when_full(zend_refcounted *ref)
575 {
576 uint32_t idx;
577 gc_root_buffer *newRoot;
578
579 ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
580 ZEND_ASSERT(GC_INFO(ref) == 0);
581
582 if (GC_G(gc_enabled) && !GC_G(gc_active)) {
583 GC_ADDREF(ref);
584 gc_adjust_threshold(gc_collect_cycles());
585 if (UNEXPECTED(GC_DELREF(ref)) == 0) {
586 rc_dtor_func(ref);
587 return;
588 } else if (UNEXPECTED(GC_INFO(ref))) {
589 return;
590 }
591 }
592
593 if (GC_HAS_UNUSED()) {
594 idx = GC_FETCH_UNUSED();
595 } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
596 idx = GC_FETCH_NEXT_UNUSED();
597 } else {
598 gc_grow_root_buffer();
599 if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
600 return;
601 }
602 idx = GC_FETCH_NEXT_UNUSED();
603 }
604
605 newRoot = GC_IDX2PTR(idx);
606 newRoot->ref = ref; /* GC_ROOT tag is 0 */
607 GC_TRACE_SET_COLOR(ref, GC_PURPLE);
608
609 idx = gc_compress(idx);
610 GC_REF_SET_INFO(ref, idx | GC_PURPLE);
611 GC_G(num_roots)++;
612
613 GC_BENCH_INC(zval_buffered);
614 GC_BENCH_INC(root_buf_length);
615 GC_BENCH_PEAK(root_buf_peak, root_buf_length);
616 }
617
gc_possible_root(zend_refcounted * ref)618 ZEND_API void ZEND_FASTCALL gc_possible_root(zend_refcounted *ref)
619 {
620 uint32_t idx;
621 gc_root_buffer *newRoot;
622
623 if (UNEXPECTED(GC_G(gc_protected))) {
624 return;
625 }
626
627 GC_BENCH_INC(zval_possible_root);
628
629 if (EXPECTED(GC_HAS_UNUSED())) {
630 idx = GC_FETCH_UNUSED();
631 } else if (EXPECTED(GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD())) {
632 idx = GC_FETCH_NEXT_UNUSED();
633 } else {
634 gc_possible_root_when_full(ref);
635 return;
636 }
637
638 ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
639 ZEND_ASSERT(GC_INFO(ref) == 0);
640
641 newRoot = GC_IDX2PTR(idx);
642 newRoot->ref = ref; /* GC_ROOT tag is 0 */
643 GC_TRACE_SET_COLOR(ref, GC_PURPLE);
644
645 idx = gc_compress(idx);
646 GC_REF_SET_INFO(ref, idx | GC_PURPLE);
647 GC_G(num_roots)++;
648
649 GC_BENCH_INC(zval_buffered);
650 GC_BENCH_INC(root_buf_length);
651 GC_BENCH_PEAK(root_buf_peak, root_buf_length);
652 }
653
gc_remove_compressed(zend_refcounted * ref,uint32_t idx)654 static zend_never_inline void ZEND_FASTCALL gc_remove_compressed(zend_refcounted *ref, uint32_t idx)
655 {
656 gc_root_buffer *root = gc_decompress(ref, idx);
657 gc_remove_from_roots(root);
658 }
659
gc_remove_from_buffer(zend_refcounted * ref)660 ZEND_API void ZEND_FASTCALL gc_remove_from_buffer(zend_refcounted *ref)
661 {
662 gc_root_buffer *root;
663 uint32_t idx = GC_REF_ADDRESS(ref);
664
665 GC_BENCH_INC(zval_remove_from_buffer);
666
667 if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
668 GC_TRACE_SET_COLOR(ref, GC_BLACK);
669 }
670 GC_REF_SET_INFO(ref, 0);
671
672 /* Perform decompression only in case of large buffers */
673 if (UNEXPECTED(GC_G(first_unused) >= GC_MAX_UNCOMPRESSED)) {
674 gc_remove_compressed(ref, idx);
675 return;
676 }
677
678 ZEND_ASSERT(idx);
679 root = GC_IDX2PTR(idx);
680 gc_remove_from_roots(root);
681 }
682
gc_scan_black(zend_refcounted * ref,gc_stack * stack)683 static void gc_scan_black(zend_refcounted *ref, gc_stack *stack)
684 {
685 HashTable *ht = NULL;
686 Bucket *p, *end;
687 zval *zv;
688 GC_STACK_DCL(stack);
689
690 tail_call:
691 if (GC_TYPE(ref) == IS_OBJECT) {
692 zend_object_get_gc_t get_gc;
693 zend_object *obj = (zend_object*)ref;
694
695 if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED) &&
696 (get_gc = obj->handlers->get_gc) != NULL)) {
697 int n;
698 zval *zv, *end;
699 zval tmp;
700
701 ZVAL_OBJ(&tmp, obj);
702 ht = get_gc(&tmp, &zv, &n);
703 end = zv + n;
704 if (EXPECTED(!ht) || UNEXPECTED(GC_REF_CHECK_COLOR(ht, GC_BLACK))) {
705 ht = NULL;
706 if (!n) goto next;
707 while (!Z_REFCOUNTED_P(--end)) {
708 if (zv == end) goto next;
709 }
710 } else {
711 GC_REF_SET_BLACK(ht);
712 }
713 while (zv != end) {
714 if (Z_REFCOUNTED_P(zv)) {
715 ref = Z_COUNTED_P(zv);
716 GC_ADDREF(ref);
717 if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
718 GC_REF_SET_BLACK(ref);
719 GC_STACK_PUSH(ref);
720 }
721 }
722 zv++;
723 }
724 if (EXPECTED(!ht)) {
725 ref = Z_COUNTED_P(zv);
726 GC_ADDREF(ref);
727 if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
728 GC_REF_SET_BLACK(ref);
729 goto tail_call;
730 }
731 goto next;
732 }
733 } else {
734 goto next;
735 }
736 } else if (GC_TYPE(ref) == IS_ARRAY) {
737 if ((zend_array*)ref != &EG(symbol_table)) {
738 ht = (zend_array*)ref;
739 } else {
740 goto next;
741 }
742 } else if (GC_TYPE(ref) == IS_REFERENCE) {
743 if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
744 ref = Z_COUNTED(((zend_reference*)ref)->val);
745 GC_ADDREF(ref);
746 if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
747 GC_REF_SET_BLACK(ref);
748 goto tail_call;
749 }
750 }
751 goto next;
752 } else {
753 goto next;
754 }
755
756 if (!ht->nNumUsed) goto next;
757 p = ht->arData;
758 end = p + ht->nNumUsed;
759 while (1) {
760 end--;
761 zv = &end->val;
762 if (Z_TYPE_P(zv) == IS_INDIRECT) {
763 zv = Z_INDIRECT_P(zv);
764 }
765 if (Z_REFCOUNTED_P(zv)) {
766 break;
767 }
768 if (p == end) goto next;
769 }
770 while (p != end) {
771 zv = &p->val;
772 if (Z_TYPE_P(zv) == IS_INDIRECT) {
773 zv = Z_INDIRECT_P(zv);
774 }
775 if (Z_REFCOUNTED_P(zv)) {
776 ref = Z_COUNTED_P(zv);
777 GC_ADDREF(ref);
778 if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
779 GC_REF_SET_BLACK(ref);
780 GC_STACK_PUSH(ref);
781 }
782 }
783 p++;
784 }
785 zv = &p->val;
786 if (Z_TYPE_P(zv) == IS_INDIRECT) {
787 zv = Z_INDIRECT_P(zv);
788 }
789 ref = Z_COUNTED_P(zv);
790 GC_ADDREF(ref);
791 if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
792 GC_REF_SET_BLACK(ref);
793 goto tail_call;
794 }
795
796 next:
797 ref = GC_STACK_POP();
798 if (ref) {
799 goto tail_call;
800 }
801 }
802
gc_mark_grey(zend_refcounted * ref,gc_stack * stack)803 static void gc_mark_grey(zend_refcounted *ref, gc_stack *stack)
804 {
805 HashTable *ht = NULL;
806 Bucket *p, *end;
807 zval *zv;
808 GC_STACK_DCL(stack);
809
810 do {
811 GC_BENCH_INC(zval_marked_grey);
812
813 if (GC_TYPE(ref) == IS_OBJECT) {
814 zend_object_get_gc_t get_gc;
815 zend_object *obj = (zend_object*)ref;
816
817 if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED) &&
818 (get_gc = obj->handlers->get_gc) != NULL)) {
819 int n;
820 zval *zv, *end;
821 zval tmp;
822
823 ZVAL_OBJ(&tmp, obj);
824 ht = get_gc(&tmp, &zv, &n);
825 end = zv + n;
826 if (EXPECTED(!ht) || UNEXPECTED(GC_REF_CHECK_COLOR(ht, GC_GREY))) {
827 ht = NULL;
828 if (!n) goto next;
829 while (!Z_REFCOUNTED_P(--end)) {
830 if (zv == end) goto next;
831 }
832 } else {
833 GC_REF_SET_COLOR(ht, GC_GREY);
834 }
835 while (zv != end) {
836 if (Z_REFCOUNTED_P(zv)) {
837 ref = Z_COUNTED_P(zv);
838 ZEND_ASSERT(GC_REFCOUNT(ref) > 0);
839 GC_DELREF(ref);
840 if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
841 GC_REF_SET_COLOR(ref, GC_GREY);
842 GC_STACK_PUSH(ref);
843 }
844 }
845 zv++;
846 }
847 if (EXPECTED(!ht)) {
848 ref = Z_COUNTED_P(zv);
849 ZEND_ASSERT(GC_REFCOUNT(ref) > 0);
850 GC_DELREF(ref);
851 if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
852 GC_REF_SET_COLOR(ref, GC_GREY);
853 continue;
854 }
855 goto next;
856 }
857 } else {
858 goto next;
859 }
860 } else if (GC_TYPE(ref) == IS_ARRAY) {
861 if (((zend_array*)ref) == &EG(symbol_table)) {
862 GC_REF_SET_BLACK(ref);
863 goto next;
864 } else {
865 ht = (zend_array*)ref;
866 }
867 } else if (GC_TYPE(ref) == IS_REFERENCE) {
868 if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
869 ref = Z_COUNTED(((zend_reference*)ref)->val);
870 ZEND_ASSERT(GC_REFCOUNT(ref) > 0);
871 GC_DELREF(ref);
872 if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
873 GC_REF_SET_COLOR(ref, GC_GREY);
874 continue;
875 }
876 }
877 goto next;
878 } else {
879 goto next;
880 }
881
882 if (!ht->nNumUsed) goto next;
883 p = ht->arData;
884 end = p + ht->nNumUsed;
885 while (1) {
886 end--;
887 zv = &end->val;
888 if (Z_TYPE_P(zv) == IS_INDIRECT) {
889 zv = Z_INDIRECT_P(zv);
890 }
891 if (Z_REFCOUNTED_P(zv)) {
892 break;
893 }
894 if (p == end) goto next;
895 }
896 while (p != end) {
897 zv = &p->val;
898 if (Z_TYPE_P(zv) == IS_INDIRECT) {
899 zv = Z_INDIRECT_P(zv);
900 }
901 if (Z_REFCOUNTED_P(zv)) {
902 ref = Z_COUNTED_P(zv);
903 ZEND_ASSERT(GC_REFCOUNT(ref) > 0);
904 GC_DELREF(ref);
905 if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
906 GC_REF_SET_COLOR(ref, GC_GREY);
907 GC_STACK_PUSH(ref);
908 }
909 }
910 p++;
911 }
912 zv = &p->val;
913 if (Z_TYPE_P(zv) == IS_INDIRECT) {
914 zv = Z_INDIRECT_P(zv);
915 }
916 ref = Z_COUNTED_P(zv);
917 ZEND_ASSERT(GC_REFCOUNT(ref) > 0);
918 GC_DELREF(ref);
919 if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
920 GC_REF_SET_COLOR(ref, GC_GREY);
921 continue;
922 }
923
924 next:
925 ref = GC_STACK_POP();
926 } while (ref);
927 }
928
929 /* Two-Finger compaction algorithm */
gc_compact(void)930 static void gc_compact(void)
931 {
932 if (GC_G(num_roots) + GC_FIRST_ROOT != GC_G(first_unused)) {
933 if (GC_G(num_roots)) {
934 gc_root_buffer *free = GC_IDX2PTR(GC_FIRST_ROOT);
935 gc_root_buffer *scan = GC_IDX2PTR(GC_G(first_unused) - 1);
936 gc_root_buffer *end = GC_IDX2PTR(GC_G(num_roots));
937 uint32_t idx;
938 zend_refcounted *p;
939
940 while (free < scan) {
941 while (!GC_IS_UNUSED(free->ref)) {
942 free++;
943 }
944 while (GC_IS_UNUSED(scan->ref)) {
945 scan--;
946 }
947 if (scan > free) {
948 p = scan->ref;
949 free->ref = p;
950 p = GC_GET_PTR(p);
951 idx = gc_compress(GC_PTR2IDX(free));
952 GC_REF_SET_INFO(p, idx | GC_REF_COLOR(p));
953 free++;
954 scan--;
955 if (scan <= end) {
956 break;
957 }
958 }
959 }
960 }
961 GC_G(unused) = GC_INVALID;
962 GC_G(first_unused) = GC_G(num_roots) + GC_FIRST_ROOT;
963 }
964 }
965
gc_mark_roots(gc_stack * stack)966 static void gc_mark_roots(gc_stack *stack)
967 {
968 gc_root_buffer *current, *last;
969
970 gc_compact();
971
972 current = GC_IDX2PTR(GC_FIRST_ROOT);
973 last = GC_IDX2PTR(GC_G(first_unused));
974 while (current != last) {
975 if (GC_IS_ROOT(current->ref)) {
976 if (GC_REF_CHECK_COLOR(current->ref, GC_PURPLE)) {
977 GC_REF_SET_COLOR(current->ref, GC_GREY);
978 gc_mark_grey(current->ref, stack);
979 }
980 }
981 current++;
982 }
983 }
984
gc_scan(zend_refcounted * ref,gc_stack * stack)985 static void gc_scan(zend_refcounted *ref, gc_stack *stack)
986 {
987 HashTable *ht = NULL;
988 Bucket *p, *end;
989 zval *zv;
990 GC_STACK_DCL(stack);
991
992 tail_call:
993 if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
994 if (GC_REFCOUNT(ref) > 0) {
995 if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
996 GC_REF_SET_BLACK(ref);
997 if (UNEXPECTED(!_stack->next)) {
998 gc_stack_next(_stack);
999 }
1000 /* Split stack and reuse the tail */
1001 _stack->next->prev = NULL;
1002 gc_scan_black(ref, _stack->next);
1003 _stack->next->prev = _stack;
1004 }
1005 } else {
1006 if (GC_TYPE(ref) == IS_OBJECT) {
1007 zend_object_get_gc_t get_gc;
1008 zend_object *obj = (zend_object*)ref;
1009
1010 if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED) &&
1011 (get_gc = obj->handlers->get_gc) != NULL)) {
1012 int n;
1013 zval *zv, *end;
1014 zval tmp;
1015
1016 ZVAL_OBJ(&tmp, obj);
1017 ht = get_gc(&tmp, &zv, &n);
1018 end = zv + n;
1019 if (EXPECTED(!ht) || UNEXPECTED(!GC_REF_CHECK_COLOR(ht, GC_GREY))) {
1020 ht = NULL;
1021 if (!n) goto next;
1022 while (!Z_REFCOUNTED_P(--end)) {
1023 if (zv == end) goto next;
1024 }
1025 } else {
1026 GC_REF_SET_COLOR(ht, GC_WHITE);
1027 }
1028 while (zv != end) {
1029 if (Z_REFCOUNTED_P(zv)) {
1030 ref = Z_COUNTED_P(zv);
1031 if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1032 GC_REF_SET_COLOR(ref, GC_WHITE);
1033 GC_STACK_PUSH(ref);
1034 }
1035 }
1036 zv++;
1037 }
1038 if (EXPECTED(!ht)) {
1039 ref = Z_COUNTED_P(zv);
1040 if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1041 GC_REF_SET_COLOR(ref, GC_WHITE);
1042 goto tail_call;
1043 }
1044 goto next;
1045 }
1046 } else {
1047 goto next;
1048 }
1049 } else if (GC_TYPE(ref) == IS_ARRAY) {
1050 if ((zend_array*)ref == &EG(symbol_table)) {
1051 GC_REF_SET_BLACK(ref);
1052 goto next;
1053 } else {
1054 ht = (zend_array*)ref;
1055 }
1056 } else if (GC_TYPE(ref) == IS_REFERENCE) {
1057 if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1058 ref = Z_COUNTED(((zend_reference*)ref)->val);
1059 if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1060 GC_REF_SET_COLOR(ref, GC_WHITE);
1061 goto tail_call;
1062 }
1063 }
1064 goto next;
1065 } else {
1066 goto next;
1067 }
1068
1069 if (!ht->nNumUsed) goto next;
1070 p = ht->arData;
1071 end = p + ht->nNumUsed;
1072 while (1) {
1073 end--;
1074 zv = &end->val;
1075 if (Z_TYPE_P(zv) == IS_INDIRECT) {
1076 zv = Z_INDIRECT_P(zv);
1077 }
1078 if (Z_REFCOUNTED_P(zv)) {
1079 break;
1080 }
1081 if (p == end) goto next;
1082 }
1083 while (p != end) {
1084 zv = &p->val;
1085 if (Z_TYPE_P(zv) == IS_INDIRECT) {
1086 zv = Z_INDIRECT_P(zv);
1087 }
1088 if (Z_REFCOUNTED_P(zv)) {
1089 ref = Z_COUNTED_P(zv);
1090 if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1091 GC_REF_SET_COLOR(ref, GC_WHITE);
1092 GC_STACK_PUSH(ref);
1093 }
1094 }
1095 p++;
1096 }
1097 zv = &p->val;
1098 if (Z_TYPE_P(zv) == IS_INDIRECT) {
1099 zv = Z_INDIRECT_P(zv);
1100 }
1101 ref = Z_COUNTED_P(zv);
1102 if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
1103 GC_REF_SET_COLOR(ref, GC_WHITE);
1104 goto tail_call;
1105 }
1106 }
1107 }
1108
1109 next:
1110 ref = GC_STACK_POP();
1111 if (ref) {
1112 goto tail_call;
1113 }
1114 }
1115
gc_scan_roots(gc_stack * stack)1116 static void gc_scan_roots(gc_stack *stack)
1117 {
1118 gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
1119 gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
1120
1121 while (current != last) {
1122 if (GC_IS_ROOT(current->ref)) {
1123 if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
1124 GC_REF_SET_COLOR(current->ref, GC_WHITE);
1125 gc_scan(current->ref, stack);
1126 }
1127 }
1128 current++;
1129 }
1130 }
1131
gc_add_garbage(zend_refcounted * ref)1132 static void gc_add_garbage(zend_refcounted *ref)
1133 {
1134 uint32_t idx;
1135 gc_root_buffer *buf;
1136
1137 if (GC_HAS_UNUSED()) {
1138 idx = GC_FETCH_UNUSED();
1139 } else if (GC_HAS_NEXT_UNUSED()) {
1140 idx = GC_FETCH_NEXT_UNUSED();
1141 } else {
1142 gc_grow_root_buffer();
1143 if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
1144 return;
1145 }
1146 idx = GC_FETCH_NEXT_UNUSED();
1147 }
1148
1149 buf = GC_IDX2PTR(idx);
1150 buf->ref = GC_MAKE_GARBAGE(ref);
1151
1152 idx = gc_compress(idx);
1153 GC_REF_SET_INFO(ref, idx | GC_BLACK);
1154 GC_G(num_roots)++;
1155 }
1156
gc_collect_white(zend_refcounted * ref,uint32_t * flags,gc_stack * stack)1157 static int gc_collect_white(zend_refcounted *ref, uint32_t *flags, gc_stack *stack)
1158 {
1159 int count = 0;
1160 HashTable *ht = NULL;
1161 Bucket *p, *end;
1162 zval *zv;
1163 GC_STACK_DCL(stack);
1164
1165 do {
1166 /* don't count references for compatibility ??? */
1167 if (GC_TYPE(ref) != IS_REFERENCE) {
1168 count++;
1169 }
1170
1171 if (GC_TYPE(ref) == IS_OBJECT) {
1172 zend_object_get_gc_t get_gc;
1173 zend_object *obj = (zend_object*)ref;
1174
1175 if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED) &&
1176 (get_gc = obj->handlers->get_gc) != NULL)) {
1177 int n;
1178 zval *zv, *end;
1179 zval tmp;
1180
1181 /* optimization: color is GC_BLACK (0) */
1182 if (!GC_INFO(ref)) {
1183 gc_add_garbage(ref);
1184 }
1185 if (obj->handlers->dtor_obj &&
1186 ((obj->handlers->dtor_obj != zend_objects_destroy_object) ||
1187 (obj->ce->destructor != NULL))) {
1188 *flags |= GC_HAS_DESTRUCTORS;
1189 }
1190 ZVAL_OBJ(&tmp, obj);
1191 ht = get_gc(&tmp, &zv, &n);
1192 end = zv + n;
1193 if (EXPECTED(!ht) || UNEXPECTED(GC_REF_CHECK_COLOR(ht, GC_BLACK))) {
1194 ht = NULL;
1195 if (!n) goto next;
1196 while (!Z_REFCOUNTED_P(--end)) {
1197 /* count non-refcounted for compatibility ??? */
1198 if (Z_TYPE_P(zv) != IS_UNDEF) {
1199 count++;
1200 }
1201 if (zv == end) goto next;
1202 }
1203 } else {
1204 GC_REF_SET_BLACK(ht);
1205 }
1206 while (zv != end) {
1207 if (Z_REFCOUNTED_P(zv)) {
1208 ref = Z_COUNTED_P(zv);
1209 GC_ADDREF(ref);
1210 if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1211 GC_REF_SET_BLACK(ref);
1212 GC_STACK_PUSH(ref);
1213 }
1214 /* count non-refcounted for compatibility ??? */
1215 } else if (Z_TYPE_P(zv) != IS_UNDEF) {
1216 count++;
1217 }
1218 zv++;
1219 }
1220 if (EXPECTED(!ht)) {
1221 ref = Z_COUNTED_P(zv);
1222 GC_ADDREF(ref);
1223 if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1224 GC_REF_SET_BLACK(ref);
1225 continue;
1226 }
1227 goto next;
1228 }
1229 } else {
1230 goto next;
1231 }
1232 } else if (GC_TYPE(ref) == IS_ARRAY) {
1233 /* optimization: color is GC_BLACK (0) */
1234 if (!GC_INFO(ref)) {
1235 gc_add_garbage(ref);
1236 }
1237 ht = (zend_array*)ref;
1238 } else if (GC_TYPE(ref) == IS_REFERENCE) {
1239 if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1240 ref = Z_COUNTED(((zend_reference*)ref)->val);
1241 GC_ADDREF(ref);
1242 if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1243 GC_REF_SET_BLACK(ref);
1244 continue;
1245 }
1246 }
1247 goto next;
1248 } else {
1249 goto next;
1250 }
1251
1252 if (!ht->nNumUsed) goto next;
1253 p = ht->arData;
1254 end = p + ht->nNumUsed;
1255 while (1) {
1256 end--;
1257 zv = &end->val;
1258 if (Z_TYPE_P(zv) == IS_INDIRECT) {
1259 zv = Z_INDIRECT_P(zv);
1260 }
1261 if (Z_REFCOUNTED_P(zv)) {
1262 break;
1263 }
1264 /* count non-refcounted for compatibility ??? */
1265 if (Z_TYPE_P(zv) != IS_UNDEF) {
1266 count++;
1267 }
1268 if (p == end) goto next;
1269 }
1270 while (p != end) {
1271 zv = &p->val;
1272 if (Z_TYPE_P(zv) == IS_INDIRECT) {
1273 zv = Z_INDIRECT_P(zv);
1274 }
1275 if (Z_REFCOUNTED_P(zv)) {
1276 ref = Z_COUNTED_P(zv);
1277 GC_ADDREF(ref);
1278 if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1279 GC_REF_SET_BLACK(ref);
1280 GC_STACK_PUSH(ref);
1281 }
1282 /* count non-refcounted for compatibility ??? */
1283 } else if (Z_TYPE_P(zv) != IS_UNDEF) {
1284 count++;
1285 }
1286 p++;
1287 }
1288 zv = &p->val;
1289 if (Z_TYPE_P(zv) == IS_INDIRECT) {
1290 zv = Z_INDIRECT_P(zv);
1291 }
1292 ref = Z_COUNTED_P(zv);
1293 GC_ADDREF(ref);
1294 if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1295 GC_REF_SET_BLACK(ref);
1296 continue;
1297 }
1298
1299 next:
1300 ref = GC_STACK_POP();
1301 } while (ref);
1302
1303 return count;
1304 }
1305
gc_collect_roots(uint32_t * flags,gc_stack * stack)1306 static int gc_collect_roots(uint32_t *flags, gc_stack *stack)
1307 {
1308 uint32_t idx, end;
1309 zend_refcounted *ref;
1310 int count = 0;
1311 gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
1312 gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
1313
1314 /* remove non-garbage from the list */
1315 while (current != last) {
1316 if (GC_IS_ROOT(current->ref)) {
1317 if (GC_REF_CHECK_COLOR(current->ref, GC_BLACK)) {
1318 GC_REF_SET_INFO(current->ref, 0); /* reset GC_ADDRESS() and keep GC_BLACK */
1319 gc_remove_from_roots(current);
1320 }
1321 }
1322 current++;
1323 }
1324
1325 gc_compact();
1326
1327 /* Root buffer might be reallocated during gc_collect_white,
1328 * make sure to reload pointers. */
1329 idx = GC_FIRST_ROOT;
1330 end = GC_G(first_unused);
1331 while (idx != end) {
1332 current = GC_IDX2PTR(idx);
1333 ref = current->ref;
1334 ZEND_ASSERT(GC_IS_ROOT(ref));
1335 current->ref = GC_MAKE_GARBAGE(ref);
1336 if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
1337 GC_REF_SET_BLACK(ref);
1338 count += gc_collect_white(ref, flags, stack);
1339 }
1340 idx++;
1341 }
1342
1343 return count;
1344 }
1345
gc_remove_nested_data_from_buffer(zend_refcounted * ref,gc_root_buffer * root)1346 static void gc_remove_nested_data_from_buffer(zend_refcounted *ref, gc_root_buffer *root)
1347 {
1348 HashTable *ht = NULL;
1349 Bucket *p, *end;
1350 zval *zv;
1351
1352 tail_call:
1353 do {
1354 if (root) {
1355 GC_TRACE_REF(ref, "removing from buffer");
1356 gc_remove_from_roots(root);
1357 GC_REF_SET_INFO(ref, 0);
1358 root = NULL;
1359 } else if (GC_REF_ADDRESS(ref) != 0
1360 && GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
1361 GC_TRACE_REF(ref, "removing from buffer");
1362 GC_REMOVE_FROM_BUFFER(ref);
1363 } else if (GC_TYPE(ref) == IS_REFERENCE) {
1364 if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
1365 ref = Z_COUNTED(((zend_reference*)ref)->val);
1366 goto tail_call;
1367 }
1368 return;
1369 } else {
1370 return;
1371 }
1372
1373 if (GC_TYPE(ref) == IS_OBJECT) {
1374 zend_object_get_gc_t get_gc;
1375 zend_object *obj = (zend_object*)ref;
1376
1377 if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED) &&
1378 (get_gc = obj->handlers->get_gc) != NULL)) {
1379 int n;
1380 zval *zv, *end;
1381 zval tmp;
1382
1383 ZVAL_OBJ(&tmp, obj);
1384 ht = get_gc(&tmp, &zv, &n);
1385 end = zv + n;
1386 if (EXPECTED(!ht)) {
1387 if (!n) return;
1388 while (!Z_REFCOUNTED_P(--end)) {
1389 if (zv == end) return;
1390 }
1391 }
1392 while (zv != end) {
1393 if (Z_REFCOUNTED_P(zv)) {
1394 ref = Z_COUNTED_P(zv);
1395 gc_remove_nested_data_from_buffer(ref, NULL);
1396 }
1397 zv++;
1398 }
1399 if (EXPECTED(!ht)) {
1400 ref = Z_COUNTED_P(zv);
1401 goto tail_call;
1402 }
1403 if (GC_REF_ADDRESS(ht) != 0 && GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
1404 GC_TRACE_REF(ht, "removing from buffer");
1405 GC_REMOVE_FROM_BUFFER(ht);
1406 }
1407 } else {
1408 return;
1409 }
1410 } else if (GC_TYPE(ref) == IS_ARRAY) {
1411 ht = (zend_array*)ref;
1412 } else {
1413 return;
1414 }
1415
1416 if (!ht->nNumUsed) return;
1417 p = ht->arData;
1418 end = p + ht->nNumUsed;
1419 while (1) {
1420 end--;
1421 zv = &end->val;
1422 if (Z_TYPE_P(zv) == IS_INDIRECT) {
1423 zv = Z_INDIRECT_P(zv);
1424 }
1425 if (Z_REFCOUNTED_P(zv)) {
1426 break;
1427 }
1428 if (p == end) return;
1429 }
1430 while (p != end) {
1431 zv = &p->val;
1432 if (Z_TYPE_P(zv) == IS_INDIRECT) {
1433 zv = Z_INDIRECT_P(zv);
1434 }
1435 if (Z_REFCOUNTED_P(zv)) {
1436 ref = Z_COUNTED_P(zv);
1437 gc_remove_nested_data_from_buffer(ref, NULL);
1438 }
1439 p++;
1440 }
1441 zv = &p->val;
1442 if (Z_TYPE_P(zv) == IS_INDIRECT) {
1443 zv = Z_INDIRECT_P(zv);
1444 }
1445 ref = Z_COUNTED_P(zv);
1446 goto tail_call;
1447 } while (0);
1448 }
1449
zend_gc_collect_cycles(void)1450 ZEND_API int zend_gc_collect_cycles(void)
1451 {
1452 int count = 0;
1453
1454 if (GC_G(num_roots)) {
1455 gc_root_buffer *current, *last;
1456 zend_refcounted *p;
1457 uint32_t gc_flags = 0;
1458 uint32_t idx, end;
1459 gc_stack stack;
1460
1461 stack.prev = NULL;
1462 stack.next = NULL;
1463
1464 if (GC_G(gc_active)) {
1465 return 0;
1466 }
1467
1468 GC_TRACE("Collecting cycles");
1469 GC_G(gc_runs)++;
1470 GC_G(gc_active) = 1;
1471
1472 GC_TRACE("Marking roots");
1473 gc_mark_roots(&stack);
1474 GC_TRACE("Scanning roots");
1475 gc_scan_roots(&stack);
1476
1477 GC_TRACE("Collecting roots");
1478 count = gc_collect_roots(&gc_flags, &stack);
1479
1480 gc_stack_free(&stack);
1481
1482 if (!GC_G(num_roots)) {
1483 /* nothing to free */
1484 GC_TRACE("Nothing to free");
1485 GC_G(gc_active) = 0;
1486 return 0;
1487 }
1488
1489 end = GC_G(first_unused);
1490
1491 if (gc_flags & GC_HAS_DESTRUCTORS) {
1492 uint32_t *refcounts;
1493
1494 GC_TRACE("Calling destructors");
1495
1496 // TODO: may be use emalloc() ???
1497 refcounts = pemalloc(sizeof(uint32_t) * end, 1);
1498
1499 /* Remember reference counters before calling destructors */
1500 idx = GC_FIRST_ROOT;
1501 current = GC_IDX2PTR(GC_FIRST_ROOT);
1502 while (idx != end) {
1503 if (GC_IS_GARBAGE(current->ref)) {
1504 p = GC_GET_PTR(current->ref);
1505 refcounts[idx] = GC_REFCOUNT(p);
1506 }
1507 current++;
1508 idx++;
1509 }
1510
1511 /* Call destructors
1512 *
1513 * The root buffer might be reallocated during destructors calls,
1514 * make sure to reload pointers as necessary. */
1515 idx = GC_FIRST_ROOT;
1516 while (idx != end) {
1517 current = GC_IDX2PTR(idx);
1518 if (GC_IS_GARBAGE(current->ref)) {
1519 p = GC_GET_PTR(current->ref);
1520 if (GC_TYPE(p) == IS_OBJECT
1521 && !(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
1522 zend_object *obj = (zend_object*)p;
1523
1524 GC_TRACE_REF(obj, "calling destructor");
1525 GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
1526 if (obj->handlers->dtor_obj
1527 && (obj->handlers->dtor_obj != zend_objects_destroy_object
1528 || obj->ce->destructor)) {
1529 GC_ADDREF(obj);
1530 obj->handlers->dtor_obj(obj);
1531 GC_DELREF(obj);
1532 }
1533 }
1534 }
1535 idx++;
1536 }
1537
1538 /* Remove values captured in destructors */
1539 idx = GC_FIRST_ROOT;
1540 current = GC_IDX2PTR(GC_FIRST_ROOT);
1541 while (idx != end) {
1542 if (GC_IS_GARBAGE(current->ref)) {
1543 p = GC_GET_PTR(current->ref);
1544 if (GC_REFCOUNT(p) > refcounts[idx]) {
1545 gc_remove_nested_data_from_buffer(p, current);
1546 }
1547 }
1548 current++;
1549 idx++;
1550 }
1551
1552 pefree(refcounts, 1);
1553
1554 if (GC_G(gc_protected)) {
1555 /* something went wrong */
1556 return 0;
1557 }
1558 }
1559
1560 /* Destroy zvals */
1561 GC_TRACE("Destroying zvals");
1562 GC_G(gc_protected) = 1;
1563 current = GC_IDX2PTR(GC_FIRST_ROOT);
1564 last = GC_IDX2PTR(GC_G(first_unused));
1565 while (current != last) {
1566 if (GC_IS_GARBAGE(current->ref)) {
1567 p = GC_GET_PTR(current->ref);
1568 GC_TRACE_REF(p, "destroying");
1569 if (GC_TYPE(p) == IS_OBJECT) {
1570 zend_object *obj = (zend_object*)p;
1571
1572 EG(objects_store).object_buckets[obj->handle] = SET_OBJ_INVALID(obj);
1573 GC_TYPE_INFO(obj) = IS_NULL |
1574 (GC_TYPE_INFO(obj) & ~GC_TYPE_MASK);
1575 if (!(OBJ_FLAGS(obj) & IS_OBJ_FREE_CALLED)) {
1576 GC_ADD_FLAGS(obj, IS_OBJ_FREE_CALLED);
1577 if (obj->handlers->free_obj) {
1578 GC_ADDREF(obj);
1579 obj->handlers->free_obj(obj);
1580 GC_DELREF(obj);
1581 }
1582 }
1583
1584 ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST(obj->handle);
1585 current->ref = GC_MAKE_GARBAGE(((char*)obj) - obj->handlers->offset);
1586 } else if (GC_TYPE(p) == IS_ARRAY) {
1587 zend_array *arr = (zend_array*)p;
1588
1589 GC_TYPE_INFO(arr) = IS_NULL |
1590 (GC_TYPE_INFO(arr) & ~GC_TYPE_MASK);
1591
1592 /* GC may destroy arrays with rc>1. This is valid and safe. */
1593 HT_ALLOW_COW_VIOLATION(arr);
1594
1595 zend_hash_destroy(arr);
1596 }
1597 }
1598 current++;
1599 }
1600
1601 /* Free objects */
1602 current = GC_IDX2PTR(GC_FIRST_ROOT);
1603 while (current != last) {
1604 if (GC_IS_GARBAGE(current->ref)) {
1605 p = GC_GET_PTR(current->ref);
1606 GC_LINK_UNUSED(current);
1607 GC_G(num_roots)--;
1608 efree(p);
1609 }
1610 current++;
1611 }
1612
1613 GC_TRACE("Collection finished");
1614 GC_G(collected) += count;
1615 GC_G(gc_protected) = 0;
1616 GC_G(gc_active) = 0;
1617 }
1618
1619 gc_compact();
1620
1621 return count;
1622 }
1623
zend_gc_get_status(zend_gc_status * status)1624 ZEND_API void zend_gc_get_status(zend_gc_status *status)
1625 {
1626 status->runs = GC_G(gc_runs);
1627 status->collected = GC_G(collected);
1628 status->threshold = GC_G(gc_threshold);
1629 status->num_roots = GC_G(num_roots);
1630 }
1631
1632 /*
1633 * Local variables:
1634 * tab-width: 4
1635 * c-basic-offset: 4
1636 * indent-tabs-mode: t
1637 * End:
1638 * vim600: sw=4 ts=4 fdm=marker
1639 * vim<600: sw=4 ts=4
1640 *
1641 * vim:noexpandtab:
1642 */
1643