1 /*
2 +----------------------------------------------------------------------+
3 | Zend Engine |
4 +----------------------------------------------------------------------+
5 | Copyright (c) Zend Technologies Ltd. (http://www.zend.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 2.00 of the Zend license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.zend.com/license/2_00.txt. |
11 | If you did not receive a copy of the Zend license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@zend.com so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
15 | Authors: Andi Gutmans <andi@php.net> |
16 | Zeev Suraski <zeev@php.net> |
17 | Dmitry Stogov <dmitry@php.net> |
18 | Xinchen Hui <laruence@php.net> |
19 +----------------------------------------------------------------------+
20 */
21
22 #ifndef ZEND_TYPES_H
23 #define ZEND_TYPES_H
24
25 #include "zend_portability.h"
26 #include "zend_long.h"
27 #include <stdbool.h>
28 #include <stdint.h>
29
30 #ifdef __SSE2__
31 # include <mmintrin.h>
32 # include <emmintrin.h>
33 #endif
34 #if defined(__AVX2__)
35 # include <immintrin.h>
36 #endif
37 #if defined(__aarch64__) || defined(_M_ARM64)
38 # include <arm_neon.h>
39 #endif
40
41 #ifdef WORDS_BIGENDIAN
42 # define ZEND_ENDIAN_LOHI(lo, hi) hi; lo;
43 # define ZEND_ENDIAN_LOHI_3(lo, mi, hi) hi; mi; lo;
44 # define ZEND_ENDIAN_LOHI_4(a, b, c, d) d; c; b; a;
45 # define ZEND_ENDIAN_LOHI_C(lo, hi) hi, lo
46 # define ZEND_ENDIAN_LOHI_C_3(lo, mi, hi) hi, mi, lo,
47 # define ZEND_ENDIAN_LOHI_C_4(a, b, c, d) d, c, b, a
48 #else
49 # define ZEND_ENDIAN_LOHI(lo, hi) lo; hi;
50 # define ZEND_ENDIAN_LOHI_3(lo, mi, hi) lo; mi; hi;
51 # define ZEND_ENDIAN_LOHI_4(a, b, c, d) a; b; c; d;
52 # define ZEND_ENDIAN_LOHI_C(lo, hi) lo, hi
53 # define ZEND_ENDIAN_LOHI_C_3(lo, mi, hi) lo, mi, hi,
54 # define ZEND_ENDIAN_LOHI_C_4(a, b, c, d) a, b, c, d
55 #endif
56
57 typedef unsigned char zend_uchar;
58
59 typedef enum {
60 SUCCESS = 0,
61 FAILURE = -1, /* this MUST stay a negative number, or it may affect functions! */
62 } ZEND_RESULT_CODE;
63
64 typedef ZEND_RESULT_CODE zend_result;
65
66 #ifdef ZEND_ENABLE_ZVAL_LONG64
67 # ifdef ZEND_WIN32
68 # define ZEND_SIZE_MAX _UI64_MAX
69 # else
70 # define ZEND_SIZE_MAX SIZE_MAX
71 # endif
72 #else
73 # if defined(ZEND_WIN32)
74 # define ZEND_SIZE_MAX _UI32_MAX
75 # else
76 # define ZEND_SIZE_MAX SIZE_MAX
77 # endif
78 #endif
79
80 #ifdef ZTS
81 #define ZEND_TLS static TSRM_TLS
82 #define ZEND_EXT_TLS TSRM_TLS
83 #else
84 #define ZEND_TLS static
85 #define ZEND_EXT_TLS
86 #endif
87
88 typedef struct _zend_object_handlers zend_object_handlers;
89 typedef struct _zend_class_entry zend_class_entry;
90 typedef union _zend_function zend_function;
91 typedef struct _zend_execute_data zend_execute_data;
92
93 typedef struct _zval_struct zval;
94
95 typedef struct _zend_refcounted zend_refcounted;
96 typedef struct _zend_string zend_string;
97 typedef struct _zend_array zend_array;
98 typedef struct _zend_object zend_object;
99 typedef struct _zend_resource zend_resource;
100 typedef struct _zend_reference zend_reference;
101 typedef struct _zend_ast_ref zend_ast_ref;
102 typedef struct _zend_ast zend_ast;
103
104 typedef int (*compare_func_t)(const void *, const void *);
105 typedef void (*swap_func_t)(void *, void *);
106 typedef void (*sort_func_t)(void *, size_t, size_t, compare_func_t, swap_func_t);
107 typedef void (*dtor_func_t)(zval *pDest);
108 typedef void (*copy_ctor_func_t)(zval *pElement);
109
110 /*
111 * zend_type - is an abstraction layer to represent information about type hint.
112 * It shouldn't be used directly. Only through ZEND_TYPE_* macros.
113 *
114 * ZEND_TYPE_IS_SET() - checks if there is a type-hint
115 * ZEND_TYPE_IS_ONLY_MASK() - checks if type-hint refer to standard type only
116 * ZEND_TYPE_IS_COMPLEX() - checks if type is a type_list, or contains a class either as a CE or as a name
117 * ZEND_TYPE_HAS_NAME() - checks if type-hint contains some class as zend_string *
118 * ZEND_TYPE_HAS_LITERAL_NAME() - checks if type-hint contains some class as const char *
119 * ZEND_TYPE_IS_INTERSECTION() - checks if the type_list represents an intersection type list
120 * ZEND_TYPE_IS_UNION() - checks if the type_list represents a union type list
121 *
122 * ZEND_TYPE_NAME() - returns referenced class name
123 * ZEND_TYPE_PURE_MASK() - returns MAY_BE_* type mask
124 * ZEND_TYPE_FULL_MASK() - returns MAY_BE_* type mask together with other flags
125 *
126 * ZEND_TYPE_ALLOW_NULL() - checks if NULL is allowed
127 *
128 * ZEND_TYPE_INIT_*() should be used for construction.
129 */
130
131 typedef struct {
132 /* Not using a union here, because there's no good way to initialize them
133 * in a way that is supported in both C and C++ (designated initializers
134 * are only supported since C++20). */
135 void *ptr;
136 uint32_t type_mask;
137 /* TODO: We could use the extra 32-bit of padding on 64-bit systems. */
138 } zend_type;
139
140 typedef struct {
141 uint32_t num_types;
142 zend_type types[1];
143 } zend_type_list;
144
145 #define _ZEND_TYPE_EXTRA_FLAGS_SHIFT 25
146 #define _ZEND_TYPE_MASK ((1u << 25) - 1)
147 /* Only one of these bits may be set. */
148 #define _ZEND_TYPE_NAME_BIT (1u << 24)
149 // Used to signify that type.ptr is not a `zend_string*` but a `const char*`,
150 #define _ZEND_TYPE_LITERAL_NAME_BIT (1u << 23)
151 #define _ZEND_TYPE_LIST_BIT (1u << 22)
152 #define _ZEND_TYPE_KIND_MASK (_ZEND_TYPE_LIST_BIT|_ZEND_TYPE_NAME_BIT|_ZEND_TYPE_LITERAL_NAME_BIT)
153 /* For BC behaviour with iterable type */
154 #define _ZEND_TYPE_ITERABLE_BIT (1u << 21)
155 /* Whether the type list is arena allocated */
156 #define _ZEND_TYPE_ARENA_BIT (1u << 20)
157 /* Whether the type list is an intersection type */
158 #define _ZEND_TYPE_INTERSECTION_BIT (1u << 19)
159 /* Whether the type is a union type */
160 #define _ZEND_TYPE_UNION_BIT (1u << 18)
161 /* Type mask excluding the flags above. */
162 #define _ZEND_TYPE_MAY_BE_MASK ((1u << 18) - 1)
163 /* Must have same value as MAY_BE_NULL */
164 #define _ZEND_TYPE_NULLABLE_BIT 0x2u
165
166 #define ZEND_TYPE_IS_SET(t) \
167 (((t).type_mask & _ZEND_TYPE_MASK) != 0)
168
169 /* If a type is complex it means it's either a list with a union or intersection,
170 * or the void pointer is a class name */
171 #define ZEND_TYPE_IS_COMPLEX(t) \
172 ((((t).type_mask) & _ZEND_TYPE_KIND_MASK) != 0)
173
174 #define ZEND_TYPE_HAS_NAME(t) \
175 ((((t).type_mask) & _ZEND_TYPE_NAME_BIT) != 0)
176
177 #define ZEND_TYPE_HAS_LITERAL_NAME(t) \
178 ((((t).type_mask) & _ZEND_TYPE_LITERAL_NAME_BIT) != 0)
179
180 #define ZEND_TYPE_HAS_LIST(t) \
181 ((((t).type_mask) & _ZEND_TYPE_LIST_BIT) != 0)
182
183 #define ZEND_TYPE_IS_ITERABLE_FALLBACK(t) \
184 ((((t).type_mask) & _ZEND_TYPE_ITERABLE_BIT) != 0)
185
186 #define ZEND_TYPE_IS_INTERSECTION(t) \
187 ((((t).type_mask) & _ZEND_TYPE_INTERSECTION_BIT) != 0)
188
189 #define ZEND_TYPE_IS_UNION(t) \
190 ((((t).type_mask) & _ZEND_TYPE_UNION_BIT) != 0)
191
192 #define ZEND_TYPE_USES_ARENA(t) \
193 ((((t).type_mask) & _ZEND_TYPE_ARENA_BIT) != 0)
194
195 #define ZEND_TYPE_IS_ONLY_MASK(t) \
196 (ZEND_TYPE_IS_SET(t) && (t).ptr == NULL)
197
198 #define ZEND_TYPE_NAME(t) \
199 ((zend_string *) (t).ptr)
200
201 #define ZEND_TYPE_LITERAL_NAME(t) \
202 ((const char *) (t).ptr)
203
204 #define ZEND_TYPE_LIST(t) \
205 ((zend_type_list *) (t).ptr)
206
207 #define ZEND_TYPE_LIST_SIZE(num_types) \
208 (sizeof(zend_type_list) + ((num_types) - 1) * sizeof(zend_type))
209
210 /* This iterates over a zend_type_list. */
211 #define ZEND_TYPE_LIST_FOREACH(list, type_ptr) do { \
212 zend_type *_list = (list)->types; \
213 zend_type *_end = _list + (list)->num_types; \
214 for (; _list < _end; _list++) { \
215 type_ptr = _list;
216
217 #define ZEND_TYPE_LIST_FOREACH_END() \
218 } \
219 } while (0)
220
221 /* This iterates over any zend_type. If it's a type list, all list elements will
222 * be visited. If it's a single type, only the single type is visited. */
223 #define ZEND_TYPE_FOREACH(type, type_ptr) do { \
224 zend_type *_cur, *_end; \
225 if (ZEND_TYPE_HAS_LIST(type)) { \
226 zend_type_list *_list = ZEND_TYPE_LIST(type); \
227 _cur = _list->types; \
228 _end = _cur + _list->num_types; \
229 } else { \
230 _cur = &(type); \
231 _end = _cur + 1; \
232 } \
233 do { \
234 type_ptr = _cur;
235
236 #define ZEND_TYPE_FOREACH_END() \
237 } while (++_cur < _end); \
238 } while (0)
239
240 #define ZEND_TYPE_SET_PTR(t, _ptr) \
241 ((t).ptr = (_ptr))
242
243 #define ZEND_TYPE_SET_PTR_AND_KIND(t, _ptr, kind_bit) do { \
244 (t).ptr = (_ptr); \
245 (t).type_mask &= ~_ZEND_TYPE_KIND_MASK; \
246 (t).type_mask |= (kind_bit); \
247 } while (0)
248
249 #define ZEND_TYPE_SET_LIST(t, list) \
250 ZEND_TYPE_SET_PTR_AND_KIND(t, list, _ZEND_TYPE_LIST_BIT)
251
252 /* FULL_MASK() includes the MAY_BE_* type mask, as well as additional metadata bits.
253 * The PURE_MASK() only includes the MAY_BE_* type mask. */
254 #define ZEND_TYPE_FULL_MASK(t) \
255 ((t).type_mask)
256
257 #define ZEND_TYPE_PURE_MASK(t) \
258 ((t).type_mask & _ZEND_TYPE_MAY_BE_MASK)
259
260 #define ZEND_TYPE_FULL_MASK_WITHOUT_NULL(t) \
261 ((t).type_mask & ~_ZEND_TYPE_NULLABLE_BIT)
262
263 #define ZEND_TYPE_PURE_MASK_WITHOUT_NULL(t) \
264 ((t).type_mask & _ZEND_TYPE_MAY_BE_MASK & ~_ZEND_TYPE_NULLABLE_BIT)
265
266 #define ZEND_TYPE_CONTAINS_CODE(t, code) \
267 (((t).type_mask & (1u << (code))) != 0)
268
269 #define ZEND_TYPE_ALLOW_NULL(t) \
270 (((t).type_mask & _ZEND_TYPE_NULLABLE_BIT) != 0)
271
272 #if defined(__cplusplus) && defined(_MSC_VER)
273 # define _ZEND_TYPE_PREFIX zend_type
274 #else
275 /* FIXME: We could add (zend_type) here at some point but this breaks in MSVC because
276 * (zend_type)(zend_type){} is no longer considered constant. */
277 # define _ZEND_TYPE_PREFIX
278 #endif
279
280 #define ZEND_TYPE_INIT_NONE(extra_flags) \
281 _ZEND_TYPE_PREFIX { NULL, (extra_flags) }
282
283 #define ZEND_TYPE_INIT_MASK(_type_mask) \
284 _ZEND_TYPE_PREFIX { NULL, (_type_mask) }
285
286 #define ZEND_TYPE_INIT_CODE(code, allow_null, extra_flags) \
287 ZEND_TYPE_INIT_MASK(((code) == _IS_BOOL ? MAY_BE_BOOL : ( (code) == IS_ITERABLE ? _ZEND_TYPE_ITERABLE_BIT : ((code) == IS_MIXED ? MAY_BE_ANY : (1 << (code))))) \
288 | ((allow_null) ? _ZEND_TYPE_NULLABLE_BIT : 0) | (extra_flags))
289
290 #define ZEND_TYPE_INIT_PTR(ptr, type_kind, allow_null, extra_flags) \
291 _ZEND_TYPE_PREFIX { (void *) (ptr), \
292 (type_kind) | ((allow_null) ? _ZEND_TYPE_NULLABLE_BIT : 0) | (extra_flags) }
293
294 #define ZEND_TYPE_INIT_PTR_MASK(ptr, type_mask) \
295 _ZEND_TYPE_PREFIX { (void *) (ptr), (type_mask) }
296
297 #define ZEND_TYPE_INIT_UNION(ptr, extra_flags) \
298 _ZEND_TYPE_PREFIX { (void *) (ptr), (_ZEND_TYPE_LIST_BIT|_ZEND_TYPE_UNION_BIT) | (extra_flags) }
299
300 #define ZEND_TYPE_INIT_INTERSECTION(ptr, extra_flags) \
301 _ZEND_TYPE_PREFIX { (void *) (ptr), (_ZEND_TYPE_LIST_BIT|_ZEND_TYPE_INTERSECTION_BIT) | (extra_flags) }
302
303 #define ZEND_TYPE_INIT_CLASS(class_name, allow_null, extra_flags) \
304 ZEND_TYPE_INIT_PTR(class_name, _ZEND_TYPE_NAME_BIT, allow_null, extra_flags)
305
306 #define ZEND_TYPE_INIT_CLASS_MASK(class_name, type_mask) \
307 ZEND_TYPE_INIT_PTR_MASK(class_name, _ZEND_TYPE_NAME_BIT | (type_mask))
308
309 #define ZEND_TYPE_INIT_CLASS_CONST(class_name, allow_null, extra_flags) \
310 ZEND_TYPE_INIT_PTR(class_name, _ZEND_TYPE_LITERAL_NAME_BIT, allow_null, extra_flags)
311
312 #define ZEND_TYPE_INIT_CLASS_CONST_MASK(class_name, type_mask) \
313 ZEND_TYPE_INIT_PTR_MASK(class_name, (_ZEND_TYPE_LITERAL_NAME_BIT | (type_mask)))
314
315 typedef union _zend_value {
316 zend_long lval; /* long value */
317 double dval; /* double value */
318 zend_refcounted *counted;
319 zend_string *str;
320 zend_array *arr;
321 zend_object *obj;
322 zend_resource *res;
323 zend_reference *ref;
324 zend_ast_ref *ast;
325 zval *zv;
326 void *ptr;
327 zend_class_entry *ce;
328 zend_function *func;
329 struct {
330 uint32_t w1;
331 uint32_t w2;
332 } ww;
333 } zend_value;
334
335 struct _zval_struct {
336 zend_value value; /* value */
337 union {
338 uint32_t type_info;
339 struct {
340 ZEND_ENDIAN_LOHI_3(
341 uint8_t type, /* active type */
342 uint8_t type_flags,
343 union {
344 uint16_t extra; /* not further specified */
345 } u)
346 } v;
347 } u1;
348 union {
349 uint32_t next; /* hash collision chain */
350 uint32_t cache_slot; /* cache slot (for RECV_INIT) */
351 uint32_t opline_num; /* opline number (for FAST_CALL) */
352 uint32_t lineno; /* line number (for ast nodes) */
353 uint32_t num_args; /* arguments number for EX(This) */
354 uint32_t fe_pos; /* foreach position */
355 uint32_t fe_iter_idx; /* foreach iterator index */
356 uint32_t guard; /* recursion and single property guard */
357 uint32_t constant_flags; /* constant flags */
358 uint32_t extra; /* not further specified */
359 } u2;
360 };
361
362 typedef struct _zend_refcounted_h {
363 uint32_t refcount; /* reference counter 32-bit */
364 union {
365 uint32_t type_info;
366 } u;
367 } zend_refcounted_h;
368
369 struct _zend_refcounted {
370 zend_refcounted_h gc;
371 };
372
373 struct _zend_string {
374 zend_refcounted_h gc;
375 zend_ulong h; /* hash value */
376 size_t len;
377 char val[1];
378 };
379
380 typedef struct _Bucket {
381 zval val;
382 zend_ulong h; /* hash value (or numeric index) */
383 zend_string *key; /* string key or NULL for numerics */
384 } Bucket;
385
386 typedef struct _zend_array HashTable;
387
388 struct _zend_array {
389 zend_refcounted_h gc;
390 union {
391 struct {
392 ZEND_ENDIAN_LOHI_4(
393 uint8_t flags,
394 uint8_t _unused,
395 uint8_t nIteratorsCount,
396 uint8_t _unused2)
397 } v;
398 uint32_t flags;
399 } u;
400 uint32_t nTableMask;
401 union {
402 uint32_t *arHash; /* hash table (allocated above this pointer) */
403 Bucket *arData; /* array of hash buckets */
404 zval *arPacked; /* packed array of zvals */
405 };
406 uint32_t nNumUsed;
407 uint32_t nNumOfElements;
408 uint32_t nTableSize;
409 uint32_t nInternalPointer;
410 zend_long nNextFreeElement;
411 dtor_func_t pDestructor;
412 };
413
414 /*
415 * HashTable Data Layout
416 * =====================
417 *
418 * +=============================+
419 * | HT_HASH(ht, ht->nTableMask) | +=============================+
420 * | ... | | HT_INVALID_IDX |
421 * | HT_HASH(ht, -1) | | HT_INVALID_IDX |
422 * +-----------------------------+ +-----------------------------+
423 * ht->arData ---> | Bucket[0] | ht->arPacked ---> | ZVAL[0] |
424 * | ... | | ... |
425 * | Bucket[ht->nTableSize-1] | | ZVAL[ht->nTableSize-1] |
426 * +=============================+ +=============================+
427 */
428
429 #define HT_INVALID_IDX ((uint32_t) -1)
430
431 #define HT_MIN_MASK ((uint32_t) -2)
432 #define HT_MIN_SIZE 8
433
434 /* HT_MAX_SIZE is chosen to satisfy the following constraints:
435 * - HT_SIZE_TO_MASK(HT_MAX_SIZE) != 0
436 * - HT_SIZE_EX(HT_MAX_SIZE, HT_SIZE_TO_MASK(HT_MAX_SIZE)) does not overflow or
437 * wrapparound, and is <= the addressable space size
438 * - HT_MAX_SIZE must be a power of two:
439 * (nTableSize<HT_MAX_SIZE ? nTableSize+nTableSize : nTableSize) <= HT_MAX_SIZE
440 */
441 #if SIZEOF_SIZE_T == 4
442 # define HT_MAX_SIZE 0x02000000
443 # define HT_HASH_TO_BUCKET_EX(data, idx) \
444 ((Bucket*)((char*)(data) + (idx)))
445 # define HT_IDX_TO_HASH(idx) \
446 ((idx) * sizeof(Bucket))
447 # define HT_HASH_TO_IDX(idx) \
448 ((idx) / sizeof(Bucket))
449 #elif SIZEOF_SIZE_T == 8
450 # define HT_MAX_SIZE 0x40000000
451 # define HT_HASH_TO_BUCKET_EX(data, idx) \
452 ((data) + (idx))
453 # define HT_IDX_TO_HASH(idx) \
454 (idx)
455 # define HT_HASH_TO_IDX(idx) \
456 (idx)
457 #else
458 # error "Unknown SIZEOF_SIZE_T"
459 #endif
460
461 #define HT_HASH_EX(data, idx) \
462 ((uint32_t*)(data))[(int32_t)(idx)]
463 #define HT_HASH(ht, idx) \
464 HT_HASH_EX((ht)->arHash, idx)
465
466 #define HT_SIZE_TO_MASK(nTableSize) \
467 ((uint32_t)(-((nTableSize) + (nTableSize))))
468 #define HT_HASH_SIZE(nTableMask) \
469 (((size_t)-(uint32_t)(nTableMask)) * sizeof(uint32_t))
470 #define HT_DATA_SIZE(nTableSize) \
471 ((size_t)(nTableSize) * sizeof(Bucket))
472 #define HT_SIZE_EX(nTableSize, nTableMask) \
473 (HT_DATA_SIZE((nTableSize)) + HT_HASH_SIZE((nTableMask)))
474 #define HT_SIZE(ht) \
475 HT_SIZE_EX((ht)->nTableSize, (ht)->nTableMask)
476 #define HT_USED_SIZE(ht) \
477 (HT_HASH_SIZE((ht)->nTableMask) + ((size_t)(ht)->nNumUsed * sizeof(Bucket)))
478 #define HT_PACKED_DATA_SIZE(nTableSize) \
479 ((size_t)(nTableSize) * sizeof(zval))
480 #define HT_PACKED_SIZE_EX(nTableSize, nTableMask) \
481 (HT_PACKED_DATA_SIZE((nTableSize)) + HT_HASH_SIZE((nTableMask)))
482 #define HT_PACKED_SIZE(ht) \
483 HT_PACKED_SIZE_EX((ht)->nTableSize, (ht)->nTableMask)
484 #define HT_PACKED_USED_SIZE(ht) \
485 (HT_HASH_SIZE((ht)->nTableMask) + ((size_t)(ht)->nNumUsed * sizeof(zval)))
486 #if defined(__AVX2__)
487 # define HT_HASH_RESET(ht) do { \
488 char *p = (char*)&HT_HASH(ht, (ht)->nTableMask); \
489 size_t size = HT_HASH_SIZE((ht)->nTableMask); \
490 __m256i ymm0 = _mm256_setzero_si256(); \
491 ymm0 = _mm256_cmpeq_epi64(ymm0, ymm0); \
492 ZEND_ASSERT(size >= 64 && ((size & 0x3f) == 0)); \
493 do { \
494 _mm256_storeu_si256((__m256i*)p, ymm0); \
495 _mm256_storeu_si256((__m256i*)(p+32), ymm0); \
496 p += 64; \
497 size -= 64; \
498 } while (size != 0); \
499 } while (0)
500 #elif defined(__SSE2__)
501 # define HT_HASH_RESET(ht) do { \
502 char *p = (char*)&HT_HASH(ht, (ht)->nTableMask); \
503 size_t size = HT_HASH_SIZE((ht)->nTableMask); \
504 __m128i xmm0 = _mm_setzero_si128(); \
505 xmm0 = _mm_cmpeq_epi8(xmm0, xmm0); \
506 ZEND_ASSERT(size >= 64 && ((size & 0x3f) == 0)); \
507 do { \
508 _mm_storeu_si128((__m128i*)p, xmm0); \
509 _mm_storeu_si128((__m128i*)(p+16), xmm0); \
510 _mm_storeu_si128((__m128i*)(p+32), xmm0); \
511 _mm_storeu_si128((__m128i*)(p+48), xmm0); \
512 p += 64; \
513 size -= 64; \
514 } while (size != 0); \
515 } while (0)
516 #elif defined(__aarch64__) || defined(_M_ARM64)
517 # define HT_HASH_RESET(ht) do { \
518 char *p = (char*)&HT_HASH(ht, (ht)->nTableMask); \
519 size_t size = HT_HASH_SIZE((ht)->nTableMask); \
520 int32x4_t t = vdupq_n_s32(-1); \
521 ZEND_ASSERT(size >= 64 && ((size & 0x3f) == 0)); \
522 do { \
523 vst1q_s32((int32_t*)p, t); \
524 vst1q_s32((int32_t*)(p+16), t); \
525 vst1q_s32((int32_t*)(p+32), t); \
526 vst1q_s32((int32_t*)(p+48), t); \
527 p += 64; \
528 size -= 64; \
529 } while (size != 0); \
530 } while (0)
531 #else
532 # define HT_HASH_RESET(ht) \
533 memset(&HT_HASH(ht, (ht)->nTableMask), HT_INVALID_IDX, HT_HASH_SIZE((ht)->nTableMask))
534 #endif
535 #define HT_HASH_RESET_PACKED(ht) do { \
536 HT_HASH(ht, -2) = HT_INVALID_IDX; \
537 HT_HASH(ht, -1) = HT_INVALID_IDX; \
538 } while (0)
539 #define HT_HASH_TO_BUCKET(ht, idx) \
540 HT_HASH_TO_BUCKET_EX((ht)->arData, idx)
541
542 #define HT_SET_DATA_ADDR(ht, ptr) do { \
543 (ht)->arData = (Bucket*)(((char*)(ptr)) + HT_HASH_SIZE((ht)->nTableMask)); \
544 } while (0)
545 #define HT_GET_DATA_ADDR(ht) \
546 ((char*)((ht)->arData) - HT_HASH_SIZE((ht)->nTableMask))
547
548 typedef uint32_t HashPosition;
549
550 typedef struct _HashTableIterator {
551 HashTable *ht;
552 HashPosition pos;
553 uint32_t next_copy; // circular linked list via index into EG(ht_iterators)
554 } HashTableIterator;
555
556 struct _zend_object {
557 zend_refcounted_h gc;
558 uint32_t handle; // TODO: may be removed ???
559 zend_class_entry *ce;
560 const zend_object_handlers *handlers;
561 HashTable *properties;
562 zval properties_table[1];
563 };
564
565 struct _zend_resource {
566 zend_refcounted_h gc;
567 zend_long handle; // TODO: may be removed ???
568 int type;
569 void *ptr;
570 };
571
572 typedef struct {
573 size_t num;
574 size_t num_allocated;
575 struct _zend_property_info *ptr[1];
576 } zend_property_info_list;
577
578 typedef union {
579 struct _zend_property_info *ptr;
580 uintptr_t list;
581 } zend_property_info_source_list;
582
583 #define ZEND_PROPERTY_INFO_SOURCE_FROM_LIST(list) (0x1 | (uintptr_t) (list))
584 #define ZEND_PROPERTY_INFO_SOURCE_TO_LIST(list) ((zend_property_info_list *) ((list) & ~0x1))
585 #define ZEND_PROPERTY_INFO_SOURCE_IS_LIST(list) ((list) & 0x1)
586
587 struct _zend_reference {
588 zend_refcounted_h gc;
589 zval val;
590 zend_property_info_source_list sources;
591 };
592
593 struct _zend_ast_ref {
594 zend_refcounted_h gc;
595 /*zend_ast ast; zend_ast follows the zend_ast_ref structure */
596 };
597
598 /* Regular data types: Must be in sync with zend_variables.c. */
599 #define IS_UNDEF 0
600 #define IS_NULL 1
601 #define IS_FALSE 2
602 #define IS_TRUE 3
603 #define IS_LONG 4
604 #define IS_DOUBLE 5
605 #define IS_STRING 6
606 #define IS_ARRAY 7
607 #define IS_OBJECT 8
608 #define IS_RESOURCE 9
609 #define IS_REFERENCE 10
610 #define IS_CONSTANT_AST 11 /* Constant expressions */
611
612 /* Fake types used only for type hinting.
613 * These are allowed to overlap with the types below. */
614 #define IS_CALLABLE 12
615 #define IS_ITERABLE 13
616 #define IS_VOID 14
617 #define IS_STATIC 15
618 #define IS_MIXED 16
619 #define IS_NEVER 17
620
621 /* internal types */
622 #define IS_INDIRECT 12
623 #define IS_PTR 13
624 #define IS_ALIAS_PTR 14
625 #define _IS_ERROR 15
626
627 /* used for casts */
628 #define _IS_BOOL 18
629 #define _IS_NUMBER 19
630
631 /* guard flags */
632 #define ZEND_GUARD_PROPERTY_GET (1<<0)
633 #define ZEND_GUARD_PROPERTY_SET (1<<1)
634 #define ZEND_GUARD_PROPERTY_UNSET (1<<2)
635 #define ZEND_GUARD_PROPERTY_ISSET (1<<3)
636 #define ZEND_GUARD_PROPERTY_MASK 15
637 #define ZEND_GUARD_RECURSION_DEBUG (1<<4)
638 #define ZEND_GUARD_RECURSION_EXPORT (1<<5)
639 #define ZEND_GUARD_RECURSION_JSON (1<<6)
640
641 #define ZEND_GUARD_RECURSION_TYPE(t) ZEND_GUARD_RECURSION_ ## t
642
643 #define ZEND_GUARD_IS_RECURSIVE(pg, t) ((*pg & ZEND_GUARD_RECURSION_TYPE(t)) != 0)
644 #define ZEND_GUARD_PROTECT_RECURSION(pg, t) *pg |= ZEND_GUARD_RECURSION_TYPE(t)
645 #define ZEND_GUARD_UNPROTECT_RECURSION(pg, t) *pg &= ~ZEND_GUARD_RECURSION_TYPE(t)
646
zval_get_type(const zval * pz)647 static zend_always_inline uint8_t zval_get_type(const zval* pz) {
648 return pz->u1.v.type;
649 }
650
651 #define ZEND_SAME_FAKE_TYPE(faketype, realtype) ( \
652 (faketype) == (realtype) \
653 || ((faketype) == _IS_BOOL && ((realtype) == IS_TRUE || (realtype) == IS_FALSE)) \
654 )
655
656 /* we should never set just Z_TYPE, we should set Z_TYPE_INFO */
657 #define Z_TYPE(zval) zval_get_type(&(zval))
658 #define Z_TYPE_P(zval_p) Z_TYPE(*(zval_p))
659
660 #define Z_TYPE_FLAGS(zval) (zval).u1.v.type_flags
661 #define Z_TYPE_FLAGS_P(zval_p) Z_TYPE_FLAGS(*(zval_p))
662
663 #define Z_TYPE_EXTRA(zval) (zval).u1.v.u.extra
664 #define Z_TYPE_EXTRA_P(zval_p) Z_TYPE_EXTRA(*(zval_p))
665
666 #define Z_TYPE_INFO(zval) (zval).u1.type_info
667 #define Z_TYPE_INFO_P(zval_p) Z_TYPE_INFO(*(zval_p))
668
669 #define Z_NEXT(zval) (zval).u2.next
670 #define Z_NEXT_P(zval_p) Z_NEXT(*(zval_p))
671
672 #define Z_CACHE_SLOT(zval) (zval).u2.cache_slot
673 #define Z_CACHE_SLOT_P(zval_p) Z_CACHE_SLOT(*(zval_p))
674
675 #define Z_LINENO(zval) (zval).u2.lineno
676 #define Z_LINENO_P(zval_p) Z_LINENO(*(zval_p))
677
678 #define Z_OPLINE_NUM(zval) (zval).u2.opline_num
679 #define Z_OPLINE_NUM_P(zval_p) Z_OPLINE_NUM(*(zval_p))
680
681 #define Z_FE_POS(zval) (zval).u2.fe_pos
682 #define Z_FE_POS_P(zval_p) Z_FE_POS(*(zval_p))
683
684 #define Z_FE_ITER(zval) (zval).u2.fe_iter_idx
685 #define Z_FE_ITER_P(zval_p) Z_FE_ITER(*(zval_p))
686
687 #define Z_GUARD(zval) (zval).u2.guard
688 #define Z_GUARD_P(zval_p) Z_GUARD(*(zval_p))
689
690 #define Z_CONSTANT_FLAGS(zval) (zval).u2.constant_flags
691 #define Z_CONSTANT_FLAGS_P(zval_p) Z_CONSTANT_FLAGS(*(zval_p))
692
693 #define Z_EXTRA(zval) (zval).u2.extra
694 #define Z_EXTRA_P(zval_p) Z_EXTRA(*(zval_p))
695
696 #define Z_COUNTED(zval) (zval).value.counted
697 #define Z_COUNTED_P(zval_p) Z_COUNTED(*(zval_p))
698
699 #define Z_TYPE_MASK 0xff
700 #define Z_TYPE_FLAGS_MASK 0xff00
701
702 #define Z_TYPE_FLAGS_SHIFT 8
703 #define Z_TYPE_INFO_EXTRA_SHIFT 16
704
705 #define GC_REFCOUNT(p) zend_gc_refcount(&(p)->gc)
706 #define GC_SET_REFCOUNT(p, rc) zend_gc_set_refcount(&(p)->gc, rc)
707 #define GC_ADDREF(p) zend_gc_addref(&(p)->gc)
708 #define GC_DELREF(p) zend_gc_delref(&(p)->gc)
709 #define GC_ADDREF_EX(p, rc) zend_gc_addref_ex(&(p)->gc, rc)
710 #define GC_DELREF_EX(p, rc) zend_gc_delref_ex(&(p)->gc, rc)
711 #define GC_TRY_ADDREF(p) zend_gc_try_addref(&(p)->gc)
712 #define GC_TRY_DELREF(p) zend_gc_try_delref(&(p)->gc)
713
714 #define GC_DTOR(p) \
715 do { \
716 zend_refcounted_h *_p = &(p)->gc; \
717 if (zend_gc_delref(_p) == 0) { \
718 rc_dtor_func((zend_refcounted *)_p); \
719 } else { \
720 gc_check_possible_root((zend_refcounted *)_p); \
721 } \
722 } while (0)
723
724 #define GC_DTOR_NO_REF(p) \
725 do { \
726 zend_refcounted_h *_p = &(p)->gc; \
727 if (zend_gc_delref(_p) == 0) { \
728 rc_dtor_func((zend_refcounted *)_p); \
729 } else { \
730 gc_check_possible_root_no_ref((zend_refcounted *)_p); \
731 } \
732 } while (0)
733
734 #define GC_TYPE_MASK 0x0000000f
735 #define GC_FLAGS_MASK 0x000003f0
736 #define GC_INFO_MASK 0xfffffc00
737 #define GC_FLAGS_SHIFT 0
738 #define GC_INFO_SHIFT 10
739
zval_gc_type(uint32_t gc_type_info)740 static zend_always_inline uint8_t zval_gc_type(uint32_t gc_type_info) {
741 return (gc_type_info & GC_TYPE_MASK);
742 }
743
zval_gc_flags(uint32_t gc_type_info)744 static zend_always_inline uint32_t zval_gc_flags(uint32_t gc_type_info) {
745 return (gc_type_info >> GC_FLAGS_SHIFT) & (GC_FLAGS_MASK >> GC_FLAGS_SHIFT);
746 }
747
zval_gc_info(uint32_t gc_type_info)748 static zend_always_inline uint32_t zval_gc_info(uint32_t gc_type_info) {
749 return (gc_type_info >> GC_INFO_SHIFT);
750 }
751
752 #define GC_TYPE_INFO(p) (p)->gc.u.type_info
753 #define GC_TYPE(p) zval_gc_type(GC_TYPE_INFO(p))
754 #define GC_FLAGS(p) zval_gc_flags(GC_TYPE_INFO(p))
755 #define GC_INFO(p) zval_gc_info(GC_TYPE_INFO(p))
756
757 #define GC_ADD_FLAGS(p, flags) do { \
758 GC_TYPE_INFO(p) |= (flags) << GC_FLAGS_SHIFT; \
759 } while (0)
760 #define GC_DEL_FLAGS(p, flags) do { \
761 GC_TYPE_INFO(p) &= ~((flags) << GC_FLAGS_SHIFT); \
762 } while (0)
763
764 #define Z_GC_TYPE(zval) GC_TYPE(Z_COUNTED(zval))
765 #define Z_GC_TYPE_P(zval_p) Z_GC_TYPE(*(zval_p))
766
767 #define Z_GC_FLAGS(zval) GC_FLAGS(Z_COUNTED(zval))
768 #define Z_GC_FLAGS_P(zval_p) Z_GC_FLAGS(*(zval_p))
769
770 #define Z_GC_INFO(zval) GC_INFO(Z_COUNTED(zval))
771 #define Z_GC_INFO_P(zval_p) Z_GC_INFO(*(zval_p))
772 #define Z_GC_TYPE_INFO(zval) GC_TYPE_INFO(Z_COUNTED(zval))
773 #define Z_GC_TYPE_INFO_P(zval_p) Z_GC_TYPE_INFO(*(zval_p))
774
775 /* zval_gc_flags(zval.value->gc.u.type_info) (common flags) */
776 #define GC_NOT_COLLECTABLE (1<<4)
777 #define GC_PROTECTED (1<<5) /* used for recursion detection */
778 #define GC_IMMUTABLE (1<<6) /* can't be changed in place */
779 #define GC_PERSISTENT (1<<7) /* allocated using malloc */
780 #define GC_PERSISTENT_LOCAL (1<<8) /* persistent, but thread-local */
781
782 #define GC_NULL (IS_NULL | (GC_NOT_COLLECTABLE << GC_FLAGS_SHIFT))
783 #define GC_STRING (IS_STRING | (GC_NOT_COLLECTABLE << GC_FLAGS_SHIFT))
784 #define GC_ARRAY IS_ARRAY
785 #define GC_OBJECT IS_OBJECT
786 #define GC_RESOURCE (IS_RESOURCE | (GC_NOT_COLLECTABLE << GC_FLAGS_SHIFT))
787 #define GC_REFERENCE (IS_REFERENCE | (GC_NOT_COLLECTABLE << GC_FLAGS_SHIFT))
788 #define GC_CONSTANT_AST (IS_CONSTANT_AST | (GC_NOT_COLLECTABLE << GC_FLAGS_SHIFT))
789
790 /* zval.u1.v.type_flags */
791 #define IS_TYPE_REFCOUNTED (1<<0)
792 #define IS_TYPE_COLLECTABLE (1<<1)
793 /* Used for static variables to check if they have been initialized. We can't use IS_UNDEF because
794 * we can't store IS_UNDEF zvals in the static_variables HashTable. This needs to live in type_info
795 * so that the ZEND_ASSIGN overrides it but is moved to extra to avoid breaking the Z_REFCOUNTED()
796 * optimization that only checks for Z_TYPE_FLAGS() without `& (IS_TYPE_COLLECTABLE|IS_TYPE_REFCOUNTED)`. */
797 #define IS_STATIC_VAR_UNINITIALIZED (1<<0)
798
799 #if 1
800 /* This optimized version assumes that we have a single "type_flag" */
801 /* IS_TYPE_COLLECTABLE may be used only with IS_TYPE_REFCOUNTED */
802 # define Z_TYPE_INFO_REFCOUNTED(t) (((t) & Z_TYPE_FLAGS_MASK) != 0)
803 #else
804 # define Z_TYPE_INFO_REFCOUNTED(t) (((t) & (IS_TYPE_REFCOUNTED << Z_TYPE_FLAGS_SHIFT)) != 0)
805 #endif
806
807 /* extended types */
808 #define IS_INTERNED_STRING_EX IS_STRING
809
810 #define IS_STRING_EX (IS_STRING | (IS_TYPE_REFCOUNTED << Z_TYPE_FLAGS_SHIFT))
811 #define IS_ARRAY_EX (IS_ARRAY | (IS_TYPE_REFCOUNTED << Z_TYPE_FLAGS_SHIFT) | (IS_TYPE_COLLECTABLE << Z_TYPE_FLAGS_SHIFT))
812 #define IS_OBJECT_EX (IS_OBJECT | (IS_TYPE_REFCOUNTED << Z_TYPE_FLAGS_SHIFT) | (IS_TYPE_COLLECTABLE << Z_TYPE_FLAGS_SHIFT))
813 #define IS_RESOURCE_EX (IS_RESOURCE | (IS_TYPE_REFCOUNTED << Z_TYPE_FLAGS_SHIFT))
814 #define IS_REFERENCE_EX (IS_REFERENCE | (IS_TYPE_REFCOUNTED << Z_TYPE_FLAGS_SHIFT))
815
816 #define IS_CONSTANT_AST_EX (IS_CONSTANT_AST | (IS_TYPE_REFCOUNTED << Z_TYPE_FLAGS_SHIFT))
817
818 /* string flags (zval.value->gc.u.flags) */
819 #define IS_STR_CLASS_NAME_MAP_PTR GC_PROTECTED /* refcount is a map_ptr offset of class_entry */
820 #define IS_STR_INTERNED GC_IMMUTABLE /* interned string */
821 #define IS_STR_PERSISTENT GC_PERSISTENT /* allocated using malloc */
822 #define IS_STR_PERMANENT (1<<8) /* relives request boundary */
823 #define IS_STR_VALID_UTF8 (1<<9) /* valid UTF-8 according to PCRE */
824
825 /* array flags */
826 #define IS_ARRAY_IMMUTABLE GC_IMMUTABLE
827 #define IS_ARRAY_PERSISTENT GC_PERSISTENT
828
829 /* object flags (zval.value->gc.u.flags) */
830 #define IS_OBJ_WEAKLY_REFERENCED GC_PERSISTENT
831 #define IS_OBJ_DESTRUCTOR_CALLED (1<<8)
832 #define IS_OBJ_FREE_CALLED (1<<9)
833
834 #define OBJ_FLAGS(obj) GC_FLAGS(obj)
835
836 /* Fast class cache */
837 #define ZSTR_HAS_CE_CACHE(s) (GC_FLAGS(s) & IS_STR_CLASS_NAME_MAP_PTR)
838 #define ZSTR_GET_CE_CACHE(s) ZSTR_GET_CE_CACHE_EX(s, 1)
839 #define ZSTR_SET_CE_CACHE(s, ce) ZSTR_SET_CE_CACHE_EX(s, ce, 1)
840
841 #define ZSTR_VALID_CE_CACHE(s) EXPECTED((GC_REFCOUNT(s)-1)/sizeof(void *) < CG(map_ptr_last))
842
843 #define ZSTR_GET_CE_CACHE_EX(s, validate) \
844 ((!(validate) || ZSTR_VALID_CE_CACHE(s)) ? GET_CE_CACHE(GC_REFCOUNT(s)) : NULL)
845
846 #define ZSTR_SET_CE_CACHE_EX(s, ce, validate) do { \
847 if (!(validate) || ZSTR_VALID_CE_CACHE(s)) { \
848 ZEND_ASSERT((validate) || ZSTR_VALID_CE_CACHE(s)); \
849 SET_CE_CACHE(GC_REFCOUNT(s), ce); \
850 } \
851 } while (0)
852
853 #define GET_CE_CACHE(ce_cache) \
854 (*(zend_class_entry **)ZEND_MAP_PTR_OFFSET2PTR(ce_cache))
855
856 #define SET_CE_CACHE(ce_cache, ce) do { \
857 *((zend_class_entry **)ZEND_MAP_PTR_OFFSET2PTR(ce_cache)) = ce; \
858 } while (0)
859
860 /* Recursion protection macros must be used only for arrays and objects */
861 #define GC_IS_RECURSIVE(p) \
862 (GC_FLAGS(p) & GC_PROTECTED)
863
864 #define GC_PROTECT_RECURSION(p) do { \
865 GC_ADD_FLAGS(p, GC_PROTECTED); \
866 } while (0)
867
868 #define GC_UNPROTECT_RECURSION(p) do { \
869 GC_DEL_FLAGS(p, GC_PROTECTED); \
870 } while (0)
871
872 #define GC_TRY_PROTECT_RECURSION(p) do { \
873 if (!(GC_FLAGS(p) & GC_IMMUTABLE)) GC_PROTECT_RECURSION(p); \
874 } while (0)
875
876 #define GC_TRY_UNPROTECT_RECURSION(p) do { \
877 if (!(GC_FLAGS(p) & GC_IMMUTABLE)) GC_UNPROTECT_RECURSION(p); \
878 } while (0)
879
880 #define Z_IS_RECURSIVE(zval) GC_IS_RECURSIVE(Z_COUNTED(zval))
881 #define Z_PROTECT_RECURSION(zval) GC_PROTECT_RECURSION(Z_COUNTED(zval))
882 #define Z_UNPROTECT_RECURSION(zval) GC_UNPROTECT_RECURSION(Z_COUNTED(zval))
883 #define Z_IS_RECURSIVE_P(zv) Z_IS_RECURSIVE(*(zv))
884 #define Z_PROTECT_RECURSION_P(zv) Z_PROTECT_RECURSION(*(zv))
885 #define Z_UNPROTECT_RECURSION_P(zv) Z_UNPROTECT_RECURSION(*(zv))
886
887 #define ZEND_GUARD_OR_GC_IS_RECURSIVE(pg, t, zobj) \
888 (pg ? ZEND_GUARD_IS_RECURSIVE(pg, t) : GC_IS_RECURSIVE(zobj))
889
890 #define ZEND_GUARD_OR_GC_PROTECT_RECURSION(pg, t, zobj) do { \
891 if (pg) { \
892 ZEND_GUARD_PROTECT_RECURSION(pg, t); \
893 } else { \
894 GC_PROTECT_RECURSION(zobj); \
895 } \
896 } while(0)
897
898 #define ZEND_GUARD_OR_GC_UNPROTECT_RECURSION(pg, t, zobj) do { \
899 if (pg) { \
900 ZEND_GUARD_UNPROTECT_RECURSION(pg, t); \
901 } else { \
902 GC_UNPROTECT_RECURSION(zobj); \
903 } \
904 } while(0)
905
906 /* All data types < IS_STRING have their constructor/destructors skipped */
907 #define Z_CONSTANT(zval) (Z_TYPE(zval) == IS_CONSTANT_AST)
908 #define Z_CONSTANT_P(zval_p) Z_CONSTANT(*(zval_p))
909
910 #if 1
911 /* This optimized version assumes that we have a single "type_flag" */
912 /* IS_TYPE_COLLECTABLE may be used only with IS_TYPE_REFCOUNTED */
913 #define Z_REFCOUNTED(zval) (Z_TYPE_FLAGS(zval) != 0)
914 #else
915 #define Z_REFCOUNTED(zval) ((Z_TYPE_FLAGS(zval) & IS_TYPE_REFCOUNTED) != 0)
916 #endif
917 #define Z_REFCOUNTED_P(zval_p) Z_REFCOUNTED(*(zval_p))
918
919 #define Z_COLLECTABLE(zval) ((Z_TYPE_FLAGS(zval) & IS_TYPE_COLLECTABLE) != 0)
920 #define Z_COLLECTABLE_P(zval_p) Z_COLLECTABLE(*(zval_p))
921
922 /* deprecated: (COPYABLE is the same as IS_ARRAY) */
923 #define Z_COPYABLE(zval) (Z_TYPE(zval) == IS_ARRAY)
924 #define Z_COPYABLE_P(zval_p) Z_COPYABLE(*(zval_p))
925
926 /* deprecated: (IMMUTABLE is the same as IS_ARRAY && !REFCOUNTED) */
927 #define Z_IMMUTABLE(zval) (Z_TYPE_INFO(zval) == IS_ARRAY)
928 #define Z_IMMUTABLE_P(zval_p) Z_IMMUTABLE(*(zval_p))
929 #define Z_OPT_IMMUTABLE(zval) Z_IMMUTABLE(zval_p)
930 #define Z_OPT_IMMUTABLE_P(zval_p) Z_IMMUTABLE(*(zval_p))
931
932 /* the following Z_OPT_* macros make better code when Z_TYPE_INFO accessed before */
933 #define Z_OPT_TYPE(zval) (Z_TYPE_INFO(zval) & Z_TYPE_MASK)
934 #define Z_OPT_TYPE_P(zval_p) Z_OPT_TYPE(*(zval_p))
935
936 #define Z_OPT_CONSTANT(zval) (Z_OPT_TYPE(zval) == IS_CONSTANT_AST)
937 #define Z_OPT_CONSTANT_P(zval_p) Z_OPT_CONSTANT(*(zval_p))
938
939 #define Z_OPT_REFCOUNTED(zval) Z_TYPE_INFO_REFCOUNTED(Z_TYPE_INFO(zval))
940 #define Z_OPT_REFCOUNTED_P(zval_p) Z_OPT_REFCOUNTED(*(zval_p))
941
942 /* deprecated: (COPYABLE is the same as IS_ARRAY) */
943 #define Z_OPT_COPYABLE(zval) (Z_OPT_TYPE(zval) == IS_ARRAY)
944 #define Z_OPT_COPYABLE_P(zval_p) Z_OPT_COPYABLE(*(zval_p))
945
946 #define Z_OPT_ISREF(zval) (Z_OPT_TYPE(zval) == IS_REFERENCE)
947 #define Z_OPT_ISREF_P(zval_p) Z_OPT_ISREF(*(zval_p))
948
949 #define Z_ISREF(zval) (Z_TYPE(zval) == IS_REFERENCE)
950 #define Z_ISREF_P(zval_p) Z_ISREF(*(zval_p))
951
952 #define Z_ISUNDEF(zval) (Z_TYPE(zval) == IS_UNDEF)
953 #define Z_ISUNDEF_P(zval_p) Z_ISUNDEF(*(zval_p))
954
955 #define Z_ISNULL(zval) (Z_TYPE(zval) == IS_NULL)
956 #define Z_ISNULL_P(zval_p) Z_ISNULL(*(zval_p))
957
958 #define Z_ISERROR(zval) (Z_TYPE(zval) == _IS_ERROR)
959 #define Z_ISERROR_P(zval_p) Z_ISERROR(*(zval_p))
960
961 #define Z_LVAL(zval) (zval).value.lval
962 #define Z_LVAL_P(zval_p) Z_LVAL(*(zval_p))
963
964 #define Z_DVAL(zval) (zval).value.dval
965 #define Z_DVAL_P(zval_p) Z_DVAL(*(zval_p))
966
967 #define Z_STR(zval) (zval).value.str
968 #define Z_STR_P(zval_p) Z_STR(*(zval_p))
969
970 #define Z_STRVAL(zval) ZSTR_VAL(Z_STR(zval))
971 #define Z_STRVAL_P(zval_p) Z_STRVAL(*(zval_p))
972
973 #define Z_STRLEN(zval) ZSTR_LEN(Z_STR(zval))
974 #define Z_STRLEN_P(zval_p) Z_STRLEN(*(zval_p))
975
976 #define Z_STRHASH(zval) ZSTR_HASH(Z_STR(zval))
977 #define Z_STRHASH_P(zval_p) Z_STRHASH(*(zval_p))
978
979 #define Z_ARR(zval) (zval).value.arr
980 #define Z_ARR_P(zval_p) Z_ARR(*(zval_p))
981
982 #define Z_ARRVAL(zval) Z_ARR(zval)
983 #define Z_ARRVAL_P(zval_p) Z_ARRVAL(*(zval_p))
984
985 #define Z_OBJ(zval) (zval).value.obj
986 #define Z_OBJ_P(zval_p) Z_OBJ(*(zval_p))
987
988 #define Z_OBJ_HT(zval) Z_OBJ(zval)->handlers
989 #define Z_OBJ_HT_P(zval_p) Z_OBJ_HT(*(zval_p))
990
991 #define Z_OBJ_HANDLER(zval, hf) Z_OBJ_HT((zval))->hf
992 #define Z_OBJ_HANDLER_P(zv_p, hf) Z_OBJ_HANDLER(*(zv_p), hf)
993
994 #define Z_OBJ_HANDLE(zval) (Z_OBJ((zval)))->handle
995 #define Z_OBJ_HANDLE_P(zval_p) Z_OBJ_HANDLE(*(zval_p))
996
997 #define Z_OBJCE(zval) (Z_OBJ(zval)->ce)
998 #define Z_OBJCE_P(zval_p) Z_OBJCE(*(zval_p))
999
1000 #define Z_OBJPROP(zval) Z_OBJ_HT((zval))->get_properties(Z_OBJ(zval))
1001 #define Z_OBJPROP_P(zval_p) Z_OBJPROP(*(zval_p))
1002
1003 #define Z_RES(zval) (zval).value.res
1004 #define Z_RES_P(zval_p) Z_RES(*zval_p)
1005
1006 #define Z_RES_HANDLE(zval) Z_RES(zval)->handle
1007 #define Z_RES_HANDLE_P(zval_p) Z_RES_HANDLE(*zval_p)
1008
1009 #define Z_RES_TYPE(zval) Z_RES(zval)->type
1010 #define Z_RES_TYPE_P(zval_p) Z_RES_TYPE(*zval_p)
1011
1012 #define Z_RES_VAL(zval) Z_RES(zval)->ptr
1013 #define Z_RES_VAL_P(zval_p) Z_RES_VAL(*zval_p)
1014
1015 #define Z_REF(zval) (zval).value.ref
1016 #define Z_REF_P(zval_p) Z_REF(*(zval_p))
1017
1018 #define Z_REFVAL(zval) &Z_REF(zval)->val
1019 #define Z_REFVAL_P(zval_p) Z_REFVAL(*(zval_p))
1020
1021 #define Z_AST(zval) (zval).value.ast
1022 #define Z_AST_P(zval_p) Z_AST(*(zval_p))
1023
1024 #define GC_AST(p) ((zend_ast*)(((char*)p) + sizeof(zend_ast_ref)))
1025
1026 #define Z_ASTVAL(zval) GC_AST(Z_AST(zval))
1027 #define Z_ASTVAL_P(zval_p) Z_ASTVAL(*(zval_p))
1028
1029 #define Z_INDIRECT(zval) (zval).value.zv
1030 #define Z_INDIRECT_P(zval_p) Z_INDIRECT(*(zval_p))
1031
1032 #define Z_CE(zval) (zval).value.ce
1033 #define Z_CE_P(zval_p) Z_CE(*(zval_p))
1034
1035 #define Z_FUNC(zval) (zval).value.func
1036 #define Z_FUNC_P(zval_p) Z_FUNC(*(zval_p))
1037
1038 #define Z_PTR(zval) (zval).value.ptr
1039 #define Z_PTR_P(zval_p) Z_PTR(*(zval_p))
1040
1041 #define ZVAL_UNDEF(z) do { \
1042 Z_TYPE_INFO_P(z) = IS_UNDEF; \
1043 } while (0)
1044
1045 #define ZVAL_NULL(z) do { \
1046 Z_TYPE_INFO_P(z) = IS_NULL; \
1047 } while (0)
1048
1049 #define ZVAL_FALSE(z) do { \
1050 Z_TYPE_INFO_P(z) = IS_FALSE; \
1051 } while (0)
1052
1053 #define ZVAL_TRUE(z) do { \
1054 Z_TYPE_INFO_P(z) = IS_TRUE; \
1055 } while (0)
1056
1057 #define ZVAL_BOOL(z, b) do { \
1058 Z_TYPE_INFO_P(z) = \
1059 (b) ? IS_TRUE : IS_FALSE; \
1060 } while (0)
1061
1062 #define ZVAL_LONG(z, l) do { \
1063 zval *__z = (z); \
1064 Z_LVAL_P(__z) = l; \
1065 Z_TYPE_INFO_P(__z) = IS_LONG; \
1066 } while (0)
1067
1068 #define ZVAL_DOUBLE(z, d) do { \
1069 zval *__z = (z); \
1070 Z_DVAL_P(__z) = d; \
1071 Z_TYPE_INFO_P(__z) = IS_DOUBLE; \
1072 } while (0)
1073
1074 #define ZVAL_STR(z, s) do { \
1075 zval *__z = (z); \
1076 zend_string *__s = (s); \
1077 Z_STR_P(__z) = __s; \
1078 /* interned strings support */ \
1079 Z_TYPE_INFO_P(__z) = ZSTR_IS_INTERNED(__s) ? \
1080 IS_INTERNED_STRING_EX : \
1081 IS_STRING_EX; \
1082 } while (0)
1083
1084 #define ZVAL_INTERNED_STR(z, s) do { \
1085 zval *__z = (z); \
1086 zend_string *__s = (s); \
1087 Z_STR_P(__z) = __s; \
1088 Z_TYPE_INFO_P(__z) = IS_INTERNED_STRING_EX; \
1089 } while (0)
1090
1091 #define ZVAL_NEW_STR(z, s) do { \
1092 zval *__z = (z); \
1093 zend_string *__s = (s); \
1094 Z_STR_P(__z) = __s; \
1095 Z_TYPE_INFO_P(__z) = IS_STRING_EX; \
1096 } while (0)
1097
1098 #define ZVAL_STR_COPY(z, s) do { \
1099 zval *__z = (z); \
1100 zend_string *__s = (s); \
1101 Z_STR_P(__z) = __s; \
1102 /* interned strings support */ \
1103 if (ZSTR_IS_INTERNED(__s)) { \
1104 Z_TYPE_INFO_P(__z) = IS_INTERNED_STRING_EX; \
1105 } else { \
1106 GC_ADDREF(__s); \
1107 Z_TYPE_INFO_P(__z) = IS_STRING_EX; \
1108 } \
1109 } while (0)
1110
1111 #define ZVAL_ARR(z, a) do { \
1112 zend_array *__arr = (a); \
1113 zval *__z = (z); \
1114 Z_ARR_P(__z) = __arr; \
1115 Z_TYPE_INFO_P(__z) = IS_ARRAY_EX; \
1116 } while (0)
1117
1118 #define ZVAL_NEW_PERSISTENT_ARR(z) do { \
1119 zval *__z = (z); \
1120 zend_array *_arr = \
1121 (zend_array *) malloc(sizeof(zend_array)); \
1122 Z_ARR_P(__z) = _arr; \
1123 Z_TYPE_INFO_P(__z) = IS_ARRAY_EX; \
1124 } while (0)
1125
1126 #define ZVAL_OBJ(z, o) do { \
1127 zval *__z = (z); \
1128 Z_OBJ_P(__z) = (o); \
1129 Z_TYPE_INFO_P(__z) = IS_OBJECT_EX; \
1130 } while (0)
1131
1132 #define ZVAL_OBJ_COPY(z, o) do { \
1133 zval *__z = (z); \
1134 zend_object *__o = (o); \
1135 GC_ADDREF(__o); \
1136 Z_OBJ_P(__z) = __o; \
1137 Z_TYPE_INFO_P(__z) = IS_OBJECT_EX; \
1138 } while (0)
1139
1140 #define ZVAL_RES(z, r) do { \
1141 zval *__z = (z); \
1142 Z_RES_P(__z) = (r); \
1143 Z_TYPE_INFO_P(__z) = IS_RESOURCE_EX; \
1144 } while (0)
1145
1146 #define ZVAL_NEW_RES(z, h, p, t) do { \
1147 zend_resource *_res = \
1148 (zend_resource *) emalloc(sizeof(zend_resource)); \
1149 zval *__z; \
1150 GC_SET_REFCOUNT(_res, 1); \
1151 GC_TYPE_INFO(_res) = GC_RESOURCE; \
1152 _res->handle = (h); \
1153 _res->type = (t); \
1154 _res->ptr = (p); \
1155 __z = (z); \
1156 Z_RES_P(__z) = _res; \
1157 Z_TYPE_INFO_P(__z) = IS_RESOURCE_EX; \
1158 } while (0)
1159
1160 #define ZVAL_NEW_PERSISTENT_RES(z, h, p, t) do { \
1161 zend_resource *_res = \
1162 (zend_resource *) malloc(sizeof(zend_resource)); \
1163 zval *__z; \
1164 GC_SET_REFCOUNT(_res, 1); \
1165 GC_TYPE_INFO(_res) = GC_RESOURCE | \
1166 (GC_PERSISTENT << GC_FLAGS_SHIFT); \
1167 _res->handle = (h); \
1168 _res->type = (t); \
1169 _res->ptr = (p); \
1170 __z = (z); \
1171 Z_RES_P(__z) = _res; \
1172 Z_TYPE_INFO_P(__z) = IS_RESOURCE_EX; \
1173 } while (0)
1174
1175 #define ZVAL_REF(z, r) do { \
1176 zval *__z = (z); \
1177 Z_REF_P(__z) = (r); \
1178 Z_TYPE_INFO_P(__z) = IS_REFERENCE_EX; \
1179 } while (0)
1180
1181 #define ZVAL_NEW_EMPTY_REF(z) do { \
1182 zend_reference *_ref = \
1183 (zend_reference *) emalloc(sizeof(zend_reference)); \
1184 GC_SET_REFCOUNT(_ref, 1); \
1185 GC_TYPE_INFO(_ref) = GC_REFERENCE; \
1186 _ref->sources.ptr = NULL; \
1187 Z_REF_P(z) = _ref; \
1188 Z_TYPE_INFO_P(z) = IS_REFERENCE_EX; \
1189 } while (0)
1190
1191 #define ZVAL_NEW_REF(z, r) do { \
1192 zend_reference *_ref = \
1193 (zend_reference *) emalloc(sizeof(zend_reference)); \
1194 GC_SET_REFCOUNT(_ref, 1); \
1195 GC_TYPE_INFO(_ref) = GC_REFERENCE; \
1196 ZVAL_COPY_VALUE(&_ref->val, r); \
1197 _ref->sources.ptr = NULL; \
1198 Z_REF_P(z) = _ref; \
1199 Z_TYPE_INFO_P(z) = IS_REFERENCE_EX; \
1200 } while (0)
1201
1202 #define ZVAL_MAKE_REF_EX(z, refcount) do { \
1203 zval *_z = (z); \
1204 zend_reference *_ref = \
1205 (zend_reference *) emalloc(sizeof(zend_reference)); \
1206 GC_SET_REFCOUNT(_ref, (refcount)); \
1207 GC_TYPE_INFO(_ref) = GC_REFERENCE; \
1208 ZVAL_COPY_VALUE(&_ref->val, _z); \
1209 _ref->sources.ptr = NULL; \
1210 Z_REF_P(_z) = _ref; \
1211 Z_TYPE_INFO_P(_z) = IS_REFERENCE_EX; \
1212 } while (0)
1213
1214 #define ZVAL_NEW_PERSISTENT_REF(z, r) do { \
1215 zend_reference *_ref = \
1216 (zend_reference *) malloc(sizeof(zend_reference)); \
1217 GC_SET_REFCOUNT(_ref, 1); \
1218 GC_TYPE_INFO(_ref) = GC_REFERENCE | \
1219 (GC_PERSISTENT << GC_FLAGS_SHIFT); \
1220 ZVAL_COPY_VALUE(&_ref->val, r); \
1221 _ref->sources.ptr = NULL; \
1222 Z_REF_P(z) = _ref; \
1223 Z_TYPE_INFO_P(z) = IS_REFERENCE_EX; \
1224 } while (0)
1225
1226 #define ZVAL_AST(z, ast) do { \
1227 zval *__z = (z); \
1228 Z_AST_P(__z) = ast; \
1229 Z_TYPE_INFO_P(__z) = IS_CONSTANT_AST_EX; \
1230 } while (0)
1231
1232 #define ZVAL_INDIRECT(z, v) do { \
1233 Z_INDIRECT_P(z) = (v); \
1234 Z_TYPE_INFO_P(z) = IS_INDIRECT; \
1235 } while (0)
1236
1237 #define ZVAL_PTR(z, p) do { \
1238 Z_PTR_P(z) = (p); \
1239 Z_TYPE_INFO_P(z) = IS_PTR; \
1240 } while (0)
1241
1242 #define ZVAL_FUNC(z, f) do { \
1243 Z_FUNC_P(z) = (f); \
1244 Z_TYPE_INFO_P(z) = IS_PTR; \
1245 } while (0)
1246
1247 #define ZVAL_CE(z, c) do { \
1248 Z_CE_P(z) = (c); \
1249 Z_TYPE_INFO_P(z) = IS_PTR; \
1250 } while (0)
1251
1252 #define ZVAL_ALIAS_PTR(z, p) do { \
1253 Z_PTR_P(z) = (p); \
1254 Z_TYPE_INFO_P(z) = IS_ALIAS_PTR; \
1255 } while (0)
1256
1257 #define ZVAL_ERROR(z) do { \
1258 Z_TYPE_INFO_P(z) = _IS_ERROR; \
1259 } while (0)
1260
1261 #define Z_REFCOUNT_P(pz) zval_refcount_p(pz)
1262 #define Z_SET_REFCOUNT_P(pz, rc) zval_set_refcount_p(pz, rc)
1263 #define Z_ADDREF_P(pz) zval_addref_p(pz)
1264 #define Z_DELREF_P(pz) zval_delref_p(pz)
1265
1266 #define Z_REFCOUNT(z) Z_REFCOUNT_P(&(z))
1267 #define Z_SET_REFCOUNT(z, rc) Z_SET_REFCOUNT_P(&(z), rc)
1268 #define Z_ADDREF(z) Z_ADDREF_P(&(z))
1269 #define Z_DELREF(z) Z_DELREF_P(&(z))
1270
1271 #define Z_TRY_ADDREF_P(pz) do { \
1272 if (Z_REFCOUNTED_P((pz))) { \
1273 Z_ADDREF_P((pz)); \
1274 } \
1275 } while (0)
1276
1277 #define Z_TRY_DELREF_P(pz) do { \
1278 if (Z_REFCOUNTED_P((pz))) { \
1279 Z_DELREF_P((pz)); \
1280 } \
1281 } while (0)
1282
1283 #define Z_TRY_ADDREF(z) Z_TRY_ADDREF_P(&(z))
1284 #define Z_TRY_DELREF(z) Z_TRY_DELREF_P(&(z))
1285
1286 #ifndef ZEND_RC_DEBUG
1287 # define ZEND_RC_DEBUG 0
1288 #endif
1289
1290 #if ZEND_RC_DEBUG
1291 extern ZEND_API bool zend_rc_debug;
1292 /* The GC_PERSISTENT flag is reused for IS_OBJ_WEAKLY_REFERENCED on objects.
1293 * Skip checks for OBJECT/NULL type to avoid interpreting the flag incorrectly. */
1294 # define ZEND_RC_MOD_CHECK(p) do { \
1295 if (zend_rc_debug) { \
1296 uint8_t type = zval_gc_type((p)->u.type_info); \
1297 if (type != IS_OBJECT && type != IS_NULL) { \
1298 ZEND_ASSERT(!(zval_gc_flags((p)->u.type_info) & GC_IMMUTABLE)); \
1299 ZEND_ASSERT((zval_gc_flags((p)->u.type_info) & (GC_PERSISTENT|GC_PERSISTENT_LOCAL)) != GC_PERSISTENT); \
1300 } \
1301 } \
1302 } while (0)
1303 # define GC_MAKE_PERSISTENT_LOCAL(p) do { \
1304 GC_ADD_FLAGS(p, GC_PERSISTENT_LOCAL); \
1305 } while (0)
1306 #else
1307 # define ZEND_RC_MOD_CHECK(p) \
1308 do { } while (0)
1309 # define GC_MAKE_PERSISTENT_LOCAL(p) \
1310 do { } while (0)
1311 #endif
1312
zend_gc_refcount(const zend_refcounted_h * p)1313 static zend_always_inline uint32_t zend_gc_refcount(const zend_refcounted_h *p) {
1314 return p->refcount;
1315 }
1316
zend_gc_set_refcount(zend_refcounted_h * p,uint32_t rc)1317 static zend_always_inline uint32_t zend_gc_set_refcount(zend_refcounted_h *p, uint32_t rc) {
1318 p->refcount = rc;
1319 return p->refcount;
1320 }
1321
zend_gc_addref(zend_refcounted_h * p)1322 static zend_always_inline uint32_t zend_gc_addref(zend_refcounted_h *p) {
1323 ZEND_RC_MOD_CHECK(p);
1324 return ++(p->refcount);
1325 }
1326
zend_gc_try_addref(zend_refcounted_h * p)1327 static zend_always_inline void zend_gc_try_addref(zend_refcounted_h *p) {
1328 if (!(p->u.type_info & GC_IMMUTABLE)) {
1329 ZEND_RC_MOD_CHECK(p);
1330 ++p->refcount;
1331 }
1332 }
1333
zend_gc_try_delref(zend_refcounted_h * p)1334 static zend_always_inline void zend_gc_try_delref(zend_refcounted_h *p) {
1335 if (!(p->u.type_info & GC_IMMUTABLE)) {
1336 ZEND_RC_MOD_CHECK(p);
1337 --p->refcount;
1338 }
1339 }
1340
zend_gc_delref(zend_refcounted_h * p)1341 static zend_always_inline uint32_t zend_gc_delref(zend_refcounted_h *p) {
1342 ZEND_ASSERT(p->refcount > 0);
1343 ZEND_RC_MOD_CHECK(p);
1344 return --(p->refcount);
1345 }
1346
zend_gc_addref_ex(zend_refcounted_h * p,uint32_t rc)1347 static zend_always_inline uint32_t zend_gc_addref_ex(zend_refcounted_h *p, uint32_t rc) {
1348 ZEND_RC_MOD_CHECK(p);
1349 p->refcount += rc;
1350 return p->refcount;
1351 }
1352
zend_gc_delref_ex(zend_refcounted_h * p,uint32_t rc)1353 static zend_always_inline uint32_t zend_gc_delref_ex(zend_refcounted_h *p, uint32_t rc) {
1354 ZEND_RC_MOD_CHECK(p);
1355 p->refcount -= rc;
1356 return p->refcount;
1357 }
1358
zval_refcount_p(const zval * pz)1359 static zend_always_inline uint32_t zval_refcount_p(const zval* pz) {
1360 #if ZEND_DEBUG
1361 ZEND_ASSERT(Z_REFCOUNTED_P(pz) || Z_TYPE_P(pz) == IS_ARRAY);
1362 #endif
1363 return GC_REFCOUNT(Z_COUNTED_P(pz));
1364 }
1365
zval_set_refcount_p(zval * pz,uint32_t rc)1366 static zend_always_inline uint32_t zval_set_refcount_p(zval* pz, uint32_t rc) {
1367 ZEND_ASSERT(Z_REFCOUNTED_P(pz));
1368 return GC_SET_REFCOUNT(Z_COUNTED_P(pz), rc);
1369 }
1370
zval_addref_p(zval * pz)1371 static zend_always_inline uint32_t zval_addref_p(zval* pz) {
1372 ZEND_ASSERT(Z_REFCOUNTED_P(pz));
1373 return GC_ADDREF(Z_COUNTED_P(pz));
1374 }
1375
zval_delref_p(zval * pz)1376 static zend_always_inline uint32_t zval_delref_p(zval* pz) {
1377 ZEND_ASSERT(Z_REFCOUNTED_P(pz));
1378 return GC_DELREF(Z_COUNTED_P(pz));
1379 }
1380
1381 #if SIZEOF_SIZE_T == 4
1382 # define ZVAL_COPY_VALUE_EX(z, v, gc, t) \
1383 do { \
1384 uint32_t _w2 = v->value.ww.w2; \
1385 Z_COUNTED_P(z) = gc; \
1386 z->value.ww.w2 = _w2; \
1387 Z_TYPE_INFO_P(z) = t; \
1388 } while (0)
1389 #elif SIZEOF_SIZE_T == 8
1390 # define ZVAL_COPY_VALUE_EX(z, v, gc, t) \
1391 do { \
1392 Z_COUNTED_P(z) = gc; \
1393 Z_TYPE_INFO_P(z) = t; \
1394 } while (0)
1395 #else
1396 # error "Unknown SIZEOF_SIZE_T"
1397 #endif
1398
1399 #define ZVAL_COPY_VALUE(z, v) \
1400 do { \
1401 zval *_z1 = (z); \
1402 const zval *_z2 = (v); \
1403 zend_refcounted *_gc = Z_COUNTED_P(_z2); \
1404 uint32_t _t = Z_TYPE_INFO_P(_z2); \
1405 ZVAL_COPY_VALUE_EX(_z1, _z2, _gc, _t); \
1406 } while (0)
1407
1408 #define ZVAL_COPY(z, v) \
1409 do { \
1410 zval *_z1 = (z); \
1411 const zval *_z2 = (v); \
1412 zend_refcounted *_gc = Z_COUNTED_P(_z2); \
1413 uint32_t _t = Z_TYPE_INFO_P(_z2); \
1414 ZVAL_COPY_VALUE_EX(_z1, _z2, _gc, _t); \
1415 if (Z_TYPE_INFO_REFCOUNTED(_t)) { \
1416 GC_ADDREF(_gc); \
1417 } \
1418 } while (0)
1419
1420 #define ZVAL_DUP(z, v) \
1421 do { \
1422 zval *_z1 = (z); \
1423 const zval *_z2 = (v); \
1424 zend_refcounted *_gc = Z_COUNTED_P(_z2); \
1425 uint32_t _t = Z_TYPE_INFO_P(_z2); \
1426 if ((_t & Z_TYPE_MASK) == IS_ARRAY) { \
1427 ZVAL_ARR(_z1, zend_array_dup((zend_array*)_gc));\
1428 } else { \
1429 if (Z_TYPE_INFO_REFCOUNTED(_t)) { \
1430 GC_ADDREF(_gc); \
1431 } \
1432 ZVAL_COPY_VALUE_EX(_z1, _z2, _gc, _t); \
1433 } \
1434 } while (0)
1435
1436
1437 /* ZVAL_COPY_OR_DUP() should be used instead of ZVAL_COPY() and ZVAL_DUP()
1438 * in all places where the source may be a persistent zval.
1439 */
1440 #define ZVAL_COPY_OR_DUP(z, v) \
1441 do { \
1442 zval *_z1 = (z); \
1443 const zval *_z2 = (v); \
1444 zend_refcounted *_gc = Z_COUNTED_P(_z2); \
1445 uint32_t _t = Z_TYPE_INFO_P(_z2); \
1446 ZVAL_COPY_VALUE_EX(_z1, _z2, _gc, _t); \
1447 if (Z_TYPE_INFO_REFCOUNTED(_t)) { \
1448 /* Objects reuse PERSISTENT as WEAKLY_REFERENCED */ \
1449 if (EXPECTED(!(GC_FLAGS(_gc) & GC_PERSISTENT) \
1450 || GC_TYPE(_gc) == IS_OBJECT)) { \
1451 GC_ADDREF(_gc); \
1452 } else { \
1453 zval_copy_ctor_func(_z1); \
1454 } \
1455 } \
1456 } while (0)
1457
1458 #define ZVAL_DEREF(z) do { \
1459 if (UNEXPECTED(Z_ISREF_P(z))) { \
1460 (z) = Z_REFVAL_P(z); \
1461 } \
1462 } while (0)
1463
1464 #define ZVAL_DEINDIRECT(z) do { \
1465 if (Z_TYPE_P(z) == IS_INDIRECT) { \
1466 (z) = Z_INDIRECT_P(z); \
1467 } \
1468 } while (0)
1469
1470 #define ZVAL_OPT_DEREF(z) do { \
1471 if (UNEXPECTED(Z_OPT_ISREF_P(z))) { \
1472 (z) = Z_REFVAL_P(z); \
1473 } \
1474 } while (0)
1475
1476 #define ZVAL_MAKE_REF(zv) do { \
1477 zval *__zv = (zv); \
1478 if (!Z_ISREF_P(__zv)) { \
1479 ZVAL_NEW_REF(__zv, __zv); \
1480 } \
1481 } while (0)
1482
1483 #define ZVAL_UNREF(z) do { \
1484 zval *_z = (z); \
1485 zend_reference *ref; \
1486 ZEND_ASSERT(Z_ISREF_P(_z)); \
1487 ref = Z_REF_P(_z); \
1488 ZVAL_COPY_VALUE(_z, &ref->val); \
1489 efree_size(ref, sizeof(zend_reference)); \
1490 } while (0)
1491
1492 #define ZVAL_COPY_DEREF(z, v) do { \
1493 zval *_z3 = (v); \
1494 if (Z_OPT_REFCOUNTED_P(_z3)) { \
1495 if (UNEXPECTED(Z_OPT_ISREF_P(_z3))) { \
1496 _z3 = Z_REFVAL_P(_z3); \
1497 if (Z_OPT_REFCOUNTED_P(_z3)) { \
1498 Z_ADDREF_P(_z3); \
1499 } \
1500 } else { \
1501 Z_ADDREF_P(_z3); \
1502 } \
1503 } \
1504 ZVAL_COPY_VALUE(z, _z3); \
1505 } while (0)
1506
1507
1508 #define SEPARATE_STRING(zv) do { \
1509 zval *_zv = (zv); \
1510 if (Z_REFCOUNT_P(_zv) > 1) { \
1511 zend_string *_str = Z_STR_P(_zv); \
1512 ZEND_ASSERT(Z_REFCOUNTED_P(_zv)); \
1513 ZEND_ASSERT(!ZSTR_IS_INTERNED(_str)); \
1514 ZVAL_NEW_STR(_zv, zend_string_init( \
1515 ZSTR_VAL(_str), ZSTR_LEN(_str), 0)); \
1516 GC_DELREF(_str); \
1517 } \
1518 } while (0)
1519
1520 #define SEPARATE_ARRAY(zv) do { \
1521 zval *__zv = (zv); \
1522 zend_array *_arr = Z_ARR_P(__zv); \
1523 if (UNEXPECTED(GC_REFCOUNT(_arr) > 1)) { \
1524 ZVAL_ARR(__zv, zend_array_dup(_arr)); \
1525 GC_TRY_DELREF(_arr); \
1526 } \
1527 } while (0)
1528
1529 #define SEPARATE_ZVAL_NOREF(zv) do { \
1530 zval *_zv = (zv); \
1531 ZEND_ASSERT(Z_TYPE_P(_zv) != IS_REFERENCE); \
1532 if (Z_TYPE_P(_zv) == IS_ARRAY) { \
1533 SEPARATE_ARRAY(_zv); \
1534 } \
1535 } while (0)
1536
1537 #define SEPARATE_ZVAL(zv) do { \
1538 zval *_zv = (zv); \
1539 if (Z_ISREF_P(_zv)) { \
1540 zend_reference *_r = Z_REF_P(_zv); \
1541 ZVAL_COPY_VALUE(_zv, &_r->val); \
1542 if (GC_DELREF(_r) == 0) { \
1543 efree_size(_r, sizeof(zend_reference)); \
1544 } else if (Z_OPT_TYPE_P(_zv) == IS_ARRAY) { \
1545 ZVAL_ARR(_zv, zend_array_dup(Z_ARR_P(_zv)));\
1546 break; \
1547 } else if (Z_OPT_REFCOUNTED_P(_zv)) { \
1548 Z_ADDREF_P(_zv); \
1549 break; \
1550 } \
1551 } \
1552 if (Z_TYPE_P(_zv) == IS_ARRAY) { \
1553 SEPARATE_ARRAY(_zv); \
1554 } \
1555 } while (0)
1556
1557 /* Properties store a flag distinguishing unset and uninitialized properties
1558 * (both use IS_UNDEF type) in the Z_EXTRA space. As such we also need to copy
1559 * the Z_EXTRA space when copying property default values etc. We define separate
1560 * macros for this purpose, so this workaround is easier to remove in the future. */
1561 #define IS_PROP_UNINIT (1<<0)
1562 #define IS_PROP_REINITABLE (1<<1) /* It has impact only on readonly properties */
1563 #define Z_PROP_FLAG_P(z) Z_EXTRA_P(z)
1564 #define ZVAL_COPY_VALUE_PROP(z, v) \
1565 do { *(z) = *(v); } while (0)
1566 #define ZVAL_COPY_PROP(z, v) \
1567 do { ZVAL_COPY(z, v); Z_PROP_FLAG_P(z) = Z_PROP_FLAG_P(v); } while (0)
1568 #define ZVAL_COPY_OR_DUP_PROP(z, v) \
1569 do { ZVAL_COPY_OR_DUP(z, v); Z_PROP_FLAG_P(z) = Z_PROP_FLAG_P(v); } while (0)
1570
1571
zend_may_modify_arg_in_place(const zval * arg)1572 static zend_always_inline bool zend_may_modify_arg_in_place(const zval *arg)
1573 {
1574 return Z_REFCOUNTED_P(arg) && !(GC_FLAGS(Z_COUNTED_P(arg)) & (GC_IMMUTABLE | GC_PERSISTENT)) && Z_REFCOUNT_P(arg) == 1;
1575 }
1576
1577 #endif /* ZEND_TYPES_H */
1578