1 /*
2 +----------------------------------------------------------------------+
3 | Zend Engine |
4 +----------------------------------------------------------------------+
5 | Copyright (c) Zend Technologies Ltd. (http://www.zend.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 2.00 of the Zend license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.zend.com/license/2_00.txt. |
11 | If you did not receive a copy of the Zend license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@zend.com so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
15 | Authors: Sascha Schumann <sascha@schumann.cx> |
16 | Ard Biesheuvel <ard.biesheuvel@linaro.org> |
17 +----------------------------------------------------------------------+
18 */
19
20 #include "zend_portability.h"
21
22 #ifndef ZEND_MULTIPLY_H
23 #define ZEND_MULTIPLY_H
24
25 #if defined(PHP_HAVE_BUILTIN_SMULL_OVERFLOW) && SIZEOF_LONG == SIZEOF_ZEND_LONG
26
27 #define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
28 long __tmpvar; \
29 if (((usedval) = __builtin_smull_overflow((a), (b), &__tmpvar))) { \
30 (dval) = (double) (a) * (double) (b); \
31 } \
32 else (lval) = __tmpvar; \
33 } while (0)
34
35 #elif defined(PHP_HAVE_BUILTIN_SMULLL_OVERFLOW) && SIZEOF_LONG_LONG == SIZEOF_ZEND_LONG
36
37 #define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
38 long long __tmpvar; \
39 if (((usedval) = __builtin_smulll_overflow((a), (b), &__tmpvar))) { \
40 (dval) = (double) (a) * (double) (b); \
41 } \
42 else (lval) = __tmpvar; \
43 } while (0)
44
45 #elif (defined(__i386__) || defined(__x86_64__)) && defined(__GNUC__)
46
47 #define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
48 zend_long __tmpvar; \
49 __asm__ ("imul %3,%0\n" \
50 "adc $0,%1" \
51 : "=r"(__tmpvar),"=r"(usedval) \
52 : "0"(a), "r"(b), "1"(0)); \
53 if (usedval) (dval) = (double) (a) * (double) (b); \
54 else (lval) = __tmpvar; \
55 } while (0)
56
57 #elif defined(__arm__) && defined(__GNUC__)
58
59 #define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
60 zend_long __tmpvar; \
61 __asm__("smull %0, %1, %2, %3\n" \
62 "sub %1, %1, %0, asr #31" \
63 : "=r"(__tmpvar), "=r"(usedval) \
64 : "r"(a), "r"(b)); \
65 if (usedval) (dval) = (double) (a) * (double) (b); \
66 else (lval) = __tmpvar; \
67 } while (0)
68
69 #elif defined(__aarch64__) && defined(__GNUC__)
70
71 #define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
72 zend_long __tmpvar; \
73 __asm__("mul %0, %2, %3\n" \
74 "smulh %1, %2, %3\n" \
75 "sub %1, %1, %0, asr #63\n" \
76 : "=&r"(__tmpvar), "=&r"(usedval) \
77 : "r"(a), "r"(b)); \
78 if (usedval) (dval) = (double) (a) * (double) (b); \
79 else (lval) = __tmpvar; \
80 } while (0)
81
82 #elif defined(ZEND_WIN32)
83
84 # ifdef _M_X64
85 # pragma intrinsic(_mul128)
86 # define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
87 __int64 __high; \
88 __int64 __low = _mul128((a), (b), &__high); \
89 if ((__low >> 63I64) == __high) { \
90 (usedval) = 0; \
91 (lval) = __low; \
92 } else { \
93 (usedval) = 1; \
94 (dval) = (double)(a) * (double)(b); \
95 } \
96 } while (0)
97 # elif defined(_M_ARM64)
98 # pragma intrinsic(__mulh)
99 # define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
100 __int64 __high = __mulh((a), (b)); \
101 __int64 __low = (a) * (b); \
102 if ((__low >> 63I64) == __high) { \
103 (usedval) = 0; \
104 (lval) = __low; \
105 } else { \
106 (usedval) = 1; \
107 (dval) = (double)(a) * (double)(b); \
108 } \
109 } while (0)
110 # else
111 # define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
112 zend_long __lres = (a) * (b); \
113 long double __dres = (long double)(a) * (long double)(b); \
114 long double __delta = (long double) __lres - __dres; \
115 if ( ((usedval) = (( __dres + __delta ) != __dres))) { \
116 (dval) = __dres; \
117 } else { \
118 (lval) = __lres; \
119 } \
120 } while (0)
121 # endif
122
123 #elif defined(__powerpc64__) && defined(__GNUC__)
124
125 #define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
126 long __low, __high; \
127 __asm__("mulld %0,%2,%3\n\t" \
128 "mulhd %1,%2,%3\n" \
129 : "=&r"(__low), "=&r"(__high) \
130 : "r"(a), "r"(b)); \
131 if ((__low >> 63) != __high) { \
132 (dval) = (double) (a) * (double) (b); \
133 (usedval) = 1; \
134 } else { \
135 (lval) = __low; \
136 (usedval) = 0; \
137 } \
138 } while (0)
139
140 #elif SIZEOF_ZEND_LONG == 4
141
142 #define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
143 int64_t __result = (int64_t) (a) * (int64_t) (b); \
144 if (__result > ZEND_LONG_MAX || __result < ZEND_LONG_MIN) { \
145 (dval) = (double) __result; \
146 (usedval) = 1; \
147 } else { \
148 (lval) = (long) __result; \
149 (usedval) = 0; \
150 } \
151 } while (0)
152
153 #else
154
155 #define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
156 long __lres = (a) * (b); \
157 long double __dres = (long double)(a) * (long double)(b); \
158 long double __delta = (long double) __lres - __dres; \
159 if ( ((usedval) = (( __dres + __delta ) != __dres))) { \
160 (dval) = __dres; \
161 } else { \
162 (lval) = __lres; \
163 } \
164 } while (0)
165
166 #endif
167
168 #if defined(__GNUC__) && (defined(__native_client__) || defined(i386))
169
zend_safe_address(size_t nmemb,size_t size,size_t offset,bool * overflow)170 static zend_always_inline size_t zend_safe_address(size_t nmemb, size_t size, size_t offset, bool *overflow)
171 {
172 size_t res = nmemb;
173 size_t m_overflow = 0;
174
175 if (ZEND_CONST_COND(offset == 0, 0)) {
176 __asm__ ("mull %3\n\tadcl $0,%1"
177 : "=&a"(res), "=&d" (m_overflow)
178 : "%0"(res),
179 "rm"(size));
180 } else {
181 __asm__ ("mull %3\n\taddl %4,%0\n\tadcl $0,%1"
182 : "=&a"(res), "=&d" (m_overflow)
183 : "%0"(res),
184 "rm"(size),
185 "rm"(offset));
186 }
187
188 if (UNEXPECTED(m_overflow)) {
189 *overflow = 1;
190 return 0;
191 }
192 *overflow = 0;
193 return res;
194 }
195
196 #elif defined(__GNUC__) && defined(__x86_64__)
197
zend_safe_address(size_t nmemb,size_t size,size_t offset,bool * overflow)198 static zend_always_inline size_t zend_safe_address(size_t nmemb, size_t size, size_t offset, bool *overflow)
199 {
200 size_t res = nmemb;
201 zend_ulong m_overflow = 0;
202
203 #ifdef __ILP32__ /* x32 */
204 # define LP_SUFF "l"
205 #else /* amd64 */
206 # define LP_SUFF "q"
207 #endif
208
209 if (ZEND_CONST_COND(offset == 0, 0)) {
210 __asm__ ("mul" LP_SUFF " %3\n\t"
211 "adc $0,%1"
212 : "=&a"(res), "=&d" (m_overflow)
213 : "%0"(res),
214 "rm"(size));
215 } else {
216 __asm__ ("mul" LP_SUFF " %3\n\t"
217 "add %4,%0\n\t"
218 "adc $0,%1"
219 : "=&a"(res), "=&d" (m_overflow)
220 : "%0"(res),
221 "rm"(size),
222 "rm"(offset));
223 }
224 #undef LP_SUFF
225 if (UNEXPECTED(m_overflow)) {
226 *overflow = 1;
227 return 0;
228 }
229 *overflow = 0;
230 return res;
231 }
232
233 #elif defined(__GNUC__) && defined(__arm__)
234
zend_safe_address(size_t nmemb,size_t size,size_t offset,bool * overflow)235 static zend_always_inline size_t zend_safe_address(size_t nmemb, size_t size, size_t offset, bool *overflow)
236 {
237 size_t res;
238 zend_ulong m_overflow;
239
240 __asm__ ("umlal %0,%1,%2,%3"
241 : "=r"(res), "=r"(m_overflow)
242 : "r"(nmemb),
243 "r"(size),
244 "0"(offset),
245 "1"(0));
246
247 if (UNEXPECTED(m_overflow)) {
248 *overflow = 1;
249 return 0;
250 }
251 *overflow = 0;
252 return res;
253 }
254
255 #elif defined(__GNUC__) && defined(__aarch64__)
256
zend_safe_address(size_t nmemb,size_t size,size_t offset,bool * overflow)257 static zend_always_inline size_t zend_safe_address(size_t nmemb, size_t size, size_t offset, bool *overflow)
258 {
259 size_t res;
260 zend_ulong m_overflow;
261
262 __asm__ ("mul %0,%2,%3\n\tumulh %1,%2,%3\n\tadds %0,%0,%4\n\tadc %1,%1,xzr"
263 : "=&r"(res), "=&r"(m_overflow)
264 : "r"(nmemb),
265 "r"(size),
266 "r"(offset));
267
268 if (UNEXPECTED(m_overflow)) {
269 *overflow = 1;
270 return 0;
271 }
272 *overflow = 0;
273 return res;
274 }
275
276 #elif defined(__GNUC__) && defined(__powerpc64__)
277
zend_safe_address(size_t nmemb,size_t size,size_t offset,bool * overflow)278 static zend_always_inline size_t zend_safe_address(size_t nmemb, size_t size, size_t offset, bool *overflow)
279 {
280 size_t res;
281 unsigned long m_overflow;
282
283 __asm__ ("mulld %0,%2,%3\n\t"
284 "mulhdu %1,%2,%3\n\t"
285 "addc %0,%0,%4\n\t"
286 "addze %1,%1\n"
287 : "=&r"(res), "=&r"(m_overflow)
288 : "r"(nmemb),
289 "r"(size),
290 "r"(offset));
291
292 if (UNEXPECTED(m_overflow)) {
293 *overflow = 1;
294 return 0;
295 }
296 *overflow = 0;
297 return res;
298 }
299
300 #elif SIZEOF_SIZE_T == 4
301
zend_safe_address(size_t nmemb,size_t size,size_t offset,bool * overflow)302 static zend_always_inline size_t zend_safe_address(size_t nmemb, size_t size, size_t offset, bool *overflow)
303 {
304 uint64_t res = (uint64_t) nmemb * (uint64_t) size + (uint64_t) offset;
305
306 if (UNEXPECTED(res > UINT64_C(0xFFFFFFFF))) {
307 *overflow = 1;
308 return 0;
309 }
310 *overflow = 0;
311 return (size_t) res;
312 }
313
314 #else
315
zend_safe_address(size_t nmemb,size_t size,size_t offset,bool * overflow)316 static zend_always_inline size_t zend_safe_address(size_t nmemb, size_t size, size_t offset, bool *overflow)
317 {
318 size_t res = nmemb * size + offset;
319 double _d = (double)nmemb * (double)size + (double)offset;
320 double _delta = (double)res - _d;
321
322 if (UNEXPECTED((_d + _delta ) != _d)) {
323 *overflow = 1;
324 return 0;
325 }
326 *overflow = 0;
327 return res;
328 }
329 #endif
330
zend_safe_address_guarded(size_t nmemb,size_t size,size_t offset)331 static zend_always_inline size_t zend_safe_address_guarded(size_t nmemb, size_t size, size_t offset)
332 {
333 bool overflow;
334 size_t ret = zend_safe_address(nmemb, size, offset, &overflow);
335
336 if (UNEXPECTED(overflow)) {
337 zend_error_noreturn(E_ERROR, "Possible integer overflow in memory allocation (%zu * %zu + %zu)", nmemb, size, offset);
338 return 0;
339 }
340 return ret;
341 }
342
343 /* A bit more generic version of the same */
zend_safe_addmult(size_t nmemb,size_t size,size_t offset,const char * message)344 static zend_always_inline size_t zend_safe_addmult(size_t nmemb, size_t size, size_t offset, const char *message)
345 {
346 bool overflow;
347 size_t ret = zend_safe_address(nmemb, size, offset, &overflow);
348
349 if (UNEXPECTED(overflow)) {
350 zend_error_noreturn(E_ERROR, "Possible integer overflow in %s (%zu * %zu + %zu)", message, nmemb, size, offset);
351 return 0;
352 }
353 return ret;
354 }
355
356 #endif /* ZEND_MULTIPLY_H */
357