1 /*
2 +----------------------------------------------------------------------+
3 | Zend Engine |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 1998-2017 Zend Technologies Ltd. (http://www.zend.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 2.00 of the Zend license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.zend.com/license/2_00.txt. |
11 | If you did not receive a copy of the Zend license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@zend.com so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
15 | Authors: Sascha Schumann <sascha@schumann.cx> |
16 | Ard Biesheuvel <ard.biesheuvel@linaro.org> |
17 +----------------------------------------------------------------------+
18 */
19
20 /* $Id$ */
21
22 #ifndef ZEND_MULTIPLY_H
23 #define ZEND_MULTIPLY_H
24
25 #if (defined(__i386__) || defined(__x86_64__)) && defined(__GNUC__)
26
27 #define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
28 zend_long __tmpvar; \
29 __asm__ ("imul %3,%0\n" \
30 "adc $0,%1" \
31 : "=r"(__tmpvar),"=r"(usedval) \
32 : "0"(a), "r"(b), "1"(0)); \
33 if (usedval) (dval) = (double) (a) * (double) (b); \
34 else (lval) = __tmpvar; \
35 } while (0)
36
37 #elif defined(__arm__) && defined(__GNUC__)
38
39 #define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
40 zend_long __tmpvar; \
41 __asm__("smull %0, %1, %2, %3\n" \
42 "sub %1, %1, %0, asr #31" \
43 : "=r"(__tmpvar), "=r"(usedval) \
44 : "r"(a), "r"(b)); \
45 if (usedval) (dval) = (double) (a) * (double) (b); \
46 else (lval) = __tmpvar; \
47 } while (0)
48
49 #elif defined(__aarch64__) && defined(__GNUC__)
50
51 #define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
52 zend_long __tmpvar; \
53 __asm__("mul %0, %2, %3\n" \
54 "smulh %1, %2, %3\n" \
55 "sub %1, %1, %0, asr #63\n" \
56 : "=&r"(__tmpvar), "=&r"(usedval) \
57 : "r"(a), "r"(b)); \
58 if (usedval) (dval) = (double) (a) * (double) (b); \
59 else (lval) = __tmpvar; \
60 } while (0)
61
62 #elif defined(ZEND_WIN32)
63
64 #define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
65 zend_long __lres = (a) * (b); \
66 long double __dres = (long double)(a) * (long double)(b); \
67 long double __delta = (long double) __lres - __dres; \
68 if ( ((usedval) = (( __dres + __delta ) != __dres))) { \
69 (dval) = __dres; \
70 } else { \
71 (lval) = __lres; \
72 } \
73 } while (0)
74
75 #elif defined(__powerpc64__) && defined(__GNUC__)
76
77 #define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
78 long __low, __high; \
79 __asm__("mulld %0,%2,%3\n\t" \
80 "mulhd %1,%2,%3\n" \
81 : "=&r"(__low), "=&r"(__high) \
82 : "r"(a), "r"(b)); \
83 if ((__low >> 63) != __high) { \
84 (dval) = (double) (a) * (double) (b); \
85 (usedval) = 1; \
86 } else { \
87 (lval) = __low; \
88 (usedval) = 0; \
89 } \
90 } while (0)
91
92 #elif SIZEOF_ZEND_LONG == 4
93
94 #define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
95 int64_t __result = (int64_t) (a) * (int64_t) (b); \
96 if (__result > ZEND_LONG_MAX || __result < ZEND_LONG_MIN) { \
97 (dval) = (double) __result; \
98 (usedval) = 1; \
99 } else { \
100 (lval) = (long) __result; \
101 (usedval) = 0; \
102 } \
103 } while (0)
104
105 #else
106
107 #define ZEND_SIGNED_MULTIPLY_LONG(a, b, lval, dval, usedval) do { \
108 long __lres = (a) * (b); \
109 long double __dres = (long double)(a) * (long double)(b); \
110 long double __delta = (long double) __lres - __dres; \
111 if ( ((usedval) = (( __dres + __delta ) != __dres))) { \
112 (dval) = __dres; \
113 } else { \
114 (lval) = __lres; \
115 } \
116 } while (0)
117
118 #endif
119
120 #if defined(__GNUC__) && (defined(__native_client__) || defined(i386))
121
zend_safe_address(size_t nmemb,size_t size,size_t offset,int * overflow)122 static zend_always_inline size_t zend_safe_address(size_t nmemb, size_t size, size_t offset, int *overflow)
123 {
124 size_t res = nmemb;
125 size_t m_overflow = 0;
126
127 __asm__ ("mull %3\n\taddl %4,%0\n\tadcl $0,%1"
128 : "=&a"(res), "=&d" (m_overflow)
129 : "%0"(res),
130 "rm"(size),
131 "rm"(offset));
132
133 if (UNEXPECTED(m_overflow)) {
134 *overflow = 1;
135 return 0;
136 }
137 *overflow = 0;
138 return res;
139 }
140
141 #elif defined(__GNUC__) && defined(__x86_64__)
142
zend_safe_address(size_t nmemb,size_t size,size_t offset,int * overflow)143 static zend_always_inline size_t zend_safe_address(size_t nmemb, size_t size, size_t offset, int *overflow)
144 {
145 size_t res = nmemb;
146 zend_ulong m_overflow = 0;
147
148 #ifdef __ILP32__ /* x32 */
149 # define LP_SUFF "l"
150 #else /* amd64 */
151 # define LP_SUFF "q"
152 #endif
153
154 __asm__ ("mul" LP_SUFF " %3\n\t"
155 "add %4,%0\n\t"
156 "adc $0,%1"
157 : "=&a"(res), "=&d" (m_overflow)
158 : "%0"(res),
159 "rm"(size),
160 "rm"(offset));
161
162 #undef LP_SUFF
163 if (UNEXPECTED(m_overflow)) {
164 *overflow = 1;
165 return 0;
166 }
167 *overflow = 0;
168 return res;
169 }
170
171 #elif defined(__GNUC__) && defined(__arm__)
172
zend_safe_address(size_t nmemb,size_t size,size_t offset,int * overflow)173 static zend_always_inline size_t zend_safe_address(size_t nmemb, size_t size, size_t offset, int *overflow)
174 {
175 size_t res;
176 zend_ulong m_overflow;
177
178 __asm__ ("umlal %0,%1,%2,%3"
179 : "=r"(res), "=r"(m_overflow)
180 : "r"(nmemb),
181 "r"(size),
182 "0"(offset),
183 "1"(0));
184
185 if (UNEXPECTED(m_overflow)) {
186 *overflow = 1;
187 return 0;
188 }
189 *overflow = 0;
190 return res;
191 }
192
193 #elif defined(__GNUC__) && defined(__aarch64__)
194
zend_safe_address(size_t nmemb,size_t size,size_t offset,int * overflow)195 static zend_always_inline size_t zend_safe_address(size_t nmemb, size_t size, size_t offset, int *overflow)
196 {
197 size_t res;
198 zend_ulong m_overflow;
199
200 __asm__ ("mul %0,%2,%3\n\tumulh %1,%2,%3\n\tadds %0,%0,%4\n\tadc %1,%1,xzr"
201 : "=&r"(res), "=&r"(m_overflow)
202 : "r"(nmemb),
203 "r"(size),
204 "r"(offset));
205
206 if (UNEXPECTED(m_overflow)) {
207 *overflow = 1;
208 return 0;
209 }
210 *overflow = 0;
211 return res;
212 }
213
214 #elif defined(__GNUC__) && defined(__powerpc64__)
215
zend_safe_address(size_t nmemb,size_t size,size_t offset,int * overflow)216 static zend_always_inline size_t zend_safe_address(size_t nmemb, size_t size, size_t offset, int *overflow)
217 {
218 size_t res;
219 unsigned long m_overflow;
220
221 __asm__ ("mulld %0,%2,%3\n\t"
222 "mulhdu %1,%2,%3\n\t"
223 "addc %0,%0,%4\n\t"
224 "addze %1,%1\n"
225 : "=&r"(res), "=&r"(m_overflow)
226 : "r"(nmemb),
227 "r"(size),
228 "r"(offset));
229
230 if (UNEXPECTED(m_overflow)) {
231 *overflow = 1;
232 return 0;
233 }
234 *overflow = 0;
235 return res;
236 }
237
238 #elif SIZEOF_SIZE_T == 4
239
zend_safe_address(size_t nmemb,size_t size,size_t offset,int * overflow)240 static zend_always_inline size_t zend_safe_address(size_t nmemb, size_t size, size_t offset, int *overflow)
241 {
242 uint64_t res = (uint64_t) nmemb * (uint64_t) size + (uint64_t) offset;
243
244 if (UNEXPECTED(res > UINT64_C(0xFFFFFFFF))) {
245 *overflow = 1;
246 return 0;
247 }
248 *overflow = 0;
249 return (size_t) res;
250 }
251
252 #else
253
zend_safe_address(size_t nmemb,size_t size,size_t offset,int * overflow)254 static zend_always_inline size_t zend_safe_address(size_t nmemb, size_t size, size_t offset, int *overflow)
255 {
256 size_t res = nmemb * size + offset;
257 double _d = (double)nmemb * (double)size + (double)offset;
258 double _delta = (double)res - _d;
259
260 if (UNEXPECTED((_d + _delta ) != _d)) {
261 *overflow = 1;
262 return 0;
263 }
264 *overflow = 0;
265 return res;
266 }
267 #endif
268
zend_safe_address_guarded(size_t nmemb,size_t size,size_t offset)269 static zend_always_inline size_t zend_safe_address_guarded(size_t nmemb, size_t size, size_t offset)
270 {
271 int overflow;
272 size_t ret = zend_safe_address(nmemb, size, offset, &overflow);
273
274 if (UNEXPECTED(overflow)) {
275 zend_error_noreturn(E_ERROR, "Possible integer overflow in memory allocation (%zu * %zu + %zu)", nmemb, size, offset);
276 return 0;
277 }
278 return ret;
279 }
280
281 /* A bit more generic version of the same */
zend_safe_addmult(size_t nmemb,size_t size,size_t offset,const char * message)282 static zend_always_inline size_t zend_safe_addmult(size_t nmemb, size_t size, size_t offset, const char *message)
283 {
284 int overflow;
285 size_t ret = zend_safe_address(nmemb, size, offset, &overflow);
286
287 if (UNEXPECTED(overflow)) {
288 zend_error_noreturn(E_ERROR, "Possible integer overflow in %s (%zu * %zu + %zu)", message, nmemb, size, offset);
289 return 0;
290 }
291 return ret;
292 }
293
294 #endif /* ZEND_MULTIPLY_H */
295
296 /*
297 * Local variables:
298 * tab-width: 4
299 * c-basic-offset: 4
300 * indent-tabs-mode: t
301 * End:
302 */
303