xref: /PHP-8.2/ext/opcache/jit/ir/ir_fold.h (revision 2289af88)
1 /*
2  * IR - Lightweight JIT Compilation Framework
3  * (Folding engine rules)
4  * Copyright (C) 2022 Zend by Perforce.
5  * Authors: Dmitry Stogov <dmitry@php.net>
6  *
7  * Based on Mike Pall's implementation for LuaJIT.
8  */
9 
10 /* Constant Folding */
11 IR_FOLD(EQ(C_BOOL, C_BOOL))
IR_FOLD(EQ (C_U8,C_U8))12 IR_FOLD(EQ(C_U8, C_U8))
13 IR_FOLD(EQ(C_U16, C_U16))
14 IR_FOLD(EQ(C_U32, C_U32))
15 IR_FOLD(EQ(C_U64, C_U64))
16 IR_FOLD(EQ(C_ADDR, C_ADDR))
17 IR_FOLD(EQ(C_CHAR, C_CHAR))
18 IR_FOLD(EQ(C_I8, C_I8))
19 IR_FOLD(EQ(C_I16, C_I16))
20 IR_FOLD(EQ(C_I32, C_I32))
21 IR_FOLD(EQ(C_I64, C_I64))
22 {
23 	IR_FOLD_BOOL(op1_insn->val.u64 == op2_insn->val.u64);
24 }
25 
IR_FOLD(EQ (C_DOUBLE,C_DOUBLE))26 IR_FOLD(EQ(C_DOUBLE, C_DOUBLE))
27 {
28 	IR_FOLD_BOOL(op1_insn->val.d == op2_insn->val.d);
29 }
30 
IR_FOLD(EQ (C_FLOAT,C_FLOAT))31 IR_FOLD(EQ(C_FLOAT, C_FLOAT))
32 {
33 	IR_FOLD_BOOL(op1_insn->val.d == op2_insn->val.d);
34 }
35 
36 IR_FOLD(NE(C_BOOL, C_BOOL))
IR_FOLD(NE (C_U8,C_U8))37 IR_FOLD(NE(C_U8, C_U8))
38 IR_FOLD(NE(C_U16, C_U16))
39 IR_FOLD(NE(C_U32, C_U32))
40 IR_FOLD(NE(C_U64, C_U64))
41 IR_FOLD(NE(C_ADDR, C_ADDR))
42 IR_FOLD(NE(C_CHAR, C_CHAR))
43 IR_FOLD(NE(C_I8, C_I8))
44 IR_FOLD(NE(C_I16, C_I16))
45 IR_FOLD(NE(C_I32, C_I32))
46 IR_FOLD(NE(C_I64, C_I64))
47 {
48 	IR_FOLD_BOOL(op1_insn->val.u64 != op2_insn->val.u64);
49 }
50 
IR_FOLD(NE (C_DOUBLE,C_DOUBLE))51 IR_FOLD(NE(C_DOUBLE, C_DOUBLE))
52 {
53 	IR_FOLD_BOOL(op1_insn->val.d != op2_insn->val.d);
54 }
55 
IR_FOLD(NE (C_FLOAT,C_FLOAT))56 IR_FOLD(NE(C_FLOAT, C_FLOAT))
57 {
58 	IR_FOLD_BOOL(op1_insn->val.f != op2_insn->val.f);
59 }
60 
61 IR_FOLD(LT(C_BOOL, C_BOOL))
IR_FOLD(LT (C_U8,C_U8))62 IR_FOLD(LT(C_U8, C_U8))
63 IR_FOLD(LT(C_U16, C_U16))
64 IR_FOLD(LT(C_U32, C_U32))
65 IR_FOLD(LT(C_U64, C_U64))
66 IR_FOLD(LT(C_ADDR, C_ADDR))
67 {
68 	IR_FOLD_BOOL(op1_insn->val.u64 < op2_insn->val.u64);
69 }
70 
71 IR_FOLD(LT(C_CHAR, C_CHAR))
IR_FOLD(LT (C_I8,C_I8))72 IR_FOLD(LT(C_I8, C_I8))
73 IR_FOLD(LT(C_I16, C_I16))
74 IR_FOLD(LT(C_I32, C_I32))
75 IR_FOLD(LT(C_I64, C_I64))
76 {
77 	IR_FOLD_BOOL(op1_insn->val.i64 < op2_insn->val.i64);
78 }
79 
IR_FOLD(LT (C_DOUBLE,C_DOUBLE))80 IR_FOLD(LT(C_DOUBLE, C_DOUBLE))
81 {
82 	IR_FOLD_BOOL(op1_insn->val.d < op2_insn->val.d);
83 }
84 
IR_FOLD(LT (C_FLOAT,C_FLOAT))85 IR_FOLD(LT(C_FLOAT, C_FLOAT))
86 {
87 	IR_FOLD_BOOL(op1_insn->val.f < op2_insn->val.f);
88 }
89 
90 IR_FOLD(GE(C_BOOL, C_BOOL))
IR_FOLD(GE (C_U8,C_U8))91 IR_FOLD(GE(C_U8, C_U8))
92 IR_FOLD(GE(C_U16, C_U16))
93 IR_FOLD(GE(C_U32, C_U32))
94 IR_FOLD(GE(C_U64, C_U64))
95 IR_FOLD(GE(C_ADDR, C_ADDR))
96 {
97 	IR_FOLD_BOOL(op1_insn->val.u64 >= op2_insn->val.u64);
98 }
99 
100 IR_FOLD(GE(C_CHAR, C_CHAR))
IR_FOLD(GE (C_I8,C_I8))101 IR_FOLD(GE(C_I8, C_I8))
102 IR_FOLD(GE(C_I16, C_I16))
103 IR_FOLD(GE(C_I32, C_I32))
104 IR_FOLD(GE(C_I64, C_I64))
105 {
106 	IR_FOLD_BOOL(op1_insn->val.i64 >= op2_insn->val.i64);
107 }
108 
IR_FOLD(GE (C_DOUBLE,C_DOUBLE))109 IR_FOLD(GE(C_DOUBLE, C_DOUBLE))
110 {
111 	IR_FOLD_BOOL(op1_insn->val.d >= op2_insn->val.d);
112 }
113 
IR_FOLD(GE (C_FLOAT,C_FLOAT))114 IR_FOLD(GE(C_FLOAT, C_FLOAT))
115 {
116 	IR_FOLD_BOOL(op1_insn->val.f >= op2_insn->val.f);
117 }
118 
119 IR_FOLD(LE(C_BOOL, C_BOOL))
IR_FOLD(LE (C_U8,C_U8))120 IR_FOLD(LE(C_U8, C_U8))
121 IR_FOLD(LE(C_U16, C_U16))
122 IR_FOLD(LE(C_U32, C_U32))
123 IR_FOLD(LE(C_U64, C_U64))
124 IR_FOLD(LE(C_ADDR, C_ADDR))
125 {
126 	IR_FOLD_BOOL(op1_insn->val.u64 <= op2_insn->val.u64);
127 }
128 
129 IR_FOLD(LE(C_CHAR, C_CHAR))
IR_FOLD(LE (C_I8,C_I8))130 IR_FOLD(LE(C_I8, C_I8))
131 IR_FOLD(LE(C_I16, C_I16))
132 IR_FOLD(LE(C_I32, C_I32))
133 IR_FOLD(LE(C_I64, C_I64))
134 {
135 	IR_FOLD_BOOL(op1_insn->val.i64 <= op2_insn->val.i64);
136 }
137 
IR_FOLD(LE (C_DOUBLE,C_DOUBLE))138 IR_FOLD(LE(C_DOUBLE, C_DOUBLE))
139 {
140 	IR_FOLD_BOOL(op1_insn->val.d <= op2_insn->val.d);
141 }
142 
IR_FOLD(LE (C_FLOAT,C_FLOAT))143 IR_FOLD(LE(C_FLOAT, C_FLOAT))
144 {
145 	IR_FOLD_BOOL(op1_insn->val.f <= op2_insn->val.f);
146 }
147 
148 IR_FOLD(GT(C_BOOL, C_BOOL))
IR_FOLD(GT (C_U8,C_U8))149 IR_FOLD(GT(C_U8, C_U8))
150 IR_FOLD(GT(C_U16, C_U16))
151 IR_FOLD(GT(C_U32, C_U32))
152 IR_FOLD(GT(C_U64, C_U64))
153 IR_FOLD(GT(C_ADDR, C_ADDR))
154 {
155 	IR_FOLD_BOOL(op1_insn->val.u64 > op2_insn->val.u64);
156 }
157 
158 IR_FOLD(GT(C_CHAR, C_CHAR))
IR_FOLD(GT (C_I8,C_I8))159 IR_FOLD(GT(C_I8, C_I8))
160 IR_FOLD(GT(C_I16, C_I16))
161 IR_FOLD(GT(C_I32, C_I32))
162 IR_FOLD(GT(C_I64, C_I64))
163 {
164 	IR_FOLD_BOOL(op1_insn->val.i64 > op2_insn->val.i64);
165 }
166 
IR_FOLD(GT (C_DOUBLE,C_DOUBLE))167 IR_FOLD(GT(C_DOUBLE, C_DOUBLE))
168 {
169 	IR_FOLD_BOOL(op1_insn->val.d > op2_insn->val.d);
170 }
171 
IR_FOLD(GT (C_FLOAT,C_FLOAT))172 IR_FOLD(GT(C_FLOAT, C_FLOAT))
173 {
174 	IR_FOLD_BOOL(op1_insn->val.f > op2_insn->val.f);
175 }
176 
177 IR_FOLD(ULT(C_BOOL, C_BOOL))
IR_FOLD(ULT (C_U8,C_U8))178 IR_FOLD(ULT(C_U8, C_U8))
179 IR_FOLD(ULT(C_U16, C_U16))
180 IR_FOLD(ULT(C_U32, C_U32))
181 IR_FOLD(ULT(C_U64, C_U64))
182 IR_FOLD(ULT(C_ADDR, C_ADDR))
183 IR_FOLD(ULT(C_CHAR, C_CHAR))
184 IR_FOLD(ULT(C_I8, C_I8))
185 IR_FOLD(ULT(C_I16, C_I16))
186 IR_FOLD(ULT(C_I32, C_I32))
187 IR_FOLD(ULT(C_I64, C_I64))
188 {
189 	IR_FOLD_BOOL(op1_insn->val.u64 < op2_insn->val.u64);
190 }
191 
IR_FOLD(ULT (C_DOUBLE,C_DOUBLE))192 IR_FOLD(ULT(C_DOUBLE, C_DOUBLE))
193 {
194 	IR_FOLD_BOOL(!(op1_insn->val.d >= op2_insn->val.d));
195 }
196 
IR_FOLD(ULT (C_FLOAT,C_FLOAT))197 IR_FOLD(ULT(C_FLOAT, C_FLOAT))
198 {
199 	IR_FOLD_BOOL(!(op1_insn->val.f >= op2_insn->val.f));
200 }
201 
202 IR_FOLD(UGE(C_BOOL, C_BOOL))
IR_FOLD(UGE (C_U8,C_U8))203 IR_FOLD(UGE(C_U8, C_U8))
204 IR_FOLD(UGE(C_U16, C_U16))
205 IR_FOLD(UGE(C_U32, C_U32))
206 IR_FOLD(UGE(C_U64, C_U64))
207 IR_FOLD(UGE(C_ADDR, C_ADDR))
208 IR_FOLD(UGE(C_CHAR, C_CHAR))
209 IR_FOLD(UGE(C_I8, C_I8))
210 IR_FOLD(UGE(C_I16, C_I16))
211 IR_FOLD(UGE(C_I32, C_I32))
212 IR_FOLD(UGE(C_I64, C_I64))
213 {
214 	IR_FOLD_BOOL(op1_insn->val.u64 >= op2_insn->val.u64);
215 }
216 
IR_FOLD(UGE (C_DOUBLE,C_DOUBLE))217 IR_FOLD(UGE(C_DOUBLE, C_DOUBLE))
218 {
219 	IR_FOLD_BOOL(!(op1_insn->val.d < op2_insn->val.d));
220 }
221 
IR_FOLD(UGE (C_FLOAT,C_FLOAT))222 IR_FOLD(UGE(C_FLOAT, C_FLOAT))
223 {
224 	IR_FOLD_BOOL(!(op1_insn->val.f < op2_insn->val.f));
225 }
226 
227 IR_FOLD(ULE(C_BOOL, C_BOOL))
IR_FOLD(ULE (C_U8,C_U8))228 IR_FOLD(ULE(C_U8, C_U8))
229 IR_FOLD(ULE(C_U16, C_U16))
230 IR_FOLD(ULE(C_U32, C_U32))
231 IR_FOLD(ULE(C_U64, C_U64))
232 IR_FOLD(ULE(C_ADDR, C_ADDR))
233 IR_FOLD(ULE(C_CHAR, C_CHAR))
234 IR_FOLD(ULE(C_I8, C_I8))
235 IR_FOLD(ULE(C_I16, C_I16))
236 IR_FOLD(ULE(C_I32, C_I32))
237 IR_FOLD(ULE(C_I64, C_I64))
238 {
239 	IR_FOLD_BOOL(op1_insn->val.u64 <= op2_insn->val.u64);
240 }
241 
IR_FOLD(ULE (C_DOUBLE,C_DOUBLE))242 IR_FOLD(ULE(C_DOUBLE, C_DOUBLE))
243 {
244 	IR_FOLD_BOOL(!(op1_insn->val.d > op2_insn->val.d));
245 }
246 
IR_FOLD(ULE (C_FLOAT,C_FLOAT))247 IR_FOLD(ULE(C_FLOAT, C_FLOAT))
248 {
249 	IR_FOLD_BOOL(!(op1_insn->val.f > op2_insn->val.f));
250 }
251 
252 IR_FOLD(UGT(C_BOOL, C_BOOL))
IR_FOLD(UGT (C_U8,C_U8))253 IR_FOLD(UGT(C_U8, C_U8))
254 IR_FOLD(UGT(C_U16, C_U16))
255 IR_FOLD(UGT(C_U32, C_U32))
256 IR_FOLD(UGT(C_U64, C_U64))
257 IR_FOLD(UGT(C_ADDR, C_ADDR))
258 IR_FOLD(UGT(C_CHAR, C_CHAR))
259 IR_FOLD(UGT(C_I8, C_I8))
260 IR_FOLD(UGT(C_I16, C_I16))
261 IR_FOLD(UGT(C_I32, C_I32))
262 IR_FOLD(UGT(C_I64, C_I64))
263 {
264 	IR_FOLD_BOOL(op1_insn->val.u64 > op2_insn->val.u64);
265 }
266 
IR_FOLD(UGT (C_DOUBLE,C_DOUBLE))267 IR_FOLD(UGT(C_DOUBLE, C_DOUBLE))
268 {
269 	IR_FOLD_BOOL(!(op1_insn->val.d <= op2_insn->val.d));
270 }
271 
IR_FOLD(UGT (C_FLOAT,C_FLOAT))272 IR_FOLD(UGT(C_FLOAT, C_FLOAT))
273 {
274 	IR_FOLD_BOOL(!(op1_insn->val.f <= op2_insn->val.f));
275 }
276 
277 IR_FOLD(ADD(C_U8, C_U8))
IR_FOLD(ADD (C_U16,C_U16))278 IR_FOLD(ADD(C_U16, C_U16))
279 IR_FOLD(ADD(C_U32, C_U32))
280 IR_FOLD(ADD(C_U64, C_U64))
281 IR_FOLD(ADD(C_ADDR, C_ADDR))
282 {
283 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
284 	IR_FOLD_CONST_U(op1_insn->val.u64 + op2_insn->val.u64);
285 }
286 
287 IR_FOLD(ADD(C_I8, C_I8))
IR_FOLD(ADD (C_I16,C_I16))288 IR_FOLD(ADD(C_I16, C_I16))
289 IR_FOLD(ADD(C_I32, C_I32))
290 IR_FOLD(ADD(C_I64, C_I64))
291 {
292 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
293 	IR_FOLD_CONST_I(op1_insn->val.i64 + op2_insn->val.i64);
294 }
295 
IR_FOLD(ADD (C_DOUBLE,C_DOUBLE))296 IR_FOLD(ADD(C_DOUBLE, C_DOUBLE))
297 {
298 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
299 	IR_FOLD_CONST_D(op1_insn->val.d + op2_insn->val.d);
300 }
301 
IR_FOLD(ADD (C_FLOAT,C_FLOAT))302 IR_FOLD(ADD(C_FLOAT, C_FLOAT))
303 {
304 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
305 	IR_FOLD_CONST_F(op1_insn->val.f + op2_insn->val.f);
306 }
307 
308 IR_FOLD(SUB(C_U8, C_U8))
IR_FOLD(SUB (C_U16,C_U16))309 IR_FOLD(SUB(C_U16, C_U16))
310 IR_FOLD(SUB(C_U32, C_U32))
311 IR_FOLD(SUB(C_U64, C_U64))
312 IR_FOLD(SUB(C_ADDR, C_ADDR))
313 {
314 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
315 	IR_FOLD_CONST_U(op1_insn->val.u64 - op2_insn->val.u64);
316 }
317 
318 IR_FOLD(SUB(C_I8, C_I8))
IR_FOLD(SUB (C_I16,C_I16))319 IR_FOLD(SUB(C_I16, C_I16))
320 IR_FOLD(SUB(C_I32, C_I32))
321 IR_FOLD(SUB(C_I64, C_I64))
322 {
323 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
324 	IR_FOLD_CONST_I(op1_insn->val.i64 - op2_insn->val.i64);
325 }
326 
IR_FOLD(SUB (C_DOUBLE,C_DOUBLE))327 IR_FOLD(SUB(C_DOUBLE, C_DOUBLE))
328 {
329 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
330 	IR_FOLD_CONST_D(op1_insn->val.d - op2_insn->val.d);
331 }
332 
IR_FOLD(SUB (C_FLOAT,C_FLOAT))333 IR_FOLD(SUB(C_FLOAT, C_FLOAT))
334 {
335 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
336 	IR_FOLD_CONST_F(op1_insn->val.f - op2_insn->val.f);
337 }
338 
339 IR_FOLD(MUL(C_U8, C_U8))
IR_FOLD(MUL (C_U16,C_U16))340 IR_FOLD(MUL(C_U16, C_U16))
341 IR_FOLD(MUL(C_U32, C_U32))
342 IR_FOLD(MUL(C_U64, C_U64))
343 IR_FOLD(MUL(C_ADDR, C_ADDR))
344 {
345 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
346 	IR_FOLD_CONST_U(op1_insn->val.u64 * op2_insn->val.u64);
347 }
348 
349 IR_FOLD(MUL(C_I8, C_I8))
IR_FOLD(MUL (C_I16,C_I16))350 IR_FOLD(MUL(C_I16, C_I16))
351 IR_FOLD(MUL(C_I32, C_I32))
352 IR_FOLD(MUL(C_I64, C_I64))
353 {
354 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
355 	IR_FOLD_CONST_I(op1_insn->val.i64 * op2_insn->val.i64);
356 }
357 
IR_FOLD(MUL (C_DOUBLE,C_DOUBLE))358 IR_FOLD(MUL(C_DOUBLE, C_DOUBLE))
359 {
360 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
361 	IR_FOLD_CONST_D(op1_insn->val.d * op2_insn->val.d);
362 }
363 
IR_FOLD(MUL (C_FLOAT,C_FLOAT))364 IR_FOLD(MUL(C_FLOAT, C_FLOAT))
365 {
366 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
367 	IR_FOLD_CONST_F(op1_insn->val.f * op2_insn->val.f);
368 }
369 
370 IR_FOLD(DIV(C_U8, C_U8))
IR_FOLD(DIV (C_U16,C_U16))371 IR_FOLD(DIV(C_U16, C_U16))
372 IR_FOLD(DIV(C_U32, C_U32))
373 IR_FOLD(DIV(C_U64, C_U64))
374 IR_FOLD(DIV(C_ADDR, C_ADDR))
375 {
376 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
377 	if (op2_insn->val.u64 == 0) {
378 		/* division by zero */
379 		IR_FOLD_EMIT;
380 	}
381 	IR_FOLD_CONST_U(op1_insn->val.u64 / op2_insn->val.u64);
382 }
383 
384 IR_FOLD(DIV(C_I8, C_I8))
IR_FOLD(DIV (C_I16,C_I16))385 IR_FOLD(DIV(C_I16, C_I16))
386 IR_FOLD(DIV(C_I32, C_I32))
387 IR_FOLD(DIV(C_I64, C_I64))
388 {
389 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
390 	if (op2_insn->val.i64 == 0) {
391 		/* division by zero */
392 		IR_FOLD_EMIT;
393 	}
394 	IR_FOLD_CONST_I(op1_insn->val.i64 / op2_insn->val.i64);
395 }
396 
IR_FOLD(DIV (C_DOUBLE,C_DOUBLE))397 IR_FOLD(DIV(C_DOUBLE, C_DOUBLE))
398 {
399 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
400 	IR_FOLD_CONST_D(op1_insn->val.d / op2_insn->val.d);
401 }
402 
IR_FOLD(DIV (C_FLOAT,C_FLOAT))403 IR_FOLD(DIV(C_FLOAT, C_FLOAT))
404 {
405 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
406 	IR_FOLD_CONST_F(op1_insn->val.f / op2_insn->val.f);
407 }
408 
409 IR_FOLD(MOD(C_U8, C_U8))
IR_FOLD(MOD (C_U16,C_U16))410 IR_FOLD(MOD(C_U16, C_U16))
411 IR_FOLD(MOD(C_U32, C_U32))
412 IR_FOLD(MOD(C_U64, C_U64))
413 IR_FOLD(MOD(C_ADDR, C_ADDR))
414 {
415 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
416 	if (op2_insn->val.u64 == 0) {
417 		/* division by zero */
418 		IR_FOLD_EMIT;
419 	}
420 	IR_FOLD_CONST_U(op1_insn->val.u64 % op2_insn->val.u64);
421 }
422 
423 IR_FOLD(MOD(C_I8, C_I8))
IR_FOLD(MOD (C_I16,C_I16))424 IR_FOLD(MOD(C_I16, C_I16))
425 IR_FOLD(MOD(C_I32, C_I32))
426 IR_FOLD(MOD(C_I64, C_I64))
427 {
428 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
429 	if (op2_insn->val.i64 == 0) {
430 		/* division by zero */
431 		IR_FOLD_EMIT;
432 	}
433 	IR_FOLD_CONST_I(op1_insn->val.i64 % op2_insn->val.i64);
434 }
435 
436 IR_FOLD(NEG(C_I8))
IR_FOLD(NEG (C_I16))437 IR_FOLD(NEG(C_I16))
438 IR_FOLD(NEG(C_I32))
439 IR_FOLD(NEG(C_I64))
440 {
441 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
442 	IR_FOLD_CONST_I(-op1_insn->val.i64);
443 }
444 
IR_FOLD(NEG (C_DOUBLE))445 IR_FOLD(NEG(C_DOUBLE))
446 {
447 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
448 	IR_FOLD_CONST_D(-op1_insn->val.d);
449 }
450 
IR_FOLD(NEG (C_FLOAT))451 IR_FOLD(NEG(C_FLOAT))
452 {
453 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
454 	IR_FOLD_CONST_F(-op1_insn->val.f);
455 }
456 
457 IR_FOLD(ABS(C_I8))
IR_FOLD(ABS (C_I16))458 IR_FOLD(ABS(C_I16))
459 IR_FOLD(ABS(C_I32))
460 IR_FOLD(ABS(C_I64))
461 {
462 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
463 	if (op1_insn->val.i64 >= 0) {
464 		IR_FOLD_COPY(op1);
465 	} else {
466 		IR_FOLD_CONST_I(-op1_insn->val.i64);
467 	}
468 }
469 
IR_FOLD(ABS (C_DOUBLE))470 IR_FOLD(ABS(C_DOUBLE))
471 {
472 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
473 	IR_FOLD_CONST_D(fabs(op1_insn->val.d));
474 }
475 
IR_FOLD(ABS (C_FLOAT))476 IR_FOLD(ABS(C_FLOAT))
477 {
478 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
479 	IR_FOLD_CONST_F(fabsf(op1_insn->val.f));
480 }
481 
482 IR_FOLD(ADD_OV(C_U8, C_U8))
IR_FOLD(ADD_OV (C_U16,C_U16))483 IR_FOLD(ADD_OV(C_U16, C_U16))
484 IR_FOLD(ADD_OV(C_U32, C_U32))
485 IR_FOLD(ADD_OV(C_U64, C_U64))
486 {
487 	ir_type type = IR_OPT_TYPE(opt);
488 	uint64_t max = ((uint64_t)0xffffffffffffffff) >> (64 - ir_type_size[type] * 8);
489 	IR_ASSERT(type == op1_insn->type);
490 	if (op1_insn->val.u64 > max - op2_insn->val.u64) {
491 		IR_FOLD_NEXT;
492 	}
493 	IR_FOLD_CONST_U(op1_insn->val.u64 + op2_insn->val.u64);
494 }
495 
496 IR_FOLD(ADD_OV(C_I8, C_I8))
IR_FOLD(ADD_OV (C_I16,C_I16))497 IR_FOLD(ADD_OV(C_I16, C_I16))
498 IR_FOLD(ADD_OV(C_I32, C_I32))
499 IR_FOLD(ADD_OV(C_I64, C_I64))
500 {
501 	ir_type type = IR_OPT_TYPE(opt);
502 	int64_t max = ((uint64_t)0x7fffffffffffffff) >> (64 - ir_type_size[type] * 8);
503 	int64_t min = - max - 1;
504 	IR_ASSERT(type == op1_insn->type);
505 	if ((op2_insn->val.i64 > 0 && op1_insn->val.i64 > max - op2_insn->val.i64)
506 	 || (op2_insn->val.i64 < 0 && op1_insn->val.i64 < min - op2_insn->val.i64)) {
507 		IR_FOLD_NEXT;
508 	}
509 	IR_FOLD_CONST_I(op1_insn->val.i64 + op2_insn->val.i64);
510 }
511 
512 IR_FOLD(SUB_OV(C_U8, C_U8))
IR_FOLD(SUB_OV (C_U16,C_U16))513 IR_FOLD(SUB_OV(C_U16, C_U16))
514 IR_FOLD(SUB_OV(C_U32, C_U32))
515 IR_FOLD(SUB_OV(C_U64, C_U64))
516 {
517 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
518 	if (op2_insn->val.u64 > op1_insn->val.u64) {
519 		IR_FOLD_NEXT;
520 	}
521 	IR_FOLD_CONST_U(op1_insn->val.u64 - op2_insn->val.u64);
522 }
523 
524 IR_FOLD(SUB_OV(C_I8, C_I8))
IR_FOLD(SUB_OV (C_I16,C_I16))525 IR_FOLD(SUB_OV(C_I16, C_I16))
526 IR_FOLD(SUB_OV(C_I32, C_I32))
527 IR_FOLD(SUB_OV(C_I64, C_I64))
528 {
529 	ir_type type = IR_OPT_TYPE(opt);
530 	int64_t max = ((uint64_t)0x7fffffffffffffff) >> (64 - ir_type_size[type] * 8);
531 	int64_t min = - max - 1;
532 	IR_ASSERT(type == op1_insn->type);
533 	if ((op2_insn->val.i64 > 0 && op1_insn->val.i64 < min + op2_insn->val.i64)
534 	 || (op2_insn->val.i64 < 0 && op1_insn->val.i64 > max + op2_insn->val.i64)) {
535 		IR_FOLD_NEXT;
536 	}
537 	IR_FOLD_CONST_I(op1_insn->val.i64 - op2_insn->val.i64);
538 }
539 
540 IR_FOLD(MUL_OV(C_U8, C_U8))
IR_FOLD(MUL_OV (C_U16,C_U16))541 IR_FOLD(MUL_OV(C_U16, C_U16))
542 IR_FOLD(MUL_OV(C_U32, C_U32))
543 IR_FOLD(MUL_OV(C_U64, C_U64))
544 {
545 	ir_type type = IR_OPT_TYPE(opt);
546 	uint64_t max = ((uint64_t)0xffffffffffffffff) >> (64 - ir_type_size[type] * 8);
547 	uint64_t res;
548 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
549 	res = op1_insn->val.u64 * op2_insn->val.u64;
550 	if (op1_insn->val.u64 != 0 && res / op1_insn->val.u64 != op2_insn->val.u64 && res <= max) {
551 		IR_FOLD_NEXT;
552 	}
553 	IR_FOLD_CONST_U(res);
554 }
555 
556 IR_FOLD(MUL_OV(C_I8, C_I8))
IR_FOLD(MUL_OV (C_I16,C_I16))557 IR_FOLD(MUL_OV(C_I16, C_I16))
558 IR_FOLD(MUL_OV(C_I32, C_I32))
559 IR_FOLD(MUL_OV(C_I64, C_I64))
560 {
561 	ir_type type = IR_OPT_TYPE(opt);
562 	int64_t max = ((uint64_t)0x7fffffffffffffff) >> (64 - ir_type_size[type] * 8);
563 	int64_t min = - max - 1;
564 	int64_t res;
565 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
566 	res = op1_insn->val.i64 * op2_insn->val.i64;
567 	if (op1_insn->val.i64 != 0 && res / op1_insn->val.i64 != op2_insn->val.i64 && res >= min && res <= max) {
568 		IR_FOLD_NEXT;
569 	}
570 	IR_FOLD_CONST_U(res);
571 }
572 
IR_FOLD(OVERFLOW (_))573 IR_FOLD(OVERFLOW(_))
574 {
575 	if (op1_insn->op != IR_ADD_OV && op1_insn->op != IR_SUB_OV && op1_insn->op != IR_MUL_OV) {
576 		IR_FOLD_COPY(IR_FALSE);
577 	}
578 	IR_FOLD_NEXT;
579 }
580 
IR_FOLD(NOT (C_BOOL))581 IR_FOLD(NOT(C_BOOL))
582 {
583 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
584 	IR_FOLD_BOOL(!op1_insn->val.u64);
585 }
586 
587 IR_FOLD(NOT(C_U8))
IR_FOLD(NOT (C_CHAR))588 IR_FOLD(NOT(C_CHAR))
589 {
590 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
591 	IR_FOLD_CONST_U(~op1_insn->val.u8);
592 }
593 
IR_FOLD(NOT (C_I8))594 IR_FOLD(NOT(C_I8))
595 {
596 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
597 	IR_FOLD_CONST_I(~op1_insn->val.i8);
598 }
599 
IR_FOLD(NOT (C_U16))600 IR_FOLD(NOT(C_U16))
601 {
602 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
603 	IR_FOLD_CONST_U(~op1_insn->val.u16);
604 }
605 
IR_FOLD(NOT (C_I16))606 IR_FOLD(NOT(C_I16))
607 {
608 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
609 	IR_FOLD_CONST_I(~op1_insn->val.i16);
610 }
611 
IR_FOLD(NOT (C_U32))612 IR_FOLD(NOT(C_U32))
613 {
614 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
615 	IR_FOLD_CONST_U(~op1_insn->val.u32);
616 }
617 
IR_FOLD(NOT (C_I32))618 IR_FOLD(NOT(C_I32))
619 {
620 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
621 	IR_FOLD_CONST_I(~op1_insn->val.i32);
622 }
623 
624 IR_FOLD(NOT(C_U64))
IR_FOLD(NOT (C_I64))625 IR_FOLD(NOT(C_I64))
626 {
627 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
628 	IR_FOLD_CONST_U(~op1_insn->val.u64);
629 }
630 
IR_FOLD(OR (C_BOOL,C_BOOL))631 IR_FOLD(OR(C_BOOL, C_BOOL))
632 {
633 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
634 	IR_FOLD_BOOL(op1_insn->val.b || op2_insn->val.b);
635 }
636 
637 IR_FOLD(OR(C_CHAR, C_CHAR))
IR_FOLD(OR (C_U8,C_U8))638 IR_FOLD(OR(C_U8, C_U8))
639 IR_FOLD(OR(C_U16, C_U16))
640 IR_FOLD(OR(C_U32, C_U32))
641 IR_FOLD(OR(C_U64, C_U64))
642 {
643 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
644 	IR_FOLD_CONST_U(op1_insn->val.u64 | op2_insn->val.u64);
645 }
646 
647 IR_FOLD(OR(C_I8, C_I8))
IR_FOLD(OR (C_I16,C_I16))648 IR_FOLD(OR(C_I16, C_I16))
649 IR_FOLD(OR(C_I32, C_I32))
650 IR_FOLD(OR(C_I64, C_I64))
651 {
652 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
653 	IR_FOLD_CONST_I(op1_insn->val.i64 | op2_insn->val.i64);
654 }
655 
IR_FOLD(AND (C_BOOL,C_BOOL))656 IR_FOLD(AND(C_BOOL, C_BOOL))
657 {
658 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
659 	IR_FOLD_BOOL(op1_insn->val.b && op2_insn->val.b);
660 }
661 
662 IR_FOLD(AND(C_CHAR, C_CHAR))
IR_FOLD(AND (C_U8,C_U8))663 IR_FOLD(AND(C_U8, C_U8))
664 IR_FOLD(AND(C_U16, C_U16))
665 IR_FOLD(AND(C_U32, C_U32))
666 IR_FOLD(AND(C_U64, C_U64))
667 {
668 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
669 	IR_FOLD_CONST_U(op1_insn->val.u64 & op2_insn->val.u64);
670 }
671 
672 IR_FOLD(AND(C_I8, C_I8))
IR_FOLD(AND (C_I16,C_I16))673 IR_FOLD(AND(C_I16, C_I16))
674 IR_FOLD(AND(C_I32, C_I32))
675 IR_FOLD(AND(C_I64, C_I64))
676 {
677 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
678 	IR_FOLD_CONST_I(op1_insn->val.i64 & op2_insn->val.i64);
679 }
680 
IR_FOLD(XOR (C_BOOL,C_BOOL))681 IR_FOLD(XOR(C_BOOL, C_BOOL))
682 {
683 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
684 	IR_FOLD_BOOL(op1_insn->val.b != op2_insn->val.b);
685 }
686 
687 IR_FOLD(XOR(C_U8, C_U8))
IR_FOLD(XOR (C_CHAR,C_CHAR))688 IR_FOLD(XOR(C_CHAR, C_CHAR))
689 {
690 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
691 	IR_FOLD_CONST_U(op1_insn->val.u8 ^ op2_insn->val.u8);
692 }
693 
IR_FOLD(XOR (C_I8,C_I8))694 IR_FOLD(XOR(C_I8, C_I8))
695 {
696 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
697 	IR_FOLD_CONST_I(op1_insn->val.i8 ^ op2_insn->val.i8);
698 }
699 
IR_FOLD(XOR (C_U16,C_U16))700 IR_FOLD(XOR(C_U16, C_U16))
701 {
702 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
703 	IR_FOLD_CONST_U(op1_insn->val.u16 ^ op2_insn->val.u16);
704 }
705 
IR_FOLD(XOR (C_I16,C_I16))706 IR_FOLD(XOR(C_I16, C_I16))
707 {
708 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
709 	IR_FOLD_CONST_I(op1_insn->val.i16 ^ op2_insn->val.i16);
710 }
711 
IR_FOLD(XOR (C_U32,C_U32))712 IR_FOLD(XOR(C_U32, C_U32))
713 {
714 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
715 	IR_FOLD_CONST_U(op1_insn->val.u32 ^ op2_insn->val.u32);
716 }
717 
IR_FOLD(XOR (C_I32,C_I32))718 IR_FOLD(XOR(C_I32, C_I32))
719 {
720 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
721 	IR_FOLD_CONST_I(op1_insn->val.i32 ^ op2_insn->val.i32);
722 }
723 
IR_FOLD(XOR (C_U64,C_U64))724 IR_FOLD(XOR(C_U64, C_U64))
725 {
726 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
727 	IR_FOLD_CONST_U(op1_insn->val.u64 ^ op2_insn->val.u64);
728 }
729 
IR_FOLD(XOR (C_I64,C_I64))730 IR_FOLD(XOR(C_I64, C_I64))
731 {
732 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
733 	IR_FOLD_CONST_I(op1_insn->val.i64 ^ op2_insn->val.i64);
734 }
735 
736 IR_FOLD(SHL(C_U8, C_U8))
IR_FOLD(SHL (C_CHAR,C_CHAR))737 IR_FOLD(SHL(C_CHAR, C_CHAR))
738 {
739 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
740 	IR_FOLD_CONST_U(op1_insn->val.u8 << op2_insn->val.u8);
741 }
742 
IR_FOLD(SHL (C_I8,C_I8))743 IR_FOLD(SHL(C_I8, C_I8))
744 {
745 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
746 	IR_FOLD_CONST_I(op1_insn->val.i8 << op2_insn->val.i8);
747 }
748 
IR_FOLD(SHL (C_U16,C_U16))749 IR_FOLD(SHL(C_U16, C_U16))
750 {
751 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
752 	IR_FOLD_CONST_U(op1_insn->val.u16 << op2_insn->val.u16);
753 }
754 
IR_FOLD(SHL (C_I16,C_I16))755 IR_FOLD(SHL(C_I16, C_I16))
756 {
757 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
758 	IR_FOLD_CONST_I(op1_insn->val.i16 << op2_insn->val.i16);
759 }
760 
IR_FOLD(SHL (C_U32,C_U32))761 IR_FOLD(SHL(C_U32, C_U32))
762 {
763 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
764 	IR_FOLD_CONST_U(op1_insn->val.u32 << op2_insn->val.u32);
765 }
766 
IR_FOLD(SHL (C_I32,C_I32))767 IR_FOLD(SHL(C_I32, C_I32))
768 {
769 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
770 	IR_FOLD_CONST_I(op1_insn->val.i32 << op2_insn->val.i32);
771 }
772 
773 IR_FOLD(SHL(C_U64, C_U64))
IR_FOLD(SHL (C_I64,C_I64))774 IR_FOLD(SHL(C_I64, C_I64))
775 {
776 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
777 	IR_FOLD_CONST_U(op1_insn->val.u64 << op2_insn->val.u64);
778 }
779 
780 IR_FOLD(SHR(C_U8, C_U8))
IR_FOLD(SHR (C_CHAR,C_CHAR))781 IR_FOLD(SHR(C_CHAR, C_CHAR))
782 {
783 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
784 	IR_FOLD_CONST_U(op1_insn->val.u8 >> op2_insn->val.u8);
785 }
786 
IR_FOLD(SHR (C_I8,C_I8))787 IR_FOLD(SHR(C_I8, C_I8))
788 {
789 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
790 	IR_FOLD_CONST_I((int8_t)(op1_insn->val.u8 >> op2_insn->val.u8));
791 }
792 
IR_FOLD(SHR (C_U16,C_U16))793 IR_FOLD(SHR(C_U16, C_U16))
794 {
795 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
796 	IR_FOLD_CONST_U(op1_insn->val.u16 >> op2_insn->val.u16);
797 }
798 
IR_FOLD(SHR (C_I16,C_I16))799 IR_FOLD(SHR(C_I16, C_I16))
800 {
801 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
802 	IR_FOLD_CONST_U((int16_t)(op1_insn->val.u16 >> op2_insn->val.u16));
803 }
804 
IR_FOLD(SHR (C_U32,C_U32))805 IR_FOLD(SHR(C_U32, C_U32))
806 {
807 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
808 	IR_FOLD_CONST_U(op1_insn->val.u32 >> op2_insn->val.u32);
809 }
810 
IR_FOLD(SHR (C_I32,C_I32))811 IR_FOLD(SHR(C_I32, C_I32))
812 {
813 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
814 	IR_FOLD_CONST_U((int32_t)(op1_insn->val.u32 >> op2_insn->val.u32));
815 }
816 
817 IR_FOLD(SHR(C_U64, C_U64))
IR_FOLD(SHR (C_I64,C_I64))818 IR_FOLD(SHR(C_I64, C_I64))
819 {
820 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
821 	IR_FOLD_CONST_U(op1_insn->val.u64 >> op2_insn->val.u64);
822 }
823 
824 IR_FOLD(SAR(C_U8, C_U8))
IR_FOLD(SAR (C_CHAR,C_CHAR))825 IR_FOLD(SAR(C_CHAR, C_CHAR))
826 {
827 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
828 	IR_FOLD_CONST_U((uint8_t)(op1_insn->val.i8 >> op2_insn->val.i8));
829 }
830 
IR_FOLD(SAR (C_I8,C_I8))831 IR_FOLD(SAR(C_I8, C_I8))
832 {
833 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
834 	IR_FOLD_CONST_I(op1_insn->val.i8 >> op2_insn->val.i8);
835 }
836 
IR_FOLD(SAR (C_U16,C_U16))837 IR_FOLD(SAR(C_U16, C_U16))
838 {
839 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
840 	IR_FOLD_CONST_U((uint16_t)(op1_insn->val.i16 >> op2_insn->val.i16));
841 }
842 
IR_FOLD(SAR (C_I16,C_I16))843 IR_FOLD(SAR(C_I16, C_I16))
844 {
845 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
846 	IR_FOLD_CONST_I(op1_insn->val.i16 >> op2_insn->val.i16);
847 }
848 
IR_FOLD(SAR (C_U32,C_U32))849 IR_FOLD(SAR(C_U32, C_U32))
850 {
851 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
852 	IR_FOLD_CONST_U((uint32_t)(op1_insn->val.i32 >> op2_insn->val.i32));
853 }
854 
IR_FOLD(SAR (C_I32,C_I32))855 IR_FOLD(SAR(C_I32, C_I32))
856 {
857 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
858 	IR_FOLD_CONST_I(op1_insn->val.i32 >> op2_insn->val.i32);
859 }
860 
861 IR_FOLD(SAR(C_U64, C_U64))
IR_FOLD(SAR (C_I64,C_I64))862 IR_FOLD(SAR(C_I64, C_I64))
863 {
864 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
865 	IR_FOLD_CONST_I(op1_insn->val.i64 >> op2_insn->val.i64);
866 }
867 
868 IR_FOLD(ROL(C_U8, C_U8))
IR_FOLD(ROL (C_CHAR,C_CHAR))869 IR_FOLD(ROL(C_CHAR, C_CHAR))
870 {
871 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
872 	IR_FOLD_CONST_U(ir_rol8(op1_insn->val.u8, op2_insn->val.u8));
873 }
874 
IR_FOLD(ROL (C_I8,C_I8))875 IR_FOLD(ROL(C_I8, C_I8))
876 {
877 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
878 	IR_FOLD_CONST_I((int8_t)ir_rol8(op1_insn->val.u8, op2_insn->val.u8));
879 }
880 
IR_FOLD(ROL (C_U16,C_U16))881 IR_FOLD(ROL(C_U16, C_U16))
882 {
883 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
884 	IR_FOLD_CONST_U(ir_rol16(op1_insn->val.u16, op2_insn->val.u16));
885 }
886 
IR_FOLD(ROL (C_I16,C_I16))887 IR_FOLD(ROL(C_I16, C_I16))
888 {
889 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
890 	IR_FOLD_CONST_I((int16_t)ir_rol16(op1_insn->val.u16, op2_insn->val.u16));
891 }
892 
IR_FOLD(ROL (C_U32,C_U32))893 IR_FOLD(ROL(C_U32, C_U32))
894 {
895 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
896 	IR_FOLD_CONST_U(ir_rol32(op1_insn->val.u32, op2_insn->val.u32));
897 }
898 
IR_FOLD(ROL (C_I32,C_I32))899 IR_FOLD(ROL(C_I32, C_I32))
900 {
901 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
902 	IR_FOLD_CONST_I((int32_t)ir_rol32(op1_insn->val.u32, op2_insn->val.u32));
903 }
904 
905 IR_FOLD(ROL(C_U64, C_U64))
IR_FOLD(ROL (C_I64,C_I64))906 IR_FOLD(ROL(C_I64, C_I64))
907 {
908 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
909 	IR_FOLD_CONST_U(ir_rol64(op1_insn->val.u64, op2_insn->val.u64));
910 }
911 
912 IR_FOLD(ROR(C_U8, C_U8))
IR_FOLD(ROR (C_CHAR,C_CHAR))913 IR_FOLD(ROR(C_CHAR, C_CHAR))
914 {
915 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
916 	IR_FOLD_CONST_U(ir_ror8(op1_insn->val.u8, op2_insn->val.u8));
917 }
918 
IR_FOLD(ROR (C_I8,C_I8))919 IR_FOLD(ROR(C_I8, C_I8))
920 {
921 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
922 	IR_FOLD_CONST_I((int8_t)ir_ror8(op1_insn->val.u8, op2_insn->val.u8));
923 }
924 
IR_FOLD(ROR (C_U16,C_U16))925 IR_FOLD(ROR(C_U16, C_U16))
926 {
927 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
928 	IR_FOLD_CONST_U(ir_ror16(op1_insn->val.u16, op2_insn->val.u16));
929 }
930 
IR_FOLD(ROR (C_I16,C_I16))931 IR_FOLD(ROR(C_I16, C_I16))
932 {
933 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
934 	IR_FOLD_CONST_I((int16_t)ir_ror16(op1_insn->val.u16, op2_insn->val.u16));
935 }
936 
IR_FOLD(ROR (C_U32,C_U32))937 IR_FOLD(ROR(C_U32, C_U32))
938 {
939 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
940 	IR_FOLD_CONST_U(ir_ror32(op1_insn->val.u32, op2_insn->val.u32));
941 }
942 
IR_FOLD(ROR (C_I32,C_I32))943 IR_FOLD(ROR(C_I32, C_I32))
944 {
945 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
946 	IR_FOLD_CONST_I((int32_t)ir_ror32(op1_insn->val.u32, op2_insn->val.u32));
947 }
948 
949 IR_FOLD(ROR(C_U64, C_U64))
IR_FOLD(ROR (C_I64,C_I64))950 IR_FOLD(ROR(C_I64, C_I64))
951 {
952 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
953 	IR_FOLD_CONST_U(ir_ror64(op1_insn->val.u64, op2_insn->val.u64));
954 }
955 
956 //IR_FOLD(BSWAP(CONST))
957 //TODO: bswap
958 
959 IR_FOLD(MIN(C_BOOL, C_BOOL))
IR_FOLD(MIN (C_U8,C_U8))960 IR_FOLD(MIN(C_U8, C_U8))
961 IR_FOLD(MIN(C_U16, C_U16))
962 IR_FOLD(MIN(C_U32, C_U32))
963 IR_FOLD(MIN(C_U64, C_U64))
964 IR_FOLD(MIN(C_ADDR, C_ADDR))
965 {
966 	IR_FOLD_COPY(op1_insn->val.u64 <= op2_insn->val.u64 ? op1 : op2);
967 }
968 
969 IR_FOLD(MIN(C_CHAR, C_CHAR))
IR_FOLD(MIN (C_I8,C_U8))970 IR_FOLD(MIN(C_I8, C_U8))
971 IR_FOLD(MIN(C_I16, C_U16))
972 IR_FOLD(MIN(C_I32, C_U32))
973 IR_FOLD(MIN(C_I64, C_U64))
974 {
975 	IR_FOLD_COPY(op1_insn->val.i64 <= op2_insn->val.i64 ? op1 : op2);
976 }
977 
IR_FOLD(MIN (C_DOUBLE,C_DOUBLE))978 IR_FOLD(MIN(C_DOUBLE, C_DOUBLE))
979 {
980 	IR_FOLD_COPY(op1_insn->val.d <= op2_insn->val.d ? op1 : op2);
981 }
982 
IR_FOLD(MIN (C_FLOAT,C_FLOAT))983 IR_FOLD(MIN(C_FLOAT, C_FLOAT))
984 {
985 	IR_FOLD_COPY(op1_insn->val.f <= op2_insn->val.f ? op1 : op2);
986 }
987 
988 IR_FOLD(MAX(C_BOOL, C_BOOL))
IR_FOLD(MAX (C_U8,C_U8))989 IR_FOLD(MAX(C_U8, C_U8))
990 IR_FOLD(MAX(C_U16, C_U16))
991 IR_FOLD(MAX(C_U32, C_U32))
992 IR_FOLD(MAX(C_U64, C_U64))
993 IR_FOLD(MAX(C_ADDR, C_ADDR))
994 {
995 	IR_FOLD_COPY(op1_insn->val.u64 >= op2_insn->val.u64 ? op1 : op2);
996 }
997 
998 IR_FOLD(MAX(C_CHAR, C_CHAR))
IR_FOLD(MAX (C_I8,C_U8))999 IR_FOLD(MAX(C_I8, C_U8))
1000 IR_FOLD(MAX(C_I16, C_U16))
1001 IR_FOLD(MAX(C_I32, C_U32))
1002 IR_FOLD(MAX(C_I64, C_U64))
1003 {
1004 	IR_FOLD_COPY(op1_insn->val.i64 >= op2_insn->val.i64 ? op1 : op2);
1005 }
1006 
IR_FOLD(MAX (C_DOUBLE,C_DOUBLE))1007 IR_FOLD(MAX(C_DOUBLE, C_DOUBLE))
1008 {
1009 	IR_FOLD_COPY(op1_insn->val.d >= op2_insn->val.d ? op1 : op2);
1010 }
1011 
IR_FOLD(MAX (C_FLOAT,C_FLOAT))1012 IR_FOLD(MAX(C_FLOAT, C_FLOAT))
1013 {
1014 	IR_FOLD_COPY(op1_insn->val.f >= op2_insn->val.f ? op1 : op2);
1015 }
1016 
1017 IR_FOLD(SEXT(C_I8))
IR_FOLD(SEXT (C_U8))1018 IR_FOLD(SEXT(C_U8))
1019 IR_FOLD(SEXT(C_BOOL))
1020 {
1021 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1022 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1023 	IR_FOLD_CONST_I((int64_t)op1_insn->val.i8);
1024 }
1025 
1026 IR_FOLD(SEXT(C_I16))
IR_FOLD(SEXT (C_U16))1027 IR_FOLD(SEXT(C_U16))
1028 {
1029 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1030 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1031 	IR_FOLD_CONST_I((int64_t)op1_insn->val.i16);
1032 }
1033 
1034 IR_FOLD(SEXT(C_I32))
IR_FOLD(SEXT (C_U32))1035 IR_FOLD(SEXT(C_U32))
1036 {
1037 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1038 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1039 	IR_FOLD_CONST_I((int64_t)op1_insn->val.i32);
1040 }
1041 
1042 IR_FOLD(ZEXT(C_I8))
IR_FOLD(ZEXT (C_U8))1043 IR_FOLD(ZEXT(C_U8))
1044 IR_FOLD(ZEXT(C_BOOL))
1045 {
1046 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1047 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1048 	IR_FOLD_CONST_U((uint64_t)op1_insn->val.u8);
1049 }
1050 
1051 IR_FOLD(ZEXT(C_I16))
IR_FOLD(ZEXT (C_U16))1052 IR_FOLD(ZEXT(C_U16))
1053 {
1054 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1055 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1056 	IR_FOLD_CONST_U((uint64_t)op1_insn->val.u16);
1057 }
1058 
1059 IR_FOLD(ZEXT(C_I32))
IR_FOLD(ZEXT (C_U32))1060 IR_FOLD(ZEXT(C_U32))
1061 {
1062 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1063 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1064 	IR_FOLD_CONST_U((uint64_t)op1_insn->val.u32);
1065 }
1066 
1067 IR_FOLD(TRUNC(C_I16))
IR_FOLD(TRUNC (C_I32))1068 IR_FOLD(TRUNC(C_I32))
1069 IR_FOLD(TRUNC(C_I64))
1070 IR_FOLD(TRUNC(C_U16))
1071 IR_FOLD(TRUNC(C_U32))
1072 IR_FOLD(TRUNC(C_U64))
1073 {
1074 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1075 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] < ir_type_size[op1_insn->type]);
1076 	switch (IR_OPT_TYPE(opt)) {
1077 		default:
1078 			IR_ASSERT(0);
1079 		case IR_I8:
1080 			IR_FOLD_CONST_I(op1_insn->val.i8);
1081 		case IR_I16:
1082 			IR_FOLD_CONST_I(op1_insn->val.i16);
1083 		case IR_I32:
1084 			IR_FOLD_CONST_I(op1_insn->val.i32);
1085 		case IR_U8:
1086 			IR_FOLD_CONST_U(op1_insn->val.u8);
1087 		case IR_U16:
1088 			IR_FOLD_CONST_U(op1_insn->val.u16);
1089 		case IR_U32:
1090 			IR_FOLD_CONST_U(op1_insn->val.u32);
1091 	}
1092 }
1093 
1094 
1095 IR_FOLD(BITCAST(C_I8))
IR_FOLD(BITCAST (C_I16))1096 IR_FOLD(BITCAST(C_I16))
1097 IR_FOLD(BITCAST(C_I32))
1098 IR_FOLD(BITCAST(C_I64))
1099 IR_FOLD(BITCAST(C_U8))
1100 IR_FOLD(BITCAST(C_U16))
1101 IR_FOLD(BITCAST(C_U32))
1102 IR_FOLD(BITCAST(C_U64))
1103 IR_FOLD(BITCAST(C_FLOAT))
1104 IR_FOLD(BITCAST(C_DOUBLE))
1105 IR_FOLD(BITCAST(C_BOOL))
1106 IR_FOLD(BITCAST(C_CHAR))
1107 IR_FOLD(BITCAST(C_ADDR))
1108 {
1109 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] == ir_type_size[op1_insn->type]);
1110 	switch (IR_OPT_TYPE(opt)) {
1111 		default:
1112 			IR_ASSERT(0);
1113 		case IR_BOOL:
1114 			IR_FOLD_BOOL(op1_insn->val.i8 != 0);
1115 		case IR_I8:
1116 			IR_FOLD_CONST_I(op1_insn->val.i8);
1117 		case IR_I16:
1118 			IR_FOLD_CONST_I(op1_insn->val.i16);
1119 		case IR_I32:
1120 			IR_FOLD_CONST_I(op1_insn->val.i32);
1121 		case IR_I64:
1122 			IR_FOLD_CONST_I(op1_insn->val.i64);
1123 		case IR_U8:
1124 			IR_FOLD_CONST_U(op1_insn->val.u8);
1125 		case IR_U16:
1126 			IR_FOLD_CONST_U(op1_insn->val.u16);
1127 		case IR_U32:
1128 			IR_FOLD_CONST_U(op1_insn->val.u32);
1129 		case IR_U64:
1130 			IR_FOLD_CONST_U(op1_insn->val.u64);
1131 		case IR_FLOAT:
1132 			IR_FOLD_CONST_F(op1_insn->val.f);
1133 		case IR_DOUBLE:
1134 			IR_FOLD_CONST_D(op1_insn->val.d);
1135 		case IR_CHAR:
1136 			IR_FOLD_CONST_I(op1_insn->val.c);
1137 		case IR_ADDR:
1138 			IR_FOLD_CONST_U(op1_insn->val.addr);
1139 	}
1140 }
1141 
1142 IR_FOLD(INT2FP(C_I8))
IR_FOLD(INT2FP (C_I16))1143 IR_FOLD(INT2FP(C_I16))
1144 IR_FOLD(INT2FP(C_I32))
1145 IR_FOLD(INT2FP(C_I64))
1146 {
1147 	if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1148 		IR_FOLD_CONST_D((double)op1_insn->val.i64);
1149 	} else {
1150 		IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1151 		IR_FOLD_CONST_F((float)op1_insn->val.i64);
1152 	}
1153 }
1154 
1155 IR_FOLD(INT2FP(C_U8))
IR_FOLD(INT2FP (C_U16))1156 IR_FOLD(INT2FP(C_U16))
1157 IR_FOLD(INT2FP(C_U32))
1158 IR_FOLD(INT2FP(C_U64))
1159 {
1160 	if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1161 		IR_FOLD_CONST_D((double)op1_insn->val.u64);
1162 	} else {
1163 		IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1164 		IR_FOLD_CONST_F((float)op1_insn->val.u64);
1165 	}
1166 }
1167 
IR_FOLD(FP2INT (C_FLOAT))1168 IR_FOLD(FP2INT(C_FLOAT))
1169 {
1170 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1171 	switch (IR_OPT_TYPE(opt)) {
1172 		default:
1173 			IR_ASSERT(0);
1174 		case IR_I8:
1175 			IR_FOLD_CONST_I((int8_t)op1_insn->val.f);
1176 		case IR_I16:
1177 			IR_FOLD_CONST_I((int16_t)op1_insn->val.f);
1178 		case IR_I32:
1179 			IR_FOLD_CONST_I((int32_t)op1_insn->val.f);
1180 		case IR_I64:
1181 			IR_FOLD_CONST_I((int64_t)op1_insn->val.f);
1182 		case IR_U8:
1183 			IR_FOLD_CONST_U((uint8_t)op1_insn->val.f);
1184 		case IR_U16:
1185 			IR_FOLD_CONST_U((uint16_t)op1_insn->val.f);
1186 		case IR_U32:
1187 			IR_FOLD_CONST_U((uint32_t)op1_insn->val.f);
1188 		case IR_U64:
1189 			IR_FOLD_CONST_U((uint64_t)op1_insn->val.f);
1190 	}
1191 }
1192 
IR_FOLD(FP2INT (C_DOUBLE))1193 IR_FOLD(FP2INT(C_DOUBLE))
1194 {
1195 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1196 	switch (IR_OPT_TYPE(opt)) {
1197 		default:
1198 			IR_ASSERT(0);
1199 		case IR_I8:
1200 			IR_FOLD_CONST_I((int8_t)op1_insn->val.d);
1201 		case IR_I16:
1202 			IR_FOLD_CONST_I((int16_t)op1_insn->val.d);
1203 		case IR_I32:
1204 			IR_FOLD_CONST_I((int32_t)op1_insn->val.d);
1205 		case IR_I64:
1206 			IR_FOLD_CONST_I((int64_t)op1_insn->val.d);
1207 		case IR_U8:
1208 			IR_FOLD_CONST_U((uint8_t)op1_insn->val.d);
1209 		case IR_U16:
1210 			IR_FOLD_CONST_U((uint16_t)op1_insn->val.d);
1211 		case IR_U32:
1212 			IR_FOLD_CONST_U((uint32_t)op1_insn->val.d);
1213 		case IR_U64:
1214 			IR_FOLD_CONST_U((uint64_t)op1_insn->val.d);
1215 	}
1216 }
1217 
IR_FOLD(FP2FP (C_FLOAT))1218 IR_FOLD(FP2FP(C_FLOAT))
1219 {
1220 	if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1221 		IR_FOLD_CONST_D((double)op1_insn->val.f);
1222 	} else {
1223 		IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1224 		IR_FOLD_COPY(op1);
1225 	}
1226 }
1227 
IR_FOLD(FP2FP (C_DOUBLE))1228 IR_FOLD(FP2FP(C_DOUBLE))
1229 {
1230 	if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1231 		IR_FOLD_COPY(op1);
1232 	} else {
1233 		IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1234 		IR_FOLD_CONST_F((float)op1_insn->val.d);
1235 	}
1236 }
1237 
1238 // TODO: constant functions (e.g.  sin, cos)
1239 
1240 /* Copy Propagation */
IR_FOLD(COPY (_))1241 IR_FOLD(COPY(_))
1242 {
1243 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1244 	if (!op2) {
1245 		IR_FOLD_COPY(op1);
1246 	}
1247 	/* skip CSE */
1248 	IR_FOLD_EMIT;
1249 }
1250 
IR_FOLD(PHI (_,_))1251 IR_FOLD(PHI(_, _)) // TODO: PHI(_, _, _)
1252 {
1253 	if (op2 == op3 && op3 != IR_UNUSED) {
1254 		IR_FOLD_COPY(op2);
1255 	}
1256 	/* skip CSE */
1257 	opt = opt | (3 << IR_OPT_INPUTS_SHIFT);
1258 	IR_FOLD_EMIT;
1259 }
1260 
1261 IR_FOLD(COND(C_BOOL, _)) // TODO: COND(CONST, _, _)
IR_FOLD(COND (C_U8,_))1262 IR_FOLD(COND(C_U8, _))
1263 IR_FOLD(COND(C_U16, _))
1264 IR_FOLD(COND(C_U32, _))
1265 IR_FOLD(COND(C_U64, _))
1266 IR_FOLD(COND(C_ADDR, _))
1267 IR_FOLD(COND(C_CHAR, _))
1268 IR_FOLD(COND(C_I8, _))
1269 IR_FOLD(COND(C_I16, _))
1270 IR_FOLD(COND(C_I32, _))
1271 IR_FOLD(COND(C_I64, _))
1272 IR_FOLD(COND(C_DOUBLE, _))
1273 IR_FOLD(COND(C_FLOAT, _))
1274 {
1275 	if (ir_const_is_true(op1_insn)) {
1276 		IR_FOLD_COPY(op2);
1277 	} else {
1278 		IR_FOLD_COPY(op3);
1279 	}
1280 }
1281 
IR_FOLD(BITCAST (_))1282 IR_FOLD(BITCAST(_))
1283 {
1284 	if (IR_OPT_TYPE(opt) == op1_insn->type) {
1285 		IR_FOLD_COPY(op1);
1286 	}
1287 	IR_FOLD_NEXT;
1288 }
1289 
1290 /* Algebraic simplifications */
IR_FOLD(ABS (ABS))1291 IR_FOLD(ABS(ABS))
1292 {
1293 	/* abs(x = abs(y)) => x */
1294 	IR_FOLD_COPY(op1);
1295 }
1296 
IR_FOLD(ABS (NEG))1297 IR_FOLD(ABS(NEG))
1298 {
1299 	/* abs(neg(y)) => abs(y) */
1300 	op1 = op1_insn->op1;
1301 	IR_FOLD_RESTART;
1302 }
1303 
1304 IR_FOLD(NEG(NEG))
IR_FOLD(NOT (NOT))1305 IR_FOLD(NOT(NOT))
1306 IR_FOLD(BSWAP(BSWAP))
1307 {
1308 	/* f(f(y)) => y */
1309 	IR_FOLD_COPY(op1_insn->op1);
1310 }
1311 
1312 IR_FOLD(ADD(_, C_U8))
IR_FOLD(ADD (_,C_U16))1313 IR_FOLD(ADD(_, C_U16))
1314 IR_FOLD(ADD(_, C_U32))
1315 IR_FOLD(ADD(_, C_U64))
1316 IR_FOLD(ADD(_, C_I8))
1317 IR_FOLD(ADD(_, C_I16))
1318 IR_FOLD(ADD(_, C_I32))
1319 IR_FOLD(ADD(_, C_I64))
1320 IR_FOLD(ADD(_, C_ADDR))
1321 IR_FOLD(SUB(_, C_U8))
1322 IR_FOLD(SUB(_, C_U16))
1323 IR_FOLD(SUB(_, C_U32))
1324 IR_FOLD(SUB(_, C_U64))
1325 IR_FOLD(SUB(_, C_I8))
1326 IR_FOLD(SUB(_, C_I16))
1327 IR_FOLD(SUB(_, C_I32))
1328 IR_FOLD(SUB(_, C_I64))
1329 IR_FOLD(SUB(_, C_ADDR))
1330 IR_FOLD(ADD_OV(_, C_U8))
1331 IR_FOLD(ADD_OV(_, C_U16))
1332 IR_FOLD(ADD_OV(_, C_U32))
1333 IR_FOLD(ADD_OV(_, C_U64))
1334 IR_FOLD(ADD_OV(_, C_I8))
1335 IR_FOLD(ADD_OV(_, C_I16))
1336 IR_FOLD(ADD_OV(_, C_I32))
1337 IR_FOLD(ADD_OV(_, C_I64))
1338 IR_FOLD(ADD_OV(_, C_ADDR))
1339 IR_FOLD(SUB_OV(_, C_U8))
1340 IR_FOLD(SUB_OV(_, C_U16))
1341 IR_FOLD(SUB_OV(_, C_U32))
1342 IR_FOLD(SUB_OV(_, C_U64))
1343 IR_FOLD(SUB_OV(_, C_I8))
1344 IR_FOLD(SUB_OV(_, C_I16))
1345 IR_FOLD(SUB_OV(_, C_I32))
1346 IR_FOLD(SUB_OV(_, C_I64))
1347 IR_FOLD(SUB_OV(_, C_ADDR))
1348 {
1349 	if (op2_insn->val.u64 == 0) {
1350 		/* a +/- 0 => a */
1351 		IR_FOLD_COPY(op1);
1352 	}
1353 	IR_FOLD_NEXT;
1354 }
1355 
1356 IR_FOLD(SUB(C_I8, _))
IR_FOLD(SUB (C_I16,_))1357 IR_FOLD(SUB(C_I16, _))
1358 IR_FOLD(SUB(C_I32, _))
1359 IR_FOLD(SUB(C_I64, _))
1360 {
1361 	if (op1_insn->val.u64 == 0) {
1362 		/* 0 - a => -a (invalid for +0.0) */
1363 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1364 		op1 = op2;
1365 		op2 = IR_UNUSED;
1366 		IR_FOLD_RESTART;
1367 	}
1368 	IR_FOLD_NEXT;
1369 }
1370 
IR_FOLD(ADD (NEG,_))1371 IR_FOLD(ADD(NEG, _))
1372 {
1373 	/* (-a) + b => b - a */
1374 	opt++; /* ADD -> SUB */
1375 	op1 = op2;
1376 	op2 = op1_insn->op1;
1377 	IR_FOLD_RESTART;
1378 }
1379 
1380 IR_FOLD(ADD(_, NEG))
IR_FOLD(SUB (_,NEG))1381 IR_FOLD(SUB(_,NEG))
1382 {
1383 	/* a + (-b) => a - b */
1384 	opt ^= 1; /* ADD <-> SUB */
1385 	op2 = op2_insn->op1;
1386 	IR_FOLD_RESTART;
1387 }
1388 
IR_FOLD(ADD (SUB,_))1389 IR_FOLD(ADD(SUB, _))
1390 {
1391 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1392 		if (op1_insn->op2 == op2) {
1393 			/* (a - b) + b => a */
1394 			IR_FOLD_COPY(op1_insn->op1);
1395 		}
1396 	}
1397 	IR_FOLD_NEXT;
1398 }
1399 
IR_FOLD(ADD (_,SUB))1400 IR_FOLD(ADD(_, SUB))
1401 {
1402 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1403 		if (op2_insn->op2 == op1) {
1404 			/* a + (b - a) => b */
1405 			IR_FOLD_COPY(op2_insn->op1);
1406 		}
1407 	}
1408 	IR_FOLD_NEXT;
1409 }
1410 
IR_FOLD(SUB (ADD,_))1411 IR_FOLD(SUB(ADD, _))
1412 {
1413 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1414 		if (op1_insn->op1 == op2) {
1415 			/* (a + b) - a => b */
1416 			IR_FOLD_COPY(op1_insn->op2);
1417 		} else if (op1_insn->op2 == op2) {
1418 			/* (a + b) - a => b */
1419 			IR_FOLD_COPY(op1_insn->op1);
1420 		}
1421 	}
1422 	IR_FOLD_NEXT;
1423 }
1424 
IR_FOLD(SUB (_,ADD))1425 IR_FOLD(SUB(_, ADD))
1426 {
1427 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1428 		if (op2_insn->op1 == op1) {
1429 			/* a - (a + b) => -b */
1430 			opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1431 			op1 = op2_insn->op2;
1432 			op2 = IR_UNUSED;
1433 			IR_FOLD_RESTART;
1434 		} else if (op2_insn->op2 == op1) {
1435 			/* b - (a + b) => -a */
1436 			opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1437 			op1 = op2_insn->op1;
1438 			op2 = IR_UNUSED;
1439 			IR_FOLD_RESTART;
1440 		}
1441 	}
1442 	IR_FOLD_NEXT;
1443 }
1444 
IR_FOLD(SUB (SUB,_))1445 IR_FOLD(SUB(SUB, _))
1446 {
1447 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1448 		if (op1_insn->op1 == op2) {
1449 			/* (a - b) - a => -b */
1450 			opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1451 			op1 = op1_insn->op2;
1452 			op2 = IR_UNUSED;
1453 			IR_FOLD_RESTART;
1454 		}
1455 	}
1456 	IR_FOLD_NEXT;
1457 }
1458 
IR_FOLD(SUB (_,SUB))1459 IR_FOLD(SUB(_, SUB))
1460 {
1461 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1462 		if (op2_insn->op1 == op1) {
1463 			/* a - (a - b) => b */
1464 			IR_FOLD_COPY(op2_insn->op2);
1465 		}
1466 	}
1467     IR_FOLD_NEXT;
1468 }
1469 
IR_FOLD(SUB (ADD,ADD))1470 IR_FOLD(SUB(ADD, ADD))
1471 {
1472 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1473 		if (op1_insn->op1 == op2_insn->op1) {
1474 			/* (a + b) - (a + c) => b - c */
1475 			op1 = op1_insn->op2;
1476 			op2 = op2_insn->op2;
1477 			IR_FOLD_RESTART;
1478 		} else if (op1_insn->op1 == op2_insn->op2) {
1479 			/* (a + b) - (c + a) => b - c */
1480 			op1 = op1_insn->op2;
1481 			op2 = op2_insn->op1;
1482 			IR_FOLD_RESTART;
1483 		} else if (op1_insn->op2 == op2_insn->op1) {
1484 			/* (a + b) - (b + c) => a - c */
1485 			op1 = op1_insn->op1;
1486 			op2 = op2_insn->op2;
1487 			IR_FOLD_RESTART;
1488 		} else if (op1_insn->op2 == op2_insn->op2) {
1489 			/* (a + b) - (c + b) => a - c */
1490 			op1 = op1_insn->op1;
1491 			op2 = op2_insn->op1;
1492 			IR_FOLD_RESTART;
1493 		}
1494 	}
1495     IR_FOLD_NEXT;
1496 }
1497 
1498 // IR_FOLD(SUB(NEG, CONST))  TODO: -a - b => -b - a
1499 // IR_FOLD(MUL(NEG, CONST))  TODO: -a * b => a * -b
1500 // IR_FOLD(DIV(NEG, CONST))  TODO: -a / b => a / -b
1501 
1502 IR_FOLD(MUL(_, C_U8))
IR_FOLD(MUL (_,C_U16))1503 IR_FOLD(MUL(_, C_U16))
1504 IR_FOLD(MUL(_, C_U32))
1505 IR_FOLD(MUL(_, C_U64))
1506 {
1507 	if (op2_insn->val.u64 == 0) {
1508 		/* a * 0 => 0 */
1509 		IR_FOLD_COPY(op2);
1510 	} else if (op2_insn->val.u64 == 1) {
1511 		IR_FOLD_COPY(op1);
1512 	} else if (op2_insn->val.u64 == 2) {
1513 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1514 		op2 = op1;
1515 		IR_FOLD_RESTART;
1516 	}
1517 	IR_FOLD_NEXT;
1518 }
1519 
1520 IR_FOLD(MUL(_, C_I8))
IR_FOLD(MUL (_,C_I16))1521 IR_FOLD(MUL(_, C_I16))
1522 IR_FOLD(MUL(_, C_I32))
1523 IR_FOLD(MUL(_, C_I64))
1524 {
1525 	if (op2_insn->val.i64 == 0) {
1526 		/* a * 0 => 0 */
1527 		IR_FOLD_COPY(op2);
1528 	} else if (op2_insn->val.i64 == 1) {
1529 		/* a * 1 => a */
1530 		IR_FOLD_COPY(op1);
1531 	} else if (op2_insn->val.i64 == 2) {
1532 		/* a * 2 => a + a */
1533 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1534 		op2 = op1;
1535 		IR_FOLD_RESTART;
1536 	} else if (op2_insn->val.i64 == -1) {
1537 		/* a * -1 => -a */
1538 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1539 		op2 = IR_UNUSED;
1540 		IR_FOLD_RESTART;
1541 	}
1542 	IR_FOLD_NEXT;
1543 }
1544 
IR_FOLD(MUL (_,C_DOUBLE))1545 IR_FOLD(MUL(_, C_DOUBLE))
1546 {
1547 	if (op2_insn->val.d == 1.0) {
1548 		/* a * 1.0 => a */
1549 		IR_FOLD_COPY(op1);
1550 	} else if (op2_insn->val.d == 2.0) {
1551 		/* a * 2.0 => a + a */
1552 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1553 		op2 = op1;
1554 		IR_FOLD_RESTART;
1555 	} else if (op2_insn->val.d == -1.0) {
1556 		/* a * -1.0 => -a */
1557 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1558 		op2 = IR_UNUSED;
1559 		IR_FOLD_RESTART;
1560 	}
1561 	IR_FOLD_NEXT;
1562 }
1563 
IR_FOLD(MUL (_,C_FLOAT))1564 IR_FOLD(MUL(_, C_FLOAT))
1565 {
1566 	if (op2_insn->val.f == 1.0) {
1567 		/* a * 1.0 => a */
1568 		IR_FOLD_COPY(op1);
1569 	} else if (op2_insn->val.f == 2.0) {
1570 		/* a * 2.0 => a + a */
1571 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1572 		op2 = op1;
1573 		IR_FOLD_RESTART;
1574 	} else if (op2_insn->val.f == -1.0) {
1575 		/* a * -1.0 => -a */
1576 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1577 		op2 = IR_UNUSED;
1578 		IR_FOLD_RESTART;
1579 	}
1580 	IR_FOLD_NEXT;
1581 }
1582 
1583 IR_FOLD(DIV(_, C_U8))
IR_FOLD(DIV (_,C_U16))1584 IR_FOLD(DIV(_, C_U16))
1585 IR_FOLD(DIV(_, C_U32))
1586 IR_FOLD(DIV(_, C_U64))
1587 {
1588 	if (op2_insn->val.u64 == 1) {
1589 		IR_FOLD_COPY(op1);
1590 	}
1591 	IR_FOLD_NEXT;
1592 }
1593 
1594 IR_FOLD(DIV(_, C_I8))
IR_FOLD(DIV (_,C_I16))1595 IR_FOLD(DIV(_, C_I16))
1596 IR_FOLD(DIV(_, C_I32))
1597 IR_FOLD(DIV(_, C_I64))
1598 {
1599 	if (op2_insn->val.i64 == 1) {
1600 		/* a / 1 => a */
1601 		IR_FOLD_COPY(op1);
1602 	} else if (op2_insn->val.i64 == -1) {
1603 		/* a / -1 => -a */
1604 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1605 		op2 = IR_UNUSED;
1606 		IR_FOLD_RESTART;
1607 	}
1608 	IR_FOLD_NEXT;
1609 }
1610 
1611 IR_FOLD(MOD(_, C_U8))
IR_FOLD(MOD (_,C_U16))1612 IR_FOLD(MOD(_, C_U16))
1613 IR_FOLD(MOD(_, C_U32))
1614 IR_FOLD(MOD(_, C_U64))
1615 IR_FOLD(MOD(_, C_I8))
1616 IR_FOLD(MOD(_, C_I16))
1617 IR_FOLD(MOD(_, C_I32))
1618 IR_FOLD(MOD(_, C_I64))
1619 {
1620 	if (op2_insn->val.i64 == 1) {
1621 		/* a % 1 => 0 */
1622 		IR_FOLD_CONST_U(0);
1623 	}
1624 	IR_FOLD_NEXT;
1625 }
1626 
IR_FOLD(DIV (_,C_DOUBLE))1627 IR_FOLD(DIV(_, C_DOUBLE))
1628 {
1629 	if (op2_insn->val.d == 1.0) {
1630 		/* a / 1.0 => a */
1631 		IR_FOLD_COPY(op1);
1632 	} else if (op2_insn->val.d == -1.0) {
1633 		/* a / -1.0 => -a */
1634 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1635 		op2 = IR_UNUSED;
1636 		IR_FOLD_RESTART;
1637 	}
1638 	IR_FOLD_NEXT;
1639 }
1640 
IR_FOLD(DIV (_,C_FLOAT))1641 IR_FOLD(DIV(_, C_FLOAT))
1642 {
1643 	if (op2_insn->val.f == 1.0) {
1644 		/* a / 1.0 => a */
1645 		IR_FOLD_COPY(op1);
1646 	} else if (op2_insn->val.f == -1.0) {
1647 		/* a / -1.0 => -a */
1648 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1649 		op2 = IR_UNUSED;
1650 		IR_FOLD_RESTART;
1651 	}
1652 	IR_FOLD_NEXT;
1653 }
1654 
1655 IR_FOLD(MUL(NEG, NEG))
IR_FOLD(DIV (NEG,NEG))1656 IR_FOLD(DIV(NEG, NEG))
1657 {
1658 	op1 = op1_insn->op1;
1659 	op2 = op2_insn->op1;
1660 	IR_FOLD_RESTART;
1661 }
1662 
IR_FOLD(AND (_,C_BOOL))1663 IR_FOLD(AND(_, C_BOOL))
1664 {
1665 	IR_FOLD_COPY(op2_insn->val.b ? op1 : op2);
1666 }
1667 
1668 IR_FOLD(AND(_, C_U8))
IR_FOLD(AND (_,C_I8))1669 IR_FOLD(AND(_, C_I8))
1670 IR_FOLD(AND(_, C_CHAR))
1671 {
1672 	if (op2_insn->val.i8 == 0) {
1673 		/* a & 0 => 0 */
1674 		IR_FOLD_COPY(op2);
1675 	} else if (op2_insn->val.i8 == -1) {
1676 		IR_FOLD_COPY(op1);
1677 	}
1678 	IR_FOLD_NEXT;
1679 }
1680 
1681 IR_FOLD(AND(_, C_U16))
IR_FOLD(AND (_,C_I16))1682 IR_FOLD(AND(_, C_I16))
1683 {
1684 	if (op2_insn->val.i16 == 0) {
1685 		/* a & 0 => 0 */
1686 		IR_FOLD_COPY(op2);
1687 	} else if (op2_insn->val.i16 == -1) {
1688 		IR_FOLD_COPY(op1);
1689 	}
1690 	IR_FOLD_NEXT;
1691 }
1692 
1693 IR_FOLD(AND(_, C_U32))
IR_FOLD(AND (_,C_I32))1694 IR_FOLD(AND(_, C_I32))
1695 {
1696 	if (op2_insn->val.i32 == 0) {
1697 		/* a & 0 => 0 */
1698 		IR_FOLD_COPY(op2);
1699 	} else if (op2_insn->val.i32 == -1) {
1700 		IR_FOLD_COPY(op1);
1701 	}
1702 	IR_FOLD_NEXT;
1703 }
1704 
1705 IR_FOLD(AND(_, C_U64))
IR_FOLD(AND (_,C_I64))1706 IR_FOLD(AND(_, C_I64))
1707 {
1708 	if (op2_insn->val.i64 == 0) {
1709 		/* a & 0 => 0 */
1710 		IR_FOLD_COPY(op2);
1711 	} else if (op2_insn->val.i64 == -1) {
1712 		IR_FOLD_COPY(op1);
1713 	}
1714 	IR_FOLD_NEXT;
1715 }
1716 
IR_FOLD(OR (_,C_BOOL))1717 IR_FOLD(OR(_, C_BOOL))
1718 {
1719 	IR_FOLD_COPY(op2_insn->val.b ? op2 : op1);
1720 }
1721 
1722 IR_FOLD(OR(_, C_U8))
IR_FOLD(OR (_,C_I8))1723 IR_FOLD(OR(_, C_I8))
1724 IR_FOLD(OR(_, C_CHAR))
1725 {
1726 	if (op2_insn->val.i8 == -1) {
1727 		/* a | 1 => 1 */
1728 		IR_FOLD_COPY(op2);
1729 	} else if (op2_insn->val.i8 == 0) {
1730 		IR_FOLD_COPY(op1);
1731 	}
1732 	IR_FOLD_NEXT;
1733 }
1734 
1735 IR_FOLD(OR(_, C_U16))
IR_FOLD(OR (_,C_I16))1736 IR_FOLD(OR(_, C_I16))
1737 {
1738 	if (op2_insn->val.i16 == -1) {
1739 		/* a | 1 => 1 */
1740 		IR_FOLD_COPY(op2);
1741 	} else if (op2_insn->val.i16 == 0) {
1742 		IR_FOLD_COPY(op1);
1743 	}
1744 	IR_FOLD_NEXT;
1745 }
1746 
1747 IR_FOLD(OR(_, C_U32))
IR_FOLD(OR (_,C_I32))1748 IR_FOLD(OR(_, C_I32))
1749 {
1750 	if (op2_insn->val.i32 == -1) {
1751 		/* a | 1 => 1 */
1752 		IR_FOLD_COPY(op2);
1753 	} else if (op2_insn->val.i32 == -0) {
1754 		IR_FOLD_COPY(op1);
1755 	}
1756 	IR_FOLD_NEXT;
1757 }
1758 
1759 IR_FOLD(OR(_, C_U64))
IR_FOLD(OR (_,C_I64))1760 IR_FOLD(OR(_, C_I64))
1761 {
1762 	if (op2_insn->val.i64 == -1) {
1763 		/* a | 1 => 1 */
1764 		IR_FOLD_COPY(op2);
1765 	} else if (op2_insn->val.i64 == 0) {
1766 		IR_FOLD_COPY(op1);
1767 	}
1768 	IR_FOLD_NEXT;
1769 }
1770 
IR_FOLD(XOR (_,C_BOOL))1771 IR_FOLD(XOR(_, C_BOOL))
1772 {
1773 	if (!op2_insn->val.b) {
1774 		/* a ^ 0 => a */
1775 		IR_FOLD_COPY(op1);
1776 	} else {
1777 		/* a ^ 1 => !a */
1778 		opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
1779 		op2 = IR_UNUSED;
1780 		IR_FOLD_RESTART;
1781 	}
1782 }
1783 
1784 IR_FOLD(XOR(_, C_U8))
IR_FOLD(XOR (_,C_I8))1785 IR_FOLD(XOR(_, C_I8))
1786 IR_FOLD(XOR(_, C_CHAR))
1787 {
1788 	if (op2_insn->val.i8 == 0) {
1789 		/* a ^ 0 => a */
1790 		IR_FOLD_COPY(op1);
1791 	} else if (op2_insn->val.i8 == -1) {
1792 		/* a ^ 1 => ~a */
1793 		opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
1794 		op2 = IR_UNUSED;
1795 		IR_FOLD_RESTART;
1796 	}
1797 	IR_FOLD_NEXT;
1798 }
1799 
1800 IR_FOLD(XOR(_, C_U16))
IR_FOLD(XOR (_,C_I16))1801 IR_FOLD(XOR(_, C_I16))
1802 {
1803 	if (op2_insn->val.i16 == 0) {
1804 		/* a ^ 0 => a */
1805 		IR_FOLD_COPY(op1);
1806 	} else if (op2_insn->val.i16 == -1) {
1807 		/* a ^ 1 => ~a */
1808 		opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
1809 		op2 = IR_UNUSED;
1810 		IR_FOLD_RESTART;
1811 	}
1812 	IR_FOLD_NEXT;
1813 }
1814 
1815 IR_FOLD(XOR(_, C_U32))
IR_FOLD(XOR (_,C_I32))1816 IR_FOLD(XOR(_, C_I32))
1817 {
1818 	if (op2_insn->val.i32 == 0) {
1819 		/* a ^ 0 => a */
1820 		IR_FOLD_COPY(op1);
1821 	} else if (op2_insn->val.i32 == -1) {
1822 		/* a ^ 1 => ~a */
1823 		opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
1824 		op2 = IR_UNUSED;
1825 		IR_FOLD_RESTART;
1826 	}
1827 	IR_FOLD_NEXT;
1828 }
1829 
1830 IR_FOLD(XOR(_, C_U64))
IR_FOLD(XOR (_,C_I64))1831 IR_FOLD(XOR(_, C_I64))
1832 {
1833 	if (op2_insn->val.i64 == 0) {
1834 		/* a ^ 0 => a */
1835 		IR_FOLD_COPY(op1);
1836 	} else if (op2_insn->val.i64 == -1) {
1837 		/* a ^ 1 => ~a */
1838 		opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
1839 		op2 = IR_UNUSED;
1840 		IR_FOLD_RESTART;
1841 	}
1842 	IR_FOLD_NEXT;
1843 }
1844 
1845 IR_FOLD(SHL(_, C_U8))
IR_FOLD(SHL (_,C_U16))1846 IR_FOLD(SHL(_, C_U16))
1847 IR_FOLD(SHL(_, C_U32))
1848 IR_FOLD(SHL(_, C_U64))
1849 IR_FOLD(SHL(_, C_I8))
1850 IR_FOLD(SHL(_, C_I16))
1851 IR_FOLD(SHL(_, C_I32))
1852 IR_FOLD(SHL(_, C_I64))
1853 {
1854 	if (op2_insn->val.u64 == 0) {
1855 		/* a << 0 => a */
1856 		IR_FOLD_COPY(op1);
1857 	} else if (op2_insn->val.u64 == 1) {
1858 		/* a << 1 => a + a */
1859 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1860 		op2 = op1;
1861 		IR_FOLD_RESTART;
1862 	}
1863 	IR_FOLD_NEXT;
1864 }
1865 
1866 IR_FOLD(SHR(_, C_U8))
IR_FOLD(SHR (_,C_U16))1867 IR_FOLD(SHR(_, C_U16))
1868 IR_FOLD(SHR(_, C_U32))
1869 IR_FOLD(SHR(_, C_U64))
1870 IR_FOLD(SHR(_, C_I8))
1871 IR_FOLD(SHR(_, C_I16))
1872 IR_FOLD(SHR(_, C_I32))
1873 IR_FOLD(SHR(_, C_I64))
1874 IR_FOLD(SAR(_, C_U8))
1875 IR_FOLD(SAR(_, C_U16))
1876 IR_FOLD(SAR(_, C_U32))
1877 IR_FOLD(SAR(_, C_U64))
1878 IR_FOLD(SAR(_, C_I8))
1879 IR_FOLD(SAR(_, C_I16))
1880 IR_FOLD(SAR(_, C_I32))
1881 IR_FOLD(SAR(_, C_I64))
1882 IR_FOLD(ROL(_, C_U8))
1883 IR_FOLD(ROL(_, C_U16))
1884 IR_FOLD(ROL(_, C_U32))
1885 IR_FOLD(ROL(_, C_U64))
1886 IR_FOLD(ROL(_, C_I8))
1887 IR_FOLD(ROL(_, C_I16))
1888 IR_FOLD(ROL(_, C_I32))
1889 IR_FOLD(ROL(_, C_I64))
1890 IR_FOLD(ROR(_, C_U8))
1891 IR_FOLD(ROR(_, C_U16))
1892 IR_FOLD(ROR(_, C_U32))
1893 IR_FOLD(ROR(_, C_U64))
1894 IR_FOLD(ROR(_, C_I8))
1895 IR_FOLD(ROR(_, C_I16))
1896 IR_FOLD(ROR(_, C_I32))
1897 IR_FOLD(ROR(_, C_I64))
1898 {
1899 	if (op2_insn->val.u64 == 0) {
1900 		/* a >> 0 => a */
1901 		IR_FOLD_COPY(op1);
1902 	}
1903 	IR_FOLD_NEXT;
1904 }
1905 
1906 IR_FOLD(SHL(C_U8, _))
IR_FOLD(SHL (C_U16,_))1907 IR_FOLD(SHL(C_U16, _))
1908 IR_FOLD(SHL(C_U32, _))
1909 IR_FOLD(SHL(C_U64, _))
1910 IR_FOLD(SHL(C_I8, _))
1911 IR_FOLD(SHL(C_I16, _))
1912 IR_FOLD(SHL(C_I32, _))
1913 IR_FOLD(SHL(C_I64, _))
1914 IR_FOLD(SHR(C_U8, _))
1915 IR_FOLD(SHR(C_U16, _))
1916 IR_FOLD(SHR(C_U32, _))
1917 IR_FOLD(SHR(C_U64, _))
1918 IR_FOLD(SHR(C_I8, _))
1919 IR_FOLD(SHR(C_I16, _))
1920 IR_FOLD(SHR(C_I32, _))
1921 IR_FOLD(SHR(C_I64, _))
1922 {
1923 	if (op1_insn->val.u64 == 0) {
1924 		/* 0 << a => 0 */
1925 		IR_FOLD_COPY(op1);
1926 	}
1927 	IR_FOLD_NEXT;
1928 }
1929 
1930 IR_FOLD(SAR(C_U8, _))
IR_FOLD(SAR (C_I8,_))1931 IR_FOLD(SAR(C_I8, _))
1932 IR_FOLD(ROL(C_U8, _))
1933 IR_FOLD(ROL(C_I8, _))
1934 IR_FOLD(ROR(C_U8, _))
1935 IR_FOLD(ROR(C_I8, _))
1936 {
1937 	if (op1_insn->val.i8 == 0 || op1_insn->val.i8 == -1) {
1938 		IR_FOLD_COPY(op1);
1939 	}
1940 	IR_FOLD_NEXT;
1941 }
1942 
1943 IR_FOLD(SAR(C_U16, _))
IR_FOLD(SAR (C_I16,_))1944 IR_FOLD(SAR(C_I16, _))
1945 IR_FOLD(ROL(C_U16, _))
1946 IR_FOLD(ROL(C_I16, _))
1947 IR_FOLD(ROR(C_U16, _))
1948 IR_FOLD(ROR(C_I16, _))
1949 {
1950 	if (op1_insn->val.i16 == 0 || op1_insn->val.i16 == -1) {
1951 		IR_FOLD_COPY(op1);
1952 	}
1953 	IR_FOLD_NEXT;
1954 }
1955 
1956 IR_FOLD(SAR(C_U32, _))
IR_FOLD(SAR (C_I32,_))1957 IR_FOLD(SAR(C_I32, _))
1958 IR_FOLD(ROL(C_U32, _))
1959 IR_FOLD(ROL(C_I32, _))
1960 IR_FOLD(ROR(C_U32, _))
1961 IR_FOLD(ROR(C_I32, _))
1962 {
1963 	if (op1_insn->val.i32 == 0 || op1_insn->val.i32 == -1) {
1964 		IR_FOLD_COPY(op1);
1965 	}
1966 	IR_FOLD_NEXT;
1967 }
1968 
1969 IR_FOLD(SAR(C_U64, _))
IR_FOLD(SAR (C_I64,_))1970 IR_FOLD(SAR(C_I64, _))
1971 IR_FOLD(ROL(C_U64, _))
1972 IR_FOLD(ROL(C_I64, _))
1973 IR_FOLD(ROR(C_U64, _))
1974 IR_FOLD(ROR(C_I64, _))
1975 {
1976 	if (op1_insn->val.i64 == 0 || op1_insn->val.i64 == -1) {
1977 		IR_FOLD_COPY(op1);
1978 	}
1979 	IR_FOLD_NEXT;
1980 }
1981 
1982 IR_FOLD(LT(ABS, C_I8))
IR_FOLD(LT (ABS,C_I16))1983 IR_FOLD(LT(ABS, C_I16))
1984 IR_FOLD(LT(ABS, C_I32))
1985 IR_FOLD(LT(ABS, C_I64))
1986 IR_FOLD(LT(ABS, C_FLOAT))
1987 IR_FOLD(LT(ABS, C_DOUBLE))
1988 {
1989 	if (op2_insn->val.u64 == 0) {
1990 		/* abs() < 0 => false */
1991 		IR_FOLD_COPY(IR_FALSE);
1992 	}
1993 	IR_FOLD_NEXT;
1994 }
1995 
1996 IR_FOLD(GE(ABS, C_I8))
IR_FOLD(GE (ABS,C_I16))1997 IR_FOLD(GE(ABS, C_I16))
1998 IR_FOLD(GE(ABS, C_I32))
1999 IR_FOLD(GE(ABS, C_I64))
2000 IR_FOLD(GE(ABS, C_FLOAT))
2001 IR_FOLD(GE(ABS, C_DOUBLE))
2002 {
2003 	if (op2_insn->val.u64 == 0) {
2004 		/* abs() >= 0 => true */
2005 		IR_FOLD_COPY(IR_TRUE);
2006 	}
2007 	IR_FOLD_NEXT;
2008 }
2009 
2010 // TODO: conversions
IR_FOLD(FP2FP (FP2FP))2011 IR_FOLD(FP2FP(FP2FP))
2012 {
2013 	if (IR_OPT_TYPE(opt) == IR_FLOAT) {
2014 		/* (float)(double)f => f */
2015 		IR_ASSERT(op1_insn->type == IR_DOUBLE);
2016 		IR_ASSERT(ctx->ir_base[op1_insn->op1].type == IR_FLOAT);
2017 		IR_FOLD_COPY(op1_insn->op1);
2018 	}
2019 	IR_FOLD_NEXT;
2020 }
2021 
IR_FOLD(FP2INT (INT2FP))2022 IR_FOLD(FP2INT(INT2FP))
2023 {
2024 	ir_type dst_type = IR_OPT_TYPE(opt);
2025 	ir_type src_type = ctx->ir_base[op1_insn->op1].type;
2026 
2027 	if (ir_type_size[src_type] >= ir_type_size[op1_insn->type]) {
2028 		/* source integer type can not fit into intermediate floating point */
2029 		IR_FOLD_NEXT;
2030 	}
2031 	/* (int)(double)i => i */
2032 	if (src_type == dst_type) {
2033 		IR_FOLD_COPY(op1_insn->op1);
2034 	}
2035 	IR_FOLD_NEXT;
2036 }
2037 
2038 IR_FOLD(TRUNC(ZEXT))
IR_FOLD(TRUNC (SEXT))2039 IR_FOLD(TRUNC(SEXT))
2040 {
2041 	ir_type dst_type = IR_OPT_TYPE(opt);
2042 	ir_type src_type = ctx->ir_base[op1_insn->op1].type;
2043 
2044 	/* (int32_t)(int64_t)i => i */
2045 	if (src_type == dst_type) {
2046 		IR_FOLD_COPY(op1_insn->op1);
2047 	} else if (ir_type_size[src_type] == ir_type_size[dst_type]) {
2048 		opt = IR_OPT(IR_BITCAST, dst_type);
2049 		op1 = op1_insn->op1;
2050 		IR_FOLD_RESTART;
2051 	} else if (ir_type_size[src_type] > ir_type_size[dst_type]) {
2052 		opt = IR_OPT(IR_TRUNC, dst_type);
2053 		op1 = op1_insn->op1;
2054 		IR_FOLD_RESTART;
2055 	} else {
2056 		opt = IR_OPT(op1_insn->op, dst_type);
2057 		op1 = op1_insn->op1;
2058 		IR_FOLD_RESTART;
2059 	}
2060 	IR_FOLD_NEXT;
2061 }
2062 
IR_FOLD(TRUNC (AND))2063 IR_FOLD(TRUNC(AND))
2064 {
2065 	if (IR_IS_CONST_REF(op1_insn->op2)) {
2066 		size_t size = ir_type_size[IR_OPT_TYPE(opt)];
2067 		uint64_t mask = ctx->ir_base[op1_insn->op2].val.u64;
2068 
2069 		if (size == 1) {
2070 			if (mask == 0xff) {
2071 				op1 = op1_insn->op1;
2072 				IR_FOLD_RESTART;
2073 			}
2074 		} else if (size == 2) {
2075 			if (mask == 0xffff) {
2076 				op1 = op1_insn->op1;
2077 				IR_FOLD_RESTART;
2078 			}
2079 		} else if (size == 4) {
2080 			if (mask == 0xffffffff) {
2081 				op1 = op1_insn->op1;
2082 				IR_FOLD_RESTART;
2083 			}
2084 		}
2085 	}
2086 	IR_FOLD_NEXT;
2087 }
2088 
2089 IR_FOLD(EQ(FP2FP, C_DOUBLE))
IR_FOLD(NE (FP2FP,C_DOUBLE))2090 IR_FOLD(NE(FP2FP, C_DOUBLE))
2091 IR_FOLD(LT(FP2FP, C_DOUBLE))
2092 IR_FOLD(GE(FP2FP, C_DOUBLE))
2093 IR_FOLD(LE(FP2FP, C_DOUBLE))
2094 IR_FOLD(GT(FP2FP, C_DOUBLE))
2095 IR_FOLD(ULT(FP2FP, C_DOUBLE))
2096 IR_FOLD(UGE(FP2FP, C_DOUBLE))
2097 IR_FOLD(ULE(FP2FP, C_DOUBLE))
2098 IR_FOLD(UGT(FP2FP, C_DOUBLE))
2099 {
2100 	IR_ASSERT(op1_insn->type == IR_DOUBLE);
2101 	IR_ASSERT(ctx->ir_base[op1_insn->op1].type == IR_FLOAT);
2102 	if (op2_insn->val.d == (double)(float)op2_insn->val.d) {
2103 		op1 = op1_insn->op1;
2104 		op2 = ir_const_float(ctx, (float)op2_insn->val.d);
2105 		IR_FOLD_RESTART;
2106 	}
2107 	IR_FOLD_NEXT;
2108 }
2109 
2110 // TODO: Reassociation
2111 IR_FOLD(ADD(ADD, C_U8))
IR_FOLD(ADD (ADD,C_U16))2112 IR_FOLD(ADD(ADD, C_U16))
2113 IR_FOLD(ADD(ADD, C_U32))
2114 IR_FOLD(ADD(ADD, C_U64))
2115 IR_FOLD(ADD(ADD, C_ADDR))
2116 {
2117 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2118 		/* (x + c1) + c2  => x + (c1 + c2) */
2119 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 + op2_insn->val.u64;
2120 		op1 = op1_insn->op1;
2121 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2122 		IR_FOLD_RESTART;
2123 	}
2124 	IR_FOLD_NEXT;
2125 }
2126 
2127 IR_FOLD(ADD(ADD, C_I8))
IR_FOLD(ADD (ADD,C_I16))2128 IR_FOLD(ADD(ADD, C_I16))
2129 IR_FOLD(ADD(ADD, C_I32))
2130 IR_FOLD(ADD(ADD, C_I64))
2131 {
2132 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2133 		/* (x + c1) + c2  => x + (c1 + c2) */
2134 		val.i64 = ctx->ir_base[op1_insn->op2].val.i64 + op2_insn->val.i64;
2135 		op1 = op1_insn->op1;
2136 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2137 		IR_FOLD_RESTART;
2138 	}
2139 	IR_FOLD_NEXT;
2140 }
2141 
2142 IR_FOLD(MUL(MUL, C_U8))
IR_FOLD(MUL (MUL,C_U16))2143 IR_FOLD(MUL(MUL, C_U16))
2144 IR_FOLD(MUL(MUL, C_U32))
2145 IR_FOLD(MUL(MUL, C_U64))
2146 {
2147 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2148 		/* (x * c1) * c2  => x * (c1 * c2) */
2149 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 * op2_insn->val.u64;
2150 		op1 = op1_insn->op1;
2151 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2152 		IR_FOLD_RESTART;
2153 	}
2154 	IR_FOLD_NEXT;
2155 }
2156 
2157 IR_FOLD(MUL(MUL, C_I8))
IR_FOLD(MUL (MUL,C_I16))2158 IR_FOLD(MUL(MUL, C_I16))
2159 IR_FOLD(MUL(MUL, C_I32))
2160 IR_FOLD(MUL(MUL, C_I64))
2161 {
2162 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2163 		/* (x * c1) * c2  => x * (c1 * c2) */
2164 		val.i64 = ctx->ir_base[op1_insn->op2].val.i64 * op2_insn->val.i64;
2165 		op1 = op1_insn->op1;
2166 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2167 		IR_FOLD_RESTART;
2168 	}
2169 	IR_FOLD_NEXT;
2170 }
2171 
2172 IR_FOLD(AND(AND, C_U8))
IR_FOLD(AND (AND,C_U16))2173 IR_FOLD(AND(AND, C_U16))
2174 IR_FOLD(AND(AND, C_U32))
2175 IR_FOLD(AND(AND, C_U64))
2176 IR_FOLD(AND(AND, C_I8))
2177 IR_FOLD(AND(AND, C_I16))
2178 IR_FOLD(AND(AND, C_I32))
2179 IR_FOLD(AND(AND, C_I64))
2180 {
2181 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2182 		/* (x & c1) & c2  => x & (c1 & c2) */
2183 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 & op2_insn->val.u64;
2184 		op1 = op1_insn->op1;
2185 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2186 		IR_FOLD_RESTART;
2187 	}
2188 	IR_FOLD_NEXT;
2189 }
2190 
2191 IR_FOLD(OR(OR, C_U8))
IR_FOLD(OR (OR,C_U16))2192 IR_FOLD(OR(OR, C_U16))
2193 IR_FOLD(OR(OR, C_U32))
2194 IR_FOLD(OR(OR, C_U64))
2195 IR_FOLD(OR(OR, C_I8))
2196 IR_FOLD(OR(OR, C_I16))
2197 IR_FOLD(OR(OR, C_I32))
2198 IR_FOLD(OR(OR, C_I64))
2199 {
2200 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2201 		/* (x | c1) | c2  => x | (c1 | c2) */
2202 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 | op2_insn->val.u64;
2203 		op1 = op1_insn->op1;
2204 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2205 		IR_FOLD_RESTART;
2206 	}
2207 	IR_FOLD_NEXT;
2208 }
2209 
2210 IR_FOLD(XOR(XOR, C_U8))
IR_FOLD(XOR (XOR,C_U16))2211 IR_FOLD(XOR(XOR, C_U16))
2212 IR_FOLD(XOR(XOR, C_U32))
2213 IR_FOLD(XOR(XOR, C_U64))
2214 IR_FOLD(XOR(XOR, C_I8))
2215 IR_FOLD(XOR(XOR, C_I16))
2216 IR_FOLD(XOR(XOR, C_I32))
2217 IR_FOLD(XOR(XOR, C_I64))
2218 {
2219 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2220 		/* (x ^ c1) ^ c2  => x ^ (c1 ^ c2) */
2221 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 ^ op2_insn->val.u64;
2222 		op1 = op1_insn->op1;
2223 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2224 		IR_FOLD_RESTART;
2225 	}
2226 	IR_FOLD_NEXT;
2227 }
2228 
2229 IR_FOLD(AND(AND, _))
IR_FOLD(OR (OR,_))2230 IR_FOLD(OR(OR, _))
2231 IR_FOLD(MIN(MIN, _))
2232 IR_FOLD(MAX(MAX, _))
2233 {
2234 	if (op1_insn->op1 == op2 || op1_insn->op2 == op2) {
2235 		IR_FOLD_COPY(op2);
2236 	}
2237 	IR_FOLD_NEXT;
2238 }
2239 
IR_FOLD(XOR (XOR,_))2240 IR_FOLD(XOR(XOR, _))
2241 {
2242 	if (op1_insn->op1 == op2) {
2243 		IR_FOLD_COPY(op1_insn->op2);
2244 	} else if (op1_insn->op2 == op2) {
2245 		IR_FOLD_COPY(op1_insn->op1);
2246 	}
2247 	IR_FOLD_NEXT;
2248 }
2249 
2250 /* Swap operands (move lower ref to op2) for better CSE */
2251 IR_FOLD(ADD(_, _))
IR_FOLD(MUL (_,_))2252 IR_FOLD(MUL(_, _))
2253 IR_FOLD_NAMED(swap_ops)
2254 {
2255 	if (op1 < op2) {  /* move lower ref to op2 */
2256 		ir_ref tmp = op1;
2257 		op1 = op2;
2258 		op2 = tmp;
2259 		IR_FOLD_RESTART;
2260 	}
2261     IR_FOLD_NEXT;
2262 }
2263 
2264 IR_FOLD(ADD_OV(_, _))
IR_FOLD(MUL_OV (_,_))2265 IR_FOLD(MUL_OV(_, _))
2266 {
2267 	if (op1 < op2) {  /* move lower ref to op2 */
2268 		ir_ref tmp = op1;
2269 		op1 = op2;
2270 		op2 = tmp;
2271 		IR_FOLD_RESTART;
2272 	}
2273 	/* skip CSE ??? */
2274 	IR_FOLD_EMIT;
2275 }
2276 
IR_FOLD(SUB (_,_))2277 IR_FOLD(SUB(_, _))
2278 {
2279 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt)) && op1 == op2) {
2280 		IR_FOLD_CONST_U(0);
2281 	}
2282 	IR_FOLD_NEXT;
2283 }
2284 
IR_FOLD(SUB_OV (_,_))2285 IR_FOLD(SUB_OV(_, _))
2286 {
2287 	if (op1 == op2) {
2288 		IR_FOLD_CONST_U(0);
2289 	}
2290 	/* skip CSE ??? */
2291 	IR_FOLD_EMIT;
2292 }
2293 
2294 /* Binary operations with op1 == op2 */
2295 IR_FOLD(AND(_,_))
IR_FOLD(OR (_,_))2296 IR_FOLD(OR(_,_))
2297 IR_FOLD(MIN(_, _))
2298 IR_FOLD(MAX(_, _))
2299 {
2300 	/* a & a => a */
2301 	if (op1 == op2) {
2302 		IR_FOLD_COPY(op1);
2303 	}
2304 	IR_FOLD_DO_NAMED(swap_ops);
2305 }
2306 
IR_FOLD(XOR (_,_))2307 IR_FOLD(XOR(_,_))
2308 {
2309 	/* a xor a => 0 */
2310 	if (op1 == op2) {
2311 		IR_FOLD_CONST_U(0);
2312 	}
2313 	IR_FOLD_DO_NAMED(swap_ops);
2314 }
2315 
2316 IR_FOLD(EQ(_, _))
IR_FOLD(NE (_,_))2317 IR_FOLD(NE(_, _))
2318 {
2319 	if (op1 != op2) {
2320 		IR_FOLD_DO_NAMED(swap_ops);
2321 	} else if (IR_IS_TYPE_INT(op1_insn->type)) {
2322 		/* a == a => true */
2323 		IR_FOLD_BOOL((opt & IR_OPT_OP_MASK) == IR_EQ);
2324 	}
2325 	IR_FOLD_NEXT;
2326 }
2327 
2328 IR_FOLD(LT(_, _))
IR_FOLD(GE (_,_))2329 IR_FOLD(GE(_, _))
2330 IR_FOLD(LE(_, _))
2331 IR_FOLD(GT(_, _))
2332 {
2333 	if (op1 == op2) {
2334 		if (IR_IS_TYPE_INT(op1_insn->type)) {
2335 			/* a >= a => true (two low bits are differ) */
2336 			IR_FOLD_BOOL((opt ^ (opt >> 1)) & 1);
2337 		}
2338 	} else if (op1 < op2) {  /* move lower ref to op2 */
2339 		ir_ref tmp = op1;
2340 		op1 = op2;
2341 		op2 = tmp;
2342 		opt ^= 3; /* [U]LT <-> [U]GT, [U]LE <-> [U]GE */
2343 		IR_FOLD_RESTART;
2344 	}
2345     IR_FOLD_NEXT;
2346 }
2347 
2348 IR_FOLD(ULT(_, _))
IR_FOLD(UGE (_,_))2349 IR_FOLD(UGE(_, _))
2350 IR_FOLD(ULE(_, _))
2351 IR_FOLD(UGT(_, _))
2352 {
2353 	if (op1 == op2) {
2354 		/* a >= a => true (two low bits are differ) */
2355 		IR_FOLD_BOOL((opt ^ (opt >> 1)) & 1);
2356 	} else if (op1 < op2) {  /* move lower ref to op2 */
2357 		ir_ref tmp = op1;
2358 		op1 = op2;
2359 		op2 = tmp;
2360 		opt ^= 3; /* [U]LT <-> [U]GT, [U]LE <-> [U]GE */
2361 	}
2362 	IR_FOLD_NEXT;
2363 }
2364 
IR_FOLD(COND (_,_))2365 IR_FOLD(COND(_, _)) // TODO: COND(_, _, _)
2366 {
2367 	if (op2 == op3) {
2368 		IR_FOLD_COPY(op2);
2369 	}
2370 	IR_FOLD_NEXT;
2371 }
2372