xref: /php-src/ext/opcache/jit/ir/ir_fold.h (revision 7e2831f0)
1 /*
2  * IR - Lightweight JIT Compilation Framework
3  * (Folding engine rules)
4  * Copyright (C) 2022 Zend by Perforce.
5  * Authors: Dmitry Stogov <dmitry@php.net>
6  *
7  * Based on Mike Pall's implementation for LuaJIT.
8  */
9 
10 /* Constant Folding */
11 IR_FOLD(EQ(C_BOOL, C_BOOL))
IR_FOLD(EQ (C_U8,C_U8))12 IR_FOLD(EQ(C_U8, C_U8))
13 IR_FOLD(EQ(C_U16, C_U16))
14 IR_FOLD(EQ(C_U32, C_U32))
15 IR_FOLD(EQ(C_U64, C_U64))
16 IR_FOLD(EQ(C_ADDR, C_ADDR))
17 IR_FOLD(EQ(C_CHAR, C_CHAR))
18 IR_FOLD(EQ(C_I8, C_I8))
19 IR_FOLD(EQ(C_I16, C_I16))
20 IR_FOLD(EQ(C_I32, C_I32))
21 IR_FOLD(EQ(C_I64, C_I64))
22 {
23 	IR_FOLD_BOOL(op1_insn->val.u64 == op2_insn->val.u64);
24 }
25 
IR_FOLD(EQ (C_DOUBLE,C_DOUBLE))26 IR_FOLD(EQ(C_DOUBLE, C_DOUBLE))
27 {
28 	IR_FOLD_BOOL(op1_insn->val.d == op2_insn->val.d);
29 }
30 
IR_FOLD(EQ (C_FLOAT,C_FLOAT))31 IR_FOLD(EQ(C_FLOAT, C_FLOAT))
32 {
33 	IR_FOLD_BOOL(op1_insn->val.d == op2_insn->val.d);
34 }
35 
36 IR_FOLD(NE(C_BOOL, C_BOOL))
IR_FOLD(NE (C_U8,C_U8))37 IR_FOLD(NE(C_U8, C_U8))
38 IR_FOLD(NE(C_U16, C_U16))
39 IR_FOLD(NE(C_U32, C_U32))
40 IR_FOLD(NE(C_U64, C_U64))
41 IR_FOLD(NE(C_ADDR, C_ADDR))
42 IR_FOLD(NE(C_CHAR, C_CHAR))
43 IR_FOLD(NE(C_I8, C_I8))
44 IR_FOLD(NE(C_I16, C_I16))
45 IR_FOLD(NE(C_I32, C_I32))
46 IR_FOLD(NE(C_I64, C_I64))
47 {
48 	IR_FOLD_BOOL(op1_insn->val.u64 != op2_insn->val.u64);
49 }
50 
IR_FOLD(NE (C_DOUBLE,C_DOUBLE))51 IR_FOLD(NE(C_DOUBLE, C_DOUBLE))
52 {
53 	IR_FOLD_BOOL(op1_insn->val.d != op2_insn->val.d);
54 }
55 
IR_FOLD(NE (C_FLOAT,C_FLOAT))56 IR_FOLD(NE(C_FLOAT, C_FLOAT))
57 {
58 	IR_FOLD_BOOL(op1_insn->val.f != op2_insn->val.f);
59 }
60 
61 IR_FOLD(LT(C_BOOL, C_BOOL))
IR_FOLD(LT (C_U8,C_U8))62 IR_FOLD(LT(C_U8, C_U8))
63 IR_FOLD(LT(C_U16, C_U16))
64 IR_FOLD(LT(C_U32, C_U32))
65 IR_FOLD(LT(C_U64, C_U64))
66 IR_FOLD(LT(C_ADDR, C_ADDR))
67 {
68 	IR_FOLD_BOOL(op1_insn->val.u64 < op2_insn->val.u64);
69 }
70 
71 IR_FOLD(LT(C_CHAR, C_CHAR))
IR_FOLD(LT (C_I8,C_I8))72 IR_FOLD(LT(C_I8, C_I8))
73 IR_FOLD(LT(C_I16, C_I16))
74 IR_FOLD(LT(C_I32, C_I32))
75 IR_FOLD(LT(C_I64, C_I64))
76 {
77 	IR_FOLD_BOOL(op1_insn->val.i64 < op2_insn->val.i64);
78 }
79 
IR_FOLD(LT (C_DOUBLE,C_DOUBLE))80 IR_FOLD(LT(C_DOUBLE, C_DOUBLE))
81 {
82 	IR_FOLD_BOOL(op1_insn->val.d < op2_insn->val.d);
83 }
84 
IR_FOLD(LT (C_FLOAT,C_FLOAT))85 IR_FOLD(LT(C_FLOAT, C_FLOAT))
86 {
87 	IR_FOLD_BOOL(op1_insn->val.f < op2_insn->val.f);
88 }
89 
90 IR_FOLD(GE(C_BOOL, C_BOOL))
IR_FOLD(GE (C_U8,C_U8))91 IR_FOLD(GE(C_U8, C_U8))
92 IR_FOLD(GE(C_U16, C_U16))
93 IR_FOLD(GE(C_U32, C_U32))
94 IR_FOLD(GE(C_U64, C_U64))
95 IR_FOLD(GE(C_ADDR, C_ADDR))
96 {
97 	IR_FOLD_BOOL(op1_insn->val.u64 >= op2_insn->val.u64);
98 }
99 
100 IR_FOLD(GE(C_CHAR, C_CHAR))
IR_FOLD(GE (C_I8,C_I8))101 IR_FOLD(GE(C_I8, C_I8))
102 IR_FOLD(GE(C_I16, C_I16))
103 IR_FOLD(GE(C_I32, C_I32))
104 IR_FOLD(GE(C_I64, C_I64))
105 {
106 	IR_FOLD_BOOL(op1_insn->val.i64 >= op2_insn->val.i64);
107 }
108 
IR_FOLD(GE (C_DOUBLE,C_DOUBLE))109 IR_FOLD(GE(C_DOUBLE, C_DOUBLE))
110 {
111 	IR_FOLD_BOOL(op1_insn->val.d >= op2_insn->val.d);
112 }
113 
IR_FOLD(GE (C_FLOAT,C_FLOAT))114 IR_FOLD(GE(C_FLOAT, C_FLOAT))
115 {
116 	IR_FOLD_BOOL(op1_insn->val.f >= op2_insn->val.f);
117 }
118 
119 IR_FOLD(LE(C_BOOL, C_BOOL))
IR_FOLD(LE (C_U8,C_U8))120 IR_FOLD(LE(C_U8, C_U8))
121 IR_FOLD(LE(C_U16, C_U16))
122 IR_FOLD(LE(C_U32, C_U32))
123 IR_FOLD(LE(C_U64, C_U64))
124 IR_FOLD(LE(C_ADDR, C_ADDR))
125 {
126 	IR_FOLD_BOOL(op1_insn->val.u64 <= op2_insn->val.u64);
127 }
128 
129 IR_FOLD(LE(C_CHAR, C_CHAR))
IR_FOLD(LE (C_I8,C_I8))130 IR_FOLD(LE(C_I8, C_I8))
131 IR_FOLD(LE(C_I16, C_I16))
132 IR_FOLD(LE(C_I32, C_I32))
133 IR_FOLD(LE(C_I64, C_I64))
134 {
135 	IR_FOLD_BOOL(op1_insn->val.i64 <= op2_insn->val.i64);
136 }
137 
IR_FOLD(LE (C_DOUBLE,C_DOUBLE))138 IR_FOLD(LE(C_DOUBLE, C_DOUBLE))
139 {
140 	IR_FOLD_BOOL(op1_insn->val.d <= op2_insn->val.d);
141 }
142 
IR_FOLD(LE (C_FLOAT,C_FLOAT))143 IR_FOLD(LE(C_FLOAT, C_FLOAT))
144 {
145 	IR_FOLD_BOOL(op1_insn->val.f <= op2_insn->val.f);
146 }
147 
148 IR_FOLD(GT(C_BOOL, C_BOOL))
IR_FOLD(GT (C_U8,C_U8))149 IR_FOLD(GT(C_U8, C_U8))
150 IR_FOLD(GT(C_U16, C_U16))
151 IR_FOLD(GT(C_U32, C_U32))
152 IR_FOLD(GT(C_U64, C_U64))
153 IR_FOLD(GT(C_ADDR, C_ADDR))
154 {
155 	IR_FOLD_BOOL(op1_insn->val.u64 > op2_insn->val.u64);
156 }
157 
158 IR_FOLD(GT(C_CHAR, C_CHAR))
IR_FOLD(GT (C_I8,C_I8))159 IR_FOLD(GT(C_I8, C_I8))
160 IR_FOLD(GT(C_I16, C_I16))
161 IR_FOLD(GT(C_I32, C_I32))
162 IR_FOLD(GT(C_I64, C_I64))
163 {
164 	IR_FOLD_BOOL(op1_insn->val.i64 > op2_insn->val.i64);
165 }
166 
IR_FOLD(GT (C_DOUBLE,C_DOUBLE))167 IR_FOLD(GT(C_DOUBLE, C_DOUBLE))
168 {
169 	IR_FOLD_BOOL(op1_insn->val.d > op2_insn->val.d);
170 }
171 
IR_FOLD(GT (C_FLOAT,C_FLOAT))172 IR_FOLD(GT(C_FLOAT, C_FLOAT))
173 {
174 	IR_FOLD_BOOL(op1_insn->val.f > op2_insn->val.f);
175 }
176 
177 IR_FOLD(ULT(C_BOOL, C_BOOL))
IR_FOLD(ULT (C_U8,C_U8))178 IR_FOLD(ULT(C_U8, C_U8))
179 IR_FOLD(ULT(C_U16, C_U16))
180 IR_FOLD(ULT(C_U32, C_U32))
181 IR_FOLD(ULT(C_U64, C_U64))
182 IR_FOLD(ULT(C_ADDR, C_ADDR))
183 IR_FOLD(ULT(C_CHAR, C_CHAR))
184 IR_FOLD(ULT(C_I8, C_I8))
185 IR_FOLD(ULT(C_I16, C_I16))
186 IR_FOLD(ULT(C_I32, C_I32))
187 IR_FOLD(ULT(C_I64, C_I64))
188 {
189 	IR_FOLD_BOOL(op1_insn->val.u64 < op2_insn->val.u64);
190 }
191 
IR_FOLD(ULT (C_DOUBLE,C_DOUBLE))192 IR_FOLD(ULT(C_DOUBLE, C_DOUBLE))
193 {
194 	IR_FOLD_BOOL(!(op1_insn->val.d >= op2_insn->val.d));
195 }
196 
IR_FOLD(ULT (C_FLOAT,C_FLOAT))197 IR_FOLD(ULT(C_FLOAT, C_FLOAT))
198 {
199 	IR_FOLD_BOOL(!(op1_insn->val.f >= op2_insn->val.f));
200 }
201 
202 IR_FOLD(UGE(C_BOOL, C_BOOL))
IR_FOLD(UGE (C_U8,C_U8))203 IR_FOLD(UGE(C_U8, C_U8))
204 IR_FOLD(UGE(C_U16, C_U16))
205 IR_FOLD(UGE(C_U32, C_U32))
206 IR_FOLD(UGE(C_U64, C_U64))
207 IR_FOLD(UGE(C_ADDR, C_ADDR))
208 IR_FOLD(UGE(C_CHAR, C_CHAR))
209 IR_FOLD(UGE(C_I8, C_I8))
210 IR_FOLD(UGE(C_I16, C_I16))
211 IR_FOLD(UGE(C_I32, C_I32))
212 IR_FOLD(UGE(C_I64, C_I64))
213 {
214 	IR_FOLD_BOOL(op1_insn->val.u64 >= op2_insn->val.u64);
215 }
216 
IR_FOLD(UGE (C_DOUBLE,C_DOUBLE))217 IR_FOLD(UGE(C_DOUBLE, C_DOUBLE))
218 {
219 	IR_FOLD_BOOL(!(op1_insn->val.d < op2_insn->val.d));
220 }
221 
IR_FOLD(UGE (C_FLOAT,C_FLOAT))222 IR_FOLD(UGE(C_FLOAT, C_FLOAT))
223 {
224 	IR_FOLD_BOOL(!(op1_insn->val.f < op2_insn->val.f));
225 }
226 
227 IR_FOLD(ULE(C_BOOL, C_BOOL))
IR_FOLD(ULE (C_U8,C_U8))228 IR_FOLD(ULE(C_U8, C_U8))
229 IR_FOLD(ULE(C_U16, C_U16))
230 IR_FOLD(ULE(C_U32, C_U32))
231 IR_FOLD(ULE(C_U64, C_U64))
232 IR_FOLD(ULE(C_ADDR, C_ADDR))
233 IR_FOLD(ULE(C_CHAR, C_CHAR))
234 IR_FOLD(ULE(C_I8, C_I8))
235 IR_FOLD(ULE(C_I16, C_I16))
236 IR_FOLD(ULE(C_I32, C_I32))
237 IR_FOLD(ULE(C_I64, C_I64))
238 {
239 	IR_FOLD_BOOL(op1_insn->val.u64 <= op2_insn->val.u64);
240 }
241 
IR_FOLD(ULE (C_DOUBLE,C_DOUBLE))242 IR_FOLD(ULE(C_DOUBLE, C_DOUBLE))
243 {
244 	IR_FOLD_BOOL(!(op1_insn->val.d > op2_insn->val.d));
245 }
246 
IR_FOLD(ULE (C_FLOAT,C_FLOAT))247 IR_FOLD(ULE(C_FLOAT, C_FLOAT))
248 {
249 	IR_FOLD_BOOL(!(op1_insn->val.f > op2_insn->val.f));
250 }
251 
252 IR_FOLD(UGT(C_BOOL, C_BOOL))
IR_FOLD(UGT (C_U8,C_U8))253 IR_FOLD(UGT(C_U8, C_U8))
254 IR_FOLD(UGT(C_U16, C_U16))
255 IR_FOLD(UGT(C_U32, C_U32))
256 IR_FOLD(UGT(C_U64, C_U64))
257 IR_FOLD(UGT(C_ADDR, C_ADDR))
258 IR_FOLD(UGT(C_CHAR, C_CHAR))
259 IR_FOLD(UGT(C_I8, C_I8))
260 IR_FOLD(UGT(C_I16, C_I16))
261 IR_FOLD(UGT(C_I32, C_I32))
262 IR_FOLD(UGT(C_I64, C_I64))
263 {
264 	IR_FOLD_BOOL(op1_insn->val.u64 > op2_insn->val.u64);
265 }
266 
IR_FOLD(UGT (C_DOUBLE,C_DOUBLE))267 IR_FOLD(UGT(C_DOUBLE, C_DOUBLE))
268 {
269 	IR_FOLD_BOOL(!(op1_insn->val.d <= op2_insn->val.d));
270 }
271 
IR_FOLD(UGT (C_FLOAT,C_FLOAT))272 IR_FOLD(UGT(C_FLOAT, C_FLOAT))
273 {
274 	IR_FOLD_BOOL(!(op1_insn->val.f <= op2_insn->val.f));
275 }
276 
IR_FOLD(ADD (C_U8,C_U8))277 IR_FOLD(ADD(C_U8, C_U8))
278 {
279 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
280 	IR_FOLD_CONST_U(op1_insn->val.u8 + op2_insn->val.u8);
281 }
282 
IR_FOLD(ADD (C_U16,C_U16))283 IR_FOLD(ADD(C_U16, C_U16))
284 {
285 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
286 	IR_FOLD_CONST_U(op1_insn->val.u16 + op2_insn->val.u16);
287 }
288 
IR_FOLD(ADD (C_U32,C_U32))289 IR_FOLD(ADD(C_U32, C_U32))
290 {
291 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type || (sizeof(void*) == 4 && IR_OPT_TYPE(opt) == IR_ADDR));
292 	IR_FOLD_CONST_U(op1_insn->val.u32 + op2_insn->val.u32);
293 }
294 
IR_FOLD(ADD (C_U64,C_U64))295 IR_FOLD(ADD(C_U64, C_U64))
296 {
297 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type || (sizeof(void*) == 8 && IR_OPT_TYPE(opt) == IR_ADDR));
298 	IR_FOLD_CONST_U(op1_insn->val.u64 + op2_insn->val.u64);
299 }
300 
301 IR_FOLD(ADD(C_ADDR, C_ADDR))
IR_FOLD(ADD (C_ADDR,C_INTPTR))302 IR_FOLD(ADD(C_ADDR, C_INTPTR))
303 IR_FOLD(ADD(C_ADDR, C_UINTPTR))
304 IR_FOLD(ADD(C_INTPTR, C_ADDR))
305 IR_FOLD(ADD(C_UINTPTR, C_ADDR))
306 {
307 //	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
308 	IR_FOLD_CONST_U(op1_insn->val.addr + op2_insn->val.addr);
309 }
310 
IR_FOLD(ADD (C_I8,C_I8))311 IR_FOLD(ADD(C_I8, C_I8))
312 {
313 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
314 	IR_FOLD_CONST_I(op1_insn->val.i8 + op2_insn->val.i8);
315 }
316 
IR_FOLD(ADD (C_I16,C_I16))317 IR_FOLD(ADD(C_I16, C_I16))
318 {
319 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
320 	IR_FOLD_CONST_I(op1_insn->val.i16 + op2_insn->val.i16);
321 }
322 
IR_FOLD(ADD (C_I32,C_I32))323 IR_FOLD(ADD(C_I32, C_I32))
324 {
325 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type || (sizeof(void*) == 4 && IR_OPT_TYPE(opt) == IR_ADDR));
326 	IR_FOLD_CONST_I(op1_insn->val.i32 + op2_insn->val.i32);
327 }
328 
IR_FOLD(ADD (C_I64,C_I64))329 IR_FOLD(ADD(C_I64, C_I64))
330 {
331 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type || (sizeof(void*) == 8 && IR_OPT_TYPE(opt) == IR_ADDR));
332 	IR_FOLD_CONST_I(op1_insn->val.i64 + op2_insn->val.i64);
333 }
334 
IR_FOLD(ADD (C_DOUBLE,C_DOUBLE))335 IR_FOLD(ADD(C_DOUBLE, C_DOUBLE))
336 {
337 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
338 	IR_FOLD_CONST_D(op1_insn->val.d + op2_insn->val.d);
339 }
340 
IR_FOLD(ADD (C_FLOAT,C_FLOAT))341 IR_FOLD(ADD(C_FLOAT, C_FLOAT))
342 {
343 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
344 	IR_FOLD_CONST_F(op1_insn->val.f + op2_insn->val.f);
345 }
346 
IR_FOLD(SUB (C_U8,C_U8))347 IR_FOLD(SUB(C_U8, C_U8))
348 {
349 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
350 	IR_FOLD_CONST_U(op1_insn->val.u8 - op2_insn->val.u8);
351 }
352 
IR_FOLD(SUB (C_U16,C_U16))353 IR_FOLD(SUB(C_U16, C_U16))
354 {
355 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
356 	IR_FOLD_CONST_U(op1_insn->val.u16 - op2_insn->val.u16);
357 }
358 
IR_FOLD(SUB (C_U32,C_U32))359 IR_FOLD(SUB(C_U32, C_U32))
360 {
361 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
362 	IR_FOLD_CONST_U(op1_insn->val.u32 - op2_insn->val.u32);
363 }
364 
IR_FOLD(SUB (C_U64,C_U64))365 IR_FOLD(SUB(C_U64, C_U64))
366 {
367 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
368 	IR_FOLD_CONST_U(op1_insn->val.u64 - op2_insn->val.u64);
369 }
370 
371 IR_FOLD(SUB(C_ADDR, C_ADDR))
IR_FOLD(SUB (C_ADDR,C_INTPTR))372 IR_FOLD(SUB(C_ADDR, C_INTPTR))
373 IR_FOLD(SUB(C_ADDR, C_UINTPTR))
374 IR_FOLD(SUB(C_INTPTR, C_ADDR))
375 IR_FOLD(SUB(C_UINTPTR, C_ADDR))
376 {
377 //	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
378 	IR_FOLD_CONST_U(op1_insn->val.addr - op2_insn->val.addr);
379 }
380 
IR_FOLD(SUB (C_I8,C_I8))381 IR_FOLD(SUB(C_I8, C_I8))
382 {
383 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
384 	IR_FOLD_CONST_I(op1_insn->val.i8 - op2_insn->val.i8);
385 }
386 
IR_FOLD(SUB (C_I16,C_I16))387 IR_FOLD(SUB(C_I16, C_I16))
388 {
389 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
390 	IR_FOLD_CONST_I(op1_insn->val.i16 - op2_insn->val.i16);
391 }
392 
IR_FOLD(SUB (C_I32,C_I32))393 IR_FOLD(SUB(C_I32, C_I32))
394 {
395 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
396 	IR_FOLD_CONST_I(op1_insn->val.i32 - op2_insn->val.i32);
397 }
398 
IR_FOLD(SUB (C_I64,C_I64))399 IR_FOLD(SUB(C_I64, C_I64))
400 {
401 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
402 	IR_FOLD_CONST_I(op1_insn->val.i64 - op2_insn->val.i64);
403 }
404 
IR_FOLD(SUB (C_DOUBLE,C_DOUBLE))405 IR_FOLD(SUB(C_DOUBLE, C_DOUBLE))
406 {
407 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
408 	IR_FOLD_CONST_D(op1_insn->val.d - op2_insn->val.d);
409 }
410 
IR_FOLD(SUB (C_FLOAT,C_FLOAT))411 IR_FOLD(SUB(C_FLOAT, C_FLOAT))
412 {
413 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
414 	IR_FOLD_CONST_F(op1_insn->val.f - op2_insn->val.f);
415 }
416 
IR_FOLD(MUL (C_U8,C_U8))417 IR_FOLD(MUL(C_U8, C_U8))
418 {
419 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
420 	IR_FOLD_CONST_U(op1_insn->val.u8 * op2_insn->val.u8);
421 }
422 
IR_FOLD(MUL (C_U16,C_U16))423 IR_FOLD(MUL(C_U16, C_U16))
424 {
425 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
426 	IR_FOLD_CONST_U(op1_insn->val.u16 * op2_insn->val.u16);
427 }
428 
IR_FOLD(MUL (C_U32,C_U32))429 IR_FOLD(MUL(C_U32, C_U32))
430 {
431 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
432 	IR_FOLD_CONST_U(op1_insn->val.u32 * op2_insn->val.u32);
433 }
434 
IR_FOLD(MUL (C_U64,C_U64))435 IR_FOLD(MUL(C_U64, C_U64))
436 {
437 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
438 	IR_FOLD_CONST_U(op1_insn->val.u64 * op2_insn->val.u64);
439 }
440 
441 IR_FOLD(MUL(C_ADDR, C_ADDR))
IR_FOLD(MUL (C_ADDR,C_INTPTR))442 IR_FOLD(MUL(C_ADDR, C_INTPTR))
443 IR_FOLD(MUL(C_ADDR, C_UINTPTR))
444 IR_FOLD(MUL(C_INTPTR, C_ADDR))
445 IR_FOLD(MUL(C_UINTPTR, C_ADDR))
446 {
447 //	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
448 	IR_FOLD_CONST_U(op1_insn->val.addr * op2_insn->val.addr);
449 }
450 
IR_FOLD(MUL (C_I8,C_I8))451 IR_FOLD(MUL(C_I8, C_I8))
452 {
453 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
454 	IR_FOLD_CONST_I(op1_insn->val.i8 * op2_insn->val.i8);
455 }
456 
IR_FOLD(MUL (C_I16,C_I16))457 IR_FOLD(MUL(C_I16, C_I16))
458 {
459 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
460 	IR_FOLD_CONST_I(op1_insn->val.i16 * op2_insn->val.i16);
461 }
462 
IR_FOLD(MUL (C_I32,C_I32))463 IR_FOLD(MUL(C_I32, C_I32))
464 {
465 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
466 	IR_FOLD_CONST_I(op1_insn->val.i32 * op2_insn->val.i32);
467 }
468 
IR_FOLD(MUL (C_I64,C_I64))469 IR_FOLD(MUL(C_I64, C_I64))
470 {
471 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
472 	IR_FOLD_CONST_I(op1_insn->val.i64 * op2_insn->val.i64);
473 }
474 
IR_FOLD(MUL (C_DOUBLE,C_DOUBLE))475 IR_FOLD(MUL(C_DOUBLE, C_DOUBLE))
476 {
477 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
478 	IR_FOLD_CONST_D(op1_insn->val.d * op2_insn->val.d);
479 }
480 
IR_FOLD(MUL (C_FLOAT,C_FLOAT))481 IR_FOLD(MUL(C_FLOAT, C_FLOAT))
482 {
483 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
484 	IR_FOLD_CONST_F(op1_insn->val.f * op2_insn->val.f);
485 }
486 
487 IR_FOLD(DIV(C_U8, C_U8))
IR_FOLD(DIV (C_U16,C_U16))488 IR_FOLD(DIV(C_U16, C_U16))
489 IR_FOLD(DIV(C_U32, C_U32))
490 IR_FOLD(DIV(C_U64, C_U64))
491 IR_FOLD(DIV(C_ADDR, C_ADDR))
492 {
493 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
494 	if (op2_insn->val.u64 == 0) {
495 		/* division by zero */
496 		IR_FOLD_EMIT;
497 	}
498 	IR_FOLD_CONST_U(op1_insn->val.u64 / op2_insn->val.u64);
499 }
500 
501 IR_FOLD(DIV(C_I8, C_I8))
IR_FOLD(DIV (C_I16,C_I16))502 IR_FOLD(DIV(C_I16, C_I16))
503 IR_FOLD(DIV(C_I32, C_I32))
504 IR_FOLD(DIV(C_I64, C_I64))
505 {
506 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
507 	if (op2_insn->val.i64 == 0) {
508 		/* division by zero */
509 		IR_FOLD_EMIT;
510 	}
511 	IR_FOLD_CONST_I(op1_insn->val.i64 / op2_insn->val.i64);
512 }
513 
IR_FOLD(DIV (C_DOUBLE,C_DOUBLE))514 IR_FOLD(DIV(C_DOUBLE, C_DOUBLE))
515 {
516 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
517 	IR_FOLD_CONST_D(op1_insn->val.d / op2_insn->val.d);
518 }
519 
IR_FOLD(DIV (C_FLOAT,C_FLOAT))520 IR_FOLD(DIV(C_FLOAT, C_FLOAT))
521 {
522 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
523 	IR_FOLD_CONST_F(op1_insn->val.f / op2_insn->val.f);
524 }
525 
526 IR_FOLD(MOD(C_U8, C_U8))
IR_FOLD(MOD (C_U16,C_U16))527 IR_FOLD(MOD(C_U16, C_U16))
528 IR_FOLD(MOD(C_U32, C_U32))
529 IR_FOLD(MOD(C_U64, C_U64))
530 IR_FOLD(MOD(C_ADDR, C_ADDR))
531 {
532 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
533 	if (op2_insn->val.u64 == 0) {
534 		/* division by zero */
535 		IR_FOLD_EMIT;
536 	}
537 	IR_FOLD_CONST_U(op1_insn->val.u64 % op2_insn->val.u64);
538 }
539 
540 IR_FOLD(MOD(C_I8, C_I8))
IR_FOLD(MOD (C_I16,C_I16))541 IR_FOLD(MOD(C_I16, C_I16))
542 IR_FOLD(MOD(C_I32, C_I32))
543 IR_FOLD(MOD(C_I64, C_I64))
544 {
545 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
546 	if (op2_insn->val.i64 == 0) {
547 		/* division by zero */
548 		IR_FOLD_EMIT;
549 	}
550 	IR_FOLD_CONST_I(op1_insn->val.i64 % op2_insn->val.i64);
551 }
552 
553 IR_FOLD(NEG(C_I8))
IR_FOLD(NEG (C_I16))554 IR_FOLD(NEG(C_I16))
555 IR_FOLD(NEG(C_I32))
556 IR_FOLD(NEG(C_I64))
557 {
558 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
559 	IR_FOLD_CONST_I(-op1_insn->val.i64);
560 }
561 
IR_FOLD(NEG (C_DOUBLE))562 IR_FOLD(NEG(C_DOUBLE))
563 {
564 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
565 	IR_FOLD_CONST_D(-op1_insn->val.d);
566 }
567 
IR_FOLD(NEG (C_FLOAT))568 IR_FOLD(NEG(C_FLOAT))
569 {
570 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
571 	IR_FOLD_CONST_F(-op1_insn->val.f);
572 }
573 
574 IR_FOLD(ABS(C_I8))
IR_FOLD(ABS (C_I16))575 IR_FOLD(ABS(C_I16))
576 IR_FOLD(ABS(C_I32))
577 IR_FOLD(ABS(C_I64))
578 {
579 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
580 	if (op1_insn->val.i64 >= 0) {
581 		IR_FOLD_COPY(op1);
582 	} else {
583 		IR_FOLD_CONST_I(-op1_insn->val.i64);
584 	}
585 }
586 
IR_FOLD(ABS (C_DOUBLE))587 IR_FOLD(ABS(C_DOUBLE))
588 {
589 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
590 	IR_FOLD_CONST_D(fabs(op1_insn->val.d));
591 }
592 
IR_FOLD(ABS (C_FLOAT))593 IR_FOLD(ABS(C_FLOAT))
594 {
595 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
596 	IR_FOLD_CONST_F(fabsf(op1_insn->val.f));
597 }
598 
599 IR_FOLD(ADD_OV(C_U8, C_U8))
IR_FOLD(ADD_OV (C_U16,C_U16))600 IR_FOLD(ADD_OV(C_U16, C_U16))
601 IR_FOLD(ADD_OV(C_U32, C_U32))
602 IR_FOLD(ADD_OV(C_U64, C_U64))
603 {
604 	ir_type type = IR_OPT_TYPE(opt);
605 	uint64_t max = ((uint64_t)0xffffffffffffffff) >> (64 - ir_type_size[type] * 8);
606 	IR_ASSERT(type == op1_insn->type);
607 	if (op1_insn->val.u64 > max - op2_insn->val.u64) {
608 		IR_FOLD_NEXT;
609 	}
610 	IR_FOLD_CONST_U(op1_insn->val.u64 + op2_insn->val.u64);
611 }
612 
613 IR_FOLD(ADD_OV(C_I8, C_I8))
IR_FOLD(ADD_OV (C_I16,C_I16))614 IR_FOLD(ADD_OV(C_I16, C_I16))
615 IR_FOLD(ADD_OV(C_I32, C_I32))
616 IR_FOLD(ADD_OV(C_I64, C_I64))
617 {
618 	ir_type type = IR_OPT_TYPE(opt);
619 	int64_t max = ((uint64_t)0x7fffffffffffffff) >> (64 - ir_type_size[type] * 8);
620 	int64_t min = - max - 1;
621 	IR_ASSERT(type == op1_insn->type);
622 	if ((op2_insn->val.i64 > 0 && op1_insn->val.i64 > max - op2_insn->val.i64)
623 	 || (op2_insn->val.i64 < 0 && op1_insn->val.i64 < min - op2_insn->val.i64)) {
624 		IR_FOLD_NEXT;
625 	}
626 	IR_FOLD_CONST_I(op1_insn->val.i64 + op2_insn->val.i64);
627 }
628 
629 IR_FOLD(SUB_OV(C_U8, C_U8))
IR_FOLD(SUB_OV (C_U16,C_U16))630 IR_FOLD(SUB_OV(C_U16, C_U16))
631 IR_FOLD(SUB_OV(C_U32, C_U32))
632 IR_FOLD(SUB_OV(C_U64, C_U64))
633 {
634 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
635 	if (op2_insn->val.u64 > op1_insn->val.u64) {
636 		IR_FOLD_NEXT;
637 	}
638 	IR_FOLD_CONST_U(op1_insn->val.u64 - op2_insn->val.u64);
639 }
640 
641 IR_FOLD(SUB_OV(C_I8, C_I8))
IR_FOLD(SUB_OV (C_I16,C_I16))642 IR_FOLD(SUB_OV(C_I16, C_I16))
643 IR_FOLD(SUB_OV(C_I32, C_I32))
644 IR_FOLD(SUB_OV(C_I64, C_I64))
645 {
646 	ir_type type = IR_OPT_TYPE(opt);
647 	int64_t max = ((uint64_t)0x7fffffffffffffff) >> (64 - ir_type_size[type] * 8);
648 	int64_t min = - max - 1;
649 	IR_ASSERT(type == op1_insn->type);
650 	if ((op2_insn->val.i64 > 0 && op1_insn->val.i64 < min + op2_insn->val.i64)
651 	 || (op2_insn->val.i64 < 0 && op1_insn->val.i64 > max + op2_insn->val.i64)) {
652 		IR_FOLD_NEXT;
653 	}
654 	IR_FOLD_CONST_I(op1_insn->val.i64 - op2_insn->val.i64);
655 }
656 
657 IR_FOLD(MUL_OV(C_U8, C_U8))
IR_FOLD(MUL_OV (C_U16,C_U16))658 IR_FOLD(MUL_OV(C_U16, C_U16))
659 IR_FOLD(MUL_OV(C_U32, C_U32))
660 IR_FOLD(MUL_OV(C_U64, C_U64))
661 {
662 	ir_type type = IR_OPT_TYPE(opt);
663 	uint64_t max = ((uint64_t)0xffffffffffffffff) >> (64 - ir_type_size[type] * 8);
664 	uint64_t res;
665 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
666 	res = op1_insn->val.u64 * op2_insn->val.u64;
667 	if (op1_insn->val.u64 != 0 && res / op1_insn->val.u64 != op2_insn->val.u64 && res <= max) {
668 		IR_FOLD_NEXT;
669 	}
670 	IR_FOLD_CONST_U(res);
671 }
672 
673 IR_FOLD(MUL_OV(C_I8, C_I8))
IR_FOLD(MUL_OV (C_I16,C_I16))674 IR_FOLD(MUL_OV(C_I16, C_I16))
675 IR_FOLD(MUL_OV(C_I32, C_I32))
676 IR_FOLD(MUL_OV(C_I64, C_I64))
677 {
678 	ir_type type = IR_OPT_TYPE(opt);
679 	int64_t max = ((uint64_t)0x7fffffffffffffff) >> (64 - ir_type_size[type] * 8);
680 	int64_t min = - max - 1;
681 	int64_t res;
682 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
683 	res = op1_insn->val.i64 * op2_insn->val.i64;
684 	if (op1_insn->val.i64 != 0 && res / op1_insn->val.i64 != op2_insn->val.i64 && res >= min && res <= max) {
685 		IR_FOLD_NEXT;
686 	}
687 	IR_FOLD_CONST_U(res);
688 }
689 
IR_FOLD(OVERFLOW (_))690 IR_FOLD(OVERFLOW(_))
691 {
692 	if (op1_insn->op != IR_ADD_OV && op1_insn->op != IR_SUB_OV && op1_insn->op != IR_MUL_OV) {
693 		IR_FOLD_COPY(IR_FALSE);
694 	}
695 	IR_FOLD_NEXT;
696 }
697 
IR_FOLD(NOT (C_BOOL))698 IR_FOLD(NOT(C_BOOL))
699 {
700 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
701 	IR_FOLD_BOOL(!op1_insn->val.u64);
702 }
703 
704 IR_FOLD(NOT(C_U8))
IR_FOLD(NOT (C_CHAR))705 IR_FOLD(NOT(C_CHAR))
706 {
707 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
708 	IR_FOLD_CONST_U(~op1_insn->val.u8);
709 }
710 
IR_FOLD(NOT (C_I8))711 IR_FOLD(NOT(C_I8))
712 {
713 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
714 	IR_FOLD_CONST_I(~op1_insn->val.i8);
715 }
716 
IR_FOLD(NOT (C_U16))717 IR_FOLD(NOT(C_U16))
718 {
719 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
720 	IR_FOLD_CONST_U(~op1_insn->val.u16);
721 }
722 
IR_FOLD(NOT (C_I16))723 IR_FOLD(NOT(C_I16))
724 {
725 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
726 	IR_FOLD_CONST_I(~op1_insn->val.i16);
727 }
728 
IR_FOLD(NOT (C_U32))729 IR_FOLD(NOT(C_U32))
730 {
731 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
732 	IR_FOLD_CONST_U(~op1_insn->val.u32);
733 }
734 
IR_FOLD(NOT (C_I32))735 IR_FOLD(NOT(C_I32))
736 {
737 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
738 	IR_FOLD_CONST_I(~op1_insn->val.i32);
739 }
740 
741 IR_FOLD(NOT(C_U64))
IR_FOLD(NOT (C_I64))742 IR_FOLD(NOT(C_I64))
743 {
744 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
745 	IR_FOLD_CONST_U(~op1_insn->val.u64);
746 }
747 
IR_FOLD(OR (C_BOOL,C_BOOL))748 IR_FOLD(OR(C_BOOL, C_BOOL))
749 {
750 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
751 	IR_FOLD_BOOL(op1_insn->val.b || op2_insn->val.b);
752 }
753 
754 IR_FOLD(OR(C_CHAR, C_CHAR))
IR_FOLD(OR (C_U8,C_U8))755 IR_FOLD(OR(C_U8, C_U8))
756 IR_FOLD(OR(C_U16, C_U16))
757 IR_FOLD(OR(C_U32, C_U32))
758 IR_FOLD(OR(C_U64, C_U64))
759 {
760 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
761 	IR_FOLD_CONST_U(op1_insn->val.u64 | op2_insn->val.u64);
762 }
763 
764 IR_FOLD(OR(C_I8, C_I8))
IR_FOLD(OR (C_I16,C_I16))765 IR_FOLD(OR(C_I16, C_I16))
766 IR_FOLD(OR(C_I32, C_I32))
767 IR_FOLD(OR(C_I64, C_I64))
768 {
769 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
770 	IR_FOLD_CONST_I(op1_insn->val.i64 | op2_insn->val.i64);
771 }
772 
IR_FOLD(AND (C_BOOL,C_BOOL))773 IR_FOLD(AND(C_BOOL, C_BOOL))
774 {
775 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
776 	IR_FOLD_BOOL(op1_insn->val.b && op2_insn->val.b);
777 }
778 
779 IR_FOLD(AND(C_CHAR, C_CHAR))
IR_FOLD(AND (C_U8,C_U8))780 IR_FOLD(AND(C_U8, C_U8))
781 IR_FOLD(AND(C_U16, C_U16))
782 IR_FOLD(AND(C_U32, C_U32))
783 IR_FOLD(AND(C_U64, C_U64))
784 {
785 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
786 	IR_FOLD_CONST_U(op1_insn->val.u64 & op2_insn->val.u64);
787 }
788 
789 IR_FOLD(AND(C_I8, C_I8))
IR_FOLD(AND (C_I16,C_I16))790 IR_FOLD(AND(C_I16, C_I16))
791 IR_FOLD(AND(C_I32, C_I32))
792 IR_FOLD(AND(C_I64, C_I64))
793 {
794 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
795 	IR_FOLD_CONST_I(op1_insn->val.i64 & op2_insn->val.i64);
796 }
797 
IR_FOLD(XOR (C_BOOL,C_BOOL))798 IR_FOLD(XOR(C_BOOL, C_BOOL))
799 {
800 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
801 	IR_FOLD_BOOL(op1_insn->val.b != op2_insn->val.b);
802 }
803 
804 IR_FOLD(XOR(C_U8, C_U8))
IR_FOLD(XOR (C_CHAR,C_CHAR))805 IR_FOLD(XOR(C_CHAR, C_CHAR))
806 {
807 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
808 	IR_FOLD_CONST_U(op1_insn->val.u8 ^ op2_insn->val.u8);
809 }
810 
IR_FOLD(XOR (C_I8,C_I8))811 IR_FOLD(XOR(C_I8, C_I8))
812 {
813 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
814 	IR_FOLD_CONST_I(op1_insn->val.i8 ^ op2_insn->val.i8);
815 }
816 
IR_FOLD(XOR (C_U16,C_U16))817 IR_FOLD(XOR(C_U16, C_U16))
818 {
819 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
820 	IR_FOLD_CONST_U(op1_insn->val.u16 ^ op2_insn->val.u16);
821 }
822 
IR_FOLD(XOR (C_I16,C_I16))823 IR_FOLD(XOR(C_I16, C_I16))
824 {
825 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
826 	IR_FOLD_CONST_I(op1_insn->val.i16 ^ op2_insn->val.i16);
827 }
828 
IR_FOLD(XOR (C_U32,C_U32))829 IR_FOLD(XOR(C_U32, C_U32))
830 {
831 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
832 	IR_FOLD_CONST_U(op1_insn->val.u32 ^ op2_insn->val.u32);
833 }
834 
IR_FOLD(XOR (C_I32,C_I32))835 IR_FOLD(XOR(C_I32, C_I32))
836 {
837 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
838 	IR_FOLD_CONST_I(op1_insn->val.i32 ^ op2_insn->val.i32);
839 }
840 
IR_FOLD(XOR (C_U64,C_U64))841 IR_FOLD(XOR(C_U64, C_U64))
842 {
843 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
844 	IR_FOLD_CONST_U(op1_insn->val.u64 ^ op2_insn->val.u64);
845 }
846 
IR_FOLD(XOR (C_I64,C_I64))847 IR_FOLD(XOR(C_I64, C_I64))
848 {
849 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
850 	IR_FOLD_CONST_I(op1_insn->val.i64 ^ op2_insn->val.i64);
851 }
852 
853 IR_FOLD(SHL(C_U8, C_U8))
IR_FOLD(SHL (C_CHAR,C_CHAR))854 IR_FOLD(SHL(C_CHAR, C_CHAR))
855 {
856 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
857 	IR_FOLD_CONST_U(op1_insn->val.u8 << op2_insn->val.u8);
858 }
859 
IR_FOLD(SHL (C_I8,C_I8))860 IR_FOLD(SHL(C_I8, C_I8))
861 {
862 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
863 	IR_FOLD_CONST_I(op1_insn->val.i8 << op2_insn->val.i8);
864 }
865 
IR_FOLD(SHL (C_U16,C_U16))866 IR_FOLD(SHL(C_U16, C_U16))
867 {
868 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
869 	IR_FOLD_CONST_U(op1_insn->val.u16 << op2_insn->val.u16);
870 }
871 
IR_FOLD(SHL (C_I16,C_I16))872 IR_FOLD(SHL(C_I16, C_I16))
873 {
874 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
875 	IR_FOLD_CONST_I(op1_insn->val.i16 << op2_insn->val.i16);
876 }
877 
IR_FOLD(SHL (C_U32,C_U32))878 IR_FOLD(SHL(C_U32, C_U32))
879 {
880 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
881 	IR_FOLD_CONST_U(op1_insn->val.u32 << op2_insn->val.u32);
882 }
883 
IR_FOLD(SHL (C_I32,C_I32))884 IR_FOLD(SHL(C_I32, C_I32))
885 {
886 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
887 	IR_FOLD_CONST_I(op1_insn->val.i32 << op2_insn->val.i32);
888 }
889 
890 IR_FOLD(SHL(C_U64, C_U64))
IR_FOLD(SHL (C_I64,C_I64))891 IR_FOLD(SHL(C_I64, C_I64))
892 {
893 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
894 	IR_FOLD_CONST_U(op1_insn->val.u64 << op2_insn->val.u64);
895 }
896 
897 IR_FOLD(SHR(C_U8, C_U8))
IR_FOLD(SHR (C_CHAR,C_CHAR))898 IR_FOLD(SHR(C_CHAR, C_CHAR))
899 {
900 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
901 	IR_FOLD_CONST_U(op1_insn->val.u8 >> op2_insn->val.u8);
902 }
903 
IR_FOLD(SHR (C_I8,C_I8))904 IR_FOLD(SHR(C_I8, C_I8))
905 {
906 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
907 	IR_FOLD_CONST_I((int8_t)(op1_insn->val.u8 >> op2_insn->val.u8));
908 }
909 
IR_FOLD(SHR (C_U16,C_U16))910 IR_FOLD(SHR(C_U16, C_U16))
911 {
912 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
913 	IR_FOLD_CONST_U(op1_insn->val.u16 >> op2_insn->val.u16);
914 }
915 
IR_FOLD(SHR (C_I16,C_I16))916 IR_FOLD(SHR(C_I16, C_I16))
917 {
918 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
919 	IR_FOLD_CONST_U((int16_t)(op1_insn->val.u16 >> op2_insn->val.u16));
920 }
921 
IR_FOLD(SHR (C_U32,C_U32))922 IR_FOLD(SHR(C_U32, C_U32))
923 {
924 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
925 	IR_FOLD_CONST_U(op1_insn->val.u32 >> op2_insn->val.u32);
926 }
927 
IR_FOLD(SHR (C_I32,C_I32))928 IR_FOLD(SHR(C_I32, C_I32))
929 {
930 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
931 	IR_FOLD_CONST_U((int32_t)(op1_insn->val.u32 >> op2_insn->val.u32));
932 }
933 
934 IR_FOLD(SHR(C_U64, C_U64))
IR_FOLD(SHR (C_I64,C_I64))935 IR_FOLD(SHR(C_I64, C_I64))
936 {
937 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
938 	IR_FOLD_CONST_U(op1_insn->val.u64 >> op2_insn->val.u64);
939 }
940 
941 IR_FOLD(SAR(C_U8, C_U8))
IR_FOLD(SAR (C_CHAR,C_CHAR))942 IR_FOLD(SAR(C_CHAR, C_CHAR))
943 {
944 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
945 	IR_FOLD_CONST_U((uint8_t)(op1_insn->val.i8 >> op2_insn->val.i8));
946 }
947 
IR_FOLD(SAR (C_I8,C_I8))948 IR_FOLD(SAR(C_I8, C_I8))
949 {
950 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
951 	IR_FOLD_CONST_I(op1_insn->val.i8 >> op2_insn->val.i8);
952 }
953 
IR_FOLD(SAR (C_U16,C_U16))954 IR_FOLD(SAR(C_U16, C_U16))
955 {
956 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
957 	IR_FOLD_CONST_U((uint16_t)(op1_insn->val.i16 >> op2_insn->val.i16));
958 }
959 
IR_FOLD(SAR (C_I16,C_I16))960 IR_FOLD(SAR(C_I16, C_I16))
961 {
962 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
963 	IR_FOLD_CONST_I(op1_insn->val.i16 >> op2_insn->val.i16);
964 }
965 
IR_FOLD(SAR (C_U32,C_U32))966 IR_FOLD(SAR(C_U32, C_U32))
967 {
968 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
969 	IR_FOLD_CONST_U((uint32_t)(op1_insn->val.i32 >> op2_insn->val.i32));
970 }
971 
IR_FOLD(SAR (C_I32,C_I32))972 IR_FOLD(SAR(C_I32, C_I32))
973 {
974 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
975 	IR_FOLD_CONST_I(op1_insn->val.i32 >> op2_insn->val.i32);
976 }
977 
978 IR_FOLD(SAR(C_U64, C_U64))
IR_FOLD(SAR (C_I64,C_I64))979 IR_FOLD(SAR(C_I64, C_I64))
980 {
981 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
982 	IR_FOLD_CONST_I(op1_insn->val.i64 >> op2_insn->val.i64);
983 }
984 
985 IR_FOLD(ROL(C_U8, C_U8))
IR_FOLD(ROL (C_CHAR,C_CHAR))986 IR_FOLD(ROL(C_CHAR, C_CHAR))
987 {
988 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
989 	IR_FOLD_CONST_U(ir_rol8(op1_insn->val.u8, op2_insn->val.u8));
990 }
991 
IR_FOLD(ROL (C_I8,C_I8))992 IR_FOLD(ROL(C_I8, C_I8))
993 {
994 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
995 	IR_FOLD_CONST_I((int8_t)ir_rol8(op1_insn->val.u8, op2_insn->val.u8));
996 }
997 
IR_FOLD(ROL (C_U16,C_U16))998 IR_FOLD(ROL(C_U16, C_U16))
999 {
1000 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1001 	IR_FOLD_CONST_U(ir_rol16(op1_insn->val.u16, op2_insn->val.u16));
1002 }
1003 
IR_FOLD(ROL (C_I16,C_I16))1004 IR_FOLD(ROL(C_I16, C_I16))
1005 {
1006 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1007 	IR_FOLD_CONST_I((int16_t)ir_rol16(op1_insn->val.u16, op2_insn->val.u16));
1008 }
1009 
IR_FOLD(ROL (C_U32,C_U32))1010 IR_FOLD(ROL(C_U32, C_U32))
1011 {
1012 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1013 	IR_FOLD_CONST_U(ir_rol32(op1_insn->val.u32, op2_insn->val.u32));
1014 }
1015 
IR_FOLD(ROL (C_I32,C_I32))1016 IR_FOLD(ROL(C_I32, C_I32))
1017 {
1018 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1019 	IR_FOLD_CONST_I((int32_t)ir_rol32(op1_insn->val.u32, op2_insn->val.u32));
1020 }
1021 
1022 IR_FOLD(ROL(C_U64, C_U64))
IR_FOLD(ROL (C_I64,C_I64))1023 IR_FOLD(ROL(C_I64, C_I64))
1024 {
1025 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1026 	IR_FOLD_CONST_U(ir_rol64(op1_insn->val.u64, op2_insn->val.u64));
1027 }
1028 
1029 IR_FOLD(ROR(C_U8, C_U8))
IR_FOLD(ROR (C_CHAR,C_CHAR))1030 IR_FOLD(ROR(C_CHAR, C_CHAR))
1031 {
1032 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1033 	IR_FOLD_CONST_U(ir_ror8(op1_insn->val.u8, op2_insn->val.u8));
1034 }
1035 
IR_FOLD(ROR (C_I8,C_I8))1036 IR_FOLD(ROR(C_I8, C_I8))
1037 {
1038 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1039 	IR_FOLD_CONST_I((int8_t)ir_ror8(op1_insn->val.u8, op2_insn->val.u8));
1040 }
1041 
IR_FOLD(ROR (C_U16,C_U16))1042 IR_FOLD(ROR(C_U16, C_U16))
1043 {
1044 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1045 	IR_FOLD_CONST_U(ir_ror16(op1_insn->val.u16, op2_insn->val.u16));
1046 }
1047 
IR_FOLD(ROR (C_I16,C_I16))1048 IR_FOLD(ROR(C_I16, C_I16))
1049 {
1050 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1051 	IR_FOLD_CONST_I((int16_t)ir_ror16(op1_insn->val.u16, op2_insn->val.u16));
1052 }
1053 
IR_FOLD(ROR (C_U32,C_U32))1054 IR_FOLD(ROR(C_U32, C_U32))
1055 {
1056 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1057 	IR_FOLD_CONST_U(ir_ror32(op1_insn->val.u32, op2_insn->val.u32));
1058 }
1059 
IR_FOLD(ROR (C_I32,C_I32))1060 IR_FOLD(ROR(C_I32, C_I32))
1061 {
1062 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1063 	IR_FOLD_CONST_I((int32_t)ir_ror32(op1_insn->val.u32, op2_insn->val.u32));
1064 }
1065 
1066 IR_FOLD(ROR(C_U64, C_U64))
IR_FOLD(ROR (C_I64,C_I64))1067 IR_FOLD(ROR(C_I64, C_I64))
1068 {
1069 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1070 	IR_FOLD_CONST_U(ir_ror64(op1_insn->val.u64, op2_insn->val.u64));
1071 }
1072 
1073 //IR_FOLD(BSWAP(CONST))
1074 //TODO: bswap
1075 
1076 IR_FOLD(MIN(C_BOOL, C_BOOL))
IR_FOLD(MIN (C_U8,C_U8))1077 IR_FOLD(MIN(C_U8, C_U8))
1078 IR_FOLD(MIN(C_U16, C_U16))
1079 IR_FOLD(MIN(C_U32, C_U32))
1080 IR_FOLD(MIN(C_U64, C_U64))
1081 IR_FOLD(MIN(C_ADDR, C_ADDR))
1082 {
1083 	IR_FOLD_COPY(op1_insn->val.u64 <= op2_insn->val.u64 ? op1 : op2);
1084 }
1085 
1086 IR_FOLD(MIN(C_CHAR, C_CHAR))
IR_FOLD(MIN (C_I8,C_U8))1087 IR_FOLD(MIN(C_I8, C_U8))
1088 IR_FOLD(MIN(C_I16, C_U16))
1089 IR_FOLD(MIN(C_I32, C_U32))
1090 IR_FOLD(MIN(C_I64, C_U64))
1091 {
1092 	IR_FOLD_COPY(op1_insn->val.i64 <= op2_insn->val.i64 ? op1 : op2);
1093 }
1094 
IR_FOLD(MIN (C_DOUBLE,C_DOUBLE))1095 IR_FOLD(MIN(C_DOUBLE, C_DOUBLE))
1096 {
1097 	IR_FOLD_COPY(op1_insn->val.d <= op2_insn->val.d ? op1 : op2);
1098 }
1099 
IR_FOLD(MIN (C_FLOAT,C_FLOAT))1100 IR_FOLD(MIN(C_FLOAT, C_FLOAT))
1101 {
1102 	IR_FOLD_COPY(op1_insn->val.f <= op2_insn->val.f ? op1 : op2);
1103 }
1104 
1105 IR_FOLD(MAX(C_BOOL, C_BOOL))
IR_FOLD(MAX (C_U8,C_U8))1106 IR_FOLD(MAX(C_U8, C_U8))
1107 IR_FOLD(MAX(C_U16, C_U16))
1108 IR_FOLD(MAX(C_U32, C_U32))
1109 IR_FOLD(MAX(C_U64, C_U64))
1110 IR_FOLD(MAX(C_ADDR, C_ADDR))
1111 {
1112 	IR_FOLD_COPY(op1_insn->val.u64 >= op2_insn->val.u64 ? op1 : op2);
1113 }
1114 
1115 IR_FOLD(MAX(C_CHAR, C_CHAR))
IR_FOLD(MAX (C_I8,C_U8))1116 IR_FOLD(MAX(C_I8, C_U8))
1117 IR_FOLD(MAX(C_I16, C_U16))
1118 IR_FOLD(MAX(C_I32, C_U32))
1119 IR_FOLD(MAX(C_I64, C_U64))
1120 {
1121 	IR_FOLD_COPY(op1_insn->val.i64 >= op2_insn->val.i64 ? op1 : op2);
1122 }
1123 
IR_FOLD(MAX (C_DOUBLE,C_DOUBLE))1124 IR_FOLD(MAX(C_DOUBLE, C_DOUBLE))
1125 {
1126 	IR_FOLD_COPY(op1_insn->val.d >= op2_insn->val.d ? op1 : op2);
1127 }
1128 
IR_FOLD(MAX (C_FLOAT,C_FLOAT))1129 IR_FOLD(MAX(C_FLOAT, C_FLOAT))
1130 {
1131 	IR_FOLD_COPY(op1_insn->val.f >= op2_insn->val.f ? op1 : op2);
1132 }
1133 
1134 IR_FOLD(SEXT(C_I8))
IR_FOLD(SEXT (C_U8))1135 IR_FOLD(SEXT(C_U8))
1136 IR_FOLD(SEXT(C_BOOL))
1137 {
1138 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1139 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1140 	IR_FOLD_CONST_I((int64_t)op1_insn->val.i8);
1141 }
1142 
1143 IR_FOLD(SEXT(C_I16))
IR_FOLD(SEXT (C_U16))1144 IR_FOLD(SEXT(C_U16))
1145 {
1146 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1147 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1148 	IR_FOLD_CONST_I((int64_t)op1_insn->val.i16);
1149 }
1150 
1151 IR_FOLD(SEXT(C_I32))
IR_FOLD(SEXT (C_U32))1152 IR_FOLD(SEXT(C_U32))
1153 {
1154 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1155 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1156 	IR_FOLD_CONST_I((int64_t)op1_insn->val.i32);
1157 }
1158 
1159 IR_FOLD(ZEXT(C_I8))
IR_FOLD(ZEXT (C_U8))1160 IR_FOLD(ZEXT(C_U8))
1161 IR_FOLD(ZEXT(C_BOOL))
1162 {
1163 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1164 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1165 	IR_FOLD_CONST_U((uint64_t)op1_insn->val.u8);
1166 }
1167 
1168 IR_FOLD(ZEXT(C_I16))
IR_FOLD(ZEXT (C_U16))1169 IR_FOLD(ZEXT(C_U16))
1170 {
1171 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1172 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1173 	IR_FOLD_CONST_U((uint64_t)op1_insn->val.u16);
1174 }
1175 
1176 IR_FOLD(ZEXT(C_I32))
IR_FOLD(ZEXT (C_U32))1177 IR_FOLD(ZEXT(C_U32))
1178 {
1179 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1180 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1181 	IR_FOLD_CONST_U((uint64_t)op1_insn->val.u32);
1182 }
1183 
1184 IR_FOLD(TRUNC(C_I16))
IR_FOLD(TRUNC (C_I32))1185 IR_FOLD(TRUNC(C_I32))
1186 IR_FOLD(TRUNC(C_I64))
1187 IR_FOLD(TRUNC(C_U16))
1188 IR_FOLD(TRUNC(C_U32))
1189 IR_FOLD(TRUNC(C_U64))
1190 {
1191 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1192 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] < ir_type_size[op1_insn->type]);
1193 	switch (IR_OPT_TYPE(opt)) {
1194 		default:
1195 			IR_ASSERT(0);
1196 		case IR_I8:
1197 			IR_FOLD_CONST_I(op1_insn->val.i8);
1198 		case IR_I16:
1199 			IR_FOLD_CONST_I(op1_insn->val.i16);
1200 		case IR_I32:
1201 			IR_FOLD_CONST_I(op1_insn->val.i32);
1202 		case IR_U8:
1203 			IR_FOLD_CONST_U(op1_insn->val.u8);
1204 		case IR_U16:
1205 			IR_FOLD_CONST_U(op1_insn->val.u16);
1206 		case IR_U32:
1207 			IR_FOLD_CONST_U(op1_insn->val.u32);
1208 	}
1209 }
1210 
1211 
1212 IR_FOLD(BITCAST(C_I8))
IR_FOLD(BITCAST (C_I16))1213 IR_FOLD(BITCAST(C_I16))
1214 IR_FOLD(BITCAST(C_I32))
1215 IR_FOLD(BITCAST(C_I64))
1216 IR_FOLD(BITCAST(C_U8))
1217 IR_FOLD(BITCAST(C_U16))
1218 IR_FOLD(BITCAST(C_U32))
1219 IR_FOLD(BITCAST(C_U64))
1220 IR_FOLD(BITCAST(C_FLOAT))
1221 IR_FOLD(BITCAST(C_DOUBLE))
1222 IR_FOLD(BITCAST(C_BOOL))
1223 IR_FOLD(BITCAST(C_CHAR))
1224 IR_FOLD(BITCAST(C_ADDR))
1225 {
1226 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] == ir_type_size[op1_insn->type]);
1227 	switch (IR_OPT_TYPE(opt)) {
1228 		default:
1229 			IR_ASSERT(0);
1230 		case IR_BOOL:
1231 			IR_FOLD_BOOL(op1_insn->val.i8 != 0);
1232 		case IR_I8:
1233 			IR_FOLD_CONST_I(op1_insn->val.i8);
1234 		case IR_I16:
1235 			IR_FOLD_CONST_I(op1_insn->val.i16);
1236 		case IR_I32:
1237 			IR_FOLD_CONST_I(op1_insn->val.i32);
1238 		case IR_I64:
1239 			IR_FOLD_CONST_I(op1_insn->val.i64);
1240 		case IR_U8:
1241 			IR_FOLD_CONST_U(op1_insn->val.u8);
1242 		case IR_U16:
1243 			IR_FOLD_CONST_U(op1_insn->val.u16);
1244 		case IR_U32:
1245 			IR_FOLD_CONST_U(op1_insn->val.u32);
1246 		case IR_U64:
1247 			IR_FOLD_CONST_U(op1_insn->val.u64);
1248 		case IR_FLOAT:
1249 			IR_FOLD_CONST_F(op1_insn->val.f);
1250 		case IR_DOUBLE:
1251 			IR_FOLD_CONST_D(op1_insn->val.d);
1252 		case IR_CHAR:
1253 			IR_FOLD_CONST_I(op1_insn->val.c);
1254 		case IR_ADDR:
1255 			IR_FOLD_CONST_U(op1_insn->val.addr);
1256 	}
1257 }
1258 
1259 IR_FOLD(INT2FP(C_I8))
IR_FOLD(INT2FP (C_I16))1260 IR_FOLD(INT2FP(C_I16))
1261 IR_FOLD(INT2FP(C_I32))
1262 IR_FOLD(INT2FP(C_I64))
1263 {
1264 	if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1265 		IR_FOLD_CONST_D((double)op1_insn->val.i64);
1266 	} else {
1267 		IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1268 		IR_FOLD_CONST_F((float)op1_insn->val.i64);
1269 	}
1270 }
1271 
1272 IR_FOLD(INT2FP(C_U8))
IR_FOLD(INT2FP (C_U16))1273 IR_FOLD(INT2FP(C_U16))
1274 IR_FOLD(INT2FP(C_U32))
1275 IR_FOLD(INT2FP(C_U64))
1276 {
1277 	if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1278 		IR_FOLD_CONST_D((double)op1_insn->val.u64);
1279 	} else {
1280 		IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1281 		IR_FOLD_CONST_F((float)op1_insn->val.u64);
1282 	}
1283 }
1284 
IR_FOLD(FP2INT (C_FLOAT))1285 IR_FOLD(FP2INT(C_FLOAT))
1286 {
1287 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1288 	switch (IR_OPT_TYPE(opt)) {
1289 		default:
1290 			IR_ASSERT(0);
1291 		case IR_I8:
1292 			IR_FOLD_CONST_I((int8_t)op1_insn->val.f);
1293 		case IR_I16:
1294 			IR_FOLD_CONST_I((int16_t)op1_insn->val.f);
1295 		case IR_I32:
1296 			IR_FOLD_CONST_I((int32_t)op1_insn->val.f);
1297 		case IR_I64:
1298 			IR_FOLD_CONST_I((int64_t)op1_insn->val.f);
1299 		case IR_U8:
1300 			IR_FOLD_CONST_U((uint8_t)op1_insn->val.f);
1301 		case IR_U16:
1302 			IR_FOLD_CONST_U((uint16_t)op1_insn->val.f);
1303 		case IR_U32:
1304 			IR_FOLD_CONST_U((uint32_t)op1_insn->val.f);
1305 		case IR_U64:
1306 			IR_FOLD_CONST_U((uint64_t)op1_insn->val.f);
1307 	}
1308 }
1309 
IR_FOLD(FP2INT (C_DOUBLE))1310 IR_FOLD(FP2INT(C_DOUBLE))
1311 {
1312 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1313 	switch (IR_OPT_TYPE(opt)) {
1314 		default:
1315 			IR_ASSERT(0);
1316 		case IR_I8:
1317 			IR_FOLD_CONST_I((int8_t)op1_insn->val.d);
1318 		case IR_I16:
1319 			IR_FOLD_CONST_I((int16_t)op1_insn->val.d);
1320 		case IR_I32:
1321 			IR_FOLD_CONST_I((int32_t)op1_insn->val.d);
1322 		case IR_I64:
1323 			IR_FOLD_CONST_I((int64_t)op1_insn->val.d);
1324 		case IR_U8:
1325 			IR_FOLD_CONST_U((uint8_t)op1_insn->val.d);
1326 		case IR_U16:
1327 			IR_FOLD_CONST_U((uint16_t)op1_insn->val.d);
1328 		case IR_U32:
1329 			IR_FOLD_CONST_U((uint32_t)op1_insn->val.d);
1330 		case IR_U64:
1331 			IR_FOLD_CONST_U((uint64_t)op1_insn->val.d);
1332 	}
1333 }
1334 
IR_FOLD(FP2FP (C_FLOAT))1335 IR_FOLD(FP2FP(C_FLOAT))
1336 {
1337 	if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1338 		IR_FOLD_CONST_D((double)op1_insn->val.f);
1339 	} else {
1340 		IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1341 		IR_FOLD_COPY(op1);
1342 	}
1343 }
1344 
IR_FOLD(FP2FP (C_DOUBLE))1345 IR_FOLD(FP2FP(C_DOUBLE))
1346 {
1347 	if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1348 		IR_FOLD_COPY(op1);
1349 	} else {
1350 		IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1351 		IR_FOLD_CONST_F((float)op1_insn->val.d);
1352 	}
1353 }
1354 
1355 // TODO: constant functions (e.g.  sin, cos)
1356 
1357 /* Copy Propagation */
IR_FOLD(COPY (_))1358 IR_FOLD(COPY(_))
1359 {
1360 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1361 	if (!op2) {
1362 		IR_FOLD_COPY(op1);
1363 	}
1364 	/* skip CSE */
1365 	IR_FOLD_EMIT;
1366 }
1367 
IR_FOLD(PHI (_,_))1368 IR_FOLD(PHI(_, _)) // TODO: PHI(_, _, _)
1369 {
1370 	if (op2 == op3 && op3 != IR_UNUSED) {
1371 		IR_FOLD_COPY(op2);
1372 	}
1373 	/* skip CSE */
1374 	opt = opt | (3 << IR_OPT_INPUTS_SHIFT);
1375 	IR_FOLD_EMIT;
1376 }
1377 
1378 IR_FOLD(COND(C_BOOL, _)) // TODO: COND(CONST, _, _)
IR_FOLD(COND (C_U8,_))1379 IR_FOLD(COND(C_U8, _))
1380 IR_FOLD(COND(C_U16, _))
1381 IR_FOLD(COND(C_U32, _))
1382 IR_FOLD(COND(C_U64, _))
1383 IR_FOLD(COND(C_ADDR, _))
1384 IR_FOLD(COND(C_CHAR, _))
1385 IR_FOLD(COND(C_I8, _))
1386 IR_FOLD(COND(C_I16, _))
1387 IR_FOLD(COND(C_I32, _))
1388 IR_FOLD(COND(C_I64, _))
1389 IR_FOLD(COND(C_DOUBLE, _))
1390 IR_FOLD(COND(C_FLOAT, _))
1391 {
1392 	if (ir_const_is_true(op1_insn)) {
1393 		IR_FOLD_COPY(op2);
1394 	} else {
1395 		IR_FOLD_COPY(op3);
1396 	}
1397 }
1398 
IR_FOLD(BITCAST (_))1399 IR_FOLD(BITCAST(_))
1400 {
1401 	if (IR_OPT_TYPE(opt) == op1_insn->type) {
1402 		IR_FOLD_COPY(op1);
1403 	}
1404 	IR_FOLD_NEXT;
1405 }
1406 
1407 /* Algebraic simplifications */
IR_FOLD(ABS (ABS))1408 IR_FOLD(ABS(ABS))
1409 {
1410 	/* abs(x = abs(y)) => x */
1411 	IR_FOLD_COPY(op1);
1412 }
1413 
IR_FOLD(ABS (NEG))1414 IR_FOLD(ABS(NEG))
1415 {
1416 	/* abs(neg(y)) => abs(y) */
1417 	op1 = op1_insn->op1;
1418 	IR_FOLD_RESTART;
1419 }
1420 
1421 IR_FOLD(NEG(NEG))
IR_FOLD(NOT (NOT))1422 IR_FOLD(NOT(NOT))
1423 IR_FOLD(BSWAP(BSWAP))
1424 {
1425 	/* f(f(y)) => y */
1426 	IR_FOLD_COPY(op1_insn->op1);
1427 }
1428 
IR_FOLD(EQ (_,C_BOOL))1429 IR_FOLD(EQ(_, C_BOOL))
1430 {
1431 	if (op2 == IR_TRUE) {
1432 		IR_FOLD_COPY(op1);
1433 	} else {
1434 		opt = IR_OPT(IR_NOT, IR_BOOL);
1435 		op2 = IR_UNUSED;
1436 		IR_FOLD_RESTART;
1437 	}
1438 }
1439 
IR_FOLD(NE (_,C_BOOL))1440 IR_FOLD(NE(_, C_BOOL))
1441 {
1442 	if (op2 != IR_TRUE) {
1443 		IR_FOLD_COPY(op1);
1444 	} else {
1445 		opt = IR_OPT(IR_NOT, IR_BOOL);
1446 		op2 = IR_UNUSED;
1447 		IR_FOLD_RESTART;
1448 	}
1449 }
1450 
1451 IR_FOLD(EQ(ZEXT, C_U16))
IR_FOLD(EQ (ZEXT,C_U32))1452 IR_FOLD(EQ(ZEXT, C_U32))
1453 IR_FOLD(EQ(ZEXT, C_U64))
1454 IR_FOLD(EQ(ZEXT, C_I16))
1455 IR_FOLD(EQ(ZEXT, C_I32))
1456 IR_FOLD(EQ(ZEXT, C_I64))
1457 IR_FOLD(EQ(ZEXT, C_ADDR))
1458 IR_FOLD(EQ(SEXT, C_U16))
1459 IR_FOLD(EQ(SEXT, C_U32))
1460 IR_FOLD(EQ(SEXT, C_U64))
1461 IR_FOLD(EQ(SEXT, C_I16))
1462 IR_FOLD(EQ(SEXT, C_I32))
1463 IR_FOLD(EQ(SEXT, C_I64))
1464 IR_FOLD(EQ(SEXT, C_ADDR))
1465 {
1466 	if (op2_insn->val.u64 == 0 && ctx->ir_base[op1_insn->op1].type == IR_BOOL) {
1467 		opt = IR_OPT(IR_NOT, IR_BOOL);
1468 		op1 = op1_insn->op1;
1469 		op2 = IR_UNUSED;
1470 		IR_FOLD_RESTART;
1471 	}
1472 	IR_FOLD_NEXT;
1473 }
1474 
1475 IR_FOLD(NE(ZEXT, C_U16))
IR_FOLD(NE (ZEXT,C_U32))1476 IR_FOLD(NE(ZEXT, C_U32))
1477 IR_FOLD(NE(ZEXT, C_U64))
1478 IR_FOLD(NE(ZEXT, C_I16))
1479 IR_FOLD(NE(ZEXT, C_I32))
1480 IR_FOLD(NE(ZEXT, C_I64))
1481 IR_FOLD(NE(ZEXT, C_ADDR))
1482 IR_FOLD(NE(SEXT, C_U16))
1483 IR_FOLD(NE(SEXT, C_U32))
1484 IR_FOLD(NE(SEXT, C_U64))
1485 IR_FOLD(NE(SEXT, C_I16))
1486 IR_FOLD(NE(SEXT, C_I32))
1487 IR_FOLD(NE(SEXT, C_I64))
1488 IR_FOLD(NE(SEXT, C_ADDR))
1489 {
1490 	if (op2_insn->val.u64 == 0 && ctx->ir_base[op1_insn->op1].type == IR_BOOL) {
1491 		IR_FOLD_COPY(op1_insn->op1);
1492 	}
1493 	IR_FOLD_NEXT;
1494 }
1495 
1496 IR_FOLD(NOT(EQ))
IR_FOLD(NOT (NE))1497 IR_FOLD(NOT(NE))
1498 IR_FOLD(NOT(LT))
1499 IR_FOLD(NOT(GE))
1500 IR_FOLD(NOT(LE))
1501 IR_FOLD(NOT(GT))
1502 IR_FOLD(NOT(ULT))
1503 IR_FOLD(NOT(UGE))
1504 IR_FOLD(NOT(ULE))
1505 IR_FOLD(NOT(UGT))
1506 {
1507 	if (IR_IS_TYPE_INT(ctx->ir_base[op1_insn->op1].type)) {
1508 		opt = op1_insn->opt ^ 1;
1509 		op1 = op1_insn->op1;
1510 		op2 = op1_insn->op2;
1511 		IR_FOLD_RESTART;
1512 	}
1513 	IR_FOLD_NEXT;
1514 }
1515 
1516 IR_FOLD(ADD(_, C_U8))
IR_FOLD(ADD (_,C_U16))1517 IR_FOLD(ADD(_, C_U16))
1518 IR_FOLD(ADD(_, C_U32))
1519 IR_FOLD(ADD(_, C_U64))
1520 IR_FOLD(ADD(_, C_I8))
1521 IR_FOLD(ADD(_, C_I16))
1522 IR_FOLD(ADD(_, C_I32))
1523 IR_FOLD(ADD(_, C_I64))
1524 IR_FOLD(ADD(_, C_ADDR))
1525 IR_FOLD(SUB(_, C_U8))
1526 IR_FOLD(SUB(_, C_U16))
1527 IR_FOLD(SUB(_, C_U32))
1528 IR_FOLD(SUB(_, C_U64))
1529 IR_FOLD(SUB(_, C_I8))
1530 IR_FOLD(SUB(_, C_I16))
1531 IR_FOLD(SUB(_, C_I32))
1532 IR_FOLD(SUB(_, C_I64))
1533 IR_FOLD(SUB(_, C_ADDR))
1534 IR_FOLD(ADD_OV(_, C_U8))
1535 IR_FOLD(ADD_OV(_, C_U16))
1536 IR_FOLD(ADD_OV(_, C_U32))
1537 IR_FOLD(ADD_OV(_, C_U64))
1538 IR_FOLD(ADD_OV(_, C_I8))
1539 IR_FOLD(ADD_OV(_, C_I16))
1540 IR_FOLD(ADD_OV(_, C_I32))
1541 IR_FOLD(ADD_OV(_, C_I64))
1542 IR_FOLD(ADD_OV(_, C_ADDR))
1543 IR_FOLD(SUB_OV(_, C_U8))
1544 IR_FOLD(SUB_OV(_, C_U16))
1545 IR_FOLD(SUB_OV(_, C_U32))
1546 IR_FOLD(SUB_OV(_, C_U64))
1547 IR_FOLD(SUB_OV(_, C_I8))
1548 IR_FOLD(SUB_OV(_, C_I16))
1549 IR_FOLD(SUB_OV(_, C_I32))
1550 IR_FOLD(SUB_OV(_, C_I64))
1551 IR_FOLD(SUB_OV(_, C_ADDR))
1552 {
1553 	if (op2_insn->val.u64 == 0) {
1554 		/* a +/- 0 => a */
1555 		IR_FOLD_COPY(op1);
1556 	}
1557 	IR_FOLD_NEXT;
1558 }
1559 
1560 IR_FOLD(SUB(C_I8, _))
IR_FOLD(SUB (C_I16,_))1561 IR_FOLD(SUB(C_I16, _))
1562 IR_FOLD(SUB(C_I32, _))
1563 IR_FOLD(SUB(C_I64, _))
1564 {
1565 	if (op1_insn->val.u64 == 0) {
1566 		/* 0 - a => -a (invalid for +0.0) */
1567 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1568 		op1 = op2;
1569 		op2 = IR_UNUSED;
1570 		IR_FOLD_RESTART;
1571 	}
1572 	IR_FOLD_NEXT;
1573 }
1574 
1575 IR_FOLD(UGE(_, C_U8))
IR_FOLD(UGE (_,C_U16))1576 IR_FOLD(UGE(_, C_U16))
1577 IR_FOLD(UGE(_, C_U32))
1578 IR_FOLD(UGE(_, C_U64))
1579 IR_FOLD(UGE(_, C_I8))
1580 IR_FOLD(UGE(_, C_I16))
1581 IR_FOLD(UGE(_, C_I32))
1582 IR_FOLD(UGE(_, C_I64))
1583 IR_FOLD(UGE(_, C_ADDR))
1584 {
1585 	if (op2_insn->val.u64 == 0) {
1586 		IR_FOLD_COPY(IR_TRUE);
1587 	}
1588 	IR_FOLD_NEXT;
1589 }
1590 
1591 IR_FOLD(UGT(_, C_U8))
IR_FOLD(UGT (_,C_U16))1592 IR_FOLD(UGT(_, C_U16))
1593 IR_FOLD(UGT(_, C_U32))
1594 IR_FOLD(UGT(_, C_U64))
1595 IR_FOLD(UGT(_, C_I8))
1596 IR_FOLD(UGT(_, C_I16))
1597 IR_FOLD(UGT(_, C_I32))
1598 IR_FOLD(UGT(_, C_I64))
1599 IR_FOLD(UGT(_, C_ADDR))
1600 {
1601 	if (op2_insn->val.u64 == 0) {
1602 		opt = IR_OPT(IR_NE, IR_BOOL);
1603 		IR_FOLD_RESTART;
1604 	}
1605 	IR_FOLD_NEXT;
1606 }
1607 
1608 IR_FOLD(ULT(_, C_U8))
IR_FOLD(ULT (_,C_U16))1609 IR_FOLD(ULT(_, C_U16))
1610 IR_FOLD(ULT(_, C_U32))
1611 IR_FOLD(ULT(_, C_U64))
1612 IR_FOLD(ULT(_, C_I8))
1613 IR_FOLD(ULT(_, C_I16))
1614 IR_FOLD(ULT(_, C_I32))
1615 IR_FOLD(ULT(_, C_I64))
1616 IR_FOLD(ULT(_, C_ADDR))
1617 {
1618 	if (op2_insn->val.u64 == 0) {
1619 		IR_FOLD_COPY(IR_FALSE);
1620 	}
1621 	IR_FOLD_NEXT;
1622 }
1623 
1624 IR_FOLD(ULE(_, C_U8))
IR_FOLD(ULE (_,C_U16))1625 IR_FOLD(ULE(_, C_U16))
1626 IR_FOLD(ULE(_, C_U32))
1627 IR_FOLD(ULE(_, C_U64))
1628 IR_FOLD(ULE(_, C_I8))
1629 IR_FOLD(ULE(_, C_I16))
1630 IR_FOLD(ULE(_, C_I32))
1631 IR_FOLD(ULE(_, C_I64))
1632 IR_FOLD(ULE(_, C_ADDR))
1633 {
1634 	if (op2_insn->val.u64 == 0) {
1635 		opt = IR_OPT(IR_EQ, IR_BOOL);
1636 		IR_FOLD_RESTART;
1637 	}
1638 	IR_FOLD_NEXT;
1639 }
1640 
IR_FOLD(ADD (NEG,_))1641 IR_FOLD(ADD(NEG, _))
1642 {
1643 	/* (-a) + b => b - a */
1644 	opt++; /* ADD -> SUB */
1645 	op1 = op2;
1646 	op2 = op1_insn->op1;
1647 	IR_FOLD_RESTART;
1648 }
1649 
1650 IR_FOLD(ADD(_, NEG))
IR_FOLD(SUB (_,NEG))1651 IR_FOLD(SUB(_,NEG))
1652 {
1653 	/* a + (-b) => a - b */
1654 	opt ^= 1; /* ADD <-> SUB */
1655 	op2 = op2_insn->op1;
1656 	IR_FOLD_RESTART;
1657 }
1658 
IR_FOLD(ADD (SUB,_))1659 IR_FOLD(ADD(SUB, _))
1660 {
1661 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1662 		if (op1_insn->op2 == op2) {
1663 			/* (a - b) + b => a */
1664 			IR_FOLD_COPY(op1_insn->op1);
1665 		}
1666 	}
1667 	IR_FOLD_NEXT;
1668 }
1669 
IR_FOLD(ADD (_,SUB))1670 IR_FOLD(ADD(_, SUB))
1671 {
1672 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1673 		if (op2_insn->op2 == op1) {
1674 			/* a + (b - a) => b */
1675 			IR_FOLD_COPY(op2_insn->op1);
1676 		}
1677 	}
1678 	IR_FOLD_NEXT;
1679 }
1680 
IR_FOLD(SUB (ADD,_))1681 IR_FOLD(SUB(ADD, _))
1682 {
1683 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1684 		if (op1_insn->op1 == op2) {
1685 			/* (a + b) - a => b */
1686 			IR_FOLD_COPY(op1_insn->op2);
1687 		} else if (op1_insn->op2 == op2) {
1688 			/* (a + b) - a => b */
1689 			IR_FOLD_COPY(op1_insn->op1);
1690 		}
1691 	}
1692 	IR_FOLD_NEXT;
1693 }
1694 
IR_FOLD(SUB (_,ADD))1695 IR_FOLD(SUB(_, ADD))
1696 {
1697 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1698 		if (op2_insn->op1 == op1) {
1699 			/* a - (a + b) => -b */
1700 			opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1701 			op1 = op2_insn->op2;
1702 			op2 = IR_UNUSED;
1703 			IR_FOLD_RESTART;
1704 		} else if (op2_insn->op2 == op1) {
1705 			/* b - (a + b) => -a */
1706 			opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1707 			op1 = op2_insn->op1;
1708 			op2 = IR_UNUSED;
1709 			IR_FOLD_RESTART;
1710 		}
1711 	}
1712 	IR_FOLD_NEXT;
1713 }
1714 
IR_FOLD(SUB (SUB,_))1715 IR_FOLD(SUB(SUB, _))
1716 {
1717 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1718 		if (op1_insn->op1 == op2) {
1719 			/* (a - b) - a => -b */
1720 			opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1721 			op1 = op1_insn->op2;
1722 			op2 = IR_UNUSED;
1723 			IR_FOLD_RESTART;
1724 		}
1725 	}
1726 	IR_FOLD_NEXT;
1727 }
1728 
IR_FOLD(SUB (_,SUB))1729 IR_FOLD(SUB(_, SUB))
1730 {
1731 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1732 		if (op2_insn->op1 == op1) {
1733 			/* a - (a - b) => b */
1734 			IR_FOLD_COPY(op2_insn->op2);
1735 		}
1736 	}
1737     IR_FOLD_NEXT;
1738 }
1739 
IR_FOLD(SUB (ADD,ADD))1740 IR_FOLD(SUB(ADD, ADD))
1741 {
1742 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1743 		if (op1_insn->op1 == op2_insn->op1) {
1744 			/* (a + b) - (a + c) => b - c */
1745 			op1 = op1_insn->op2;
1746 			op2 = op2_insn->op2;
1747 			IR_FOLD_RESTART;
1748 		} else if (op1_insn->op1 == op2_insn->op2) {
1749 			/* (a + b) - (c + a) => b - c */
1750 			op1 = op1_insn->op2;
1751 			op2 = op2_insn->op1;
1752 			IR_FOLD_RESTART;
1753 		} else if (op1_insn->op2 == op2_insn->op1) {
1754 			/* (a + b) - (b + c) => a - c */
1755 			op1 = op1_insn->op1;
1756 			op2 = op2_insn->op2;
1757 			IR_FOLD_RESTART;
1758 		} else if (op1_insn->op2 == op2_insn->op2) {
1759 			/* (a + b) - (c + b) => a - c */
1760 			op1 = op1_insn->op1;
1761 			op2 = op2_insn->op1;
1762 			IR_FOLD_RESTART;
1763 		}
1764 	}
1765     IR_FOLD_NEXT;
1766 }
1767 
1768 // IR_FOLD(SUB(NEG, CONST))  TODO: -a - b => -b - a
1769 // IR_FOLD(MUL(NEG, CONST))  TODO: -a * b => a * -b
1770 // IR_FOLD(DIV(NEG, CONST))  TODO: -a / b => a / -b
1771 
1772 IR_FOLD(MUL(_, C_U8))
IR_FOLD(MUL (_,C_U16))1773 IR_FOLD(MUL(_, C_U16))
1774 IR_FOLD(MUL(_, C_U32))
1775 IR_FOLD(MUL(_, C_U64))
1776 IR_FOLD(MUL(_, C_ADDR))
1777 {
1778 	if (op2_insn->val.u64 == 0) {
1779 		/* a * 0 => 0 */
1780 		IR_FOLD_COPY(op2);
1781 	} else if (op2_insn->val.u64 == 1) {
1782 		IR_FOLD_COPY(op1);
1783 	} else if (op2_insn->val.u64 == 2 && IR_OPT_TYPE(opt) != IR_ADDR) {
1784 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1785 		op2 = op1;
1786 		IR_FOLD_RESTART;
1787 	}
1788 	IR_FOLD_NEXT;
1789 }
1790 
1791 IR_FOLD(MUL(_, C_I8))
IR_FOLD(MUL (_,C_I16))1792 IR_FOLD(MUL(_, C_I16))
1793 IR_FOLD(MUL(_, C_I32))
1794 IR_FOLD(MUL(_, C_I64))
1795 {
1796 	if (op2_insn->val.i64 == 0) {
1797 		/* a * 0 => 0 */
1798 		IR_FOLD_COPY(op2);
1799 	} else if (op2_insn->val.i64 == 1) {
1800 		/* a * 1 => a */
1801 		IR_FOLD_COPY(op1);
1802 	} else if (op2_insn->val.i64 == 2) {
1803 		/* a * 2 => a + a */
1804 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1805 		op2 = op1;
1806 		IR_FOLD_RESTART;
1807 	} else if (op2_insn->val.i64 == -1) {
1808 		/* a * -1 => -a */
1809 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1810 		op2 = IR_UNUSED;
1811 		IR_FOLD_RESTART;
1812 	}
1813 	IR_FOLD_NEXT;
1814 }
1815 
IR_FOLD(MUL (_,C_DOUBLE))1816 IR_FOLD(MUL(_, C_DOUBLE))
1817 {
1818 	if (op2_insn->val.d == 1.0) {
1819 		/* a * 1.0 => a */
1820 		IR_FOLD_COPY(op1);
1821 	} else if (op2_insn->val.d == 2.0) {
1822 		/* a * 2.0 => a + a */
1823 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1824 		op2 = op1;
1825 		IR_FOLD_RESTART;
1826 	} else if (op2_insn->val.d == -1.0) {
1827 		/* a * -1.0 => -a */
1828 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1829 		op2 = IR_UNUSED;
1830 		IR_FOLD_RESTART;
1831 	}
1832 	IR_FOLD_NEXT;
1833 }
1834 
IR_FOLD(MUL (_,C_FLOAT))1835 IR_FOLD(MUL(_, C_FLOAT))
1836 {
1837 	if (op2_insn->val.f == 1.0) {
1838 		/* a * 1.0 => a */
1839 		IR_FOLD_COPY(op1);
1840 	} else if (op2_insn->val.f == 2.0) {
1841 		/* a * 2.0 => a + a */
1842 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1843 		op2 = op1;
1844 		IR_FOLD_RESTART;
1845 	} else if (op2_insn->val.f == -1.0) {
1846 		/* a * -1.0 => -a */
1847 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1848 		op2 = IR_UNUSED;
1849 		IR_FOLD_RESTART;
1850 	}
1851 	IR_FOLD_NEXT;
1852 }
1853 
1854 IR_FOLD(DIV(_, C_U8))
IR_FOLD(DIV (_,C_U16))1855 IR_FOLD(DIV(_, C_U16))
1856 IR_FOLD(DIV(_, C_U32))
1857 IR_FOLD(DIV(_, C_U64))
1858 {
1859 	if (op2_insn->val.u64 == 1) {
1860 		IR_FOLD_COPY(op1);
1861 	}
1862 	IR_FOLD_NEXT;
1863 }
1864 
1865 IR_FOLD(DIV(_, C_I8))
IR_FOLD(DIV (_,C_I16))1866 IR_FOLD(DIV(_, C_I16))
1867 IR_FOLD(DIV(_, C_I32))
1868 IR_FOLD(DIV(_, C_I64))
1869 {
1870 	if (op2_insn->val.i64 == 1) {
1871 		/* a / 1 => a */
1872 		IR_FOLD_COPY(op1);
1873 	} else if (op2_insn->val.i64 == -1) {
1874 		/* a / -1 => -a */
1875 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1876 		op2 = IR_UNUSED;
1877 		IR_FOLD_RESTART;
1878 	}
1879 	IR_FOLD_NEXT;
1880 }
1881 
1882 IR_FOLD(MOD(_, C_U8))
IR_FOLD(MOD (_,C_U16))1883 IR_FOLD(MOD(_, C_U16))
1884 IR_FOLD(MOD(_, C_U32))
1885 IR_FOLD(MOD(_, C_U64))
1886 IR_FOLD(MOD(_, C_I8))
1887 IR_FOLD(MOD(_, C_I16))
1888 IR_FOLD(MOD(_, C_I32))
1889 IR_FOLD(MOD(_, C_I64))
1890 {
1891 	if (op2_insn->val.i64 == 1) {
1892 		/* a % 1 => 0 */
1893 		IR_FOLD_CONST_U(0);
1894 	}
1895 	IR_FOLD_NEXT;
1896 }
1897 
IR_FOLD(DIV (_,C_DOUBLE))1898 IR_FOLD(DIV(_, C_DOUBLE))
1899 {
1900 	if (op2_insn->val.d == 1.0) {
1901 		/* a / 1.0 => a */
1902 		IR_FOLD_COPY(op1);
1903 	} else if (op2_insn->val.d == -1.0) {
1904 		/* a / -1.0 => -a */
1905 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1906 		op2 = IR_UNUSED;
1907 		IR_FOLD_RESTART;
1908 	}
1909 	IR_FOLD_NEXT;
1910 }
1911 
IR_FOLD(DIV (_,C_FLOAT))1912 IR_FOLD(DIV(_, C_FLOAT))
1913 {
1914 	if (op2_insn->val.f == 1.0) {
1915 		/* a / 1.0 => a */
1916 		IR_FOLD_COPY(op1);
1917 	} else if (op2_insn->val.f == -1.0) {
1918 		/* a / -1.0 => -a */
1919 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1920 		op2 = IR_UNUSED;
1921 		IR_FOLD_RESTART;
1922 	}
1923 	IR_FOLD_NEXT;
1924 }
1925 
1926 IR_FOLD(MUL(NEG, NEG))
IR_FOLD(DIV (NEG,NEG))1927 IR_FOLD(DIV(NEG, NEG))
1928 {
1929 	op1 = op1_insn->op1;
1930 	op2 = op2_insn->op1;
1931 	IR_FOLD_RESTART;
1932 }
1933 
IR_FOLD(AND (_,C_BOOL))1934 IR_FOLD(AND(_, C_BOOL))
1935 {
1936 	IR_FOLD_COPY(op2_insn->val.b ? op1 : op2);
1937 }
1938 
1939 IR_FOLD(AND(_, C_U8))
IR_FOLD(AND (_,C_I8))1940 IR_FOLD(AND(_, C_I8))
1941 IR_FOLD(AND(_, C_CHAR))
1942 {
1943 	if (op2_insn->val.i8 == 0) {
1944 		/* a & 0 => 0 */
1945 		IR_FOLD_COPY(op2);
1946 	} else if (op2_insn->val.i8 == -1) {
1947 		IR_FOLD_COPY(op1);
1948 	}
1949 	IR_FOLD_NEXT;
1950 }
1951 
1952 IR_FOLD(AND(_, C_U16))
IR_FOLD(AND (_,C_I16))1953 IR_FOLD(AND(_, C_I16))
1954 {
1955 	if (op2_insn->val.i16 == 0) {
1956 		/* a & 0 => 0 */
1957 		IR_FOLD_COPY(op2);
1958 	} else if (op2_insn->val.i16 == -1) {
1959 		IR_FOLD_COPY(op1);
1960 	}
1961 	IR_FOLD_NEXT;
1962 }
1963 
1964 IR_FOLD(AND(_, C_U32))
IR_FOLD(AND (_,C_I32))1965 IR_FOLD(AND(_, C_I32))
1966 {
1967 	if (op2_insn->val.i32 == 0) {
1968 		/* a & 0 => 0 */
1969 		IR_FOLD_COPY(op2);
1970 	} else if (op2_insn->val.i32 == -1) {
1971 		IR_FOLD_COPY(op1);
1972 	}
1973 	IR_FOLD_NEXT;
1974 }
1975 
1976 IR_FOLD(AND(_, C_U64))
IR_FOLD(AND (_,C_I64))1977 IR_FOLD(AND(_, C_I64))
1978 {
1979 	if (op2_insn->val.i64 == 0) {
1980 		/* a & 0 => 0 */
1981 		IR_FOLD_COPY(op2);
1982 	} else if (op2_insn->val.i64 == -1) {
1983 		IR_FOLD_COPY(op1);
1984 	}
1985 	IR_FOLD_NEXT;
1986 }
1987 
IR_FOLD(OR (_,C_BOOL))1988 IR_FOLD(OR(_, C_BOOL))
1989 {
1990 	IR_FOLD_COPY(op2_insn->val.b ? op2 : op1);
1991 }
1992 
1993 IR_FOLD(OR(_, C_U8))
IR_FOLD(OR (_,C_I8))1994 IR_FOLD(OR(_, C_I8))
1995 IR_FOLD(OR(_, C_CHAR))
1996 {
1997 	if (op2_insn->val.i8 == -1) {
1998 		/* a | 1 => 1 */
1999 		IR_FOLD_COPY(op2);
2000 	} else if (op2_insn->val.i8 == 0) {
2001 		IR_FOLD_COPY(op1);
2002 	}
2003 	IR_FOLD_NEXT;
2004 }
2005 
2006 IR_FOLD(OR(_, C_U16))
IR_FOLD(OR (_,C_I16))2007 IR_FOLD(OR(_, C_I16))
2008 {
2009 	if (op2_insn->val.i16 == -1) {
2010 		/* a | 1 => 1 */
2011 		IR_FOLD_COPY(op2);
2012 	} else if (op2_insn->val.i16 == 0) {
2013 		IR_FOLD_COPY(op1);
2014 	}
2015 	IR_FOLD_NEXT;
2016 }
2017 
2018 IR_FOLD(OR(_, C_U32))
IR_FOLD(OR (_,C_I32))2019 IR_FOLD(OR(_, C_I32))
2020 {
2021 	if (op2_insn->val.i32 == -1) {
2022 		/* a | 1 => 1 */
2023 		IR_FOLD_COPY(op2);
2024 	} else if (op2_insn->val.i32 == -0) {
2025 		IR_FOLD_COPY(op1);
2026 	}
2027 	IR_FOLD_NEXT;
2028 }
2029 
2030 IR_FOLD(OR(_, C_U64))
IR_FOLD(OR (_,C_I64))2031 IR_FOLD(OR(_, C_I64))
2032 {
2033 	if (op2_insn->val.i64 == -1) {
2034 		/* a | 1 => 1 */
2035 		IR_FOLD_COPY(op2);
2036 	} else if (op2_insn->val.i64 == 0) {
2037 		IR_FOLD_COPY(op1);
2038 	}
2039 	IR_FOLD_NEXT;
2040 }
2041 
IR_FOLD(XOR (_,C_BOOL))2042 IR_FOLD(XOR(_, C_BOOL))
2043 {
2044 	if (!op2_insn->val.b) {
2045 		/* a ^ 0 => a */
2046 		IR_FOLD_COPY(op1);
2047 	} else {
2048 		/* a ^ 1 => !a */
2049 		opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
2050 		op2 = IR_UNUSED;
2051 		IR_FOLD_RESTART;
2052 	}
2053 }
2054 
2055 IR_FOLD(XOR(_, C_U8))
IR_FOLD(XOR (_,C_I8))2056 IR_FOLD(XOR(_, C_I8))
2057 IR_FOLD(XOR(_, C_CHAR))
2058 {
2059 	if (op2_insn->val.i8 == 0) {
2060 		/* a ^ 0 => a */
2061 		IR_FOLD_COPY(op1);
2062 	} else if (op2_insn->val.i8 == -1) {
2063 		/* a ^ 1 => ~a */
2064 		opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
2065 		op2 = IR_UNUSED;
2066 		IR_FOLD_RESTART;
2067 	}
2068 	IR_FOLD_NEXT;
2069 }
2070 
2071 IR_FOLD(XOR(_, C_U16))
IR_FOLD(XOR (_,C_I16))2072 IR_FOLD(XOR(_, C_I16))
2073 {
2074 	if (op2_insn->val.i16 == 0) {
2075 		/* a ^ 0 => a */
2076 		IR_FOLD_COPY(op1);
2077 	} else if (op2_insn->val.i16 == -1) {
2078 		/* a ^ 1 => ~a */
2079 		opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
2080 		op2 = IR_UNUSED;
2081 		IR_FOLD_RESTART;
2082 	}
2083 	IR_FOLD_NEXT;
2084 }
2085 
2086 IR_FOLD(XOR(_, C_U32))
IR_FOLD(XOR (_,C_I32))2087 IR_FOLD(XOR(_, C_I32))
2088 {
2089 	if (op2_insn->val.i32 == 0) {
2090 		/* a ^ 0 => a */
2091 		IR_FOLD_COPY(op1);
2092 	} else if (op2_insn->val.i32 == -1) {
2093 		/* a ^ 1 => ~a */
2094 		opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
2095 		op2 = IR_UNUSED;
2096 		IR_FOLD_RESTART;
2097 	}
2098 	IR_FOLD_NEXT;
2099 }
2100 
2101 IR_FOLD(XOR(_, C_U64))
IR_FOLD(XOR (_,C_I64))2102 IR_FOLD(XOR(_, C_I64))
2103 {
2104 	if (op2_insn->val.i64 == 0) {
2105 		/* a ^ 0 => a */
2106 		IR_FOLD_COPY(op1);
2107 	} else if (op2_insn->val.i64 == -1) {
2108 		/* a ^ 1 => ~a */
2109 		opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
2110 		op2 = IR_UNUSED;
2111 		IR_FOLD_RESTART;
2112 	}
2113 	IR_FOLD_NEXT;
2114 }
2115 
2116 IR_FOLD(SHL(_, C_U8))
IR_FOLD(SHL (_,C_U16))2117 IR_FOLD(SHL(_, C_U16))
2118 IR_FOLD(SHL(_, C_U32))
2119 IR_FOLD(SHL(_, C_U64))
2120 IR_FOLD(SHL(_, C_I8))
2121 IR_FOLD(SHL(_, C_I16))
2122 IR_FOLD(SHL(_, C_I32))
2123 IR_FOLD(SHL(_, C_I64))
2124 {
2125 	if (op2_insn->val.u64 == 0) {
2126 		/* a << 0 => a */
2127 		IR_FOLD_COPY(op1);
2128 	} else if (op2_insn->val.u64 == 1) {
2129 		/* a << 1 => a + a */
2130 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
2131 		op2 = op1;
2132 		IR_FOLD_RESTART;
2133 	}
2134 	IR_FOLD_NEXT;
2135 }
2136 
2137 IR_FOLD(SHR(_, C_U8))
IR_FOLD(SHR (_,C_U16))2138 IR_FOLD(SHR(_, C_U16))
2139 IR_FOLD(SHR(_, C_U32))
2140 IR_FOLD(SHR(_, C_U64))
2141 IR_FOLD(SHR(_, C_I8))
2142 IR_FOLD(SHR(_, C_I16))
2143 IR_FOLD(SHR(_, C_I32))
2144 IR_FOLD(SHR(_, C_I64))
2145 IR_FOLD(SAR(_, C_U8))
2146 IR_FOLD(SAR(_, C_U16))
2147 IR_FOLD(SAR(_, C_U32))
2148 IR_FOLD(SAR(_, C_U64))
2149 IR_FOLD(SAR(_, C_I8))
2150 IR_FOLD(SAR(_, C_I16))
2151 IR_FOLD(SAR(_, C_I32))
2152 IR_FOLD(SAR(_, C_I64))
2153 IR_FOLD(ROL(_, C_U8))
2154 IR_FOLD(ROL(_, C_U16))
2155 IR_FOLD(ROL(_, C_U32))
2156 IR_FOLD(ROL(_, C_U64))
2157 IR_FOLD(ROL(_, C_I8))
2158 IR_FOLD(ROL(_, C_I16))
2159 IR_FOLD(ROL(_, C_I32))
2160 IR_FOLD(ROL(_, C_I64))
2161 IR_FOLD(ROR(_, C_U8))
2162 IR_FOLD(ROR(_, C_U16))
2163 IR_FOLD(ROR(_, C_U32))
2164 IR_FOLD(ROR(_, C_U64))
2165 IR_FOLD(ROR(_, C_I8))
2166 IR_FOLD(ROR(_, C_I16))
2167 IR_FOLD(ROR(_, C_I32))
2168 IR_FOLD(ROR(_, C_I64))
2169 {
2170 	if (op2_insn->val.u64 == 0) {
2171 		/* a >> 0 => a */
2172 		IR_FOLD_COPY(op1);
2173 	}
2174 	IR_FOLD_NEXT;
2175 }
2176 
2177 IR_FOLD(SHL(C_U8, _))
IR_FOLD(SHL (C_U16,_))2178 IR_FOLD(SHL(C_U16, _))
2179 IR_FOLD(SHL(C_U32, _))
2180 IR_FOLD(SHL(C_U64, _))
2181 IR_FOLD(SHL(C_I8, _))
2182 IR_FOLD(SHL(C_I16, _))
2183 IR_FOLD(SHL(C_I32, _))
2184 IR_FOLD(SHL(C_I64, _))
2185 IR_FOLD(SHR(C_U8, _))
2186 IR_FOLD(SHR(C_U16, _))
2187 IR_FOLD(SHR(C_U32, _))
2188 IR_FOLD(SHR(C_U64, _))
2189 IR_FOLD(SHR(C_I8, _))
2190 IR_FOLD(SHR(C_I16, _))
2191 IR_FOLD(SHR(C_I32, _))
2192 IR_FOLD(SHR(C_I64, _))
2193 {
2194 	if (op1_insn->val.u64 == 0) {
2195 		/* 0 << a => 0 */
2196 		IR_FOLD_COPY(op1);
2197 	}
2198 	IR_FOLD_NEXT;
2199 }
2200 
2201 IR_FOLD(SAR(C_U8, _))
IR_FOLD(SAR (C_I8,_))2202 IR_FOLD(SAR(C_I8, _))
2203 IR_FOLD(ROL(C_U8, _))
2204 IR_FOLD(ROL(C_I8, _))
2205 IR_FOLD(ROR(C_U8, _))
2206 IR_FOLD(ROR(C_I8, _))
2207 {
2208 	if (op1_insn->val.i8 == 0 || op1_insn->val.i8 == -1) {
2209 		IR_FOLD_COPY(op1);
2210 	}
2211 	IR_FOLD_NEXT;
2212 }
2213 
2214 IR_FOLD(SAR(C_U16, _))
IR_FOLD(SAR (C_I16,_))2215 IR_FOLD(SAR(C_I16, _))
2216 IR_FOLD(ROL(C_U16, _))
2217 IR_FOLD(ROL(C_I16, _))
2218 IR_FOLD(ROR(C_U16, _))
2219 IR_FOLD(ROR(C_I16, _))
2220 {
2221 	if (op1_insn->val.i16 == 0 || op1_insn->val.i16 == -1) {
2222 		IR_FOLD_COPY(op1);
2223 	}
2224 	IR_FOLD_NEXT;
2225 }
2226 
2227 IR_FOLD(SAR(C_U32, _))
IR_FOLD(SAR (C_I32,_))2228 IR_FOLD(SAR(C_I32, _))
2229 IR_FOLD(ROL(C_U32, _))
2230 IR_FOLD(ROL(C_I32, _))
2231 IR_FOLD(ROR(C_U32, _))
2232 IR_FOLD(ROR(C_I32, _))
2233 {
2234 	if (op1_insn->val.i32 == 0 || op1_insn->val.i32 == -1) {
2235 		IR_FOLD_COPY(op1);
2236 	}
2237 	IR_FOLD_NEXT;
2238 }
2239 
2240 IR_FOLD(SAR(C_U64, _))
IR_FOLD(SAR (C_I64,_))2241 IR_FOLD(SAR(C_I64, _))
2242 IR_FOLD(ROL(C_U64, _))
2243 IR_FOLD(ROL(C_I64, _))
2244 IR_FOLD(ROR(C_U64, _))
2245 IR_FOLD(ROR(C_I64, _))
2246 {
2247 	if (op1_insn->val.i64 == 0 || op1_insn->val.i64 == -1) {
2248 		IR_FOLD_COPY(op1);
2249 	}
2250 	IR_FOLD_NEXT;
2251 }
2252 
2253 IR_FOLD(LT(ABS, C_I8))
IR_FOLD(LT (ABS,C_I16))2254 IR_FOLD(LT(ABS, C_I16))
2255 IR_FOLD(LT(ABS, C_I32))
2256 IR_FOLD(LT(ABS, C_I64))
2257 IR_FOLD(LT(ABS, C_FLOAT))
2258 IR_FOLD(LT(ABS, C_DOUBLE))
2259 {
2260 	if (op2_insn->val.u64 == 0) {
2261 		/* abs() < 0 => false */
2262 		IR_FOLD_COPY(IR_FALSE);
2263 	}
2264 	IR_FOLD_NEXT;
2265 }
2266 
2267 IR_FOLD(GE(ABS, C_I8))
IR_FOLD(GE (ABS,C_I16))2268 IR_FOLD(GE(ABS, C_I16))
2269 IR_FOLD(GE(ABS, C_I32))
2270 IR_FOLD(GE(ABS, C_I64))
2271 IR_FOLD(GE(ABS, C_FLOAT))
2272 IR_FOLD(GE(ABS, C_DOUBLE))
2273 {
2274 	if (op2_insn->val.u64 == 0) {
2275 		/* abs() >= 0 => true */
2276 		IR_FOLD_COPY(IR_TRUE);
2277 	}
2278 	IR_FOLD_NEXT;
2279 }
2280 
2281 // TODO: conversions
IR_FOLD(FP2FP (FP2FP))2282 IR_FOLD(FP2FP(FP2FP))
2283 {
2284 	if (IR_OPT_TYPE(opt) == IR_FLOAT) {
2285 		/* (float)(double)f => f */
2286 		IR_ASSERT(op1_insn->type == IR_DOUBLE);
2287 		IR_ASSERT(ctx->ir_base[op1_insn->op1].type == IR_FLOAT);
2288 		IR_FOLD_COPY(op1_insn->op1);
2289 	}
2290 	IR_FOLD_NEXT;
2291 }
2292 
IR_FOLD(FP2INT (INT2FP))2293 IR_FOLD(FP2INT(INT2FP))
2294 {
2295 	ir_type dst_type = IR_OPT_TYPE(opt);
2296 	ir_type src_type = ctx->ir_base[op1_insn->op1].type;
2297 
2298 	if (ir_type_size[src_type] >= ir_type_size[op1_insn->type]) {
2299 		/* source integer type can not fit into intermediate floating point */
2300 		IR_FOLD_NEXT;
2301 	}
2302 	/* (int)(double)i => i */
2303 	if (src_type == dst_type) {
2304 		IR_FOLD_COPY(op1_insn->op1);
2305 	}
2306 	IR_FOLD_NEXT;
2307 }
2308 
2309 IR_FOLD(TRUNC(ZEXT))
IR_FOLD(TRUNC (SEXT))2310 IR_FOLD(TRUNC(SEXT))
2311 {
2312 	ir_type dst_type = IR_OPT_TYPE(opt);
2313 	ir_type src_type = ctx->ir_base[op1_insn->op1].type;
2314 
2315 	/* (int32_t)(int64_t)i => i */
2316 	if (src_type == dst_type) {
2317 		IR_FOLD_COPY(op1_insn->op1);
2318 	} else if (ir_type_size[src_type] == ir_type_size[dst_type]) {
2319 		opt = IR_OPT(IR_BITCAST, dst_type);
2320 		op1 = op1_insn->op1;
2321 		IR_FOLD_RESTART;
2322 	} else if (ir_type_size[src_type] > ir_type_size[dst_type]) {
2323 		opt = IR_OPT(IR_TRUNC, dst_type);
2324 		op1 = op1_insn->op1;
2325 		IR_FOLD_RESTART;
2326 	} else {
2327 		opt = IR_OPT(op1_insn->op, dst_type);
2328 		op1 = op1_insn->op1;
2329 		IR_FOLD_RESTART;
2330 	}
2331 	IR_FOLD_NEXT;
2332 }
2333 
2334 IR_FOLD(TRUNC(BITCAST))
IR_FOLD(ZEXT (BITCAST))2335 IR_FOLD(ZEXT(BITCAST))
2336 IR_FOLD(SEXT(BITCAST))
2337 {
2338 	if (IR_IS_TYPE_INT(ctx->ir_base[op1_insn->op1].type)) {
2339 		op1 = op1_insn->op1;
2340 		IR_FOLD_RESTART;
2341 	}
2342 	IR_FOLD_NEXT;
2343 }
2344 
IR_FOLD(BITCAST (BITCAST))2345 IR_FOLD(BITCAST(BITCAST))
2346 {
2347 	ir_type dst_type = IR_OPT_TYPE(opt);
2348 	ir_type src_type = ctx->ir_base[op1_insn->op1].type;
2349 
2350 	if (src_type == dst_type) {
2351 		IR_FOLD_COPY(op1_insn->op1);
2352 	} else if (IR_IS_TYPE_INT(src_type) == IR_IS_TYPE_INT(dst_type)) {
2353 		op1 = op1_insn->op1;
2354 		IR_FOLD_RESTART;
2355 	}
2356 	IR_FOLD_NEXT;
2357 }
2358 
2359 IR_FOLD(TRUNC(TRUNC))
IR_FOLD(ZEXT (ZEXT))2360 IR_FOLD(ZEXT(ZEXT))
2361 IR_FOLD(SEXT(SEXT))
2362 {
2363 	op1 = op1_insn->op1;
2364 	IR_FOLD_RESTART;
2365 }
2366 
IR_FOLD(SEXT (ZEXT))2367 IR_FOLD(SEXT(ZEXT))
2368 {
2369 	op1 = op1_insn->op1;
2370 	opt = IR_OPT(IR_ZEXT, IR_OPT_TYPE(opt));
2371 	IR_FOLD_RESTART;
2372 }
2373 
IR_FOLD(SEXT (AND))2374 IR_FOLD(SEXT(AND))
2375 {
2376 	if (IR_IS_CONST_REF(op1_insn->op2)
2377 	 && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)
2378 	 && !(ctx->ir_base[op1_insn->op2].val.u64
2379 			& (1ULL << ((ir_type_size[op1_insn->type] * 8) - 1)))) {
2380 		/* SEXT(AND(_, 0b0*)) -> ZEXT(AND(_, 0b0*)) */
2381 		opt = IR_OPT(IR_ZEXT, IR_OPT_TYPE(opt));
2382 		IR_FOLD_RESTART;
2383 	}
2384 	IR_FOLD_NEXT;
2385 }
2386 
IR_FOLD(TRUNC (AND))2387 IR_FOLD(TRUNC(AND))
2388 {
2389 	if (IR_IS_CONST_REF(op1_insn->op2)) {
2390 		size_t size = ir_type_size[IR_OPT_TYPE(opt)];
2391 		uint64_t mask = ctx->ir_base[op1_insn->op2].val.u64;
2392 
2393 		if (size == 1) {
2394 			if (mask == 0xff) {
2395 				op1 = op1_insn->op1;
2396 				IR_FOLD_RESTART;
2397 			}
2398 		} else if (size == 2) {
2399 			if (mask == 0xffff) {
2400 				op1 = op1_insn->op1;
2401 				IR_FOLD_RESTART;
2402 			}
2403 		} else if (size == 4) {
2404 			if (mask == 0xffffffff) {
2405 				op1 = op1_insn->op1;
2406 				IR_FOLD_RESTART;
2407 			}
2408 		}
2409 	}
2410 	IR_FOLD_NEXT;
2411 }
2412 
2413 IR_FOLD(AND(SHR, C_I8))
IR_FOLD(AND (SHR,C_U8))2414 IR_FOLD(AND(SHR, C_U8))
2415 {
2416 	if (IR_IS_CONST_REF(op1_insn->op2)) {
2417 		if (((uint8_t)-1) >> ctx->ir_base[op1_insn->op2].val.u8 == op2_insn->val.u8) {
2418 			IR_FOLD_COPY(op1);
2419 		}
2420 	}
2421 	IR_FOLD_NEXT;
2422 }
2423 
2424 IR_FOLD(AND(SHR, C_I16))
IR_FOLD(AND (SHR,C_U16))2425 IR_FOLD(AND(SHR, C_U16))
2426 {
2427 	if (IR_IS_CONST_REF(op1_insn->op2)) {
2428 		if (((uint16_t)-1) >> ctx->ir_base[op1_insn->op2].val.u16 == op2_insn->val.u16) {
2429 			IR_FOLD_COPY(op1);
2430 		}
2431 	}
2432 	IR_FOLD_NEXT;
2433 }
2434 
2435 IR_FOLD(AND(SHR, C_I32))
IR_FOLD(AND (SHR,C_U32))2436 IR_FOLD(AND(SHR, C_U32))
2437 {
2438 	if (IR_IS_CONST_REF(op1_insn->op2)) {
2439 		if (((uint32_t)-1) >> ctx->ir_base[op1_insn->op2].val.u32 == op2_insn->val.u32) {
2440 			IR_FOLD_COPY(op1);
2441 		}
2442 	}
2443 	IR_FOLD_NEXT;
2444 }
2445 
2446 IR_FOLD(AND(SHR, C_I64))
IR_FOLD(AND (SHR,C_U64))2447 IR_FOLD(AND(SHR, C_U64))
2448 {
2449 	if (IR_IS_CONST_REF(op1_insn->op2)) {
2450 		if (((uint64_t)-1) >> ctx->ir_base[op1_insn->op2].val.u64 == op2_insn->val.u64) {
2451 			IR_FOLD_COPY(op1);
2452 		}
2453 	}
2454 	IR_FOLD_NEXT;
2455 }
2456 
2457 IR_FOLD(EQ(FP2FP, C_DOUBLE))
IR_FOLD(NE (FP2FP,C_DOUBLE))2458 IR_FOLD(NE(FP2FP, C_DOUBLE))
2459 IR_FOLD(LT(FP2FP, C_DOUBLE))
2460 IR_FOLD(GE(FP2FP, C_DOUBLE))
2461 IR_FOLD(LE(FP2FP, C_DOUBLE))
2462 IR_FOLD(GT(FP2FP, C_DOUBLE))
2463 IR_FOLD(ULT(FP2FP, C_DOUBLE))
2464 IR_FOLD(UGE(FP2FP, C_DOUBLE))
2465 IR_FOLD(ULE(FP2FP, C_DOUBLE))
2466 IR_FOLD(UGT(FP2FP, C_DOUBLE))
2467 {
2468 	IR_ASSERT(op1_insn->type == IR_DOUBLE);
2469 	IR_ASSERT(ctx->ir_base[op1_insn->op1].type == IR_FLOAT);
2470 	if (op2_insn->val.d == (double)(float)op2_insn->val.d) {
2471 		op1 = op1_insn->op1;
2472 		op2 = ir_const_float(ctx, (float)op2_insn->val.d);
2473 		IR_FOLD_RESTART;
2474 	}
2475 	IR_FOLD_NEXT;
2476 }
2477 
2478 // TODO: Reassociation
2479 IR_FOLD(ADD(ADD, C_U8))
IR_FOLD(ADD (ADD,C_U16))2480 IR_FOLD(ADD(ADD, C_U16))
2481 IR_FOLD(ADD(ADD, C_U32))
2482 IR_FOLD(ADD(ADD, C_U64))
2483 IR_FOLD(ADD(ADD, C_ADDR))
2484 {
2485 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2486 		/* (x + c1) + c2  => x + (c1 + c2) */
2487 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 + op2_insn->val.u64;
2488 		op1 = op1_insn->op1;
2489 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2490 		IR_FOLD_RESTART;
2491 	}
2492 	IR_FOLD_NEXT;
2493 }
2494 
2495 IR_FOLD(ADD(ADD, C_I8))
IR_FOLD(ADD (ADD,C_I16))2496 IR_FOLD(ADD(ADD, C_I16))
2497 IR_FOLD(ADD(ADD, C_I32))
2498 IR_FOLD(ADD(ADD, C_I64))
2499 {
2500 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2501 		/* (x + c1) + c2  => x + (c1 + c2) */
2502 		val.i64 = ctx->ir_base[op1_insn->op2].val.i64 + op2_insn->val.i64;
2503 		op1 = op1_insn->op1;
2504 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2505 		IR_FOLD_RESTART;
2506 	}
2507 	IR_FOLD_NEXT;
2508 }
2509 
2510 IR_FOLD(ADD(SUB, C_U8))
IR_FOLD(ADD (SUB,C_U16))2511 IR_FOLD(ADD(SUB, C_U16))
2512 IR_FOLD(ADD(SUB, C_U32))
2513 IR_FOLD(ADD(SUB, C_U64))
2514 IR_FOLD(ADD(SUB, C_ADDR))
2515 {
2516 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2517 		/* (x - c1) + c2  => x + (c2 - c1) */
2518 		val.u64 = op2_insn->val.u64 - ctx->ir_base[op1_insn->op2].val.u64;
2519 		op1 = op1_insn->op1;
2520 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2521 		IR_FOLD_RESTART;
2522 	} else if (IR_IS_CONST_REF(op1_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op1].op)) {
2523 		/* (c1 - x) + c2  => (c1 + c2) - x */
2524 		val.u64 = ctx->ir_base[op1_insn->op1].val.u64 + op2_insn->val.u64;
2525 		opt++; /* ADD -> SUB */
2526 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2527 		op2 = op1_insn->op2;
2528 		IR_FOLD_RESTART;
2529 	}
2530 	IR_FOLD_NEXT;
2531 }
2532 
2533 IR_FOLD(ADD(SUB, C_I8))
IR_FOLD(ADD (SUB,C_I16))2534 IR_FOLD(ADD(SUB, C_I16))
2535 IR_FOLD(ADD(SUB, C_I32))
2536 IR_FOLD(ADD(SUB, C_I64))
2537 {
2538 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2539 		/* (x - c1) + c2  => x + (c2 - c1) */
2540 		val.i64 = op2_insn->val.i64 - ctx->ir_base[op1_insn->op2].val.i64;
2541 		if (val.i64 < 0 && val.i64 - 1 < 0) {
2542 			val.i64 = -val.i64;
2543 			opt++; /* ADD -> SUB */
2544 		}
2545 		op1 = op1_insn->op1;
2546 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2547 		IR_FOLD_RESTART;
2548 	} else if (IR_IS_CONST_REF(op1_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op1].op)) {
2549 		/* (c1 - x) + c2  => (c1 + c2) - x */
2550 		val.i64 = ctx->ir_base[op1_insn->op1].val.i64 + op2_insn->val.i64;
2551 		opt++; /* ADD -> SUB */
2552 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2553 		op2 = op1_insn->op2;
2554 		IR_FOLD_RESTART;
2555 	}
2556 	IR_FOLD_NEXT;
2557 }
2558 
2559 IR_FOLD(SUB(ADD, C_U8))
IR_FOLD(SUB (ADD,C_U16))2560 IR_FOLD(SUB(ADD, C_U16))
2561 IR_FOLD(SUB(ADD, C_U32))
2562 IR_FOLD(SUB(ADD, C_U64))
2563 IR_FOLD(SUB(ADD, C_ADDR))
2564 {
2565 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2566 		/* (x + c1) - c2  => x + (c1 - c2) */
2567 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 - op2_insn->val.u64;
2568 		opt--; /* SUB -> ADD */
2569 		op1 = op1_insn->op1;
2570 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2571 		IR_FOLD_RESTART;
2572 	}
2573 	IR_FOLD_NEXT;
2574 }
2575 
2576 IR_FOLD(SUB(ADD, C_I8))
IR_FOLD(SUB (ADD,C_I16))2577 IR_FOLD(SUB(ADD, C_I16))
2578 IR_FOLD(SUB(ADD, C_I32))
2579 IR_FOLD(SUB(ADD, C_I64))
2580 {
2581 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2582 		/* (x + c1) - c2  => x + (c1 - c2) */
2583 		val.i64 = ctx->ir_base[op1_insn->op2].val.i64 - op2_insn->val.i64;
2584 		if (val.i64 < 0 && val.i64 - 1 < 0) {
2585 			val.i64 = -val.i64;
2586 		} else {
2587 			opt--; /* SUB -> ADD */
2588 		}
2589 		op1 = op1_insn->op1;
2590 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2591 		IR_FOLD_RESTART;
2592 	}
2593 	IR_FOLD_NEXT;
2594 }
2595 
2596 IR_FOLD(SUB(C_U8, ADD))
IR_FOLD(SUB (C_U16,ADD))2597 IR_FOLD(SUB(C_U16, ADD))
2598 IR_FOLD(SUB(C_U32, ADD))
2599 IR_FOLD(SUB(C_U64, ADD))
2600 IR_FOLD(SUB(C_ADDR, ADD))
2601 {
2602 	if (IR_IS_CONST_REF(op2_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op2].op)) {
2603 		/* c1 - (x + c2) => (c1 - c2) - x */
2604 		val.u64 = op1_insn->val.u64 - ctx->ir_base[op2_insn->op2].val.u64;
2605 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2606 		op2 = op2_insn->op1;
2607 		IR_FOLD_RESTART;
2608 	}
2609 	IR_FOLD_NEXT;
2610 }
2611 
2612 IR_FOLD(SUB(C_I8, ADD))
IR_FOLD(SUB (C_I16,ADD))2613 IR_FOLD(SUB(C_I16, ADD))
2614 IR_FOLD(SUB(C_I32, ADD))
2615 IR_FOLD(SUB(C_I64, ADD))
2616 {
2617 	if (IR_IS_CONST_REF(op2_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op2].op)) {
2618 		/* c1 - (x + c2) => (c1 - c2) - x */
2619 		val.i64 = op1_insn->val.i64 - ctx->ir_base[op2_insn->op2].val.i64;
2620 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2621 		op2 = op2_insn->op1;
2622 		IR_FOLD_RESTART;
2623 	}
2624 	IR_FOLD_NEXT;
2625 }
2626 
2627 IR_FOLD(SUB(SUB, C_U8))
IR_FOLD(SUB (SUB,C_U16))2628 IR_FOLD(SUB(SUB, C_U16))
2629 IR_FOLD(SUB(SUB, C_U32))
2630 IR_FOLD(SUB(SUB, C_U64))
2631 IR_FOLD(SUB(SUB, C_ADDR))
2632 {
2633 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2634 		/* (x - c1) - c2  => x - (c1 + c2) */
2635 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 + op2_insn->val.u64;
2636 		if (val.i64 < 0 && val.i64 - 1 < 0) {
2637 			val.i64 = -val.i64;
2638 			opt--; /* SUB -> ADD */
2639 		}
2640 		op1 = op1_insn->op1;
2641 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2642 		IR_FOLD_RESTART;
2643 	} else if (IR_IS_CONST_REF(op1_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op1].op)) {
2644 		/* (c1 - x) - c2  => (c1 - c2) - x */
2645 		val.u64 = ctx->ir_base[op1_insn->op1].val.u64 - op2_insn->val.u64;
2646 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2647 		op2 = op1_insn->op2;
2648 		IR_FOLD_RESTART;
2649 	}
2650 	IR_FOLD_NEXT;
2651 }
2652 
2653 IR_FOLD(SUB(SUB, C_I8))
IR_FOLD(SUB (SUB,C_I16))2654 IR_FOLD(SUB(SUB, C_I16))
2655 IR_FOLD(SUB(SUB, C_I32))
2656 IR_FOLD(SUB(SUB, C_I64))
2657 {
2658 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2659 		/* (x - c1) - c2  => x - (c1 + c2) */
2660 		val.i64 = ctx->ir_base[op1_insn->op2].val.i64 + op2_insn->val.i64;
2661 		if (val.i64 < 0 && val.i64 - 1 < 0) {
2662 			val.i64 = -val.i64;
2663 			opt--; /* SUB -> ADD */
2664 		}
2665 		op1 = op1_insn->op1;
2666 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2667 		IR_FOLD_RESTART;
2668 	} else if (IR_IS_CONST_REF(op1_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op1].op)) {
2669 		/* (c1 - x) - c2  => (c1 - c2) - x */
2670 		val.i64 = ctx->ir_base[op1_insn->op1].val.i64 - op2_insn->val.i64;
2671 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2672 		op2 = op1_insn->op2;
2673 		IR_FOLD_RESTART;
2674 	}
2675 	IR_FOLD_NEXT;
2676 }
2677 
2678 IR_FOLD(SUB(C_U8, SUB))
IR_FOLD(SUB (C_U16,SUB))2679 IR_FOLD(SUB(C_U16, SUB))
2680 IR_FOLD(SUB(C_U32, SUB))
2681 IR_FOLD(SUB(C_U64, SUB))
2682 IR_FOLD(SUB(C_ADDR, SUB))
2683 {
2684 	if (IR_IS_CONST_REF(op2_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op2].op)) {
2685 		/* c1 - (x - c2) => (c1 + c2) - x */
2686 		val.u64 = op1_insn->val.u64 + ctx->ir_base[op2_insn->op2].val.u64;
2687 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2688 		op2 = op2_insn->op1;
2689 		IR_FOLD_RESTART;
2690 	} else if (IR_IS_CONST_REF(op2_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op1].op)) {
2691 		/* c1 - (c2 - x) => x + (c1 - c2) */
2692 		val.u64 = op1_insn->val.u64 - ctx->ir_base[op2_insn->op1].val.u64;
2693 		if (val.i64 < 0 && val.i64 - 1 < 0) {
2694 			val.i64 = -val.i64;
2695 			opt++; /* ADD -> SUB */
2696 		}
2697 		op1 = op2_insn->op2;
2698 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2699 		IR_FOLD_RESTART;
2700 	}
2701 	IR_FOLD_NEXT;
2702 }
2703 
2704 IR_FOLD(SUB(C_I8, SUB))
IR_FOLD(SUB (C_I16,SUB))2705 IR_FOLD(SUB(C_I16, SUB))
2706 IR_FOLD(SUB(C_I32, SUB))
2707 IR_FOLD(SUB(C_I64, SUB))
2708 {
2709 	if (IR_IS_CONST_REF(op2_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op2].op)) {
2710 		/* c1 - (x - c2) => (c1 + c2) - x */
2711 		val.i64 = op1_insn->val.i64 + ctx->ir_base[op2_insn->op2].val.i64;
2712 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2713 		op2 = op2_insn->op1;
2714 		IR_FOLD_RESTART;
2715 	} else if (IR_IS_CONST_REF(op2_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op1].op)) {
2716 		/* c1 - (c2 - x) => x + (c1 - c2) */
2717 		val.i64 = op1_insn->val.i64 - ctx->ir_base[op2_insn->op1].val.i64;
2718 		if (val.i64 < 0 && val.i64 - 1 < 0) {
2719 			val.i64 = -val.i64;
2720 			opt++; /* ADD -> SUB */
2721 		}
2722 		op1 = op2_insn->op2;
2723 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2724 		IR_FOLD_RESTART;
2725 	}
2726 	IR_FOLD_NEXT;
2727 }
2728 
2729 IR_FOLD(MUL(MUL, C_U8))
IR_FOLD(MUL (MUL,C_U16))2730 IR_FOLD(MUL(MUL, C_U16))
2731 IR_FOLD(MUL(MUL, C_U32))
2732 IR_FOLD(MUL(MUL, C_U64))
2733 IR_FOLD(MUL(MUL, C_ADDR))
2734 {
2735 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2736 		/* (x * c1) * c2  => x * (c1 * c2) */
2737 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 * op2_insn->val.u64;
2738 		op1 = op1_insn->op1;
2739 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2740 		IR_FOLD_RESTART;
2741 	}
2742 	IR_FOLD_NEXT;
2743 }
2744 
2745 IR_FOLD(MUL(MUL, C_I8))
IR_FOLD(MUL (MUL,C_I16))2746 IR_FOLD(MUL(MUL, C_I16))
2747 IR_FOLD(MUL(MUL, C_I32))
2748 IR_FOLD(MUL(MUL, C_I64))
2749 {
2750 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2751 		/* (x * c1) * c2  => x * (c1 * c2) */
2752 		val.i64 = ctx->ir_base[op1_insn->op2].val.i64 * op2_insn->val.i64;
2753 		op1 = op1_insn->op1;
2754 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2755 		IR_FOLD_RESTART;
2756 	}
2757 	IR_FOLD_NEXT;
2758 }
2759 
2760 IR_FOLD(AND(AND, C_U8))
IR_FOLD(AND (AND,C_U16))2761 IR_FOLD(AND(AND, C_U16))
2762 IR_FOLD(AND(AND, C_U32))
2763 IR_FOLD(AND(AND, C_U64))
2764 IR_FOLD(AND(AND, C_I8))
2765 IR_FOLD(AND(AND, C_I16))
2766 IR_FOLD(AND(AND, C_I32))
2767 IR_FOLD(AND(AND, C_I64))
2768 IR_FOLD(AND(AND, C_ADDR))
2769 {
2770 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2771 		/* (x & c1) & c2  => x & (c1 & c2) */
2772 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 & op2_insn->val.u64;
2773 		op1 = op1_insn->op1;
2774 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2775 		IR_FOLD_RESTART;
2776 	}
2777 	IR_FOLD_NEXT;
2778 }
2779 
2780 IR_FOLD(OR(OR, C_U8))
IR_FOLD(OR (OR,C_U16))2781 IR_FOLD(OR(OR, C_U16))
2782 IR_FOLD(OR(OR, C_U32))
2783 IR_FOLD(OR(OR, C_U64))
2784 IR_FOLD(OR(OR, C_I8))
2785 IR_FOLD(OR(OR, C_I16))
2786 IR_FOLD(OR(OR, C_I32))
2787 IR_FOLD(OR(OR, C_I64))
2788 IR_FOLD(OR(OR, C_ADDR))
2789 {
2790 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2791 		/* (x | c1) | c2  => x | (c1 | c2) */
2792 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 | op2_insn->val.u64;
2793 		op1 = op1_insn->op1;
2794 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2795 		IR_FOLD_RESTART;
2796 	}
2797 	IR_FOLD_NEXT;
2798 }
2799 
2800 IR_FOLD(XOR(XOR, C_U8))
IR_FOLD(XOR (XOR,C_U16))2801 IR_FOLD(XOR(XOR, C_U16))
2802 IR_FOLD(XOR(XOR, C_U32))
2803 IR_FOLD(XOR(XOR, C_U64))
2804 IR_FOLD(XOR(XOR, C_I8))
2805 IR_FOLD(XOR(XOR, C_I16))
2806 IR_FOLD(XOR(XOR, C_I32))
2807 IR_FOLD(XOR(XOR, C_I64))
2808 IR_FOLD(XOR(XOR, C_ADDR))
2809 {
2810 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2811 		/* (x ^ c1) ^ c2  => x ^ (c1 ^ c2) */
2812 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 ^ op2_insn->val.u64;
2813 		op1 = op1_insn->op1;
2814 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2815 		IR_FOLD_RESTART;
2816 	}
2817 	IR_FOLD_NEXT;
2818 }
2819 
2820 IR_FOLD(AND(AND, _))
IR_FOLD(OR (OR,_))2821 IR_FOLD(OR(OR, _))
2822 IR_FOLD(MIN(MIN, _))
2823 IR_FOLD(MAX(MAX, _))
2824 {
2825 	if (op1_insn->op1 == op2 || op1_insn->op2 == op2) {
2826 		IR_FOLD_COPY(op2);
2827 	}
2828 	IR_FOLD_NEXT;
2829 }
2830 
IR_FOLD(XOR (XOR,_))2831 IR_FOLD(XOR(XOR, _))
2832 {
2833 	if (op1_insn->op1 == op2) {
2834 		IR_FOLD_COPY(op1_insn->op2);
2835 	} else if (op1_insn->op2 == op2) {
2836 		IR_FOLD_COPY(op1_insn->op1);
2837 	}
2838 	IR_FOLD_NEXT;
2839 }
2840 
2841 /* ROL/ROR */
2842 IR_FOLD(OR(SHL, SHR))
IR_FOLD(OR (SHR,SHL))2843 IR_FOLD(OR(SHR, SHL))
2844 IR_FOLD(ADD(SHL, SHR))
2845 IR_FOLD(ADD(SHR, SHL))
2846 {
2847 	if (op1_insn->op1 == op2_insn->op1) {
2848 		if (IR_IS_CONST_REF(op1_insn->op2) && IR_IS_CONST_REF(op2_insn->op2)) {
2849 			if (ctx->ir_base[op1_insn->op2].val.u64 + ctx->ir_base[op2_insn->op2].val.u64 ==
2850 					ir_type_size[IR_OPT_TYPE(opt)] * 8) {
2851 				/* (x << c) | (x >> (32 - c)) -> ROL(x, c) */
2852 				op1 = op1_insn->op1;
2853 				op2 = op1_insn->op2;
2854 				opt = op1_insn->opt + 3; /* SHL -> ROL, SHR -> ROR */
2855 				IR_FOLD_RESTART;
2856 			}
2857 		} else if (ctx->ir_base[op2_insn->op2].op == IR_SUB
2858 				&& IR_IS_CONST_REF(ctx->ir_base[op2_insn->op2].op1)
2859 				&& ctx->ir_base[op2_insn->op2].op2 == op1_insn->op2
2860 				&& ctx->ir_base[ctx->ir_base[op2_insn->op2].op1].val.u64 == ir_type_size[IR_OPT_TYPE(opt)] * 8) {
2861 			/* (x << y) | (x >> (32 - y)) -> ROL(x, y) */
2862 			op1 = op1_insn->op1;
2863 			op2 = op1_insn->op2;
2864 			opt = op1_insn->opt + 3; /* SHL -> ROL, SHR -> ROR */
2865 			IR_FOLD_RESTART;
2866 		} else if (ctx->ir_base[op1_insn->op2].op == IR_SUB
2867 				&& IR_IS_CONST_REF(ctx->ir_base[op1_insn->op2].op1)
2868 				&& ctx->ir_base[op1_insn->op2].op2 == op2_insn->op2
2869 				&& ctx->ir_base[ctx->ir_base[op1_insn->op2].op1].val.u64 == ir_type_size[IR_OPT_TYPE(opt)] * 8) {
2870 			/* (x << (32 - y)) | (x >> y) -> ROR(x, y) */
2871 			op1 = op2_insn->op1;
2872 			op2 = op2_insn->op2;
2873 			opt = op2_insn->opt + 3; /* SHL -> ROL, SHR -> ROR */
2874 			IR_FOLD_RESTART;
2875 		}
2876 	}
2877 	IR_FOLD_NEXT;
2878 }
2879 
2880 
2881 /* Swap operands (move lower ref to op2) for better CSE */
2882 IR_FOLD(ADD(_, _))
IR_FOLD(MUL (_,_))2883 IR_FOLD(MUL(_, _))
2884 IR_FOLD_NAMED(swap_ops)
2885 {
2886 	if (op1 < op2) {  /* move lower ref to op2 */
2887 		SWAP_REFS(op1, op2);
2888 		IR_FOLD_RESTART;
2889 	}
2890     IR_FOLD_NEXT;
2891 }
2892 
2893 IR_FOLD(ADD_OV(_, _))
IR_FOLD(MUL_OV (_,_))2894 IR_FOLD(MUL_OV(_, _))
2895 {
2896 	if (op1 < op2) {  /* move lower ref to op2 */
2897 		SWAP_REFS(op1, op2);
2898 		IR_FOLD_RESTART;
2899 	}
2900 	/* skip CSE ??? */
2901 	IR_FOLD_EMIT;
2902 }
2903 
IR_FOLD(SUB (_,_))2904 IR_FOLD(SUB(_, _))
2905 {
2906 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt)) && op1 == op2) {
2907 		IR_FOLD_CONST_U(0);
2908 	}
2909 	IR_FOLD_NEXT;
2910 }
2911 
IR_FOLD(SUB_OV (_,_))2912 IR_FOLD(SUB_OV(_, _))
2913 {
2914 	if (op1 == op2) {
2915 		IR_FOLD_CONST_U(0);
2916 	}
2917 	/* skip CSE ??? */
2918 	IR_FOLD_EMIT;
2919 }
2920 
2921 /* Binary operations with op1 == op2 */
2922 IR_FOLD(AND(_,_))
IR_FOLD(OR (_,_))2923 IR_FOLD(OR(_,_))
2924 IR_FOLD(MIN(_, _))
2925 IR_FOLD(MAX(_, _))
2926 {
2927 	/* a & a => a */
2928 	if (op1 == op2) {
2929 		IR_FOLD_COPY(op1);
2930 	}
2931 	IR_FOLD_DO_NAMED(swap_ops);
2932 }
2933 
IR_FOLD(XOR (_,_))2934 IR_FOLD(XOR(_,_))
2935 {
2936 	/* a xor a => 0 */
2937 	if (op1 == op2) {
2938 		IR_FOLD_CONST_U(0);
2939 	}
2940 	IR_FOLD_DO_NAMED(swap_ops);
2941 }
2942 
2943 IR_FOLD(EQ(_, _))
IR_FOLD(NE (_,_))2944 IR_FOLD(NE(_, _))
2945 {
2946 	if (op1 != op2) {
2947 		IR_FOLD_DO_NAMED(swap_ops);
2948 	} else if (IR_IS_TYPE_INT(op1_insn->type)) {
2949 		/* a == a => true */
2950 		IR_FOLD_BOOL((opt & IR_OPT_OP_MASK) == IR_EQ);
2951 	}
2952 	IR_FOLD_NEXT;
2953 }
2954 
2955 IR_FOLD(LT(_, _))
IR_FOLD(GE (_,_))2956 IR_FOLD(GE(_, _))
2957 IR_FOLD(LE(_, _))
2958 IR_FOLD(GT(_, _))
2959 {
2960 	if (op1 == op2) {
2961 		if (IR_IS_TYPE_INT(op1_insn->type)) {
2962 			/* a >= a => true (two low bits are differ) */
2963 			IR_FOLD_BOOL((opt ^ (opt >> 1)) & 1);
2964 		}
2965 	} else if (op1 < op2) {  /* move lower ref to op2 */
2966 		SWAP_REFS(op1, op2);
2967 		opt ^= 3; /* [U]LT <-> [U]GT, [U]LE <-> [U]GE */
2968 		IR_FOLD_RESTART;
2969 	}
2970     IR_FOLD_NEXT;
2971 }
2972 
2973 IR_FOLD(ULT(_, _))
IR_FOLD(UGE (_,_))2974 IR_FOLD(UGE(_, _))
2975 IR_FOLD(ULE(_, _))
2976 IR_FOLD(UGT(_, _))
2977 {
2978 	if (op1 == op2) {
2979 		/* a >= a => true (two low bits are differ) */
2980 		IR_FOLD_BOOL((opt ^ (opt >> 1)) & 1);
2981 	} else if (op1 < op2) {  /* move lower ref to op2 */
2982 		SWAP_REFS(op1, op2);
2983 		opt ^= 3; /* [U]LT <-> [U]GT, [U]LE <-> [U]GE */
2984 	}
2985 	IR_FOLD_NEXT;
2986 }
2987 
IR_FOLD(COND (_,_))2988 IR_FOLD(COND(_, _)) // TODO: COND(_, _, _)
2989 {
2990 	if (op2 == op3) {
2991 		IR_FOLD_COPY(op2);
2992 	}
2993 	IR_FOLD_NEXT;
2994 }
2995