xref: /php-src/ext/opcache/jit/ir/ir_fold.h (revision bad5d2c7)
1 /*
2  * IR - Lightweight JIT Compilation Framework
3  * (Folding engine rules)
4  * Copyright (C) 2022 Zend by Perforce.
5  * Authors: Dmitry Stogov <dmitry@php.net>
6  *
7  * Based on Mike Pall's implementation for LuaJIT.
8  */
9 
10 /* Constant Folding */
11 IR_FOLD(EQ(C_BOOL, C_BOOL))
IR_FOLD(EQ (C_U8,C_U8))12 IR_FOLD(EQ(C_U8, C_U8))
13 IR_FOLD(EQ(C_U16, C_U16))
14 IR_FOLD(EQ(C_U32, C_U32))
15 IR_FOLD(EQ(C_U64, C_U64))
16 IR_FOLD(EQ(C_ADDR, C_ADDR))
17 IR_FOLD(EQ(C_CHAR, C_CHAR))
18 IR_FOLD(EQ(C_I8, C_I8))
19 IR_FOLD(EQ(C_I16, C_I16))
20 IR_FOLD(EQ(C_I32, C_I32))
21 IR_FOLD(EQ(C_I64, C_I64))
22 {
23 	IR_FOLD_BOOL(op1_insn->val.u64 == op2_insn->val.u64);
24 }
25 
IR_FOLD(EQ (C_DOUBLE,C_DOUBLE))26 IR_FOLD(EQ(C_DOUBLE, C_DOUBLE))
27 {
28 	IR_FOLD_BOOL(op1_insn->val.d == op2_insn->val.d);
29 }
30 
IR_FOLD(EQ (C_FLOAT,C_FLOAT))31 IR_FOLD(EQ(C_FLOAT, C_FLOAT))
32 {
33 	IR_FOLD_BOOL(op1_insn->val.d == op2_insn->val.d);
34 }
35 
36 IR_FOLD(NE(C_BOOL, C_BOOL))
IR_FOLD(NE (C_U8,C_U8))37 IR_FOLD(NE(C_U8, C_U8))
38 IR_FOLD(NE(C_U16, C_U16))
39 IR_FOLD(NE(C_U32, C_U32))
40 IR_FOLD(NE(C_U64, C_U64))
41 IR_FOLD(NE(C_ADDR, C_ADDR))
42 IR_FOLD(NE(C_CHAR, C_CHAR))
43 IR_FOLD(NE(C_I8, C_I8))
44 IR_FOLD(NE(C_I16, C_I16))
45 IR_FOLD(NE(C_I32, C_I32))
46 IR_FOLD(NE(C_I64, C_I64))
47 {
48 	IR_FOLD_BOOL(op1_insn->val.u64 != op2_insn->val.u64);
49 }
50 
IR_FOLD(NE (C_DOUBLE,C_DOUBLE))51 IR_FOLD(NE(C_DOUBLE, C_DOUBLE))
52 {
53 	IR_FOLD_BOOL(op1_insn->val.d != op2_insn->val.d);
54 }
55 
IR_FOLD(NE (C_FLOAT,C_FLOAT))56 IR_FOLD(NE(C_FLOAT, C_FLOAT))
57 {
58 	IR_FOLD_BOOL(op1_insn->val.f != op2_insn->val.f);
59 }
60 
61 IR_FOLD(LT(C_BOOL, C_BOOL))
IR_FOLD(LT (C_U8,C_U8))62 IR_FOLD(LT(C_U8, C_U8))
63 IR_FOLD(LT(C_U16, C_U16))
64 IR_FOLD(LT(C_U32, C_U32))
65 IR_FOLD(LT(C_U64, C_U64))
66 IR_FOLD(LT(C_ADDR, C_ADDR))
67 {
68 	IR_FOLD_BOOL(op1_insn->val.u64 < op2_insn->val.u64);
69 }
70 
71 IR_FOLD(LT(C_CHAR, C_CHAR))
IR_FOLD(LT (C_I8,C_I8))72 IR_FOLD(LT(C_I8, C_I8))
73 IR_FOLD(LT(C_I16, C_I16))
74 IR_FOLD(LT(C_I32, C_I32))
75 IR_FOLD(LT(C_I64, C_I64))
76 {
77 	IR_FOLD_BOOL(op1_insn->val.i64 < op2_insn->val.i64);
78 }
79 
IR_FOLD(LT (C_DOUBLE,C_DOUBLE))80 IR_FOLD(LT(C_DOUBLE, C_DOUBLE))
81 {
82 	IR_FOLD_BOOL(op1_insn->val.d < op2_insn->val.d);
83 }
84 
IR_FOLD(LT (C_FLOAT,C_FLOAT))85 IR_FOLD(LT(C_FLOAT, C_FLOAT))
86 {
87 	IR_FOLD_BOOL(op1_insn->val.f < op2_insn->val.f);
88 }
89 
90 IR_FOLD(GE(C_BOOL, C_BOOL))
IR_FOLD(GE (C_U8,C_U8))91 IR_FOLD(GE(C_U8, C_U8))
92 IR_FOLD(GE(C_U16, C_U16))
93 IR_FOLD(GE(C_U32, C_U32))
94 IR_FOLD(GE(C_U64, C_U64))
95 IR_FOLD(GE(C_ADDR, C_ADDR))
96 {
97 	IR_FOLD_BOOL(op1_insn->val.u64 >= op2_insn->val.u64);
98 }
99 
100 IR_FOLD(GE(C_CHAR, C_CHAR))
IR_FOLD(GE (C_I8,C_I8))101 IR_FOLD(GE(C_I8, C_I8))
102 IR_FOLD(GE(C_I16, C_I16))
103 IR_FOLD(GE(C_I32, C_I32))
104 IR_FOLD(GE(C_I64, C_I64))
105 {
106 	IR_FOLD_BOOL(op1_insn->val.i64 >= op2_insn->val.i64);
107 }
108 
IR_FOLD(GE (C_DOUBLE,C_DOUBLE))109 IR_FOLD(GE(C_DOUBLE, C_DOUBLE))
110 {
111 	IR_FOLD_BOOL(op1_insn->val.d >= op2_insn->val.d);
112 }
113 
IR_FOLD(GE (C_FLOAT,C_FLOAT))114 IR_FOLD(GE(C_FLOAT, C_FLOAT))
115 {
116 	IR_FOLD_BOOL(op1_insn->val.f >= op2_insn->val.f);
117 }
118 
119 IR_FOLD(LE(C_BOOL, C_BOOL))
IR_FOLD(LE (C_U8,C_U8))120 IR_FOLD(LE(C_U8, C_U8))
121 IR_FOLD(LE(C_U16, C_U16))
122 IR_FOLD(LE(C_U32, C_U32))
123 IR_FOLD(LE(C_U64, C_U64))
124 IR_FOLD(LE(C_ADDR, C_ADDR))
125 {
126 	IR_FOLD_BOOL(op1_insn->val.u64 <= op2_insn->val.u64);
127 }
128 
129 IR_FOLD(LE(C_CHAR, C_CHAR))
IR_FOLD(LE (C_I8,C_I8))130 IR_FOLD(LE(C_I8, C_I8))
131 IR_FOLD(LE(C_I16, C_I16))
132 IR_FOLD(LE(C_I32, C_I32))
133 IR_FOLD(LE(C_I64, C_I64))
134 {
135 	IR_FOLD_BOOL(op1_insn->val.i64 <= op2_insn->val.i64);
136 }
137 
IR_FOLD(LE (C_DOUBLE,C_DOUBLE))138 IR_FOLD(LE(C_DOUBLE, C_DOUBLE))
139 {
140 	IR_FOLD_BOOL(op1_insn->val.d <= op2_insn->val.d);
141 }
142 
IR_FOLD(LE (C_FLOAT,C_FLOAT))143 IR_FOLD(LE(C_FLOAT, C_FLOAT))
144 {
145 	IR_FOLD_BOOL(op1_insn->val.f <= op2_insn->val.f);
146 }
147 
148 IR_FOLD(GT(C_BOOL, C_BOOL))
IR_FOLD(GT (C_U8,C_U8))149 IR_FOLD(GT(C_U8, C_U8))
150 IR_FOLD(GT(C_U16, C_U16))
151 IR_FOLD(GT(C_U32, C_U32))
152 IR_FOLD(GT(C_U64, C_U64))
153 IR_FOLD(GT(C_ADDR, C_ADDR))
154 {
155 	IR_FOLD_BOOL(op1_insn->val.u64 > op2_insn->val.u64);
156 }
157 
158 IR_FOLD(GT(C_CHAR, C_CHAR))
IR_FOLD(GT (C_I8,C_I8))159 IR_FOLD(GT(C_I8, C_I8))
160 IR_FOLD(GT(C_I16, C_I16))
161 IR_FOLD(GT(C_I32, C_I32))
162 IR_FOLD(GT(C_I64, C_I64))
163 {
164 	IR_FOLD_BOOL(op1_insn->val.i64 > op2_insn->val.i64);
165 }
166 
IR_FOLD(GT (C_DOUBLE,C_DOUBLE))167 IR_FOLD(GT(C_DOUBLE, C_DOUBLE))
168 {
169 	IR_FOLD_BOOL(op1_insn->val.d > op2_insn->val.d);
170 }
171 
IR_FOLD(GT (C_FLOAT,C_FLOAT))172 IR_FOLD(GT(C_FLOAT, C_FLOAT))
173 {
174 	IR_FOLD_BOOL(op1_insn->val.f > op2_insn->val.f);
175 }
176 
177 IR_FOLD(ULT(C_BOOL, C_BOOL))
IR_FOLD(ULT (C_U8,C_U8))178 IR_FOLD(ULT(C_U8, C_U8))
179 IR_FOLD(ULT(C_U16, C_U16))
180 IR_FOLD(ULT(C_U32, C_U32))
181 IR_FOLD(ULT(C_U64, C_U64))
182 IR_FOLD(ULT(C_ADDR, C_ADDR))
183 IR_FOLD(ULT(C_CHAR, C_CHAR))
184 IR_FOLD(ULT(C_I8, C_I8))
185 IR_FOLD(ULT(C_I16, C_I16))
186 IR_FOLD(ULT(C_I32, C_I32))
187 IR_FOLD(ULT(C_I64, C_I64))
188 {
189 	IR_FOLD_BOOL(op1_insn->val.u64 < op2_insn->val.u64);
190 }
191 
IR_FOLD(ULT (C_DOUBLE,C_DOUBLE))192 IR_FOLD(ULT(C_DOUBLE, C_DOUBLE))
193 {
194 	IR_FOLD_BOOL(!(op1_insn->val.d >= op2_insn->val.d));
195 }
196 
IR_FOLD(ULT (C_FLOAT,C_FLOAT))197 IR_FOLD(ULT(C_FLOAT, C_FLOAT))
198 {
199 	IR_FOLD_BOOL(!(op1_insn->val.f >= op2_insn->val.f));
200 }
201 
202 IR_FOLD(UGE(C_BOOL, C_BOOL))
IR_FOLD(UGE (C_U8,C_U8))203 IR_FOLD(UGE(C_U8, C_U8))
204 IR_FOLD(UGE(C_U16, C_U16))
205 IR_FOLD(UGE(C_U32, C_U32))
206 IR_FOLD(UGE(C_U64, C_U64))
207 IR_FOLD(UGE(C_ADDR, C_ADDR))
208 IR_FOLD(UGE(C_CHAR, C_CHAR))
209 IR_FOLD(UGE(C_I8, C_I8))
210 IR_FOLD(UGE(C_I16, C_I16))
211 IR_FOLD(UGE(C_I32, C_I32))
212 IR_FOLD(UGE(C_I64, C_I64))
213 {
214 	IR_FOLD_BOOL(op1_insn->val.u64 >= op2_insn->val.u64);
215 }
216 
IR_FOLD(UGE (C_DOUBLE,C_DOUBLE))217 IR_FOLD(UGE(C_DOUBLE, C_DOUBLE))
218 {
219 	IR_FOLD_BOOL(!(op1_insn->val.d < op2_insn->val.d));
220 }
221 
IR_FOLD(UGE (C_FLOAT,C_FLOAT))222 IR_FOLD(UGE(C_FLOAT, C_FLOAT))
223 {
224 	IR_FOLD_BOOL(!(op1_insn->val.f < op2_insn->val.f));
225 }
226 
227 IR_FOLD(ULE(C_BOOL, C_BOOL))
IR_FOLD(ULE (C_U8,C_U8))228 IR_FOLD(ULE(C_U8, C_U8))
229 IR_FOLD(ULE(C_U16, C_U16))
230 IR_FOLD(ULE(C_U32, C_U32))
231 IR_FOLD(ULE(C_U64, C_U64))
232 IR_FOLD(ULE(C_ADDR, C_ADDR))
233 IR_FOLD(ULE(C_CHAR, C_CHAR))
234 IR_FOLD(ULE(C_I8, C_I8))
235 IR_FOLD(ULE(C_I16, C_I16))
236 IR_FOLD(ULE(C_I32, C_I32))
237 IR_FOLD(ULE(C_I64, C_I64))
238 {
239 	IR_FOLD_BOOL(op1_insn->val.u64 <= op2_insn->val.u64);
240 }
241 
IR_FOLD(ULE (C_DOUBLE,C_DOUBLE))242 IR_FOLD(ULE(C_DOUBLE, C_DOUBLE))
243 {
244 	IR_FOLD_BOOL(!(op1_insn->val.d > op2_insn->val.d));
245 }
246 
IR_FOLD(ULE (C_FLOAT,C_FLOAT))247 IR_FOLD(ULE(C_FLOAT, C_FLOAT))
248 {
249 	IR_FOLD_BOOL(!(op1_insn->val.f > op2_insn->val.f));
250 }
251 
252 IR_FOLD(UGT(C_BOOL, C_BOOL))
IR_FOLD(UGT (C_U8,C_U8))253 IR_FOLD(UGT(C_U8, C_U8))
254 IR_FOLD(UGT(C_U16, C_U16))
255 IR_FOLD(UGT(C_U32, C_U32))
256 IR_FOLD(UGT(C_U64, C_U64))
257 IR_FOLD(UGT(C_ADDR, C_ADDR))
258 IR_FOLD(UGT(C_CHAR, C_CHAR))
259 IR_FOLD(UGT(C_I8, C_I8))
260 IR_FOLD(UGT(C_I16, C_I16))
261 IR_FOLD(UGT(C_I32, C_I32))
262 IR_FOLD(UGT(C_I64, C_I64))
263 {
264 	IR_FOLD_BOOL(op1_insn->val.u64 > op2_insn->val.u64);
265 }
266 
IR_FOLD(UGT (C_DOUBLE,C_DOUBLE))267 IR_FOLD(UGT(C_DOUBLE, C_DOUBLE))
268 {
269 	IR_FOLD_BOOL(!(op1_insn->val.d <= op2_insn->val.d));
270 }
271 
IR_FOLD(UGT (C_FLOAT,C_FLOAT))272 IR_FOLD(UGT(C_FLOAT, C_FLOAT))
273 {
274 	IR_FOLD_BOOL(!(op1_insn->val.f <= op2_insn->val.f));
275 }
276 
IR_FOLD(ADD (C_U8,C_U8))277 IR_FOLD(ADD(C_U8, C_U8))
278 {
279 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
280 	IR_FOLD_CONST_U(op1_insn->val.u8 + op2_insn->val.u8);
281 }
282 
IR_FOLD(ADD (C_U16,C_U16))283 IR_FOLD(ADD(C_U16, C_U16))
284 {
285 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
286 	IR_FOLD_CONST_U(op1_insn->val.u16 + op2_insn->val.u16);
287 }
288 
IR_FOLD(ADD (C_U32,C_U32))289 IR_FOLD(ADD(C_U32, C_U32))
290 {
291 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type || (sizeof(void*) == 4 && IR_OPT_TYPE(opt) == IR_ADDR));
292 	IR_FOLD_CONST_U(op1_insn->val.u32 + op2_insn->val.u32);
293 }
294 
IR_FOLD(ADD (C_U64,C_U64))295 IR_FOLD(ADD(C_U64, C_U64))
296 {
297 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type || (sizeof(void*) == 8 && IR_OPT_TYPE(opt) == IR_ADDR));
298 	IR_FOLD_CONST_U(op1_insn->val.u64 + op2_insn->val.u64);
299 }
300 
301 IR_FOLD(ADD(C_ADDR, C_ADDR))
IR_FOLD(ADD (C_ADDR,C_INTPTR))302 IR_FOLD(ADD(C_ADDR, C_INTPTR))
303 IR_FOLD(ADD(C_ADDR, C_UINTPTR))
304 IR_FOLD(ADD(C_INTPTR, C_ADDR))
305 IR_FOLD(ADD(C_UINTPTR, C_ADDR))
306 {
307 //	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
308 	IR_FOLD_CONST_U(op1_insn->val.addr + op2_insn->val.addr);
309 }
310 
IR_FOLD(ADD (C_I8,C_I8))311 IR_FOLD(ADD(C_I8, C_I8))
312 {
313 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
314 	IR_FOLD_CONST_I(op1_insn->val.i8 + op2_insn->val.i8);
315 }
316 
IR_FOLD(ADD (C_I16,C_I16))317 IR_FOLD(ADD(C_I16, C_I16))
318 {
319 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
320 	IR_FOLD_CONST_I(op1_insn->val.i16 + op2_insn->val.i16);
321 }
322 
IR_FOLD(ADD (C_I32,C_I32))323 IR_FOLD(ADD(C_I32, C_I32))
324 {
325 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type || (sizeof(void*) == 4 && IR_OPT_TYPE(opt) == IR_ADDR));
326 	IR_FOLD_CONST_I(op1_insn->val.i32 + op2_insn->val.i32);
327 }
328 
IR_FOLD(ADD (C_I64,C_I64))329 IR_FOLD(ADD(C_I64, C_I64))
330 {
331 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type || (sizeof(void*) == 8 && IR_OPT_TYPE(opt) == IR_ADDR));
332 	IR_FOLD_CONST_I(op1_insn->val.i64 + op2_insn->val.i64);
333 }
334 
IR_FOLD(ADD (C_DOUBLE,C_DOUBLE))335 IR_FOLD(ADD(C_DOUBLE, C_DOUBLE))
336 {
337 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
338 	IR_FOLD_CONST_D(op1_insn->val.d + op2_insn->val.d);
339 }
340 
IR_FOLD(ADD (C_FLOAT,C_FLOAT))341 IR_FOLD(ADD(C_FLOAT, C_FLOAT))
342 {
343 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
344 	IR_FOLD_CONST_F(op1_insn->val.f + op2_insn->val.f);
345 }
346 
IR_FOLD(SUB (C_U8,C_U8))347 IR_FOLD(SUB(C_U8, C_U8))
348 {
349 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
350 	IR_FOLD_CONST_U(op1_insn->val.u8 - op2_insn->val.u8);
351 }
352 
IR_FOLD(SUB (C_U16,C_U16))353 IR_FOLD(SUB(C_U16, C_U16))
354 {
355 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
356 	IR_FOLD_CONST_U(op1_insn->val.u16 - op2_insn->val.u16);
357 }
358 
IR_FOLD(SUB (C_U32,C_U32))359 IR_FOLD(SUB(C_U32, C_U32))
360 {
361 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
362 	IR_FOLD_CONST_U(op1_insn->val.u32 - op2_insn->val.u32);
363 }
364 
IR_FOLD(SUB (C_U64,C_U64))365 IR_FOLD(SUB(C_U64, C_U64))
366 {
367 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
368 	IR_FOLD_CONST_U(op1_insn->val.u64 - op2_insn->val.u64);
369 }
370 
371 IR_FOLD(SUB(C_ADDR, C_ADDR))
IR_FOLD(SUB (C_ADDR,C_INTPTR))372 IR_FOLD(SUB(C_ADDR, C_INTPTR))
373 IR_FOLD(SUB(C_ADDR, C_UINTPTR))
374 IR_FOLD(SUB(C_INTPTR, C_ADDR))
375 IR_FOLD(SUB(C_UINTPTR, C_ADDR))
376 {
377 //	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
378 	IR_FOLD_CONST_U(op1_insn->val.addr - op2_insn->val.addr);
379 }
380 
IR_FOLD(SUB (C_I8,C_I8))381 IR_FOLD(SUB(C_I8, C_I8))
382 {
383 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
384 	IR_FOLD_CONST_I(op1_insn->val.i8 - op2_insn->val.i8);
385 }
386 
IR_FOLD(SUB (C_I16,C_I16))387 IR_FOLD(SUB(C_I16, C_I16))
388 {
389 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
390 	IR_FOLD_CONST_I(op1_insn->val.i16 - op2_insn->val.i16);
391 }
392 
IR_FOLD(SUB (C_I32,C_I32))393 IR_FOLD(SUB(C_I32, C_I32))
394 {
395 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
396 	IR_FOLD_CONST_I(op1_insn->val.i32 - op2_insn->val.i32);
397 }
398 
IR_FOLD(SUB (C_I64,C_I64))399 IR_FOLD(SUB(C_I64, C_I64))
400 {
401 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
402 	IR_FOLD_CONST_I(op1_insn->val.i64 - op2_insn->val.i64);
403 }
404 
IR_FOLD(SUB (C_DOUBLE,C_DOUBLE))405 IR_FOLD(SUB(C_DOUBLE, C_DOUBLE))
406 {
407 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
408 	IR_FOLD_CONST_D(op1_insn->val.d - op2_insn->val.d);
409 }
410 
IR_FOLD(SUB (C_FLOAT,C_FLOAT))411 IR_FOLD(SUB(C_FLOAT, C_FLOAT))
412 {
413 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
414 	IR_FOLD_CONST_F(op1_insn->val.f - op2_insn->val.f);
415 }
416 
IR_FOLD(MUL (C_U8,C_U8))417 IR_FOLD(MUL(C_U8, C_U8))
418 {
419 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
420 	IR_FOLD_CONST_U(op1_insn->val.u8 * op2_insn->val.u8);
421 }
422 
IR_FOLD(MUL (C_U16,C_U16))423 IR_FOLD(MUL(C_U16, C_U16))
424 {
425 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
426 	IR_FOLD_CONST_U(op1_insn->val.u16 * op2_insn->val.u16);
427 }
428 
IR_FOLD(MUL (C_U32,C_U32))429 IR_FOLD(MUL(C_U32, C_U32))
430 {
431 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
432 	IR_FOLD_CONST_U(op1_insn->val.u32 * op2_insn->val.u32);
433 }
434 
IR_FOLD(MUL (C_U64,C_U64))435 IR_FOLD(MUL(C_U64, C_U64))
436 {
437 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
438 	IR_FOLD_CONST_U(op1_insn->val.u64 * op2_insn->val.u64);
439 }
440 
441 IR_FOLD(MUL(C_ADDR, C_ADDR))
IR_FOLD(MUL (C_ADDR,C_INTPTR))442 IR_FOLD(MUL(C_ADDR, C_INTPTR))
443 IR_FOLD(MUL(C_ADDR, C_UINTPTR))
444 IR_FOLD(MUL(C_INTPTR, C_ADDR))
445 IR_FOLD(MUL(C_UINTPTR, C_ADDR))
446 {
447 //	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
448 	IR_FOLD_CONST_U(op1_insn->val.addr * op2_insn->val.addr);
449 }
450 
IR_FOLD(MUL (C_I8,C_I8))451 IR_FOLD(MUL(C_I8, C_I8))
452 {
453 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
454 	IR_FOLD_CONST_I(op1_insn->val.i8 * op2_insn->val.i8);
455 }
456 
IR_FOLD(MUL (C_I16,C_I16))457 IR_FOLD(MUL(C_I16, C_I16))
458 {
459 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
460 	IR_FOLD_CONST_I(op1_insn->val.i16 * op2_insn->val.i16);
461 }
462 
IR_FOLD(MUL (C_I32,C_I32))463 IR_FOLD(MUL(C_I32, C_I32))
464 {
465 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
466 	IR_FOLD_CONST_I(op1_insn->val.i32 * op2_insn->val.i32);
467 }
468 
IR_FOLD(MUL (C_I64,C_I64))469 IR_FOLD(MUL(C_I64, C_I64))
470 {
471 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
472 	IR_FOLD_CONST_I(op1_insn->val.i64 * op2_insn->val.i64);
473 }
474 
IR_FOLD(MUL (C_DOUBLE,C_DOUBLE))475 IR_FOLD(MUL(C_DOUBLE, C_DOUBLE))
476 {
477 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
478 	IR_FOLD_CONST_D(op1_insn->val.d * op2_insn->val.d);
479 }
480 
IR_FOLD(MUL (C_FLOAT,C_FLOAT))481 IR_FOLD(MUL(C_FLOAT, C_FLOAT))
482 {
483 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
484 	IR_FOLD_CONST_F(op1_insn->val.f * op2_insn->val.f);
485 }
486 
487 IR_FOLD(DIV(C_U8, C_U8))
IR_FOLD(DIV (C_U16,C_U16))488 IR_FOLD(DIV(C_U16, C_U16))
489 IR_FOLD(DIV(C_U32, C_U32))
490 IR_FOLD(DIV(C_U64, C_U64))
491 IR_FOLD(DIV(C_ADDR, C_ADDR))
492 {
493 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
494 	if (op2_insn->val.u64 == 0) {
495 		/* division by zero */
496 		IR_FOLD_EMIT;
497 	}
498 	IR_FOLD_CONST_U(op1_insn->val.u64 / op2_insn->val.u64);
499 }
500 
501 IR_FOLD(DIV(C_I8, C_I8))
IR_FOLD(DIV (C_I16,C_I16))502 IR_FOLD(DIV(C_I16, C_I16))
503 IR_FOLD(DIV(C_I32, C_I32))
504 IR_FOLD(DIV(C_I64, C_I64))
505 {
506 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
507 	if (op2_insn->val.i64 == 0) {
508 		/* division by zero */
509 		IR_FOLD_EMIT;
510 	}
511 	IR_FOLD_CONST_I(op1_insn->val.i64 / op2_insn->val.i64);
512 }
513 
IR_FOLD(DIV (C_DOUBLE,C_DOUBLE))514 IR_FOLD(DIV(C_DOUBLE, C_DOUBLE))
515 {
516 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
517 	IR_FOLD_CONST_D(op1_insn->val.d / op2_insn->val.d);
518 }
519 
IR_FOLD(DIV (C_FLOAT,C_FLOAT))520 IR_FOLD(DIV(C_FLOAT, C_FLOAT))
521 {
522 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
523 	IR_FOLD_CONST_F(op1_insn->val.f / op2_insn->val.f);
524 }
525 
526 IR_FOLD(MOD(C_U8, C_U8))
IR_FOLD(MOD (C_U16,C_U16))527 IR_FOLD(MOD(C_U16, C_U16))
528 IR_FOLD(MOD(C_U32, C_U32))
529 IR_FOLD(MOD(C_U64, C_U64))
530 IR_FOLD(MOD(C_ADDR, C_ADDR))
531 {
532 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
533 	if (op2_insn->val.u64 == 0) {
534 		/* division by zero */
535 		IR_FOLD_EMIT;
536 	}
537 	IR_FOLD_CONST_U(op1_insn->val.u64 % op2_insn->val.u64);
538 }
539 
540 IR_FOLD(MOD(C_I8, C_I8))
IR_FOLD(MOD (C_I16,C_I16))541 IR_FOLD(MOD(C_I16, C_I16))
542 IR_FOLD(MOD(C_I32, C_I32))
543 IR_FOLD(MOD(C_I64, C_I64))
544 {
545 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
546 	if (op2_insn->val.i64 == 0) {
547 		/* division by zero */
548 		IR_FOLD_EMIT;
549 	}
550 	IR_FOLD_CONST_I(op1_insn->val.i64 % op2_insn->val.i64);
551 }
552 
553 IR_FOLD(NEG(C_I8))
IR_FOLD(NEG (C_I16))554 IR_FOLD(NEG(C_I16))
555 IR_FOLD(NEG(C_I32))
556 IR_FOLD(NEG(C_I64))
557 {
558 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
559 	IR_FOLD_CONST_I(-op1_insn->val.i64);
560 }
561 
IR_FOLD(NEG (C_DOUBLE))562 IR_FOLD(NEG(C_DOUBLE))
563 {
564 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
565 	IR_FOLD_CONST_D(-op1_insn->val.d);
566 }
567 
IR_FOLD(NEG (C_FLOAT))568 IR_FOLD(NEG(C_FLOAT))
569 {
570 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
571 	IR_FOLD_CONST_F(-op1_insn->val.f);
572 }
573 
574 IR_FOLD(ABS(C_I8))
IR_FOLD(ABS (C_I16))575 IR_FOLD(ABS(C_I16))
576 IR_FOLD(ABS(C_I32))
577 IR_FOLD(ABS(C_I64))
578 {
579 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
580 	if (op1_insn->val.i64 >= 0) {
581 		IR_FOLD_COPY(op1);
582 	} else {
583 		IR_FOLD_CONST_I(-op1_insn->val.i64);
584 	}
585 }
586 
IR_FOLD(ABS (C_DOUBLE))587 IR_FOLD(ABS(C_DOUBLE))
588 {
589 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
590 	IR_FOLD_CONST_D(fabs(op1_insn->val.d));
591 }
592 
IR_FOLD(ABS (C_FLOAT))593 IR_FOLD(ABS(C_FLOAT))
594 {
595 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
596 	IR_FOLD_CONST_F(fabsf(op1_insn->val.f));
597 }
598 
599 IR_FOLD(ADD_OV(C_U8, C_U8))
IR_FOLD(ADD_OV (C_U16,C_U16))600 IR_FOLD(ADD_OV(C_U16, C_U16))
601 IR_FOLD(ADD_OV(C_U32, C_U32))
602 IR_FOLD(ADD_OV(C_U64, C_U64))
603 {
604 	ir_type type = IR_OPT_TYPE(opt);
605 	uint64_t max = ((uint64_t)0xffffffffffffffff) >> (64 - ir_type_size[type] * 8);
606 	IR_ASSERT(type == op1_insn->type);
607 	if (op1_insn->val.u64 > max - op2_insn->val.u64) {
608 		IR_FOLD_NEXT;
609 	}
610 	IR_FOLD_CONST_U(op1_insn->val.u64 + op2_insn->val.u64);
611 }
612 
613 IR_FOLD(ADD_OV(C_I8, C_I8))
IR_FOLD(ADD_OV (C_I16,C_I16))614 IR_FOLD(ADD_OV(C_I16, C_I16))
615 IR_FOLD(ADD_OV(C_I32, C_I32))
616 IR_FOLD(ADD_OV(C_I64, C_I64))
617 {
618 	ir_type type = IR_OPT_TYPE(opt);
619 	int64_t max = ((uint64_t)0x7fffffffffffffff) >> (64 - ir_type_size[type] * 8);
620 	int64_t min = - max - 1;
621 	IR_ASSERT(type == op1_insn->type);
622 	if ((op2_insn->val.i64 > 0 && op1_insn->val.i64 > max - op2_insn->val.i64)
623 	 || (op2_insn->val.i64 < 0 && op1_insn->val.i64 < min - op2_insn->val.i64)) {
624 		IR_FOLD_NEXT;
625 	}
626 	IR_FOLD_CONST_I(op1_insn->val.i64 + op2_insn->val.i64);
627 }
628 
629 IR_FOLD(SUB_OV(C_U8, C_U8))
IR_FOLD(SUB_OV (C_U16,C_U16))630 IR_FOLD(SUB_OV(C_U16, C_U16))
631 IR_FOLD(SUB_OV(C_U32, C_U32))
632 IR_FOLD(SUB_OV(C_U64, C_U64))
633 {
634 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
635 	if (op2_insn->val.u64 > op1_insn->val.u64) {
636 		IR_FOLD_NEXT;
637 	}
638 	IR_FOLD_CONST_U(op1_insn->val.u64 - op2_insn->val.u64);
639 }
640 
641 IR_FOLD(SUB_OV(C_I8, C_I8))
IR_FOLD(SUB_OV (C_I16,C_I16))642 IR_FOLD(SUB_OV(C_I16, C_I16))
643 IR_FOLD(SUB_OV(C_I32, C_I32))
644 IR_FOLD(SUB_OV(C_I64, C_I64))
645 {
646 	ir_type type = IR_OPT_TYPE(opt);
647 	int64_t max = ((uint64_t)0x7fffffffffffffff) >> (64 - ir_type_size[type] * 8);
648 	int64_t min = - max - 1;
649 	IR_ASSERT(type == op1_insn->type);
650 	if ((op2_insn->val.i64 > 0 && op1_insn->val.i64 < min + op2_insn->val.i64)
651 	 || (op2_insn->val.i64 < 0 && op1_insn->val.i64 > max + op2_insn->val.i64)) {
652 		IR_FOLD_NEXT;
653 	}
654 	IR_FOLD_CONST_I(op1_insn->val.i64 - op2_insn->val.i64);
655 }
656 
657 IR_FOLD(MUL_OV(C_U8, C_U8))
IR_FOLD(MUL_OV (C_U16,C_U16))658 IR_FOLD(MUL_OV(C_U16, C_U16))
659 IR_FOLD(MUL_OV(C_U32, C_U32))
660 IR_FOLD(MUL_OV(C_U64, C_U64))
661 {
662 	ir_type type = IR_OPT_TYPE(opt);
663 	uint64_t max = ((uint64_t)0xffffffffffffffff) >> (64 - ir_type_size[type] * 8);
664 	uint64_t res;
665 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
666 	res = op1_insn->val.u64 * op2_insn->val.u64;
667 	if (op1_insn->val.u64 != 0 && res / op1_insn->val.u64 != op2_insn->val.u64 && res <= max) {
668 		IR_FOLD_NEXT;
669 	}
670 	IR_FOLD_CONST_U(res);
671 }
672 
673 IR_FOLD(MUL_OV(C_I8, C_I8))
IR_FOLD(MUL_OV (C_I16,C_I16))674 IR_FOLD(MUL_OV(C_I16, C_I16))
675 IR_FOLD(MUL_OV(C_I32, C_I32))
676 IR_FOLD(MUL_OV(C_I64, C_I64))
677 {
678 	ir_type type = IR_OPT_TYPE(opt);
679 	int64_t max = ((uint64_t)0x7fffffffffffffff) >> (64 - ir_type_size[type] * 8);
680 	int64_t min = - max - 1;
681 	int64_t res;
682 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
683 	res = op1_insn->val.i64 * op2_insn->val.i64;
684 	if (op1_insn->val.i64 != 0 && res / op1_insn->val.i64 != op2_insn->val.i64 && res >= min && res <= max) {
685 		IR_FOLD_NEXT;
686 	}
687 	IR_FOLD_CONST_U(res);
688 }
689 
IR_FOLD(OVERFLOW (_))690 IR_FOLD(OVERFLOW(_))
691 {
692 	if (op1_insn->op != IR_ADD_OV && op1_insn->op != IR_SUB_OV && op1_insn->op != IR_MUL_OV) {
693 		IR_FOLD_COPY(IR_FALSE);
694 	}
695 	IR_FOLD_NEXT;
696 }
697 
IR_FOLD(NOT (C_BOOL))698 IR_FOLD(NOT(C_BOOL))
699 {
700 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
701 	IR_FOLD_BOOL(!op1_insn->val.u64);
702 }
703 
704 IR_FOLD(NOT(C_U8))
IR_FOLD(NOT (C_CHAR))705 IR_FOLD(NOT(C_CHAR))
706 {
707 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
708 	IR_FOLD_CONST_U(~op1_insn->val.u8);
709 }
710 
IR_FOLD(NOT (C_I8))711 IR_FOLD(NOT(C_I8))
712 {
713 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
714 	IR_FOLD_CONST_I(~op1_insn->val.i8);
715 }
716 
IR_FOLD(NOT (C_U16))717 IR_FOLD(NOT(C_U16))
718 {
719 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
720 	IR_FOLD_CONST_U(~op1_insn->val.u16);
721 }
722 
IR_FOLD(NOT (C_I16))723 IR_FOLD(NOT(C_I16))
724 {
725 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
726 	IR_FOLD_CONST_I(~op1_insn->val.i16);
727 }
728 
IR_FOLD(NOT (C_U32))729 IR_FOLD(NOT(C_U32))
730 {
731 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
732 	IR_FOLD_CONST_U(~op1_insn->val.u32);
733 }
734 
IR_FOLD(NOT (C_I32))735 IR_FOLD(NOT(C_I32))
736 {
737 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
738 	IR_FOLD_CONST_I(~op1_insn->val.i32);
739 }
740 
741 IR_FOLD(NOT(C_U64))
IR_FOLD(NOT (C_I64))742 IR_FOLD(NOT(C_I64))
743 {
744 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
745 	IR_FOLD_CONST_U(~op1_insn->val.u64);
746 }
747 
IR_FOLD(OR (C_BOOL,C_BOOL))748 IR_FOLD(OR(C_BOOL, C_BOOL))
749 {
750 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
751 	IR_FOLD_BOOL(op1_insn->val.b || op2_insn->val.b);
752 }
753 
754 IR_FOLD(OR(C_CHAR, C_CHAR))
IR_FOLD(OR (C_U8,C_U8))755 IR_FOLD(OR(C_U8, C_U8))
756 IR_FOLD(OR(C_U16, C_U16))
757 IR_FOLD(OR(C_U32, C_U32))
758 IR_FOLD(OR(C_U64, C_U64))
759 {
760 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
761 	IR_FOLD_CONST_U(op1_insn->val.u64 | op2_insn->val.u64);
762 }
763 
764 IR_FOLD(OR(C_I8, C_I8))
IR_FOLD(OR (C_I16,C_I16))765 IR_FOLD(OR(C_I16, C_I16))
766 IR_FOLD(OR(C_I32, C_I32))
767 IR_FOLD(OR(C_I64, C_I64))
768 {
769 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
770 	IR_FOLD_CONST_I(op1_insn->val.i64 | op2_insn->val.i64);
771 }
772 
IR_FOLD(AND (C_BOOL,C_BOOL))773 IR_FOLD(AND(C_BOOL, C_BOOL))
774 {
775 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
776 	IR_FOLD_BOOL(op1_insn->val.b && op2_insn->val.b);
777 }
778 
779 IR_FOLD(AND(C_CHAR, C_CHAR))
IR_FOLD(AND (C_U8,C_U8))780 IR_FOLD(AND(C_U8, C_U8))
781 IR_FOLD(AND(C_U16, C_U16))
782 IR_FOLD(AND(C_U32, C_U32))
783 IR_FOLD(AND(C_U64, C_U64))
784 {
785 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
786 	IR_FOLD_CONST_U(op1_insn->val.u64 & op2_insn->val.u64);
787 }
788 
789 IR_FOLD(AND(C_I8, C_I8))
IR_FOLD(AND (C_I16,C_I16))790 IR_FOLD(AND(C_I16, C_I16))
791 IR_FOLD(AND(C_I32, C_I32))
792 IR_FOLD(AND(C_I64, C_I64))
793 {
794 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
795 	IR_FOLD_CONST_I(op1_insn->val.i64 & op2_insn->val.i64);
796 }
797 
IR_FOLD(XOR (C_BOOL,C_BOOL))798 IR_FOLD(XOR(C_BOOL, C_BOOL))
799 {
800 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
801 	IR_FOLD_BOOL(op1_insn->val.b != op2_insn->val.b);
802 }
803 
804 IR_FOLD(XOR(C_U8, C_U8))
IR_FOLD(XOR (C_CHAR,C_CHAR))805 IR_FOLD(XOR(C_CHAR, C_CHAR))
806 {
807 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
808 	IR_FOLD_CONST_U(op1_insn->val.u8 ^ op2_insn->val.u8);
809 }
810 
IR_FOLD(XOR (C_I8,C_I8))811 IR_FOLD(XOR(C_I8, C_I8))
812 {
813 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
814 	IR_FOLD_CONST_I(op1_insn->val.i8 ^ op2_insn->val.i8);
815 }
816 
IR_FOLD(XOR (C_U16,C_U16))817 IR_FOLD(XOR(C_U16, C_U16))
818 {
819 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
820 	IR_FOLD_CONST_U(op1_insn->val.u16 ^ op2_insn->val.u16);
821 }
822 
IR_FOLD(XOR (C_I16,C_I16))823 IR_FOLD(XOR(C_I16, C_I16))
824 {
825 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
826 	IR_FOLD_CONST_I(op1_insn->val.i16 ^ op2_insn->val.i16);
827 }
828 
IR_FOLD(XOR (C_U32,C_U32))829 IR_FOLD(XOR(C_U32, C_U32))
830 {
831 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
832 	IR_FOLD_CONST_U(op1_insn->val.u32 ^ op2_insn->val.u32);
833 }
834 
IR_FOLD(XOR (C_I32,C_I32))835 IR_FOLD(XOR(C_I32, C_I32))
836 {
837 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
838 	IR_FOLD_CONST_I(op1_insn->val.i32 ^ op2_insn->val.i32);
839 }
840 
IR_FOLD(XOR (C_U64,C_U64))841 IR_FOLD(XOR(C_U64, C_U64))
842 {
843 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
844 	IR_FOLD_CONST_U(op1_insn->val.u64 ^ op2_insn->val.u64);
845 }
846 
IR_FOLD(XOR (C_I64,C_I64))847 IR_FOLD(XOR(C_I64, C_I64))
848 {
849 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
850 	IR_FOLD_CONST_I(op1_insn->val.i64 ^ op2_insn->val.i64);
851 }
852 
853 IR_FOLD(SHL(C_U8, C_U8))
IR_FOLD(SHL (C_CHAR,C_CHAR))854 IR_FOLD(SHL(C_CHAR, C_CHAR))
855 {
856 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
857 	IR_FOLD_CONST_U(op1_insn->val.u8 << op2_insn->val.u8);
858 }
859 
IR_FOLD(SHL (C_I8,C_I8))860 IR_FOLD(SHL(C_I8, C_I8))
861 {
862 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
863 	IR_FOLD_CONST_I(op1_insn->val.i8 << op2_insn->val.i8);
864 }
865 
IR_FOLD(SHL (C_U16,C_U16))866 IR_FOLD(SHL(C_U16, C_U16))
867 {
868 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
869 	IR_FOLD_CONST_U(op1_insn->val.u16 << op2_insn->val.u16);
870 }
871 
IR_FOLD(SHL (C_I16,C_I16))872 IR_FOLD(SHL(C_I16, C_I16))
873 {
874 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
875 	IR_FOLD_CONST_I(op1_insn->val.i16 << op2_insn->val.i16);
876 }
877 
IR_FOLD(SHL (C_U32,C_U32))878 IR_FOLD(SHL(C_U32, C_U32))
879 {
880 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
881 	IR_FOLD_CONST_U(op1_insn->val.u32 << op2_insn->val.u32);
882 }
883 
IR_FOLD(SHL (C_I32,C_I32))884 IR_FOLD(SHL(C_I32, C_I32))
885 {
886 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
887 	IR_FOLD_CONST_I(op1_insn->val.i32 << op2_insn->val.i32);
888 }
889 
890 IR_FOLD(SHL(C_U64, C_U64))
IR_FOLD(SHL (C_I64,C_I64))891 IR_FOLD(SHL(C_I64, C_I64))
892 {
893 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
894 	IR_FOLD_CONST_U(op1_insn->val.u64 << op2_insn->val.u64);
895 }
896 
897 IR_FOLD(SHR(C_U8, C_U8))
IR_FOLD(SHR (C_CHAR,C_CHAR))898 IR_FOLD(SHR(C_CHAR, C_CHAR))
899 {
900 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
901 	IR_FOLD_CONST_U(op1_insn->val.u8 >> op2_insn->val.u8);
902 }
903 
IR_FOLD(SHR (C_I8,C_I8))904 IR_FOLD(SHR(C_I8, C_I8))
905 {
906 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
907 	IR_FOLD_CONST_I((int8_t)(op1_insn->val.u8 >> op2_insn->val.u8));
908 }
909 
IR_FOLD(SHR (C_U16,C_U16))910 IR_FOLD(SHR(C_U16, C_U16))
911 {
912 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
913 	IR_FOLD_CONST_U(op1_insn->val.u16 >> op2_insn->val.u16);
914 }
915 
IR_FOLD(SHR (C_I16,C_I16))916 IR_FOLD(SHR(C_I16, C_I16))
917 {
918 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
919 	IR_FOLD_CONST_U((int16_t)(op1_insn->val.u16 >> op2_insn->val.u16));
920 }
921 
IR_FOLD(SHR (C_U32,C_U32))922 IR_FOLD(SHR(C_U32, C_U32))
923 {
924 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
925 	IR_FOLD_CONST_U(op1_insn->val.u32 >> op2_insn->val.u32);
926 }
927 
IR_FOLD(SHR (C_I32,C_I32))928 IR_FOLD(SHR(C_I32, C_I32))
929 {
930 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
931 	IR_FOLD_CONST_U((int32_t)(op1_insn->val.u32 >> op2_insn->val.u32));
932 }
933 
934 IR_FOLD(SHR(C_U64, C_U64))
IR_FOLD(SHR (C_I64,C_I64))935 IR_FOLD(SHR(C_I64, C_I64))
936 {
937 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
938 	IR_FOLD_CONST_U(op1_insn->val.u64 >> op2_insn->val.u64);
939 }
940 
941 IR_FOLD(SAR(C_U8, C_U8))
IR_FOLD(SAR (C_CHAR,C_CHAR))942 IR_FOLD(SAR(C_CHAR, C_CHAR))
943 {
944 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
945 	IR_FOLD_CONST_U((uint8_t)(op1_insn->val.i8 >> op2_insn->val.i8));
946 }
947 
IR_FOLD(SAR (C_I8,C_I8))948 IR_FOLD(SAR(C_I8, C_I8))
949 {
950 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
951 	IR_FOLD_CONST_I(op1_insn->val.i8 >> op2_insn->val.i8);
952 }
953 
IR_FOLD(SAR (C_U16,C_U16))954 IR_FOLD(SAR(C_U16, C_U16))
955 {
956 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
957 	IR_FOLD_CONST_U((uint16_t)(op1_insn->val.i16 >> op2_insn->val.i16));
958 }
959 
IR_FOLD(SAR (C_I16,C_I16))960 IR_FOLD(SAR(C_I16, C_I16))
961 {
962 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
963 	IR_FOLD_CONST_I(op1_insn->val.i16 >> op2_insn->val.i16);
964 }
965 
IR_FOLD(SAR (C_U32,C_U32))966 IR_FOLD(SAR(C_U32, C_U32))
967 {
968 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
969 	IR_FOLD_CONST_U((uint32_t)(op1_insn->val.i32 >> op2_insn->val.i32));
970 }
971 
IR_FOLD(SAR (C_I32,C_I32))972 IR_FOLD(SAR(C_I32, C_I32))
973 {
974 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
975 	IR_FOLD_CONST_I(op1_insn->val.i32 >> op2_insn->val.i32);
976 }
977 
978 IR_FOLD(SAR(C_U64, C_U64))
IR_FOLD(SAR (C_I64,C_I64))979 IR_FOLD(SAR(C_I64, C_I64))
980 {
981 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
982 	IR_FOLD_CONST_I(op1_insn->val.i64 >> op2_insn->val.i64);
983 }
984 
985 IR_FOLD(ROL(C_U8, C_U8))
IR_FOLD(ROL (C_CHAR,C_CHAR))986 IR_FOLD(ROL(C_CHAR, C_CHAR))
987 {
988 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
989 	IR_FOLD_CONST_U(ir_rol8(op1_insn->val.u8, op2_insn->val.u8));
990 }
991 
IR_FOLD(ROL (C_I8,C_I8))992 IR_FOLD(ROL(C_I8, C_I8))
993 {
994 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
995 	IR_FOLD_CONST_I((int8_t)ir_rol8(op1_insn->val.u8, op2_insn->val.u8));
996 }
997 
IR_FOLD(ROL (C_U16,C_U16))998 IR_FOLD(ROL(C_U16, C_U16))
999 {
1000 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1001 	IR_FOLD_CONST_U(ir_rol16(op1_insn->val.u16, op2_insn->val.u16));
1002 }
1003 
IR_FOLD(ROL (C_I16,C_I16))1004 IR_FOLD(ROL(C_I16, C_I16))
1005 {
1006 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1007 	IR_FOLD_CONST_I((int16_t)ir_rol16(op1_insn->val.u16, op2_insn->val.u16));
1008 }
1009 
IR_FOLD(ROL (C_U32,C_U32))1010 IR_FOLD(ROL(C_U32, C_U32))
1011 {
1012 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1013 	IR_FOLD_CONST_U(ir_rol32(op1_insn->val.u32, op2_insn->val.u32));
1014 }
1015 
IR_FOLD(ROL (C_I32,C_I32))1016 IR_FOLD(ROL(C_I32, C_I32))
1017 {
1018 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1019 	IR_FOLD_CONST_I((int32_t)ir_rol32(op1_insn->val.u32, op2_insn->val.u32));
1020 }
1021 
1022 IR_FOLD(ROL(C_U64, C_U64))
IR_FOLD(ROL (C_I64,C_I64))1023 IR_FOLD(ROL(C_I64, C_I64))
1024 {
1025 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1026 	IR_FOLD_CONST_U(ir_rol64(op1_insn->val.u64, op2_insn->val.u64));
1027 }
1028 
1029 IR_FOLD(ROR(C_U8, C_U8))
IR_FOLD(ROR (C_CHAR,C_CHAR))1030 IR_FOLD(ROR(C_CHAR, C_CHAR))
1031 {
1032 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1033 	IR_FOLD_CONST_U(ir_ror8(op1_insn->val.u8, op2_insn->val.u8));
1034 }
1035 
IR_FOLD(ROR (C_I8,C_I8))1036 IR_FOLD(ROR(C_I8, C_I8))
1037 {
1038 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1039 	IR_FOLD_CONST_I((int8_t)ir_ror8(op1_insn->val.u8, op2_insn->val.u8));
1040 }
1041 
IR_FOLD(ROR (C_U16,C_U16))1042 IR_FOLD(ROR(C_U16, C_U16))
1043 {
1044 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1045 	IR_FOLD_CONST_U(ir_ror16(op1_insn->val.u16, op2_insn->val.u16));
1046 }
1047 
IR_FOLD(ROR (C_I16,C_I16))1048 IR_FOLD(ROR(C_I16, C_I16))
1049 {
1050 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1051 	IR_FOLD_CONST_I((int16_t)ir_ror16(op1_insn->val.u16, op2_insn->val.u16));
1052 }
1053 
IR_FOLD(ROR (C_U32,C_U32))1054 IR_FOLD(ROR(C_U32, C_U32))
1055 {
1056 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1057 	IR_FOLD_CONST_U(ir_ror32(op1_insn->val.u32, op2_insn->val.u32));
1058 }
1059 
IR_FOLD(ROR (C_I32,C_I32))1060 IR_FOLD(ROR(C_I32, C_I32))
1061 {
1062 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1063 	IR_FOLD_CONST_I((int32_t)ir_ror32(op1_insn->val.u32, op2_insn->val.u32));
1064 }
1065 
1066 IR_FOLD(ROR(C_U64, C_U64))
IR_FOLD(ROR (C_I64,C_I64))1067 IR_FOLD(ROR(C_I64, C_I64))
1068 {
1069 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1070 	IR_FOLD_CONST_U(ir_ror64(op1_insn->val.u64, op2_insn->val.u64));
1071 }
1072 
1073 //IR_FOLD(BSWAP(CONST))
1074 //TODO: bswap
1075 
1076 IR_FOLD(MIN(C_BOOL, C_BOOL))
IR_FOLD(MIN (C_U8,C_U8))1077 IR_FOLD(MIN(C_U8, C_U8))
1078 IR_FOLD(MIN(C_U16, C_U16))
1079 IR_FOLD(MIN(C_U32, C_U32))
1080 IR_FOLD(MIN(C_U64, C_U64))
1081 IR_FOLD(MIN(C_ADDR, C_ADDR))
1082 {
1083 	IR_FOLD_COPY(op1_insn->val.u64 <= op2_insn->val.u64 ? op1 : op2);
1084 }
1085 
1086 IR_FOLD(MIN(C_CHAR, C_CHAR))
IR_FOLD(MIN (C_I8,C_U8))1087 IR_FOLD(MIN(C_I8, C_U8))
1088 IR_FOLD(MIN(C_I16, C_U16))
1089 IR_FOLD(MIN(C_I32, C_U32))
1090 IR_FOLD(MIN(C_I64, C_U64))
1091 {
1092 	IR_FOLD_COPY(op1_insn->val.i64 <= op2_insn->val.i64 ? op1 : op2);
1093 }
1094 
IR_FOLD(MIN (C_DOUBLE,C_DOUBLE))1095 IR_FOLD(MIN(C_DOUBLE, C_DOUBLE))
1096 {
1097 	IR_FOLD_COPY(op1_insn->val.d <= op2_insn->val.d ? op1 : op2);
1098 }
1099 
IR_FOLD(MIN (C_FLOAT,C_FLOAT))1100 IR_FOLD(MIN(C_FLOAT, C_FLOAT))
1101 {
1102 	IR_FOLD_COPY(op1_insn->val.f <= op2_insn->val.f ? op1 : op2);
1103 }
1104 
1105 IR_FOLD(MAX(C_BOOL, C_BOOL))
IR_FOLD(MAX (C_U8,C_U8))1106 IR_FOLD(MAX(C_U8, C_U8))
1107 IR_FOLD(MAX(C_U16, C_U16))
1108 IR_FOLD(MAX(C_U32, C_U32))
1109 IR_FOLD(MAX(C_U64, C_U64))
1110 IR_FOLD(MAX(C_ADDR, C_ADDR))
1111 {
1112 	IR_FOLD_COPY(op1_insn->val.u64 >= op2_insn->val.u64 ? op1 : op2);
1113 }
1114 
1115 IR_FOLD(MAX(C_CHAR, C_CHAR))
IR_FOLD(MAX (C_I8,C_U8))1116 IR_FOLD(MAX(C_I8, C_U8))
1117 IR_FOLD(MAX(C_I16, C_U16))
1118 IR_FOLD(MAX(C_I32, C_U32))
1119 IR_FOLD(MAX(C_I64, C_U64))
1120 {
1121 	IR_FOLD_COPY(op1_insn->val.i64 >= op2_insn->val.i64 ? op1 : op2);
1122 }
1123 
IR_FOLD(MAX (C_DOUBLE,C_DOUBLE))1124 IR_FOLD(MAX(C_DOUBLE, C_DOUBLE))
1125 {
1126 	IR_FOLD_COPY(op1_insn->val.d >= op2_insn->val.d ? op1 : op2);
1127 }
1128 
IR_FOLD(MAX (C_FLOAT,C_FLOAT))1129 IR_FOLD(MAX(C_FLOAT, C_FLOAT))
1130 {
1131 	IR_FOLD_COPY(op1_insn->val.f >= op2_insn->val.f ? op1 : op2);
1132 }
1133 
1134 IR_FOLD(SEXT(C_I8))
IR_FOLD(SEXT (C_U8))1135 IR_FOLD(SEXT(C_U8))
1136 IR_FOLD(SEXT(C_BOOL))
1137 {
1138 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1139 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1140 	IR_FOLD_CONST_I((int64_t)op1_insn->val.i8);
1141 }
1142 
1143 IR_FOLD(SEXT(C_I16))
IR_FOLD(SEXT (C_U16))1144 IR_FOLD(SEXT(C_U16))
1145 {
1146 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1147 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1148 	IR_FOLD_CONST_I((int64_t)op1_insn->val.i16);
1149 }
1150 
1151 IR_FOLD(SEXT(C_I32))
IR_FOLD(SEXT (C_U32))1152 IR_FOLD(SEXT(C_U32))
1153 {
1154 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1155 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1156 	IR_FOLD_CONST_I((int64_t)op1_insn->val.i32);
1157 }
1158 
1159 IR_FOLD(ZEXT(C_I8))
IR_FOLD(ZEXT (C_U8))1160 IR_FOLD(ZEXT(C_U8))
1161 IR_FOLD(ZEXT(C_BOOL))
1162 {
1163 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1164 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1165 	IR_FOLD_CONST_U((uint64_t)op1_insn->val.u8);
1166 }
1167 
1168 IR_FOLD(ZEXT(C_I16))
IR_FOLD(ZEXT (C_U16))1169 IR_FOLD(ZEXT(C_U16))
1170 {
1171 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1172 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1173 	IR_FOLD_CONST_U((uint64_t)op1_insn->val.u16);
1174 }
1175 
1176 IR_FOLD(ZEXT(C_I32))
IR_FOLD(ZEXT (C_U32))1177 IR_FOLD(ZEXT(C_U32))
1178 {
1179 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1180 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] > ir_type_size[op1_insn->type]);
1181 	IR_FOLD_CONST_U((uint64_t)op1_insn->val.u32);
1182 }
1183 
1184 IR_FOLD(TRUNC(C_I16))
IR_FOLD(TRUNC (C_I32))1185 IR_FOLD(TRUNC(C_I32))
1186 IR_FOLD(TRUNC(C_I64))
1187 IR_FOLD(TRUNC(C_U16))
1188 IR_FOLD(TRUNC(C_U32))
1189 IR_FOLD(TRUNC(C_U64))
1190 {
1191 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1192 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] < ir_type_size[op1_insn->type]);
1193 	switch (IR_OPT_TYPE(opt)) {
1194 		default:
1195 			IR_ASSERT(0);
1196 		case IR_I8:
1197 			IR_FOLD_CONST_I(op1_insn->val.i8);
1198 		case IR_I16:
1199 			IR_FOLD_CONST_I(op1_insn->val.i16);
1200 		case IR_I32:
1201 			IR_FOLD_CONST_I(op1_insn->val.i32);
1202 		case IR_U8:
1203 			IR_FOLD_CONST_U(op1_insn->val.u8);
1204 		case IR_U16:
1205 			IR_FOLD_CONST_U(op1_insn->val.u16);
1206 		case IR_U32:
1207 			IR_FOLD_CONST_U(op1_insn->val.u32);
1208 	}
1209 }
1210 
1211 
1212 IR_FOLD(BITCAST(C_I8))
IR_FOLD(BITCAST (C_I16))1213 IR_FOLD(BITCAST(C_I16))
1214 IR_FOLD(BITCAST(C_I32))
1215 IR_FOLD(BITCAST(C_I64))
1216 IR_FOLD(BITCAST(C_U8))
1217 IR_FOLD(BITCAST(C_U16))
1218 IR_FOLD(BITCAST(C_U32))
1219 IR_FOLD(BITCAST(C_U64))
1220 IR_FOLD(BITCAST(C_FLOAT))
1221 IR_FOLD(BITCAST(C_DOUBLE))
1222 IR_FOLD(BITCAST(C_BOOL))
1223 IR_FOLD(BITCAST(C_CHAR))
1224 IR_FOLD(BITCAST(C_ADDR))
1225 {
1226 	IR_ASSERT(ir_type_size[IR_OPT_TYPE(opt)] == ir_type_size[op1_insn->type]);
1227 	switch (IR_OPT_TYPE(opt)) {
1228 		default:
1229 			IR_ASSERT(0);
1230 		case IR_BOOL:
1231 			IR_FOLD_BOOL(op1_insn->val.i8 != 0);
1232 		case IR_I8:
1233 			IR_FOLD_CONST_I(op1_insn->val.i8);
1234 		case IR_I16:
1235 			IR_FOLD_CONST_I(op1_insn->val.i16);
1236 		case IR_I32:
1237 			IR_FOLD_CONST_I(op1_insn->val.i32);
1238 		case IR_I64:
1239 			IR_FOLD_CONST_I(op1_insn->val.i64);
1240 		case IR_U8:
1241 			IR_FOLD_CONST_U(op1_insn->val.u8);
1242 		case IR_U16:
1243 			IR_FOLD_CONST_U(op1_insn->val.u16);
1244 		case IR_U32:
1245 			IR_FOLD_CONST_U(op1_insn->val.u32);
1246 		case IR_U64:
1247 			IR_FOLD_CONST_U(op1_insn->val.u64);
1248 		case IR_FLOAT:
1249 			IR_FOLD_CONST_F(op1_insn->val.f);
1250 		case IR_DOUBLE:
1251 			IR_FOLD_CONST_D(op1_insn->val.d);
1252 		case IR_CHAR:
1253 			IR_FOLD_CONST_I(op1_insn->val.c);
1254 		case IR_ADDR:
1255 			IR_FOLD_CONST_U(op1_insn->val.addr);
1256 	}
1257 }
1258 
1259 IR_FOLD(INT2FP(C_I8))
IR_FOLD(INT2FP (C_I16))1260 IR_FOLD(INT2FP(C_I16))
1261 IR_FOLD(INT2FP(C_I32))
1262 IR_FOLD(INT2FP(C_I64))
1263 {
1264 	if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1265 		IR_FOLD_CONST_D((double)op1_insn->val.i64);
1266 	} else {
1267 		IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1268 		IR_FOLD_CONST_F((float)op1_insn->val.i64);
1269 	}
1270 }
1271 
1272 IR_FOLD(INT2FP(C_U8))
IR_FOLD(INT2FP (C_U16))1273 IR_FOLD(INT2FP(C_U16))
1274 IR_FOLD(INT2FP(C_U32))
1275 IR_FOLD(INT2FP(C_U64))
1276 {
1277 	if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1278 		IR_FOLD_CONST_D((double)op1_insn->val.u64);
1279 	} else {
1280 		IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1281 		IR_FOLD_CONST_F((float)op1_insn->val.u64);
1282 	}
1283 }
1284 
IR_FOLD(FP2INT (C_FLOAT))1285 IR_FOLD(FP2INT(C_FLOAT))
1286 {
1287 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1288 	switch (IR_OPT_TYPE(opt)) {
1289 		default:
1290 			IR_ASSERT(0);
1291 		case IR_I8:
1292 			IR_FOLD_CONST_I((int8_t)op1_insn->val.f);
1293 		case IR_I16:
1294 			IR_FOLD_CONST_I((int16_t)op1_insn->val.f);
1295 		case IR_I32:
1296 			IR_FOLD_CONST_I((int32_t)op1_insn->val.f);
1297 		case IR_I64:
1298 			IR_FOLD_CONST_I((int64_t)op1_insn->val.f);
1299 		case IR_U8:
1300 			IR_FOLD_CONST_U((uint8_t)op1_insn->val.f);
1301 		case IR_U16:
1302 			IR_FOLD_CONST_U((uint16_t)op1_insn->val.f);
1303 		case IR_U32:
1304 			IR_FOLD_CONST_U((uint32_t)op1_insn->val.f);
1305 		case IR_U64:
1306 			IR_FOLD_CONST_U((uint64_t)op1_insn->val.f);
1307 	}
1308 }
1309 
IR_FOLD(FP2INT (C_DOUBLE))1310 IR_FOLD(FP2INT(C_DOUBLE))
1311 {
1312 	IR_ASSERT(IR_IS_TYPE_INT(IR_OPT_TYPE(opt)));
1313 	switch (IR_OPT_TYPE(opt)) {
1314 		default:
1315 			IR_ASSERT(0);
1316 		case IR_I8:
1317 			IR_FOLD_CONST_I((int8_t)op1_insn->val.d);
1318 		case IR_I16:
1319 			IR_FOLD_CONST_I((int16_t)op1_insn->val.d);
1320 		case IR_I32:
1321 			IR_FOLD_CONST_I((int32_t)op1_insn->val.d);
1322 		case IR_I64:
1323 			IR_FOLD_CONST_I((int64_t)op1_insn->val.d);
1324 		case IR_U8:
1325 			IR_FOLD_CONST_U((uint8_t)op1_insn->val.d);
1326 		case IR_U16:
1327 			IR_FOLD_CONST_U((uint16_t)op1_insn->val.d);
1328 		case IR_U32:
1329 			IR_FOLD_CONST_U((uint32_t)op1_insn->val.d);
1330 		case IR_U64:
1331 			IR_FOLD_CONST_U((uint64_t)op1_insn->val.d);
1332 	}
1333 }
1334 
IR_FOLD(FP2FP (C_FLOAT))1335 IR_FOLD(FP2FP(C_FLOAT))
1336 {
1337 	if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1338 		IR_FOLD_CONST_D((double)op1_insn->val.f);
1339 	} else {
1340 		IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1341 		IR_FOLD_COPY(op1);
1342 	}
1343 }
1344 
IR_FOLD(FP2FP (C_DOUBLE))1345 IR_FOLD(FP2FP(C_DOUBLE))
1346 {
1347 	if (IR_OPT_TYPE(opt) == IR_DOUBLE) {
1348 		IR_FOLD_COPY(op1);
1349 	} else {
1350 		IR_ASSERT(IR_OPT_TYPE(opt) == IR_FLOAT);
1351 		IR_FOLD_CONST_F((float)op1_insn->val.d);
1352 	}
1353 }
1354 
1355 // TODO: constant functions (e.g.  sin, cos)
1356 
1357 /* Copy Propagation */
IR_FOLD(COPY (_))1358 IR_FOLD(COPY(_))
1359 {
1360 	IR_ASSERT(IR_OPT_TYPE(opt) == op1_insn->type);
1361 	if (!op2) {
1362 		IR_FOLD_COPY(op1);
1363 	}
1364 	/* skip CSE */
1365 	IR_FOLD_EMIT;
1366 }
1367 
IR_FOLD(PHI (_,_))1368 IR_FOLD(PHI(_, _)) // TODO: PHI(_, _, _)
1369 {
1370 	if (op2 == op3 && op3 != IR_UNUSED) {
1371 		IR_FOLD_COPY(op2);
1372 	}
1373 	/* skip CSE */
1374 	opt = opt | (3 << IR_OPT_INPUTS_SHIFT);
1375 	IR_FOLD_EMIT;
1376 }
1377 
1378 IR_FOLD(COND(C_BOOL, _)) // TODO: COND(CONST, _, _)
IR_FOLD(COND (C_U8,_))1379 IR_FOLD(COND(C_U8, _))
1380 IR_FOLD(COND(C_U16, _))
1381 IR_FOLD(COND(C_U32, _))
1382 IR_FOLD(COND(C_U64, _))
1383 IR_FOLD(COND(C_ADDR, _))
1384 IR_FOLD(COND(C_CHAR, _))
1385 IR_FOLD(COND(C_I8, _))
1386 IR_FOLD(COND(C_I16, _))
1387 IR_FOLD(COND(C_I32, _))
1388 IR_FOLD(COND(C_I64, _))
1389 IR_FOLD(COND(C_DOUBLE, _))
1390 IR_FOLD(COND(C_FLOAT, _))
1391 {
1392 	if (ir_const_is_true(op1_insn)) {
1393 		IR_FOLD_COPY(op2);
1394 	} else {
1395 		IR_FOLD_COPY(op3);
1396 	}
1397 }
1398 
IR_FOLD(BITCAST (_))1399 IR_FOLD(BITCAST(_))
1400 {
1401 	if (IR_OPT_TYPE(opt) == op1_insn->type) {
1402 		IR_FOLD_COPY(op1);
1403 	}
1404 	IR_FOLD_NEXT;
1405 }
1406 
1407 /* Algebraic simplifications */
IR_FOLD(ABS (ABS))1408 IR_FOLD(ABS(ABS))
1409 {
1410 	/* abs(x = abs(y)) => x */
1411 	IR_FOLD_COPY(op1);
1412 }
1413 
IR_FOLD(ABS (NEG))1414 IR_FOLD(ABS(NEG))
1415 {
1416 	/* abs(neg(y)) => abs(y) */
1417 	op1 = op1_insn->op1;
1418 	IR_FOLD_RESTART;
1419 }
1420 
1421 IR_FOLD(NEG(NEG))
IR_FOLD(NOT (NOT))1422 IR_FOLD(NOT(NOT))
1423 IR_FOLD(BSWAP(BSWAP))
1424 {
1425 	/* f(f(y)) => y */
1426 	IR_FOLD_COPY(op1_insn->op1);
1427 }
1428 
IR_FOLD(EQ (_,C_BOOL))1429 IR_FOLD(EQ(_, C_BOOL))
1430 {
1431 	if (op2 == IR_TRUE) {
1432 		IR_FOLD_COPY(op1);
1433 	} else {
1434 		opt = IR_OPT(IR_NOT, IR_BOOL);
1435 		op2 = IR_UNUSED;
1436 		IR_FOLD_RESTART;
1437 	}
1438 }
1439 
IR_FOLD(NE (_,C_BOOL))1440 IR_FOLD(NE(_, C_BOOL))
1441 {
1442 	if (op2 != IR_TRUE) {
1443 		IR_FOLD_COPY(op1);
1444 	} else {
1445 		opt = IR_OPT(IR_NOT, IR_BOOL);
1446 		op2 = IR_UNUSED;
1447 		IR_FOLD_RESTART;
1448 	}
1449 }
1450 
1451 IR_FOLD(EQ(ZEXT, C_U16))
IR_FOLD(EQ (ZEXT,C_U32))1452 IR_FOLD(EQ(ZEXT, C_U32))
1453 IR_FOLD(EQ(ZEXT, C_U64))
1454 IR_FOLD(EQ(ZEXT, C_I16))
1455 IR_FOLD(EQ(ZEXT, C_I32))
1456 IR_FOLD(EQ(ZEXT, C_I64))
1457 IR_FOLD(EQ(ZEXT, C_ADDR))
1458 IR_FOLD(EQ(SEXT, C_U16))
1459 IR_FOLD(EQ(SEXT, C_U32))
1460 IR_FOLD(EQ(SEXT, C_U64))
1461 IR_FOLD(EQ(SEXT, C_I16))
1462 IR_FOLD(EQ(SEXT, C_I32))
1463 IR_FOLD(EQ(SEXT, C_I64))
1464 IR_FOLD(EQ(SEXT, C_ADDR))
1465 {
1466 	if (op2_insn->val.u64 == 0 && ctx->ir_base[op1_insn->op1].type == IR_BOOL) {
1467 		opt = IR_OPT(IR_NOT, IR_BOOL);
1468 		op1 = op1_insn->op1;
1469 		op2 = IR_UNUSED;
1470 		IR_FOLD_RESTART;
1471 	}
1472 	IR_FOLD_NEXT;
1473 }
1474 
1475 IR_FOLD(NE(ZEXT, C_U16))
IR_FOLD(NE (ZEXT,C_U32))1476 IR_FOLD(NE(ZEXT, C_U32))
1477 IR_FOLD(NE(ZEXT, C_U64))
1478 IR_FOLD(NE(ZEXT, C_I16))
1479 IR_FOLD(NE(ZEXT, C_I32))
1480 IR_FOLD(NE(ZEXT, C_I64))
1481 IR_FOLD(NE(ZEXT, C_ADDR))
1482 IR_FOLD(NE(SEXT, C_U16))
1483 IR_FOLD(NE(SEXT, C_U32))
1484 IR_FOLD(NE(SEXT, C_U64))
1485 IR_FOLD(NE(SEXT, C_I16))
1486 IR_FOLD(NE(SEXT, C_I32))
1487 IR_FOLD(NE(SEXT, C_I64))
1488 IR_FOLD(NE(SEXT, C_ADDR))
1489 {
1490 	if (op2_insn->val.u64 == 0 && ctx->ir_base[op1_insn->op1].type == IR_BOOL) {
1491 		IR_FOLD_COPY(op1_insn->op1);
1492 	}
1493 	IR_FOLD_NEXT;
1494 }
1495 
1496 IR_FOLD(NOT(EQ))
IR_FOLD(NOT (NE))1497 IR_FOLD(NOT(NE))
1498 IR_FOLD(NOT(LT))
1499 IR_FOLD(NOT(GE))
1500 IR_FOLD(NOT(LE))
1501 IR_FOLD(NOT(GT))
1502 IR_FOLD(NOT(ULT))
1503 IR_FOLD(NOT(UGE))
1504 IR_FOLD(NOT(ULE))
1505 IR_FOLD(NOT(UGT))
1506 {
1507 	if (IR_IS_TYPE_INT(ctx->ir_base[op1_insn->op1].type)) {
1508 		opt = op1_insn->opt ^ 1;
1509 		op1 = op1_insn->op1;
1510 		op2 = op1_insn->op2;
1511 		IR_FOLD_RESTART;
1512 	}
1513 	IR_FOLD_NEXT;
1514 }
1515 
1516 IR_FOLD(ADD(_, C_U8))
IR_FOLD(ADD (_,C_U16))1517 IR_FOLD(ADD(_, C_U16))
1518 IR_FOLD(ADD(_, C_U32))
1519 IR_FOLD(ADD(_, C_U64))
1520 IR_FOLD(ADD(_, C_I8))
1521 IR_FOLD(ADD(_, C_I16))
1522 IR_FOLD(ADD(_, C_I32))
1523 IR_FOLD(ADD(_, C_I64))
1524 IR_FOLD(ADD(_, C_ADDR))
1525 IR_FOLD(SUB(_, C_U8))
1526 IR_FOLD(SUB(_, C_U16))
1527 IR_FOLD(SUB(_, C_U32))
1528 IR_FOLD(SUB(_, C_U64))
1529 IR_FOLD(SUB(_, C_I8))
1530 IR_FOLD(SUB(_, C_I16))
1531 IR_FOLD(SUB(_, C_I32))
1532 IR_FOLD(SUB(_, C_I64))
1533 IR_FOLD(SUB(_, C_ADDR))
1534 IR_FOLD(ADD_OV(_, C_U8))
1535 IR_FOLD(ADD_OV(_, C_U16))
1536 IR_FOLD(ADD_OV(_, C_U32))
1537 IR_FOLD(ADD_OV(_, C_U64))
1538 IR_FOLD(ADD_OV(_, C_I8))
1539 IR_FOLD(ADD_OV(_, C_I16))
1540 IR_FOLD(ADD_OV(_, C_I32))
1541 IR_FOLD(ADD_OV(_, C_I64))
1542 IR_FOLD(ADD_OV(_, C_ADDR))
1543 IR_FOLD(SUB_OV(_, C_U8))
1544 IR_FOLD(SUB_OV(_, C_U16))
1545 IR_FOLD(SUB_OV(_, C_U32))
1546 IR_FOLD(SUB_OV(_, C_U64))
1547 IR_FOLD(SUB_OV(_, C_I8))
1548 IR_FOLD(SUB_OV(_, C_I16))
1549 IR_FOLD(SUB_OV(_, C_I32))
1550 IR_FOLD(SUB_OV(_, C_I64))
1551 IR_FOLD(SUB_OV(_, C_ADDR))
1552 {
1553 	if (op2_insn->val.u64 == 0) {
1554 		/* a +/- 0 => a */
1555 		IR_FOLD_COPY(op1);
1556 	}
1557 	IR_FOLD_NEXT;
1558 }
1559 
1560 IR_FOLD(ADD(C_U8, _))
IR_FOLD(ADD (C_U16,_))1561 IR_FOLD(ADD(C_U16, _))
1562 IR_FOLD(ADD(C_U32, _))
1563 IR_FOLD(ADD(C_U64, _))
1564 IR_FOLD(ADD(C_I8, _))
1565 IR_FOLD(ADD(C_I16, _))
1566 IR_FOLD(ADD(C_I32, _))
1567 IR_FOLD(ADD(C_I64, _))
1568 IR_FOLD(ADD(C_ADDR, _))
1569 IR_FOLD(ADD_OV(C_U8, _))
1570 IR_FOLD(ADD_OV(C_U16, _))
1571 IR_FOLD(ADD_OV(C_U32, _))
1572 IR_FOLD(ADD_OV(C_U64, _))
1573 IR_FOLD(ADD_OV(C_I8, _))
1574 IR_FOLD(ADD_OV(C_I16, _))
1575 IR_FOLD(ADD_OV(C_I32, _))
1576 IR_FOLD(ADD_OV(C_I64, _))
1577 IR_FOLD(ADD_OV(C_ADDR, _))
1578 {
1579 	if (op1_insn->val.u64 == 0) {
1580 		/* 0 + a => a */
1581 		IR_FOLD_COPY(op2);
1582 	}
1583 	IR_FOLD_NEXT;
1584 }
1585 
1586 IR_FOLD(SUB(C_I8, _))
IR_FOLD(SUB (C_I16,_))1587 IR_FOLD(SUB(C_I16, _))
1588 IR_FOLD(SUB(C_I32, _))
1589 IR_FOLD(SUB(C_I64, _))
1590 {
1591 	if (op1_insn->val.u64 == 0) {
1592 		/* 0 - a => -a (invalid for +0.0) */
1593 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1594 		op1 = op2;
1595 		op2 = IR_UNUSED;
1596 		IR_FOLD_RESTART;
1597 	}
1598 	IR_FOLD_NEXT;
1599 }
1600 
1601 IR_FOLD(UGE(_, C_U8))
IR_FOLD(UGE (_,C_U16))1602 IR_FOLD(UGE(_, C_U16))
1603 IR_FOLD(UGE(_, C_U32))
1604 IR_FOLD(UGE(_, C_U64))
1605 IR_FOLD(UGE(_, C_I8))
1606 IR_FOLD(UGE(_, C_I16))
1607 IR_FOLD(UGE(_, C_I32))
1608 IR_FOLD(UGE(_, C_I64))
1609 IR_FOLD(UGE(_, C_ADDR))
1610 {
1611 	if (op2_insn->val.u64 == 0) {
1612 		IR_FOLD_COPY(IR_TRUE);
1613 	}
1614 	IR_FOLD_NEXT;
1615 }
1616 
1617 IR_FOLD(UGT(_, C_U8))
IR_FOLD(UGT (_,C_U16))1618 IR_FOLD(UGT(_, C_U16))
1619 IR_FOLD(UGT(_, C_U32))
1620 IR_FOLD(UGT(_, C_U64))
1621 IR_FOLD(UGT(_, C_I8))
1622 IR_FOLD(UGT(_, C_I16))
1623 IR_FOLD(UGT(_, C_I32))
1624 IR_FOLD(UGT(_, C_I64))
1625 IR_FOLD(UGT(_, C_ADDR))
1626 {
1627 	if (op2_insn->val.u64 == 0) {
1628 		opt = IR_OPT(IR_NE, IR_BOOL);
1629 		IR_FOLD_RESTART;
1630 	}
1631 	IR_FOLD_NEXT;
1632 }
1633 
1634 IR_FOLD(ULT(_, C_U8))
IR_FOLD(ULT (_,C_U16))1635 IR_FOLD(ULT(_, C_U16))
1636 IR_FOLD(ULT(_, C_U32))
1637 IR_FOLD(ULT(_, C_U64))
1638 IR_FOLD(ULT(_, C_I8))
1639 IR_FOLD(ULT(_, C_I16))
1640 IR_FOLD(ULT(_, C_I32))
1641 IR_FOLD(ULT(_, C_I64))
1642 IR_FOLD(ULT(_, C_ADDR))
1643 {
1644 	if (op2_insn->val.u64 == 0) {
1645 		IR_FOLD_COPY(IR_FALSE);
1646 	}
1647 	IR_FOLD_NEXT;
1648 }
1649 
1650 IR_FOLD(ULE(_, C_U8))
IR_FOLD(ULE (_,C_U16))1651 IR_FOLD(ULE(_, C_U16))
1652 IR_FOLD(ULE(_, C_U32))
1653 IR_FOLD(ULE(_, C_U64))
1654 IR_FOLD(ULE(_, C_I8))
1655 IR_FOLD(ULE(_, C_I16))
1656 IR_FOLD(ULE(_, C_I32))
1657 IR_FOLD(ULE(_, C_I64))
1658 IR_FOLD(ULE(_, C_ADDR))
1659 {
1660 	if (op2_insn->val.u64 == 0) {
1661 		opt = IR_OPT(IR_EQ, IR_BOOL);
1662 		IR_FOLD_RESTART;
1663 	}
1664 	IR_FOLD_NEXT;
1665 }
1666 
IR_FOLD(ADD (NEG,_))1667 IR_FOLD(ADD(NEG, _))
1668 {
1669 	/* (-a) + b => b - a */
1670 	opt++; /* ADD -> SUB */
1671 	op1 = op2;
1672 	op2 = op1_insn->op1;
1673 	IR_FOLD_RESTART;
1674 }
1675 
1676 IR_FOLD(ADD(_, NEG))
IR_FOLD(SUB (_,NEG))1677 IR_FOLD(SUB(_,NEG))
1678 {
1679 	/* a + (-b) => a - b */
1680 	opt ^= 1; /* ADD <-> SUB */
1681 	op2 = op2_insn->op1;
1682 	IR_FOLD_RESTART;
1683 }
1684 
IR_FOLD(ADD (SUB,_))1685 IR_FOLD(ADD(SUB, _))
1686 {
1687 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1688 		if (op1_insn->op2 == op2) {
1689 			/* (a - b) + b => a */
1690 			IR_FOLD_COPY(op1_insn->op1);
1691 		}
1692 	}
1693 	IR_FOLD_NEXT;
1694 }
1695 
IR_FOLD(ADD (_,SUB))1696 IR_FOLD(ADD(_, SUB))
1697 {
1698 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1699 		if (op2_insn->op2 == op1) {
1700 			/* a + (b - a) => b */
1701 			IR_FOLD_COPY(op2_insn->op1);
1702 		}
1703 	}
1704 	IR_FOLD_NEXT;
1705 }
1706 
IR_FOLD(SUB (ADD,_))1707 IR_FOLD(SUB(ADD, _))
1708 {
1709 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1710 		if (op1_insn->op1 == op2) {
1711 			/* (a + b) - a => b */
1712 			IR_FOLD_COPY(op1_insn->op2);
1713 		} else if (op1_insn->op2 == op2) {
1714 			/* (a + b) - a => b */
1715 			IR_FOLD_COPY(op1_insn->op1);
1716 		}
1717 	}
1718 	IR_FOLD_NEXT;
1719 }
1720 
IR_FOLD(SUB (_,ADD))1721 IR_FOLD(SUB(_, ADD))
1722 {
1723 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1724 		if (op2_insn->op1 == op1) {
1725 			/* a - (a + b) => -b */
1726 			opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1727 			op1 = op2_insn->op2;
1728 			op2 = IR_UNUSED;
1729 			IR_FOLD_RESTART;
1730 		} else if (op2_insn->op2 == op1) {
1731 			/* b - (a + b) => -a */
1732 			opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1733 			op1 = op2_insn->op1;
1734 			op2 = IR_UNUSED;
1735 			IR_FOLD_RESTART;
1736 		}
1737 	}
1738 	IR_FOLD_NEXT;
1739 }
1740 
IR_FOLD(SUB (SUB,_))1741 IR_FOLD(SUB(SUB, _))
1742 {
1743 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1744 		if (op1_insn->op1 == op2) {
1745 			/* (a - b) - a => -b */
1746 			opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1747 			op1 = op1_insn->op2;
1748 			op2 = IR_UNUSED;
1749 			IR_FOLD_RESTART;
1750 		}
1751 	}
1752 	IR_FOLD_NEXT;
1753 }
1754 
IR_FOLD(SUB (_,SUB))1755 IR_FOLD(SUB(_, SUB))
1756 {
1757 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1758 		if (op2_insn->op1 == op1) {
1759 			/* a - (a - b) => b */
1760 			IR_FOLD_COPY(op2_insn->op2);
1761 		}
1762 	}
1763     IR_FOLD_NEXT;
1764 }
1765 
IR_FOLD(SUB (ADD,ADD))1766 IR_FOLD(SUB(ADD, ADD))
1767 {
1768 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt))) {
1769 		if (op1_insn->op1 == op2_insn->op1) {
1770 			/* (a + b) - (a + c) => b - c */
1771 			op1 = op1_insn->op2;
1772 			op2 = op2_insn->op2;
1773 			IR_FOLD_RESTART;
1774 		} else if (op1_insn->op1 == op2_insn->op2) {
1775 			/* (a + b) - (c + a) => b - c */
1776 			op1 = op1_insn->op2;
1777 			op2 = op2_insn->op1;
1778 			IR_FOLD_RESTART;
1779 		} else if (op1_insn->op2 == op2_insn->op1) {
1780 			/* (a + b) - (b + c) => a - c */
1781 			op1 = op1_insn->op1;
1782 			op2 = op2_insn->op2;
1783 			IR_FOLD_RESTART;
1784 		} else if (op1_insn->op2 == op2_insn->op2) {
1785 			/* (a + b) - (c + b) => a - c */
1786 			op1 = op1_insn->op1;
1787 			op2 = op2_insn->op1;
1788 			IR_FOLD_RESTART;
1789 		}
1790 	}
1791     IR_FOLD_NEXT;
1792 }
1793 
1794 // IR_FOLD(SUB(NEG, CONST))  TODO: -a - b => -b - a
1795 // IR_FOLD(MUL(NEG, CONST))  TODO: -a * b => a * -b
1796 // IR_FOLD(DIV(NEG, CONST))  TODO: -a / b => a / -b
1797 
1798 IR_FOLD(MUL(_, C_U8))
IR_FOLD(MUL (_,C_U16))1799 IR_FOLD(MUL(_, C_U16))
1800 IR_FOLD(MUL(_, C_U32))
1801 IR_FOLD(MUL(_, C_U64))
1802 IR_FOLD(MUL(_, C_ADDR))
1803 {
1804 	if (op2_insn->val.u64 == 0) {
1805 		/* a * 0 => 0 */
1806 		IR_FOLD_COPY(op2);
1807 	} else if (op2_insn->val.u64 == 1) {
1808 		IR_FOLD_COPY(op1);
1809 	} else if (op2_insn->val.u64 == 2 && IR_OPT_TYPE(opt) != IR_ADDR) {
1810 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1811 		op2 = op1;
1812 		IR_FOLD_RESTART;
1813 	}
1814 	IR_FOLD_NEXT;
1815 }
1816 
1817 IR_FOLD(MUL(_, C_I8))
IR_FOLD(MUL (_,C_I16))1818 IR_FOLD(MUL(_, C_I16))
1819 IR_FOLD(MUL(_, C_I32))
1820 IR_FOLD(MUL(_, C_I64))
1821 {
1822 	if (op2_insn->val.i64 == 0) {
1823 		/* a * 0 => 0 */
1824 		IR_FOLD_COPY(op2);
1825 	} else if (op2_insn->val.i64 == 1) {
1826 		/* a * 1 => a */
1827 		IR_FOLD_COPY(op1);
1828 	} else if (op2_insn->val.i64 == 2) {
1829 		/* a * 2 => a + a */
1830 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1831 		op2 = op1;
1832 		IR_FOLD_RESTART;
1833 	} else if (op2_insn->val.i64 == -1) {
1834 		/* a * -1 => -a */
1835 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1836 		op2 = IR_UNUSED;
1837 		IR_FOLD_RESTART;
1838 	}
1839 	IR_FOLD_NEXT;
1840 }
1841 
1842 IR_FOLD(MUL(C_U8, _))
IR_FOLD(MUL (C_U16,_))1843 IR_FOLD(MUL(C_U16, _))
1844 IR_FOLD(MUL(C_U32, _))
1845 IR_FOLD(MUL(C_U64, _))
1846 IR_FOLD(MUL(C_ADDR, _))
1847 {
1848 	if (op1_insn->val.u64 == 0) {
1849 		/* 0 * a => 0 */
1850 		IR_FOLD_COPY(op1);
1851 	} else if (op1_insn->val.u64 == 1) {
1852 		IR_FOLD_COPY(op2);
1853 	} else if (op1_insn->val.u64 == 2 && IR_OPT_TYPE(opt) != IR_ADDR) {
1854 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1855 		op1 = op2;
1856 		IR_FOLD_RESTART;
1857 	}
1858 	IR_FOLD_NEXT;
1859 }
1860 
1861 IR_FOLD(MUL(C_I8, _))
IR_FOLD(MUL (C_I16,_))1862 IR_FOLD(MUL(C_I16, _))
1863 IR_FOLD(MUL(C_I32, _))
1864 IR_FOLD(MUL(C_I64, _))
1865 {
1866 	if (op1_insn->val.i64 == 0) {
1867 		/* 0 * a => 0 */
1868 		IR_FOLD_COPY(op1);
1869 	} else if (op1_insn->val.i64 == 1) {
1870 		/* 1 * a => a */
1871 		IR_FOLD_COPY(op2);
1872 	} else if (op1_insn->val.i64 == 2) {
1873 		/* 2 * a => a + a */
1874 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1875 		op1 = op2;
1876 		IR_FOLD_RESTART;
1877 	} else if (op1_insn->val.i64 == -1) {
1878 		/* -1 * a => -a */
1879 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1880 		op1 = op2;
1881 		op2 = IR_UNUSED;
1882 		IR_FOLD_RESTART;
1883 	}
1884 	IR_FOLD_NEXT;
1885 }
1886 
IR_FOLD(MUL (_,C_DOUBLE))1887 IR_FOLD(MUL(_, C_DOUBLE))
1888 {
1889 	if (op2_insn->val.d == 1.0) {
1890 		/* a * 1.0 => a */
1891 		IR_FOLD_COPY(op1);
1892 	} else if (op2_insn->val.d == 2.0) {
1893 		/* a * 2.0 => a + a */
1894 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1895 		op2 = op1;
1896 		IR_FOLD_RESTART;
1897 	} else if (op2_insn->val.d == -1.0) {
1898 		/* a * -1.0 => -a */
1899 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1900 		op2 = IR_UNUSED;
1901 		IR_FOLD_RESTART;
1902 	}
1903 	IR_FOLD_NEXT;
1904 }
1905 
IR_FOLD(MUL (_,C_FLOAT))1906 IR_FOLD(MUL(_, C_FLOAT))
1907 {
1908 	if (op2_insn->val.f == 1.0) {
1909 		/* a * 1.0 => a */
1910 		IR_FOLD_COPY(op1);
1911 	} else if (op2_insn->val.f == 2.0) {
1912 		/* a * 2.0 => a + a */
1913 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
1914 		op2 = op1;
1915 		IR_FOLD_RESTART;
1916 	} else if (op2_insn->val.f == -1.0) {
1917 		/* a * -1.0 => -a */
1918 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1919 		op2 = IR_UNUSED;
1920 		IR_FOLD_RESTART;
1921 	}
1922 	IR_FOLD_NEXT;
1923 }
1924 
1925 IR_FOLD(DIV(_, C_U8))
IR_FOLD(DIV (_,C_U16))1926 IR_FOLD(DIV(_, C_U16))
1927 IR_FOLD(DIV(_, C_U32))
1928 IR_FOLD(DIV(_, C_U64))
1929 {
1930 	if (op2_insn->val.u64 == 1) {
1931 		IR_FOLD_COPY(op1);
1932 	}
1933 	IR_FOLD_NEXT;
1934 }
1935 
1936 IR_FOLD(DIV(_, C_I8))
IR_FOLD(DIV (_,C_I16))1937 IR_FOLD(DIV(_, C_I16))
1938 IR_FOLD(DIV(_, C_I32))
1939 IR_FOLD(DIV(_, C_I64))
1940 {
1941 	if (op2_insn->val.i64 == 1) {
1942 		/* a / 1 => a */
1943 		IR_FOLD_COPY(op1);
1944 	} else if (op2_insn->val.i64 == -1) {
1945 		/* a / -1 => -a */
1946 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1947 		op2 = IR_UNUSED;
1948 		IR_FOLD_RESTART;
1949 	}
1950 	IR_FOLD_NEXT;
1951 }
1952 
1953 IR_FOLD(MOD(_, C_U8))
IR_FOLD(MOD (_,C_U16))1954 IR_FOLD(MOD(_, C_U16))
1955 IR_FOLD(MOD(_, C_U32))
1956 IR_FOLD(MOD(_, C_U64))
1957 IR_FOLD(MOD(_, C_I8))
1958 IR_FOLD(MOD(_, C_I16))
1959 IR_FOLD(MOD(_, C_I32))
1960 IR_FOLD(MOD(_, C_I64))
1961 {
1962 	if (op2_insn->val.i64 == 1) {
1963 		/* a % 1 => 0 */
1964 		IR_FOLD_CONST_U(0);
1965 	}
1966 	IR_FOLD_NEXT;
1967 }
1968 
IR_FOLD(DIV (_,C_DOUBLE))1969 IR_FOLD(DIV(_, C_DOUBLE))
1970 {
1971 	if (op2_insn->val.d == 1.0) {
1972 		/* a / 1.0 => a */
1973 		IR_FOLD_COPY(op1);
1974 	} else if (op2_insn->val.d == -1.0) {
1975 		/* a / -1.0 => -a */
1976 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1977 		op2 = IR_UNUSED;
1978 		IR_FOLD_RESTART;
1979 	}
1980 	IR_FOLD_NEXT;
1981 }
1982 
IR_FOLD(DIV (_,C_FLOAT))1983 IR_FOLD(DIV(_, C_FLOAT))
1984 {
1985 	if (op2_insn->val.f == 1.0) {
1986 		/* a / 1.0 => a */
1987 		IR_FOLD_COPY(op1);
1988 	} else if (op2_insn->val.f == -1.0) {
1989 		/* a / -1.0 => -a */
1990 		opt = IR_NEG | (opt & IR_OPT_TYPE_MASK);
1991 		op2 = IR_UNUSED;
1992 		IR_FOLD_RESTART;
1993 	}
1994 	IR_FOLD_NEXT;
1995 }
1996 
1997 IR_FOLD(MUL(NEG, NEG))
IR_FOLD(DIV (NEG,NEG))1998 IR_FOLD(DIV(NEG, NEG))
1999 {
2000 	op1 = op1_insn->op1;
2001 	op2 = op2_insn->op1;
2002 	IR_FOLD_RESTART;
2003 }
2004 
IR_FOLD(AND (_,C_BOOL))2005 IR_FOLD(AND(_, C_BOOL))
2006 {
2007 	IR_FOLD_COPY(op2_insn->val.b ? op1 : op2);
2008 }
2009 
2010 IR_FOLD(AND(_, C_U8))
IR_FOLD(AND (_,C_I8))2011 IR_FOLD(AND(_, C_I8))
2012 IR_FOLD(AND(_, C_CHAR))
2013 {
2014 	if (op2_insn->val.i8 == 0) {
2015 		/* a & 0 => 0 */
2016 		IR_FOLD_COPY(op2);
2017 	} else if (op2_insn->val.i8 == -1) {
2018 		IR_FOLD_COPY(op1);
2019 	}
2020 	IR_FOLD_NEXT;
2021 }
2022 
2023 IR_FOLD(AND(_, C_U16))
IR_FOLD(AND (_,C_I16))2024 IR_FOLD(AND(_, C_I16))
2025 {
2026 	if (op2_insn->val.i16 == 0) {
2027 		/* a & 0 => 0 */
2028 		IR_FOLD_COPY(op2);
2029 	} else if (op2_insn->val.i16 == -1) {
2030 		IR_FOLD_COPY(op1);
2031 	}
2032 	IR_FOLD_NEXT;
2033 }
2034 
2035 IR_FOLD(AND(_, C_U32))
IR_FOLD(AND (_,C_I32))2036 IR_FOLD(AND(_, C_I32))
2037 {
2038 	if (op2_insn->val.i32 == 0) {
2039 		/* a & 0 => 0 */
2040 		IR_FOLD_COPY(op2);
2041 	} else if (op2_insn->val.i32 == -1) {
2042 		IR_FOLD_COPY(op1);
2043 	}
2044 	IR_FOLD_NEXT;
2045 }
2046 
2047 IR_FOLD(AND(_, C_U64))
IR_FOLD(AND (_,C_I64))2048 IR_FOLD(AND(_, C_I64))
2049 {
2050 	if (op2_insn->val.i64 == 0) {
2051 		/* a & 0 => 0 */
2052 		IR_FOLD_COPY(op2);
2053 	} else if (op2_insn->val.i64 == -1) {
2054 		IR_FOLD_COPY(op1);
2055 	}
2056 	IR_FOLD_NEXT;
2057 }
2058 
IR_FOLD(OR (_,C_BOOL))2059 IR_FOLD(OR(_, C_BOOL))
2060 {
2061 	IR_FOLD_COPY(op2_insn->val.b ? op2 : op1);
2062 }
2063 
2064 IR_FOLD(OR(_, C_U8))
IR_FOLD(OR (_,C_I8))2065 IR_FOLD(OR(_, C_I8))
2066 IR_FOLD(OR(_, C_CHAR))
2067 {
2068 	if (op2_insn->val.i8 == -1) {
2069 		/* a | 1 => 1 */
2070 		IR_FOLD_COPY(op2);
2071 	} else if (op2_insn->val.i8 == 0) {
2072 		IR_FOLD_COPY(op1);
2073 	}
2074 	IR_FOLD_NEXT;
2075 }
2076 
2077 IR_FOLD(OR(_, C_U16))
IR_FOLD(OR (_,C_I16))2078 IR_FOLD(OR(_, C_I16))
2079 {
2080 	if (op2_insn->val.i16 == -1) {
2081 		/* a | 1 => 1 */
2082 		IR_FOLD_COPY(op2);
2083 	} else if (op2_insn->val.i16 == 0) {
2084 		IR_FOLD_COPY(op1);
2085 	}
2086 	IR_FOLD_NEXT;
2087 }
2088 
2089 IR_FOLD(OR(_, C_U32))
IR_FOLD(OR (_,C_I32))2090 IR_FOLD(OR(_, C_I32))
2091 {
2092 	if (op2_insn->val.i32 == -1) {
2093 		/* a | 1 => 1 */
2094 		IR_FOLD_COPY(op2);
2095 	} else if (op2_insn->val.i32 == -0) {
2096 		IR_FOLD_COPY(op1);
2097 	}
2098 	IR_FOLD_NEXT;
2099 }
2100 
2101 IR_FOLD(OR(_, C_U64))
IR_FOLD(OR (_,C_I64))2102 IR_FOLD(OR(_, C_I64))
2103 {
2104 	if (op2_insn->val.i64 == -1) {
2105 		/* a | 1 => 1 */
2106 		IR_FOLD_COPY(op2);
2107 	} else if (op2_insn->val.i64 == 0) {
2108 		IR_FOLD_COPY(op1);
2109 	}
2110 	IR_FOLD_NEXT;
2111 }
2112 
IR_FOLD(XOR (_,C_BOOL))2113 IR_FOLD(XOR(_, C_BOOL))
2114 {
2115 	if (!op2_insn->val.b) {
2116 		/* a ^ 0 => a */
2117 		IR_FOLD_COPY(op1);
2118 	} else {
2119 		/* a ^ 1 => !a */
2120 		opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
2121 		op2 = IR_UNUSED;
2122 		IR_FOLD_RESTART;
2123 	}
2124 }
2125 
2126 IR_FOLD(XOR(_, C_U8))
IR_FOLD(XOR (_,C_I8))2127 IR_FOLD(XOR(_, C_I8))
2128 IR_FOLD(XOR(_, C_CHAR))
2129 {
2130 	if (op2_insn->val.i8 == 0) {
2131 		/* a ^ 0 => a */
2132 		IR_FOLD_COPY(op1);
2133 	} else if (op2_insn->val.i8 == -1) {
2134 		/* a ^ 1 => ~a */
2135 		opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
2136 		op2 = IR_UNUSED;
2137 		IR_FOLD_RESTART;
2138 	}
2139 	IR_FOLD_NEXT;
2140 }
2141 
2142 IR_FOLD(XOR(_, C_U16))
IR_FOLD(XOR (_,C_I16))2143 IR_FOLD(XOR(_, C_I16))
2144 {
2145 	if (op2_insn->val.i16 == 0) {
2146 		/* a ^ 0 => a */
2147 		IR_FOLD_COPY(op1);
2148 	} else if (op2_insn->val.i16 == -1) {
2149 		/* a ^ 1 => ~a */
2150 		opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
2151 		op2 = IR_UNUSED;
2152 		IR_FOLD_RESTART;
2153 	}
2154 	IR_FOLD_NEXT;
2155 }
2156 
2157 IR_FOLD(XOR(_, C_U32))
IR_FOLD(XOR (_,C_I32))2158 IR_FOLD(XOR(_, C_I32))
2159 {
2160 	if (op2_insn->val.i32 == 0) {
2161 		/* a ^ 0 => a */
2162 		IR_FOLD_COPY(op1);
2163 	} else if (op2_insn->val.i32 == -1) {
2164 		/* a ^ 1 => ~a */
2165 		opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
2166 		op2 = IR_UNUSED;
2167 		IR_FOLD_RESTART;
2168 	}
2169 	IR_FOLD_NEXT;
2170 }
2171 
2172 IR_FOLD(XOR(_, C_U64))
IR_FOLD(XOR (_,C_I64))2173 IR_FOLD(XOR(_, C_I64))
2174 {
2175 	if (op2_insn->val.i64 == 0) {
2176 		/* a ^ 0 => a */
2177 		IR_FOLD_COPY(op1);
2178 	} else if (op2_insn->val.i64 == -1) {
2179 		/* a ^ 1 => ~a */
2180 		opt = IR_NOT | (opt & IR_OPT_TYPE_MASK);
2181 		op2 = IR_UNUSED;
2182 		IR_FOLD_RESTART;
2183 	}
2184 	IR_FOLD_NEXT;
2185 }
2186 
2187 IR_FOLD(SHL(_, C_U8))
IR_FOLD(SHL (_,C_U16))2188 IR_FOLD(SHL(_, C_U16))
2189 IR_FOLD(SHL(_, C_U32))
2190 IR_FOLD(SHL(_, C_U64))
2191 IR_FOLD(SHL(_, C_I8))
2192 IR_FOLD(SHL(_, C_I16))
2193 IR_FOLD(SHL(_, C_I32))
2194 IR_FOLD(SHL(_, C_I64))
2195 {
2196 	if (op2_insn->val.u64 == 0) {
2197 		/* a << 0 => a */
2198 		IR_FOLD_COPY(op1);
2199 	} else if (op2_insn->val.u64 == 1) {
2200 		/* a << 1 => a + a */
2201 		opt = IR_ADD | (opt & IR_OPT_TYPE_MASK);
2202 		op2 = op1;
2203 		IR_FOLD_RESTART;
2204 	}
2205 	IR_FOLD_NEXT;
2206 }
2207 
2208 IR_FOLD(SHR(_, C_U8))
IR_FOLD(SHR (_,C_U16))2209 IR_FOLD(SHR(_, C_U16))
2210 IR_FOLD(SHR(_, C_U32))
2211 IR_FOLD(SHR(_, C_U64))
2212 IR_FOLD(SHR(_, C_I8))
2213 IR_FOLD(SHR(_, C_I16))
2214 IR_FOLD(SHR(_, C_I32))
2215 IR_FOLD(SHR(_, C_I64))
2216 IR_FOLD(SAR(_, C_U8))
2217 IR_FOLD(SAR(_, C_U16))
2218 IR_FOLD(SAR(_, C_U32))
2219 IR_FOLD(SAR(_, C_U64))
2220 IR_FOLD(SAR(_, C_I8))
2221 IR_FOLD(SAR(_, C_I16))
2222 IR_FOLD(SAR(_, C_I32))
2223 IR_FOLD(SAR(_, C_I64))
2224 IR_FOLD(ROL(_, C_U8))
2225 IR_FOLD(ROL(_, C_U16))
2226 IR_FOLD(ROL(_, C_U32))
2227 IR_FOLD(ROL(_, C_U64))
2228 IR_FOLD(ROL(_, C_I8))
2229 IR_FOLD(ROL(_, C_I16))
2230 IR_FOLD(ROL(_, C_I32))
2231 IR_FOLD(ROL(_, C_I64))
2232 IR_FOLD(ROR(_, C_U8))
2233 IR_FOLD(ROR(_, C_U16))
2234 IR_FOLD(ROR(_, C_U32))
2235 IR_FOLD(ROR(_, C_U64))
2236 IR_FOLD(ROR(_, C_I8))
2237 IR_FOLD(ROR(_, C_I16))
2238 IR_FOLD(ROR(_, C_I32))
2239 IR_FOLD(ROR(_, C_I64))
2240 {
2241 	if (op2_insn->val.u64 == 0) {
2242 		/* a >> 0 => a */
2243 		IR_FOLD_COPY(op1);
2244 	}
2245 	IR_FOLD_NEXT;
2246 }
2247 
2248 IR_FOLD(SHL(C_U8, _))
IR_FOLD(SHL (C_U16,_))2249 IR_FOLD(SHL(C_U16, _))
2250 IR_FOLD(SHL(C_U32, _))
2251 IR_FOLD(SHL(C_U64, _))
2252 IR_FOLD(SHL(C_I8, _))
2253 IR_FOLD(SHL(C_I16, _))
2254 IR_FOLD(SHL(C_I32, _))
2255 IR_FOLD(SHL(C_I64, _))
2256 IR_FOLD(SHR(C_U8, _))
2257 IR_FOLD(SHR(C_U16, _))
2258 IR_FOLD(SHR(C_U32, _))
2259 IR_FOLD(SHR(C_U64, _))
2260 IR_FOLD(SHR(C_I8, _))
2261 IR_FOLD(SHR(C_I16, _))
2262 IR_FOLD(SHR(C_I32, _))
2263 IR_FOLD(SHR(C_I64, _))
2264 {
2265 	if (op1_insn->val.u64 == 0) {
2266 		/* 0 << a => 0 */
2267 		IR_FOLD_COPY(op1);
2268 	}
2269 	IR_FOLD_NEXT;
2270 }
2271 
2272 IR_FOLD(SAR(C_U8, _))
IR_FOLD(SAR (C_I8,_))2273 IR_FOLD(SAR(C_I8, _))
2274 IR_FOLD(ROL(C_U8, _))
2275 IR_FOLD(ROL(C_I8, _))
2276 IR_FOLD(ROR(C_U8, _))
2277 IR_FOLD(ROR(C_I8, _))
2278 {
2279 	if (op1_insn->val.i8 == 0 || op1_insn->val.i8 == -1) {
2280 		IR_FOLD_COPY(op1);
2281 	}
2282 	IR_FOLD_NEXT;
2283 }
2284 
2285 IR_FOLD(SAR(C_U16, _))
IR_FOLD(SAR (C_I16,_))2286 IR_FOLD(SAR(C_I16, _))
2287 IR_FOLD(ROL(C_U16, _))
2288 IR_FOLD(ROL(C_I16, _))
2289 IR_FOLD(ROR(C_U16, _))
2290 IR_FOLD(ROR(C_I16, _))
2291 {
2292 	if (op1_insn->val.i16 == 0 || op1_insn->val.i16 == -1) {
2293 		IR_FOLD_COPY(op1);
2294 	}
2295 	IR_FOLD_NEXT;
2296 }
2297 
2298 IR_FOLD(SAR(C_U32, _))
IR_FOLD(SAR (C_I32,_))2299 IR_FOLD(SAR(C_I32, _))
2300 IR_FOLD(ROL(C_U32, _))
2301 IR_FOLD(ROL(C_I32, _))
2302 IR_FOLD(ROR(C_U32, _))
2303 IR_FOLD(ROR(C_I32, _))
2304 {
2305 	if (op1_insn->val.i32 == 0 || op1_insn->val.i32 == -1) {
2306 		IR_FOLD_COPY(op1);
2307 	}
2308 	IR_FOLD_NEXT;
2309 }
2310 
2311 IR_FOLD(SAR(C_U64, _))
IR_FOLD(SAR (C_I64,_))2312 IR_FOLD(SAR(C_I64, _))
2313 IR_FOLD(ROL(C_U64, _))
2314 IR_FOLD(ROL(C_I64, _))
2315 IR_FOLD(ROR(C_U64, _))
2316 IR_FOLD(ROR(C_I64, _))
2317 {
2318 	if (op1_insn->val.i64 == 0 || op1_insn->val.i64 == -1) {
2319 		IR_FOLD_COPY(op1);
2320 	}
2321 	IR_FOLD_NEXT;
2322 }
2323 
2324 IR_FOLD(LT(ABS, C_I8))
IR_FOLD(LT (ABS,C_I16))2325 IR_FOLD(LT(ABS, C_I16))
2326 IR_FOLD(LT(ABS, C_I32))
2327 IR_FOLD(LT(ABS, C_I64))
2328 IR_FOLD(LT(ABS, C_FLOAT))
2329 IR_FOLD(LT(ABS, C_DOUBLE))
2330 {
2331 	if (op2_insn->val.u64 == 0) {
2332 		/* abs() < 0 => false */
2333 		IR_FOLD_COPY(IR_FALSE);
2334 	}
2335 	IR_FOLD_NEXT;
2336 }
2337 
2338 IR_FOLD(GE(ABS, C_I8))
IR_FOLD(GE (ABS,C_I16))2339 IR_FOLD(GE(ABS, C_I16))
2340 IR_FOLD(GE(ABS, C_I32))
2341 IR_FOLD(GE(ABS, C_I64))
2342 IR_FOLD(GE(ABS, C_FLOAT))
2343 IR_FOLD(GE(ABS, C_DOUBLE))
2344 {
2345 	if (op2_insn->val.u64 == 0) {
2346 		/* abs() >= 0 => true */
2347 		IR_FOLD_COPY(IR_TRUE);
2348 	}
2349 	IR_FOLD_NEXT;
2350 }
2351 
2352 // TODO: conversions
IR_FOLD(FP2FP (FP2FP))2353 IR_FOLD(FP2FP(FP2FP))
2354 {
2355 	if (IR_OPT_TYPE(opt) == IR_FLOAT) {
2356 		/* (float)(double)f => f */
2357 		IR_ASSERT(op1_insn->type == IR_DOUBLE);
2358 		IR_ASSERT(ctx->ir_base[op1_insn->op1].type == IR_FLOAT);
2359 		IR_FOLD_COPY(op1_insn->op1);
2360 	}
2361 	IR_FOLD_NEXT;
2362 }
2363 
IR_FOLD(FP2INT (INT2FP))2364 IR_FOLD(FP2INT(INT2FP))
2365 {
2366 	ir_type dst_type = IR_OPT_TYPE(opt);
2367 	ir_type src_type = ctx->ir_base[op1_insn->op1].type;
2368 
2369 	if (ir_type_size[src_type] >= ir_type_size[op1_insn->type]) {
2370 		/* source integer type can not fit into intermediate floating point */
2371 		IR_FOLD_NEXT;
2372 	}
2373 	/* (int)(double)i => i */
2374 	if (src_type == dst_type) {
2375 		IR_FOLD_COPY(op1_insn->op1);
2376 	}
2377 	IR_FOLD_NEXT;
2378 }
2379 
2380 IR_FOLD(TRUNC(ZEXT))
IR_FOLD(TRUNC (SEXT))2381 IR_FOLD(TRUNC(SEXT))
2382 {
2383 	ir_type dst_type = IR_OPT_TYPE(opt);
2384 	ir_type src_type = ctx->ir_base[op1_insn->op1].type;
2385 
2386 	/* (int32_t)(int64_t)i => i */
2387 	if (src_type == dst_type) {
2388 		IR_FOLD_COPY(op1_insn->op1);
2389 	} else if (ir_type_size[src_type] == ir_type_size[dst_type]) {
2390 		opt = IR_OPT(IR_BITCAST, dst_type);
2391 		op1 = op1_insn->op1;
2392 		IR_FOLD_RESTART;
2393 	} else if (ir_type_size[src_type] > ir_type_size[dst_type]) {
2394 		opt = IR_OPT(IR_TRUNC, dst_type);
2395 		op1 = op1_insn->op1;
2396 		IR_FOLD_RESTART;
2397 	} else {
2398 		opt = IR_OPT(op1_insn->op, dst_type);
2399 		op1 = op1_insn->op1;
2400 		IR_FOLD_RESTART;
2401 	}
2402 	IR_FOLD_NEXT;
2403 }
2404 
2405 IR_FOLD(TRUNC(BITCAST))
IR_FOLD(ZEXT (BITCAST))2406 IR_FOLD(ZEXT(BITCAST))
2407 IR_FOLD(SEXT(BITCAST))
2408 {
2409 	if (IR_IS_TYPE_INT(ctx->ir_base[op1_insn->op1].type)) {
2410 		op1 = op1_insn->op1;
2411 		IR_FOLD_RESTART;
2412 	}
2413 	IR_FOLD_NEXT;
2414 }
2415 
IR_FOLD(BITCAST (BITCAST))2416 IR_FOLD(BITCAST(BITCAST))
2417 {
2418 	ir_type dst_type = IR_OPT_TYPE(opt);
2419 	ir_type src_type = ctx->ir_base[op1_insn->op1].type;
2420 
2421 	if (src_type == dst_type) {
2422 		IR_FOLD_COPY(op1_insn->op1);
2423 	} else if (IR_IS_TYPE_INT(src_type) == IR_IS_TYPE_INT(dst_type)) {
2424 		op1 = op1_insn->op1;
2425 		IR_FOLD_RESTART;
2426 	}
2427 	IR_FOLD_NEXT;
2428 }
2429 
2430 IR_FOLD(TRUNC(TRUNC))
IR_FOLD(ZEXT (ZEXT))2431 IR_FOLD(ZEXT(ZEXT))
2432 IR_FOLD(SEXT(SEXT))
2433 {
2434 	op1 = op1_insn->op1;
2435 	IR_FOLD_RESTART;
2436 }
2437 
IR_FOLD(SEXT (ZEXT))2438 IR_FOLD(SEXT(ZEXT))
2439 {
2440 	op1 = op1_insn->op1;
2441 	opt = IR_OPT(IR_ZEXT, IR_OPT_TYPE(opt));
2442 	IR_FOLD_RESTART;
2443 }
2444 
IR_FOLD(SEXT (AND))2445 IR_FOLD(SEXT(AND))
2446 {
2447 	if (IR_IS_CONST_REF(op1_insn->op2)
2448 	 && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)
2449 	 && !(ctx->ir_base[op1_insn->op2].val.u64
2450 			& (1ULL << ((ir_type_size[op1_insn->type] * 8) - 1)))) {
2451 		/* SEXT(AND(_, 0b0*)) -> ZEXT(AND(_, 0b0*)) */
2452 		opt = IR_OPT(IR_ZEXT, IR_OPT_TYPE(opt));
2453 		IR_FOLD_RESTART;
2454 	}
2455 	IR_FOLD_NEXT;
2456 }
2457 
IR_FOLD(TRUNC (AND))2458 IR_FOLD(TRUNC(AND))
2459 {
2460 	if (IR_IS_CONST_REF(op1_insn->op2)) {
2461 		size_t size = ir_type_size[IR_OPT_TYPE(opt)];
2462 		uint64_t mask = ctx->ir_base[op1_insn->op2].val.u64;
2463 
2464 		if (size == 1) {
2465 			if (mask == 0xff) {
2466 				op1 = op1_insn->op1;
2467 				IR_FOLD_RESTART;
2468 			}
2469 		} else if (size == 2) {
2470 			if (mask == 0xffff) {
2471 				op1 = op1_insn->op1;
2472 				IR_FOLD_RESTART;
2473 			}
2474 		} else if (size == 4) {
2475 			if (mask == 0xffffffff) {
2476 				op1 = op1_insn->op1;
2477 				IR_FOLD_RESTART;
2478 			}
2479 		}
2480 	}
2481 	IR_FOLD_NEXT;
2482 }
2483 
2484 IR_FOLD(EQ(FP2FP, C_DOUBLE))
IR_FOLD(NE (FP2FP,C_DOUBLE))2485 IR_FOLD(NE(FP2FP, C_DOUBLE))
2486 IR_FOLD(LT(FP2FP, C_DOUBLE))
2487 IR_FOLD(GE(FP2FP, C_DOUBLE))
2488 IR_FOLD(LE(FP2FP, C_DOUBLE))
2489 IR_FOLD(GT(FP2FP, C_DOUBLE))
2490 IR_FOLD(ULT(FP2FP, C_DOUBLE))
2491 IR_FOLD(UGE(FP2FP, C_DOUBLE))
2492 IR_FOLD(ULE(FP2FP, C_DOUBLE))
2493 IR_FOLD(UGT(FP2FP, C_DOUBLE))
2494 {
2495 	IR_ASSERT(op1_insn->type == IR_DOUBLE);
2496 	IR_ASSERT(ctx->ir_base[op1_insn->op1].type == IR_FLOAT);
2497 	if (op2_insn->val.d == (double)(float)op2_insn->val.d) {
2498 		op1 = op1_insn->op1;
2499 		op2 = ir_const_float(ctx, (float)op2_insn->val.d);
2500 		IR_FOLD_RESTART;
2501 	}
2502 	IR_FOLD_NEXT;
2503 }
2504 
2505 // TODO: Reassociation
2506 IR_FOLD(ADD(ADD, C_U8))
IR_FOLD(ADD (ADD,C_U16))2507 IR_FOLD(ADD(ADD, C_U16))
2508 IR_FOLD(ADD(ADD, C_U32))
2509 IR_FOLD(ADD(ADD, C_U64))
2510 IR_FOLD(ADD(ADD, C_ADDR))
2511 {
2512 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2513 		/* (x + c1) + c2  => x + (c1 + c2) */
2514 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 + op2_insn->val.u64;
2515 		op1 = op1_insn->op1;
2516 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2517 		IR_FOLD_RESTART;
2518 	}
2519 	IR_FOLD_NEXT;
2520 }
2521 
2522 IR_FOLD(ADD(ADD, C_I8))
IR_FOLD(ADD (ADD,C_I16))2523 IR_FOLD(ADD(ADD, C_I16))
2524 IR_FOLD(ADD(ADD, C_I32))
2525 IR_FOLD(ADD(ADD, C_I64))
2526 {
2527 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2528 		/* (x + c1) + c2  => x + (c1 + c2) */
2529 		val.i64 = ctx->ir_base[op1_insn->op2].val.i64 + op2_insn->val.i64;
2530 		op1 = op1_insn->op1;
2531 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2532 		IR_FOLD_RESTART;
2533 	}
2534 	IR_FOLD_NEXT;
2535 }
2536 
2537 IR_FOLD(ADD(SUB, C_U8))
IR_FOLD(ADD (SUB,C_U16))2538 IR_FOLD(ADD(SUB, C_U16))
2539 IR_FOLD(ADD(SUB, C_U32))
2540 IR_FOLD(ADD(SUB, C_U64))
2541 IR_FOLD(ADD(SUB, C_ADDR))
2542 {
2543 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2544 		/* (x - c1) + c2  => x + (c2 - c1) */
2545 		val.u64 = op2_insn->val.u64 - ctx->ir_base[op1_insn->op2].val.u64;
2546 		op1 = op1_insn->op1;
2547 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2548 		IR_FOLD_RESTART;
2549 	} else if (IR_IS_CONST_REF(op1_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op1].op)) {
2550 		/* (c1 - x) + c2  => (c1 + c2) - x */
2551 		val.u64 = ctx->ir_base[op1_insn->op1].val.u64 + op2_insn->val.u64;
2552 		opt++; /* ADD -> SUB */
2553 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2554 		op2 = op1_insn->op2;
2555 		IR_FOLD_RESTART;
2556 	}
2557 	IR_FOLD_NEXT;
2558 }
2559 
2560 IR_FOLD(ADD(SUB, C_I8))
IR_FOLD(ADD (SUB,C_I16))2561 IR_FOLD(ADD(SUB, C_I16))
2562 IR_FOLD(ADD(SUB, C_I32))
2563 IR_FOLD(ADD(SUB, C_I64))
2564 {
2565 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2566 		/* (x - c1) + c2  => x + (c2 - c1) */
2567 		val.i64 = op2_insn->val.i64 - ctx->ir_base[op1_insn->op2].val.i64;
2568 		if (val.i64 < 0 && val.i64 - 1 < 0) {
2569 			val.i64 = -val.i64;
2570 			opt++; /* ADD -> SUB */
2571 		}
2572 		op1 = op1_insn->op1;
2573 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2574 		IR_FOLD_RESTART;
2575 	} else if (IR_IS_CONST_REF(op1_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op1].op)) {
2576 		/* (c1 - x) + c2  => (c1 + c2) - x */
2577 		val.i64 = ctx->ir_base[op1_insn->op1].val.i64 + op2_insn->val.i64;
2578 		opt++; /* ADD -> SUB */
2579 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2580 		op2 = op1_insn->op2;
2581 		IR_FOLD_RESTART;
2582 	}
2583 	IR_FOLD_NEXT;
2584 }
2585 
2586 IR_FOLD(SUB(ADD, C_U8))
IR_FOLD(SUB (ADD,C_U16))2587 IR_FOLD(SUB(ADD, C_U16))
2588 IR_FOLD(SUB(ADD, C_U32))
2589 IR_FOLD(SUB(ADD, C_U64))
2590 IR_FOLD(SUB(ADD, C_ADDR))
2591 {
2592 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2593 		/* (x + c1) - c2  => x + (c1 - c2) */
2594 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 - op2_insn->val.u64;
2595 		opt--; /* SUB -> ADD */
2596 		op1 = op1_insn->op1;
2597 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2598 		IR_FOLD_RESTART;
2599 	}
2600 	IR_FOLD_NEXT;
2601 }
2602 
2603 IR_FOLD(SUB(ADD, C_I8))
IR_FOLD(SUB (ADD,C_I16))2604 IR_FOLD(SUB(ADD, C_I16))
2605 IR_FOLD(SUB(ADD, C_I32))
2606 IR_FOLD(SUB(ADD, C_I64))
2607 {
2608 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2609 		/* (x + c1) - c2  => x + (c1 - c2) */
2610 		val.i64 = ctx->ir_base[op1_insn->op2].val.i64 - op2_insn->val.i64;
2611 		if (val.i64 < 0 && val.i64 - 1 < 0) {
2612 			val.i64 = -val.i64;
2613 		} else {
2614 			opt--; /* SUB -> ADD */
2615 		}
2616 		op1 = op1_insn->op1;
2617 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2618 		IR_FOLD_RESTART;
2619 	}
2620 	IR_FOLD_NEXT;
2621 }
2622 
2623 IR_FOLD(SUB(C_U8, ADD))
IR_FOLD(SUB (C_U16,ADD))2624 IR_FOLD(SUB(C_U16, ADD))
2625 IR_FOLD(SUB(C_U32, ADD))
2626 IR_FOLD(SUB(C_U64, ADD))
2627 IR_FOLD(SUB(C_ADDR, ADD))
2628 {
2629 	if (IR_IS_CONST_REF(op2_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op2].op)) {
2630 		/* c1 - (x + c2) => (c1 - c2) - x */
2631 		val.u64 = op1_insn->val.u64 - ctx->ir_base[op2_insn->op2].val.u64;
2632 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2633 		op2 = op2_insn->op1;
2634 		IR_FOLD_RESTART;
2635 	}
2636 	IR_FOLD_NEXT;
2637 }
2638 
2639 IR_FOLD(SUB(C_I8, ADD))
IR_FOLD(SUB (C_I16,ADD))2640 IR_FOLD(SUB(C_I16, ADD))
2641 IR_FOLD(SUB(C_I32, ADD))
2642 IR_FOLD(SUB(C_I64, ADD))
2643 {
2644 	if (IR_IS_CONST_REF(op2_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op2].op)) {
2645 		/* c1 - (x + c2) => (c1 - c2) - x */
2646 		val.i64 = op1_insn->val.i64 - ctx->ir_base[op2_insn->op2].val.i64;
2647 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2648 		op2 = op2_insn->op1;
2649 		IR_FOLD_RESTART;
2650 	}
2651 	IR_FOLD_NEXT;
2652 }
2653 
2654 IR_FOLD(SUB(SUB, C_U8))
IR_FOLD(SUB (SUB,C_U16))2655 IR_FOLD(SUB(SUB, C_U16))
2656 IR_FOLD(SUB(SUB, C_U32))
2657 IR_FOLD(SUB(SUB, C_U64))
2658 IR_FOLD(SUB(SUB, C_ADDR))
2659 {
2660 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2661 		/* (x - c1) - c2  => x - (c1 + c2) */
2662 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 + op2_insn->val.u64;
2663 		if (val.i64 < 0 && val.i64 - 1 < 0) {
2664 			val.i64 = -val.i64;
2665 			opt--; /* SUB -> ADD */
2666 		}
2667 		op1 = op1_insn->op1;
2668 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2669 		IR_FOLD_RESTART;
2670 	} else if (IR_IS_CONST_REF(op1_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op1].op)) {
2671 		/* (c1 - x) - c2  => (c1 - c2) - x */
2672 		val.u64 = ctx->ir_base[op1_insn->op1].val.u64 - op2_insn->val.u64;
2673 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2674 		op2 = op1_insn->op2;
2675 		IR_FOLD_RESTART;
2676 	}
2677 	IR_FOLD_NEXT;
2678 }
2679 
2680 IR_FOLD(SUB(SUB, C_I8))
IR_FOLD(SUB (SUB,C_I16))2681 IR_FOLD(SUB(SUB, C_I16))
2682 IR_FOLD(SUB(SUB, C_I32))
2683 IR_FOLD(SUB(SUB, C_I64))
2684 {
2685 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2686 		/* (x - c1) - c2  => x - (c1 + c2) */
2687 		val.i64 = ctx->ir_base[op1_insn->op2].val.i64 + op2_insn->val.i64;
2688 		if (val.i64 < 0 && val.i64 - 1 < 0) {
2689 			val.i64 = -val.i64;
2690 			opt--; /* SUB -> ADD */
2691 		}
2692 		op1 = op1_insn->op1;
2693 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2694 		IR_FOLD_RESTART;
2695 	} else if (IR_IS_CONST_REF(op1_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op1].op)) {
2696 		/* (c1 - x) - c2  => (c1 - c2) - x */
2697 		val.i64 = ctx->ir_base[op1_insn->op1].val.i64 - op2_insn->val.i64;
2698 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2699 		op2 = op1_insn->op2;
2700 		IR_FOLD_RESTART;
2701 	}
2702 	IR_FOLD_NEXT;
2703 }
2704 
2705 IR_FOLD(SUB(C_U8, SUB))
IR_FOLD(SUB (C_U16,SUB))2706 IR_FOLD(SUB(C_U16, SUB))
2707 IR_FOLD(SUB(C_U32, SUB))
2708 IR_FOLD(SUB(C_U64, SUB))
2709 IR_FOLD(SUB(C_ADDR, SUB))
2710 {
2711 	if (IR_IS_CONST_REF(op2_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op2].op)) {
2712 		/* c1 - (x - c2) => (c1 + c2) - x */
2713 		val.u64 = op1_insn->val.u64 + ctx->ir_base[op2_insn->op2].val.u64;
2714 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2715 		op2 = op2_insn->op1;
2716 		IR_FOLD_RESTART;
2717 	} else if (IR_IS_CONST_REF(op2_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op1].op)) {
2718 		/* c1 - (c2 - x) => x + (c1 - c2) */
2719 		val.u64 = op1_insn->val.u64 - ctx->ir_base[op2_insn->op1].val.u64;
2720 		if (val.i64 < 0 && val.i64 - 1 < 0) {
2721 			val.i64 = -val.i64;
2722 			opt++; /* ADD -> SUB */
2723 		}
2724 		op1 = op2_insn->op2;
2725 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2726 		IR_FOLD_RESTART;
2727 	}
2728 	IR_FOLD_NEXT;
2729 }
2730 
2731 IR_FOLD(SUB(C_I8, SUB))
IR_FOLD(SUB (C_I16,SUB))2732 IR_FOLD(SUB(C_I16, SUB))
2733 IR_FOLD(SUB(C_I32, SUB))
2734 IR_FOLD(SUB(C_I64, SUB))
2735 {
2736 	if (IR_IS_CONST_REF(op2_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op2].op)) {
2737 		/* c1 - (x - c2) => (c1 + c2) - x */
2738 		val.i64 = op1_insn->val.i64 + ctx->ir_base[op2_insn->op2].val.i64;
2739 		op1 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2740 		op2 = op2_insn->op1;
2741 		IR_FOLD_RESTART;
2742 	} else if (IR_IS_CONST_REF(op2_insn->op1) && !IR_IS_SYM_CONST(ctx->ir_base[op2_insn->op1].op)) {
2743 		/* c1 - (c2 - x) => x + (c1 - c2) */
2744 		val.i64 = op1_insn->val.i64 - ctx->ir_base[op2_insn->op1].val.i64;
2745 		if (val.i64 < 0 && val.i64 - 1 < 0) {
2746 			val.i64 = -val.i64;
2747 			opt++; /* ADD -> SUB */
2748 		}
2749 		op1 = op2_insn->op2;
2750 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2751 		IR_FOLD_RESTART;
2752 	}
2753 	IR_FOLD_NEXT;
2754 }
2755 
2756 IR_FOLD(MUL(MUL, C_U8))
IR_FOLD(MUL (MUL,C_U16))2757 IR_FOLD(MUL(MUL, C_U16))
2758 IR_FOLD(MUL(MUL, C_U32))
2759 IR_FOLD(MUL(MUL, C_U64))
2760 IR_FOLD(MUL(MUL, C_ADDR))
2761 {
2762 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2763 		/* (x * c1) * c2  => x * (c1 * c2) */
2764 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 * op2_insn->val.u64;
2765 		op1 = op1_insn->op1;
2766 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2767 		IR_FOLD_RESTART;
2768 	}
2769 	IR_FOLD_NEXT;
2770 }
2771 
2772 IR_FOLD(MUL(MUL, C_I8))
IR_FOLD(MUL (MUL,C_I16))2773 IR_FOLD(MUL(MUL, C_I16))
2774 IR_FOLD(MUL(MUL, C_I32))
2775 IR_FOLD(MUL(MUL, C_I64))
2776 {
2777 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2778 		/* (x * c1) * c2  => x * (c1 * c2) */
2779 		val.i64 = ctx->ir_base[op1_insn->op2].val.i64 * op2_insn->val.i64;
2780 		op1 = op1_insn->op1;
2781 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2782 		IR_FOLD_RESTART;
2783 	}
2784 	IR_FOLD_NEXT;
2785 }
2786 
2787 IR_FOLD(AND(AND, C_U8))
IR_FOLD(AND (AND,C_U16))2788 IR_FOLD(AND(AND, C_U16))
2789 IR_FOLD(AND(AND, C_U32))
2790 IR_FOLD(AND(AND, C_U64))
2791 IR_FOLD(AND(AND, C_I8))
2792 IR_FOLD(AND(AND, C_I16))
2793 IR_FOLD(AND(AND, C_I32))
2794 IR_FOLD(AND(AND, C_I64))
2795 IR_FOLD(AND(AND, C_ADDR))
2796 {
2797 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2798 		/* (x & c1) & c2  => x & (c1 & c2) */
2799 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 & op2_insn->val.u64;
2800 		op1 = op1_insn->op1;
2801 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2802 		IR_FOLD_RESTART;
2803 	}
2804 	IR_FOLD_NEXT;
2805 }
2806 
2807 IR_FOLD(OR(OR, C_U8))
IR_FOLD(OR (OR,C_U16))2808 IR_FOLD(OR(OR, C_U16))
2809 IR_FOLD(OR(OR, C_U32))
2810 IR_FOLD(OR(OR, C_U64))
2811 IR_FOLD(OR(OR, C_I8))
2812 IR_FOLD(OR(OR, C_I16))
2813 IR_FOLD(OR(OR, C_I32))
2814 IR_FOLD(OR(OR, C_I64))
2815 IR_FOLD(OR(OR, C_ADDR))
2816 {
2817 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2818 		/* (x | c1) | c2  => x | (c1 | c2) */
2819 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 | op2_insn->val.u64;
2820 		op1 = op1_insn->op1;
2821 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2822 		IR_FOLD_RESTART;
2823 	}
2824 	IR_FOLD_NEXT;
2825 }
2826 
2827 IR_FOLD(XOR(XOR, C_U8))
IR_FOLD(XOR (XOR,C_U16))2828 IR_FOLD(XOR(XOR, C_U16))
2829 IR_FOLD(XOR(XOR, C_U32))
2830 IR_FOLD(XOR(XOR, C_U64))
2831 IR_FOLD(XOR(XOR, C_I8))
2832 IR_FOLD(XOR(XOR, C_I16))
2833 IR_FOLD(XOR(XOR, C_I32))
2834 IR_FOLD(XOR(XOR, C_I64))
2835 IR_FOLD(XOR(XOR, C_ADDR))
2836 {
2837 	if (IR_IS_CONST_REF(op1_insn->op2) && !IR_IS_SYM_CONST(ctx->ir_base[op1_insn->op2].op)) {
2838 		/* (x ^ c1) ^ c2  => x ^ (c1 ^ c2) */
2839 		val.u64 = ctx->ir_base[op1_insn->op2].val.u64 ^ op2_insn->val.u64;
2840 		op1 = op1_insn->op1;
2841 		op2 = ir_const(ctx, val, IR_OPT_TYPE(opt));
2842 		IR_FOLD_RESTART;
2843 	}
2844 	IR_FOLD_NEXT;
2845 }
2846 
2847 IR_FOLD(AND(AND, _))
IR_FOLD(OR (OR,_))2848 IR_FOLD(OR(OR, _))
2849 IR_FOLD(MIN(MIN, _))
2850 IR_FOLD(MAX(MAX, _))
2851 {
2852 	if (op1_insn->op1 == op2 || op1_insn->op2 == op2) {
2853 		IR_FOLD_COPY(op2);
2854 	}
2855 	IR_FOLD_NEXT;
2856 }
2857 
IR_FOLD(XOR (XOR,_))2858 IR_FOLD(XOR(XOR, _))
2859 {
2860 	if (op1_insn->op1 == op2) {
2861 		IR_FOLD_COPY(op1_insn->op2);
2862 	} else if (op1_insn->op2 == op2) {
2863 		IR_FOLD_COPY(op1_insn->op1);
2864 	}
2865 	IR_FOLD_NEXT;
2866 }
2867 
2868 /* Swap operands (move lower ref to op2) for better CSE */
2869 IR_FOLD(ADD(_, _))
IR_FOLD(MUL (_,_))2870 IR_FOLD(MUL(_, _))
2871 IR_FOLD_NAMED(swap_ops)
2872 {
2873 	if (op1 < op2) {  /* move lower ref to op2 */
2874 		SWAP_REFS(op1, op2);
2875 		IR_FOLD_RESTART;
2876 	}
2877     IR_FOLD_NEXT;
2878 }
2879 
2880 IR_FOLD(ADD_OV(_, _))
IR_FOLD(MUL_OV (_,_))2881 IR_FOLD(MUL_OV(_, _))
2882 {
2883 	if (op1 < op2) {  /* move lower ref to op2 */
2884 		SWAP_REFS(op1, op2);
2885 		IR_FOLD_RESTART;
2886 	}
2887 	/* skip CSE ??? */
2888 	IR_FOLD_EMIT;
2889 }
2890 
IR_FOLD(SUB (_,_))2891 IR_FOLD(SUB(_, _))
2892 {
2893 	if (IR_IS_TYPE_INT(IR_OPT_TYPE(opt)) && op1 == op2) {
2894 		IR_FOLD_CONST_U(0);
2895 	}
2896 	IR_FOLD_NEXT;
2897 }
2898 
IR_FOLD(SUB_OV (_,_))2899 IR_FOLD(SUB_OV(_, _))
2900 {
2901 	if (op1 == op2) {
2902 		IR_FOLD_CONST_U(0);
2903 	}
2904 	/* skip CSE ??? */
2905 	IR_FOLD_EMIT;
2906 }
2907 
2908 /* Binary operations with op1 == op2 */
2909 IR_FOLD(AND(_,_))
IR_FOLD(OR (_,_))2910 IR_FOLD(OR(_,_))
2911 IR_FOLD(MIN(_, _))
2912 IR_FOLD(MAX(_, _))
2913 {
2914 	/* a & a => a */
2915 	if (op1 == op2) {
2916 		IR_FOLD_COPY(op1);
2917 	}
2918 	IR_FOLD_DO_NAMED(swap_ops);
2919 }
2920 
IR_FOLD(XOR (_,_))2921 IR_FOLD(XOR(_,_))
2922 {
2923 	/* a xor a => 0 */
2924 	if (op1 == op2) {
2925 		IR_FOLD_CONST_U(0);
2926 	}
2927 	IR_FOLD_DO_NAMED(swap_ops);
2928 }
2929 
2930 IR_FOLD(EQ(_, _))
IR_FOLD(NE (_,_))2931 IR_FOLD(NE(_, _))
2932 {
2933 	if (op1 != op2) {
2934 		IR_FOLD_DO_NAMED(swap_ops);
2935 	} else if (IR_IS_TYPE_INT(op1_insn->type)) {
2936 		/* a == a => true */
2937 		IR_FOLD_BOOL((opt & IR_OPT_OP_MASK) == IR_EQ);
2938 	}
2939 	IR_FOLD_NEXT;
2940 }
2941 
2942 IR_FOLD(LT(_, _))
IR_FOLD(GE (_,_))2943 IR_FOLD(GE(_, _))
2944 IR_FOLD(LE(_, _))
2945 IR_FOLD(GT(_, _))
2946 {
2947 	if (op1 == op2) {
2948 		if (IR_IS_TYPE_INT(op1_insn->type)) {
2949 			/* a >= a => true (two low bits are differ) */
2950 			IR_FOLD_BOOL((opt ^ (opt >> 1)) & 1);
2951 		}
2952 	} else if (op1 < op2) {  /* move lower ref to op2 */
2953 		SWAP_REFS(op1, op2);
2954 		opt ^= 3; /* [U]LT <-> [U]GT, [U]LE <-> [U]GE */
2955 		IR_FOLD_RESTART;
2956 	}
2957     IR_FOLD_NEXT;
2958 }
2959 
2960 IR_FOLD(ULT(_, _))
IR_FOLD(UGE (_,_))2961 IR_FOLD(UGE(_, _))
2962 IR_FOLD(ULE(_, _))
2963 IR_FOLD(UGT(_, _))
2964 {
2965 	if (op1 == op2) {
2966 		/* a >= a => true (two low bits are differ) */
2967 		IR_FOLD_BOOL((opt ^ (opt >> 1)) & 1);
2968 	} else if (op1 < op2) {  /* move lower ref to op2 */
2969 		SWAP_REFS(op1, op2);
2970 		opt ^= 3; /* [U]LT <-> [U]GT, [U]LE <-> [U]GE */
2971 	}
2972 	IR_FOLD_NEXT;
2973 }
2974 
IR_FOLD(COND (_,_))2975 IR_FOLD(COND(_, _)) // TODO: COND(_, _, _)
2976 {
2977 	if (op2 == op3) {
2978 		IR_FOLD_COPY(op2);
2979 	}
2980 	IR_FOLD_NEXT;
2981 }
2982