Lines Matching refs:ctx

117 void ir_print_const(const ir_ctx *ctx, const ir_insn *insn, FILE *f, bool quoted)  argument
122 fprintf(f, "%s", ir_get_str(ctx, insn->val.name));
126 const char *str = ir_get_strl(ctx, insn->val.str, &len);
301 static void ir_grow_bottom(ir_ctx *ctx) argument
303 ir_insn *buf = ctx->ir_base - ctx->consts_limit;
304 ir_ref old_consts_limit = ctx->consts_limit;
306 if (ctx->consts_limit < 1024 * 4) {
307 ctx->consts_limit *= 2;
308 } else if (ctx->consts_limit < 1024 * 4 * 2) {
309 ctx->consts_limit = 1024 * 4 * 2;
311 ctx->consts_limit += 1024 * 4;
313 buf = ir_mem_realloc(buf, (ctx->consts_limit + ctx->insns_limit) * sizeof(ir_insn));
314 memmove(buf + (ctx->consts_limit - old_consts_limit),
316 (old_consts_limit + ctx->insns_count) * sizeof(ir_insn));
317 ctx->ir_base = buf + ctx->consts_limit;
320 static ir_ref ir_next_const(ir_ctx *ctx) argument
322 ir_ref ref = ctx->consts_count;
324 if (UNEXPECTED(ref >= ctx->consts_limit)) {
325 ir_grow_bottom(ctx);
327 ctx->consts_count = ref + 1;
331 static void ir_grow_top(ir_ctx *ctx) argument
333 ir_ref old_insns_limit = ctx->insns_limit;
334 ir_insn *buf = ctx->ir_base - ctx->consts_limit;
336 if (ctx->insns_limit < 1024 * 4) {
337 ctx->insns_limit *= 2;
338 } else if (ctx->insns_limit < 1024 * 4 * 2) {
339 ctx->insns_limit = 1024 * 4 * 2;
341 ctx->insns_limit += 1024 * 4;
343 buf = ir_mem_realloc(buf, (ctx->consts_limit + ctx->insns_limit) * sizeof(ir_insn));
344 ctx->ir_base = buf + ctx->consts_limit;
346 if (ctx->use_lists) {
347 ctx->use_lists = ir_mem_realloc(ctx->use_lists, ctx->insns_limit * sizeof(ir_use_list));
348 memset(ctx->use_lists + old_insns_limit, 0,
349 (ctx->insns_limit - old_insns_limit) * sizeof(ir_use_list));
352 if (ctx->cfg_map) {
353 ctx->cfg_map = ir_mem_realloc(ctx->cfg_map, ctx->insns_limit * sizeof(uint32_t));
354 memset(ctx->cfg_map + old_insns_limit, 0,
355 (ctx->insns_limit - old_insns_limit) * sizeof(uint32_t));
359 static ir_ref ir_next_insn(ir_ctx *ctx) argument
361 ir_ref ref = ctx->insns_count;
363 if (UNEXPECTED(ref >= ctx->insns_limit)) {
364 ir_grow_top(ctx);
366 ctx->insns_count = ref + 1;
370 void ir_truncate(ir_ctx *ctx) argument
372 ir_insn *buf = ir_mem_malloc((ctx->consts_count + ctx->insns_count) * sizeof(ir_insn));
374 …memcpy(buf, ctx->ir_base - ctx->consts_count, (ctx->consts_count + ctx->insns_count) * sizeof(ir_i…
375 ir_mem_free(ctx->ir_base - ctx->consts_limit);
376 ctx->insns_limit = ctx->insns_count;
377 ctx->consts_limit = ctx->consts_count;
378 ctx->ir_base = buf + ctx->consts_limit;
381 void ir_init(ir_ctx *ctx, uint32_t flags, ir_ref consts_limit, ir_ref insns_limit) argument
388 memset(ctx, 0, sizeof(ir_ctx));
390 ctx->insns_count = IR_UNUSED + 1;
391 ctx->insns_limit = insns_limit;
392 ctx->consts_count = -(IR_TRUE - 1);
393 ctx->consts_limit = consts_limit;
394 ctx->fold_cse_limit = IR_UNUSED + 1;
395 ctx->flags = flags;
397 ctx->spill_base = -1;
398 ctx->fixed_stack_frame_size = -1;
401 ctx->ir_base = buf + consts_limit;
403 MAKE_NOP(&ctx->ir_base[IR_UNUSED]);
404 ctx->ir_base[IR_NULL].optx = IR_OPT(IR_C_ADDR, IR_ADDR);
405 ctx->ir_base[IR_NULL].val.u64 = 0;
406 ctx->ir_base[IR_FALSE].optx = IR_OPT(IR_C_BOOL, IR_BOOL);
407 ctx->ir_base[IR_FALSE].val.u64 = 0;
408 ctx->ir_base[IR_TRUE].optx = IR_OPT(IR_C_BOOL, IR_BOOL);
409 ctx->ir_base[IR_TRUE].val.u64 = 1;
412 void ir_free(ir_ctx *ctx) argument
414 ir_insn *buf = ctx->ir_base - ctx->consts_limit;
416 if (ctx->strtab.data) {
417 ir_strtab_free(&ctx->strtab);
419 if (ctx->binding) {
420 ir_hashtab_free(ctx->binding);
421 ir_mem_free(ctx->binding);
423 if (ctx->use_lists) {
424 ir_mem_free(ctx->use_lists);
426 if (ctx->use_edges) {
427 ir_mem_free(ctx->use_edges);
429 if (ctx->cfg_blocks) {
430 ir_mem_free(ctx->cfg_blocks);
432 if (ctx->cfg_edges) {
433 ir_mem_free(ctx->cfg_edges);
435 if (ctx->cfg_map) {
436 ir_mem_free(ctx->cfg_map);
438 if (ctx->cfg_schedule) {
439 ir_mem_free(ctx->cfg_schedule);
441 if (ctx->rules) {
442 ir_mem_free(ctx->rules);
444 if (ctx->vregs) {
445 ir_mem_free(ctx->vregs);
447 if (ctx->live_intervals) {
448 ir_mem_free(ctx->live_intervals);
450 if (ctx->arena) {
451 ir_arena_free(ctx->arena);
453 if (ctx->regs) {
454 ir_mem_free(ctx->regs);
455 if (ctx->fused_regs) {
456 ir_strtab_free(ctx->fused_regs);
457 ir_mem_free(ctx->fused_regs);
460 if (ctx->prev_ref) {
461 ir_mem_free(ctx->prev_ref);
463 if (ctx->entries) {
464 ir_mem_free(ctx->entries);
466 if (ctx->osr_entry_loads) {
467 ir_list_free((ir_list*)ctx->osr_entry_loads);
468 ir_mem_free(ctx->osr_entry_loads);
472 ir_ref ir_unique_const_addr(ir_ctx *ctx, uintptr_t addr) argument
474 ir_ref ref = ir_next_const(ctx);
475 ir_insn *insn = &ctx->ir_base[ref];
482 insn->prev_const = ctx->prev_const_chain[IR_ADDR];
483 ctx->prev_const_chain[IR_ADDR] = ref;
490 next = ctx->prev_const_chain[IR_ADDR];
492 next_insn = &ctx->ir_base[next];
504 insn->prev_const = ctx->prev_const_chain[IR_ADDR];
505 ctx->prev_const_chain[IR_ADDR] = ref;
512 ir_ref ir_const_ex(ir_ctx *ctx, ir_val val, uint8_t type, uint32_t optx) argument
523 ref = ctx->prev_const_chain[type];
525 insn = &ctx->ir_base[ref];
541 prev_insn->prev_const = -ctx->consts_count;
543 prev = ctx->prev_const_chain[type];
544 ctx->prev_const_chain[type] = -ctx->consts_count;
547 ref = ir_next_const(ctx);
548 insn = &ctx->ir_base[ref];
557 ir_ref ir_const(ir_ctx *ctx, ir_val val, uint8_t type) argument
559 return ir_const_ex(ctx, val, type, IR_OPT(type, type));
562 ir_ref ir_const_i8(ir_ctx *ctx, int8_t c) argument
566 return ir_const(ctx, val, IR_I8);
569 ir_ref ir_const_i16(ir_ctx *ctx, int16_t c) argument
573 return ir_const(ctx, val, IR_I16);
576 ir_ref ir_const_i32(ir_ctx *ctx, int32_t c) argument
580 return ir_const(ctx, val, IR_I32);
583 ir_ref ir_const_i64(ir_ctx *ctx, int64_t c) argument
587 return ir_const(ctx, val, IR_I64);
590 ir_ref ir_const_u8(ir_ctx *ctx, uint8_t c) argument
594 return ir_const(ctx, val, IR_U8);
597 ir_ref ir_const_u16(ir_ctx *ctx, uint16_t c) argument
601 return ir_const(ctx, val, IR_U16);
604 ir_ref ir_const_u32(ir_ctx *ctx, uint32_t c) argument
608 return ir_const(ctx, val, IR_U32);
611 ir_ref ir_const_u64(ir_ctx *ctx, uint64_t c) argument
615 return ir_const(ctx, val, IR_U64);
618 ir_ref ir_const_bool(ir_ctx *ctx, bool c) argument
623 ir_ref ir_const_char(ir_ctx *ctx, char c) argument
627 return ir_const(ctx, val, IR_CHAR);
630 ir_ref ir_const_float(ir_ctx *ctx, float c) argument
635 return ir_const(ctx, val, IR_FLOAT);
638 ir_ref ir_const_double(ir_ctx *ctx, double c) argument
642 return ir_const(ctx, val, IR_DOUBLE);
645 ir_ref ir_const_addr(ir_ctx *ctx, uintptr_t c) argument
652 return ir_const(ctx, val, IR_ADDR);
655 ir_ref ir_const_func_addr(ir_ctx *ctx, uintptr_t c, ir_ref proto) argument
663 return ir_const_ex(ctx, val, IR_ADDR, IR_OPTX(IR_FUNC_ADDR, IR_ADDR, proto));
666 ir_ref ir_const_func(ir_ctx *ctx, ir_ref str, ir_ref proto) argument
671 return ir_const_ex(ctx, val, IR_ADDR, IR_OPTX(IR_FUNC, IR_ADDR, proto));
674 ir_ref ir_const_sym(ir_ctx *ctx, ir_ref str) argument
678 return ir_const_ex(ctx, val, IR_ADDR, IR_OPTX(IR_SYM, IR_ADDR, 0));
681 ir_ref ir_const_str(ir_ctx *ctx, ir_ref str) argument
685 return ir_const_ex(ctx, val, IR_ADDR, IR_OPTX(IR_STR, IR_ADDR, 0));
688 ir_ref ir_str(ir_ctx *ctx, const char *s) argument
692 if (!ctx->strtab.data) {
693 ir_strtab_init(&ctx->strtab, 64, 4096);
697 return ir_strtab_lookup(&ctx->strtab, s, (uint32_t)len, ir_strtab_count(&ctx->strtab) + 1);
700 ir_ref ir_strl(ir_ctx *ctx, const char *s, size_t len) argument
702 if (!ctx->strtab.data) {
703 ir_strtab_init(&ctx->strtab, 64, 4096);
706 return ir_strtab_lookup(&ctx->strtab, s, (uint32_t)len, ir_strtab_count(&ctx->strtab) + 1);
709 const char *ir_get_str(const ir_ctx *ctx, ir_ref idx) argument
711 IR_ASSERT(ctx->strtab.data);
712 return ir_strtab_str(&ctx->strtab, idx - 1);
715 const char *ir_get_strl(const ir_ctx *ctx, ir_ref idx, size_t *len) argument
717 IR_ASSERT(ctx->strtab.data);
718 return ir_strtab_strl(&ctx->strtab, idx - 1, len);
721 ir_ref ir_proto_0(ir_ctx *ctx, uint8_t flags, ir_type ret_type) argument
728 return ir_strl(ctx, (const char *)&proto, offsetof(ir_proto_t, param_types) + 0);
731 ir_ref ir_proto_1(ir_ctx *ctx, uint8_t flags, ir_type ret_type, ir_type t1) argument
739 return ir_strl(ctx, (const char *)&proto, offsetof(ir_proto_t, param_types) + 1);
742 ir_ref ir_proto_2(ir_ctx *ctx, uint8_t flags, ir_type ret_type, ir_type t1, ir_type t2) argument
751 return ir_strl(ctx, (const char *)&proto, offsetof(ir_proto_t, param_types) + 2);
754 ir_ref ir_proto_3(ir_ctx *ctx, uint8_t flags, ir_type ret_type, ir_type t1, ir_type t2, ir_type t3) argument
764 return ir_strl(ctx, (const char *)&proto, offsetof(ir_proto_t, param_types) + 3);
767 ir_ref ir_proto_4(ir_ctx *ctx, uint8_t flags, ir_type ret_type, ir_type t1, ir_type t2, ir_type t3, argument
779 return ir_strl(ctx, (const char *)&proto, offsetof(ir_proto_t, param_types) + 4);
782 ir_ref ir_proto_5(ir_ctx *ctx, uint8_t flags, ir_type ret_type, ir_type t1, ir_type t2, ir_type t3, argument
795 return ir_strl(ctx, (const char *)&proto, offsetof(ir_proto_t, param_types) + 5);
798 ir_ref ir_proto(ir_ctx *ctx, uint8_t flags, ir_type ret_type, uint32_t params_count, uint8_t *param… argument
807 return ir_strl(ctx, (const char *)proto, offsetof(ir_proto_t, param_types) + params_count);
811 ir_ref ir_emit(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3) argument
813 ir_ref ref = ir_next_insn(ctx);
814 ir_insn *insn = &ctx->ir_base[ref];
824 ir_ref ir_emit0(ir_ctx *ctx, uint32_t opt) argument
826 return ir_emit(ctx, opt, IR_UNUSED, IR_UNUSED, IR_UNUSED);
829 ir_ref ir_emit1(ir_ctx *ctx, uint32_t opt, ir_ref op1) argument
831 return ir_emit(ctx, opt, op1, IR_UNUSED, IR_UNUSED);
834 ir_ref ir_emit2(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2) argument
836 return ir_emit(ctx, opt, op1, op2, IR_UNUSED);
839 ir_ref ir_emit3(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3) argument
841 return ir_emit(ctx, opt, op1, op2, op3);
844 static ir_ref _ir_fold_cse(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3) argument
846 ir_ref ref = ctx->prev_insn_chain[opt & IR_OPT_OP_MASK];
850 ir_ref limit = ctx->fold_cse_limit;
862 insn = &ctx->ir_base[ref];
932 ir_ref ir_folding(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3, ir_insn *op1_insn,… argument
977 if (!(ctx->flags2 & IR_OPT_IN_SCCP)) {
978 op1_insn = ctx->ir_base + op1;
979 op2_insn = ctx->ir_base + op2;
980 op3_insn = ctx->ir_base + op3;
983 ctx->fold_insn.optx = opt;
984 ctx->fold_insn.op1 = op1;
985 ctx->fold_insn.op2 = op2;
986 ctx->fold_insn.op3 = op3;
990 if (!(ctx->flags2 & IR_OPT_IN_SCCP)) {
992 ref = _ir_fold_cse(ctx, opt, op1, op2, op3);
997 ref = ir_emit(ctx, opt, op1, op2, op3);
1001 ir_ref prev = ctx->prev_insn_chain[op];
1002 ir_insn *insn = ctx->ir_base + ref;
1009 ctx->prev_insn_chain[op] = ref;
1014 if (!(ctx->flags2 & IR_OPT_IN_SCCP)) {
1015 return ir_emit(ctx, opt, op1, op2, op3);
1017 ctx->fold_insn.optx = opt;
1018 ctx->fold_insn.op1 = op1;
1019 ctx->fold_insn.op2 = op2;
1020 ctx->fold_insn.op3 = op3;
1024 if (!(ctx->flags2 & IR_OPT_IN_SCCP)) {
1027 ctx->fold_insn.op1 = ref;
1031 if (!(ctx->flags2 & IR_OPT_IN_SCCP)) {
1032 return ir_const(ctx, val, IR_OPT_TYPE(opt));
1034 ctx->fold_insn.type = IR_OPT_TYPE(opt);
1035 ctx->fold_insn.val.u64 = val.u64;
1040 ir_ref ir_fold(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3) argument
1042 if (UNEXPECTED(!(ctx->flags & IR_OPT_FOLDING))) {
1046 return ir_emit(ctx, opt, op1, op2, op3);
1048 …return ir_folding(ctx, opt, op1, op2, op3, ctx->ir_base + op1, ctx->ir_base + op2, ctx->ir_base + …
1051 ir_ref ir_fold0(ir_ctx *ctx, uint32_t opt) argument
1053 return ir_fold(ctx, opt, IR_UNUSED, IR_UNUSED, IR_UNUSED);
1056 ir_ref ir_fold1(ir_ctx *ctx, uint32_t opt, ir_ref op1) argument
1058 return ir_fold(ctx, opt, op1, IR_UNUSED, IR_UNUSED);
1061 ir_ref ir_fold2(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2) argument
1063 return ir_fold(ctx, opt, op1, op2, IR_UNUSED);
1066 ir_ref ir_fold3(ir_ctx *ctx, uint32_t opt, ir_ref op1, ir_ref op2, ir_ref op3) argument
1068 return ir_fold(ctx, opt, op1, op2, op3);
1071 ir_ref ir_emit_N(ir_ctx *ctx, uint32_t opt, int32_t count) argument
1074 ir_ref *p, ref = ctx->insns_count;
1078 while (UNEXPECTED(ref + count/4 >= ctx->insns_limit)) {
1079 ir_grow_top(ctx);
1081 ctx->insns_count = ref + 1 + count/4;
1083 insn = &ctx->ir_base[ref];
1092 void ir_set_op(ir_ctx *ctx, ir_ref ref, int32_t n, ir_ref val) argument
1094 ir_insn *insn = &ctx->ir_base[ref];
1108 ir_ref ir_get_op(ir_ctx *ctx, ir_ref ref, int32_t n) argument
1110 ir_insn *insn = &ctx->ir_base[ref];
1124 ir_ref ir_param(ir_ctx *ctx, ir_type type, ir_ref region, const char *name, int pos) argument
1126 IR_ASSERT(ctx->ir_base[region].op == IR_START);
1127 return ir_emit(ctx, IR_OPT(IR_PARAM, type), region, ir_str(ctx, name), pos);
1130 ir_ref ir_var(ir_ctx *ctx, ir_type type, ir_ref region, const char *name) argument
1132 IR_ASSERT(IR_IS_BB_START(ctx->ir_base[region].op));
1133 return ir_emit(ctx, IR_OPT(IR_VAR, type), region, ir_str(ctx, name), IR_UNUSED);
1136 ir_ref ir_bind(ir_ctx *ctx, ir_ref var, ir_ref def) argument
1141 if (!ctx->binding) {
1142 ctx->binding = ir_mem_malloc(sizeof(ir_hashtab));;
1143 ir_hashtab_init(ctx->binding, 16);
1147 if (!ir_hashtab_add(ctx->binding, def, var)) {
1149 def = ir_emit2(ctx, IR_OPT(IR_COPY, ctx->ir_base[def].type), def, 1);
1150 ir_hashtab_add(ctx->binding, def, var);
1155 ir_ref ir_binding_find(const ir_ctx *ctx, ir_ref ref) argument
1157 ir_ref var = ir_hashtab_find(ctx->binding, ref);
1163 void ir_build_def_use_lists(ir_ctx *ctx)
1168 ir_use_list *lists = ir_mem_calloc(ctx->insns_limit, sizeof(ir_use_list));
1172 for (i = IR_UNUSED + 1, insn = ctx->ir_base + i; i < ctx->insns_count;) {
1192 for (i = IR_UNUSED + 1, use_list = &lists[i]; i < ctx->insns_count; i++, use_list++) {
1199 for (i = IR_UNUSED + 1, insn = ctx->ir_base + i; i < ctx->insns_count;) {
1213 ctx->use_edges = edges;
1214 ctx->use_edges_count = edges_count;
1215 ctx->use_lists = lists;
1218 void ir_build_def_use_lists(ir_ctx *ctx) argument
1223 ir_use_list *lists = ir_mem_calloc(ctx->insns_limit, sizeof(ir_use_list));
1228 linked_lists_size = IR_ALIGNED_SIZE(ctx->insns_count, 1024);
1230 for (i = IR_UNUSED + 1, insn = ctx->ir_base + i; i < ctx->insns_count;) {
1266 ctx->use_edges_count = edges_count;
1268 for (use_list = lists + ctx->insns_count - 1; use_list != lists; use_list--) {
1283 ctx->use_edges = edges;
1284 ctx->use_lists = lists;
1289 void ir_use_list_remove_all(ir_ctx *ctx, ir_ref from, ir_ref ref) argument
1296 use_list = &ctx->use_lists[from];
1298 for (j = 0, p = q = &ctx->use_edges[use_list->refs]; j < n; j++, p++) {
1318 void ir_use_list_remove_one(ir_ctx *ctx, ir_ref from, ir_ref ref) argument
1324 use_list = &ctx->use_lists[from];
1327 p = &ctx->use_edges[use_list->refs];
1345 void ir_use_list_replace_one(ir_ctx *ctx, ir_ref ref, ir_ref use, ir_ref new_use) argument
1351 use_list = &ctx->use_lists[ref];
1353 for (i = 0, p = &ctx->use_edges[use_list->refs]; i < n; i++, p++) {
1361 void ir_use_list_replace_all(ir_ctx *ctx, ir_ref ref, ir_ref use, ir_ref new_use) argument
1367 use_list = &ctx->use_lists[ref];
1369 for (i = 0, p = &ctx->use_edges[use_list->refs]; i < n; i++, p++) {
1376 bool ir_use_list_add(ir_ctx *ctx, ir_ref to, ir_ref ref) argument
1382 use_list = &ctx->use_lists[to];
1384 if (n < ctx->use_edges_count && ctx->use_edges[n] == IR_UNUSED) {
1385 ctx->use_edges[n] = ref;
1389 size_t old_size = IR_ALIGNED_SIZE(ctx->use_edges_count * sizeof(ir_ref), 4096);
1390 …size_t new_size = IR_ALIGNED_SIZE((ctx->use_edges_count + use_list->count + 1) * sizeof(ir_ref), 4…
1394 ctx->use_edges = ir_mem_realloc(ctx->use_edges, new_size);
1395 } else if (n == ctx->use_edges_count) {
1396 ctx->use_edges[n] = ref;
1398 ctx->use_edges_count++;
1401 …memcpy(ctx->use_edges + ctx->use_edges_count, ctx->use_edges + use_list->refs, use_list->count * s…
1402 use_list->refs = ctx->use_edges_count;
1403 ctx->use_edges[use_list->refs + use_list->count] = ref;
1405 ctx->use_edges_count += use_list->count;
1415 void ir_use_list_sort(ir_ctx *ctx, ir_ref ref) argument
1421 use_list = &ctx->use_lists[ref];
1424 qsort(ctx->use_edges + use_list->refs, n, sizeof(ir_ref), ir_ref_cmp);
1428 void ir_replace(ir_ctx *ctx, ir_ref ref, ir_ref new_ref) argument
1435 use_list = &ctx->use_lists[ref];
1437 p = ctx->use_edges + use_list->refs;
1444 insn = &ctx->ir_base[use];
1453 insn = &ctx->ir_base[use];
1457 if (ir_use_list_add(ctx, new_ref, use)) {
1459 use_list = &ctx->use_lists[ref];
1461 p = &ctx->use_edges[use_list->refs + i];
1467 void ir_update_op(ir_ctx *ctx, ir_ref ref, uint32_t idx, ir_ref new_val) argument
1469 ir_insn *insn = &ctx->ir_base[ref];
1474 ir_use_list_add(ctx, new_val, ref);
1478 ir_use_list_remove_one(ctx, old_val, ref);
1883 static ir_alias ir_check_aliasing(ir_ctx *ctx, ir_ref addr1, ir_ref addr2)
1891 insn1 = &ctx->ir_base[addr1];
1892 insn2 = &ctx->ir_base[addr2];
1895 uintptr_t offset1 = ctx->ir_base[insn1->op2].val.u64;
1901 uintptr_t offset1 = ctx->ir_base[insn1->op2].val.u64;
1902 uintptr_t offset2 = ctx->ir_base[insn2->op2].val.u64;
1909 uintptr_t offset2 = ctx->ir_base[insn2->op2].val.u64;
1918 static ir_alias ir_check_partial_aliasing(const ir_ctx *ctx, ir_ref addr1, ir_ref addr2, ir_type ty… argument
1926 insn1 = &ctx->ir_base[addr1];
1927 insn2 = &ctx->ir_base[addr2];
1931 } else if (ctx->ir_base[insn1->op2].op == IR_SYM
1932 || ctx->ir_base[insn1->op2].op == IR_ALLOCA
1933 || ctx->ir_base[insn1->op2].op == IR_VADDR) {
1943 } else if (ctx->ir_base[insn2->op2].op == IR_SYM
1944 || ctx->ir_base[insn2->op2].op == IR_ALLOCA
1945 || ctx->ir_base[insn2->op2].op == IR_VADDR) {
1957 } else if (IR_IS_CONST_REF(off1) && !IR_IS_SYM_CONST(ctx->ir_base[off1].op)) {
1958 offset1 = ctx->ir_base[off1].val.addr;
1964 } else if (IR_IS_CONST_REF(off2) && !IR_IS_SYM_CONST(ctx->ir_base[off2].op)) {
1965 offset2 = ctx->ir_base[off2].val.addr;
1977 insn1 = &ctx->ir_base[base1];
1978 insn2 = &ctx->ir_base[base2];
1980 insn1 = &ctx->ir_base[insn1->op2];
1986 insn1 = &ctx->ir_base[insn1->op1];
1990 insn2 = &ctx->ir_base[insn2->op2];
1996 insn2 = &ctx->ir_base[insn2->op1];
2012 static ir_ref ir_find_aliasing_load(ir_ctx *ctx, ir_ref ref, ir_type type, ir_ref addr) argument
2019 insn = &ctx->ir_base[ref];
2025 return ir_fold1(ctx, IR_OPT(IR_BITCAST, type), ref); /* load forwarding with bitcast (L2L) */
2028 return ir_fold1(ctx, IR_OPT(IR_TRUNC, type), ref); /* partial load forwarding (L2L) */
2032 ir_type type2 = ctx->ir_base[insn->op3].type;
2035 if (ctx->ir_base[insn->op3].op == IR_RLOAD
2036 && (modified_regset & (1 << ctx->ir_base[insn->op3].op2))) {
2042 …return ir_fold1(ctx, IR_OPT(IR_BITCAST, type), insn->op3); /* store forwarding with bitcast (S2L) …
2045 return ir_fold1(ctx, IR_OPT(IR_TRUNC, type), insn->op3); /* partial store forwarding (S2L) */
2049 } else if (ir_check_partial_aliasing(ctx, addr, insn->op2, type, type2) != IR_NO_ALIAS) {
2064 ir_ref _ir_PARAM(ir_ctx *ctx, ir_type type, const char* name, ir_ref num) argument
2066 IR_ASSERT(ctx->control);
2067 IR_ASSERT(ctx->ir_base[ctx->control].op == IR_START);
2068 IR_ASSERT(ctx->insns_count == num + 1);
2069 return ir_param(ctx, type, ctx->control, name, num);
2072 ir_ref _ir_VAR(ir_ctx *ctx, ir_type type, const char* name) argument
2077 ir_ref ref = ctx->control;
2081 if (IR_IS_BB_START(ctx->ir_base[ref].op)) {
2084 ref = ctx->ir_base[ref].op1;
2086 return ir_var(ctx, type, ref, name);
2089 ir_ref _ir_PHI_2(ir_ctx *ctx, ir_type type, ir_ref src1, ir_ref src2) argument
2091 IR_ASSERT(ctx->control);
2092 …IR_ASSERT(ctx->ir_base[ctx->control].op == IR_MERGE || ctx->ir_base[ctx->control].op == IR_LOOP_BE…
2096 return ir_emit3(ctx, IR_OPTX(IR_PHI, type, 3), ctx->control, src1, src2);
2099 ir_ref _ir_PHI_N(ir_ctx *ctx, ir_type type, ir_ref n, ir_ref *inputs) argument
2101 IR_ASSERT(ctx->control);
2109 if (UNEXPECTED(!(ctx->flags & IR_OPT_FOLDING))) {
2110 IR_ASSERT(ctx->ir_base[ctx->control].op == IR_MERGE
2111 || ctx->ir_base[ctx->control].op == IR_LOOP_BEGIN);
2126 ref = ir_emit_N(ctx, IR_OPT(IR_PHI, type), n + 1);
2127 ir_set_op(ctx, ref, 1, ctx->control);
2129 ir_set_op(ctx, ref, i + 2, inputs[i]);
2135 void _ir_PHI_SET_OP(ir_ctx *ctx, ir_ref phi, ir_ref pos, ir_ref src) argument
2137 ir_insn *insn = &ctx->ir_base[phi];
2141 IR_ASSERT(ctx->ir_base[insn->op1].op == IR_MERGE || ctx->ir_base[insn->op1].op == IR_LOOP_BEGIN);
2147 void _ir_START(ir_ctx *ctx) argument
2149 IR_ASSERT(!ctx->control);
2150 IR_ASSERT(ctx->insns_count == 1);
2151 ctx->control = ir_emit0(ctx, IR_START);
2154 void _ir_ENTRY(ir_ctx *ctx, ir_ref src, ir_ref num) argument
2156 IR_ASSERT(!ctx->control);
2158 IR_ASSERT((ir_op_flags[ctx->ir_base[src].op] & IR_OP_FLAG_TERMINATOR)
2159 || ctx->ir_base[src].op == IR_END
2160 || ctx->ir_base[src].op == IR_LOOP_END); /* return from a recursive call */
2161 ctx->control = ir_emit2(ctx, IR_ENTRY, src, num);
2164 void _ir_BEGIN(ir_ctx *ctx, ir_ref src) argument
2166 IR_ASSERT(!ctx->control);
2167 if (EXPECTED(ctx->flags & IR_OPT_FOLDING)
2169 && src + 1 == ctx->insns_count
2170 && ctx->ir_base[src].op == IR_END) {
2172 ctx->control = ctx->ir_base[src].op1;
2173 ctx->insns_count--;
2175 ctx->control = ir_emit1(ctx, IR_BEGIN, src);
2179 ir_ref _ir_fold_condition(ir_ctx *ctx, ir_ref ref) argument
2181 ir_insn *insn = &ctx->ir_base[ref];
2184 ir_insn *op2_insn = &ctx->ir_base[insn->op2];
2193 ir_ref _ir_IF(ir_ctx *ctx, ir_ref condition) argument
2197 IR_ASSERT(ctx->control);
2198 if (UNEXPECTED(!(ctx->flags & IR_OPT_FOLDING))) {
2199 if_ref = ir_emit2(ctx, IR_IF, ctx->control, condition);
2200 ctx->control = IR_UNUSED;
2204 condition = _ir_fold_condition(ctx, condition);
2206 condition = ir_ref_is_true(ctx, condition) ? IR_TRUE : IR_FALSE;
2209 ir_ref ref = ctx->control;
2213 insn = &ctx->ir_base[ref];
2241 if_ref = ir_emit2(ctx, IR_IF, ctx->control, condition);
2242 ctx->control = IR_UNUSED;
2246 void _ir_IF_TRUE(ir_ctx *ctx, ir_ref if_ref) argument
2248 IR_ASSERT(!ctx->control);
2250 IR_ASSERT(ctx->ir_base[if_ref].op == IR_IF);
2251 ctx->control = ir_emit1(ctx, IR_IF_TRUE, if_ref);
2254 void _ir_IF_TRUE_cold(ir_ctx *ctx, ir_ref if_ref) argument
2256 IR_ASSERT(!ctx->control);
2258 IR_ASSERT(ctx->ir_base[if_ref].op == IR_IF);
2260 ctx->control = ir_emit2(ctx, IR_IF_TRUE, if_ref, 1);
2263 void _ir_IF_FALSE(ir_ctx *ctx, ir_ref if_ref) argument
2265 IR_ASSERT(!ctx->control);
2267 IR_ASSERT(ctx->ir_base[if_ref].op == IR_IF);
2268 ctx->control = ir_emit1(ctx, IR_IF_FALSE, if_ref);
2271 void _ir_IF_FALSE_cold(ir_ctx *ctx, ir_ref if_ref) argument
2273 IR_ASSERT(!ctx->control);
2275 IR_ASSERT(ctx->ir_base[if_ref].op == IR_IF);
2277 ctx->control = ir_emit2(ctx, IR_IF_FALSE, if_ref, 1);
2280 ir_ref _ir_END(ir_ctx *ctx) argument
2284 IR_ASSERT(ctx->control);
2285 ref = ir_emit1(ctx, IR_END, ctx->control);
2286 ctx->control = IR_UNUSED;
2290 void _ir_MERGE_2(ir_ctx *ctx, ir_ref src1, ir_ref src2) argument
2292 IR_ASSERT(!ctx->control);
2293 ctx->control = ir_emit2(ctx, IR_OPTX(IR_MERGE, IR_VOID, 2), src1, src2);
2296 void _ir_MERGE_N(ir_ctx *ctx, ir_ref n, ir_ref *inputs) argument
2298 IR_ASSERT(!ctx->control);
2301 _ir_BEGIN(ctx, inputs[0]);
2305 ctx->control = ir_emit_N(ctx, IR_MERGE, n);
2306 ops = ctx->ir_base[ctx->control].ops;
2314 void _ir_MERGE_SET_OP(ir_ctx *ctx, ir_ref merge, ir_ref pos, ir_ref src) argument
2316 ir_insn *insn = &ctx->ir_base[merge];
2324 ir_ref _ir_END_LIST(ir_ctx *ctx, ir_ref list) argument
2328 IR_ASSERT(ctx->control);
2329 IR_ASSERT(!list || ctx->ir_base[list].op == IR_END);
2331 ref = ir_emit2(ctx, IR_END, ctx->control, list);
2332 ctx->control = IR_UNUSED;
2336 ir_ref _ir_END_PHI_LIST(ir_ctx *ctx, ir_ref list, ir_ref val) argument
2340 IR_ASSERT(ctx->control);
2341 IR_ASSERT(!list || ctx->ir_base[list].op == IR_END);
2343 ref = ir_emit3(ctx, IR_END, ctx->control, list, val);
2344 ctx->control = IR_UNUSED;
2348 void _ir_MERGE_LIST(ir_ctx *ctx, ir_ref list) argument
2355 IR_ASSERT(!ctx->control);
2359 ir_insn *insn = &ctx->ir_base[ref];
2370 ctx->ir_base[list].op2 = IR_UNUSED;
2371 _ir_BEGIN(ctx, list);
2373 ctx->control = ir_emit_N(ctx, IR_MERGE, n);
2376 ir_insn *insn = &ctx->ir_base[ref];
2378 ir_set_op(ctx, ctx->control, n, ref);
2387 ir_ref _ir_PHI_LIST(ir_ctx *ctx, ir_ref list) argument
2396 end = &ctx->ir_base[list];
2400 _ir_BEGIN(ctx, list);
2402 _ir_MERGE_LIST(ctx, list);
2405 type = ctx->ir_base[end->op3].type;
2406 _ir_MERGE_LIST(ctx, list);
2407 merge = &ctx->ir_base[ctx->control];
2409 phi = ir_emit_N(ctx, IR_OPT(IR_PHI, type), merge->inputs_count + 1);
2410 merge = &ctx->ir_base[ctx->control];
2412 ir_set_op(ctx, phi, 1, ctx->control);
2414 end = &ctx->ir_base[ops[i + 1]];
2415 ir_set_op(ctx, phi, i + 2, end->op3);
2422 ir_ref _ir_LOOP_BEGIN(ir_ctx *ctx, ir_ref src1) argument
2424 IR_ASSERT(!ctx->control);
2425 ctx->control = ir_emit2(ctx, IR_OPTX(IR_LOOP_BEGIN, IR_VOID, 2), src1, IR_UNUSED);
2426 return ctx->control;
2429 ir_ref _ir_LOOP_END(ir_ctx *ctx) argument
2433 IR_ASSERT(ctx->control);
2434 ref = ir_emit1(ctx, IR_LOOP_END, ctx->control);
2435 ctx->control = IR_UNUSED;
2439 ir_ref _ir_CALL(ir_ctx *ctx, ir_type type, ir_ref func) argument
2441 IR_ASSERT(ctx->control);
2442 return ctx->control = ir_emit2(ctx, IR_OPTX(IR_CALL, type, 2), ctx->control, func);
2445 ir_ref _ir_CALL_1(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1) argument
2447 IR_ASSERT(ctx->control);
2448 return ctx->control = ir_emit3(ctx, IR_OPTX(IR_CALL, type, 3), ctx->control, func, arg1);
2451 ir_ref _ir_CALL_2(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2) argument
2455 IR_ASSERT(ctx->control);
2456 call = ir_emit_N(ctx, IR_OPT(IR_CALL, type), 4);
2457 ir_set_op(ctx, call, 1, ctx->control);
2458 ir_set_op(ctx, call, 2, func);
2459 ir_set_op(ctx, call, 3, arg1);
2460 ir_set_op(ctx, call, 4, arg2);
2461 ctx->control = call;
2465 ir_ref _ir_CALL_3(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3) argument
2469 IR_ASSERT(ctx->control);
2470 call = ir_emit_N(ctx, IR_OPT(IR_CALL, type), 5);
2471 ir_set_op(ctx, call, 1, ctx->control);
2472 ir_set_op(ctx, call, 2, func);
2473 ir_set_op(ctx, call, 3, arg1);
2474 ir_set_op(ctx, call, 4, arg2);
2475 ir_set_op(ctx, call, 5, arg3);
2476 ctx->control = call;
2480 ir_ref _ir_CALL_4(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3, ir… argument
2484 IR_ASSERT(ctx->control);
2485 call = ir_emit_N(ctx, IR_OPT(IR_CALL, type), 6);
2486 ir_set_op(ctx, call, 1, ctx->control);
2487 ir_set_op(ctx, call, 2, func);
2488 ir_set_op(ctx, call, 3, arg1);
2489 ir_set_op(ctx, call, 4, arg2);
2490 ir_set_op(ctx, call, 5, arg3);
2491 ir_set_op(ctx, call, 6, arg4);
2492 ctx->control = call;
2496 ir_ref _ir_CALL_5(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3, ir… argument
2500 IR_ASSERT(ctx->control);
2501 call = ir_emit_N(ctx, IR_OPT(IR_CALL, type), 7);
2502 ir_set_op(ctx, call, 1, ctx->control);
2503 ir_set_op(ctx, call, 2, func);
2504 ir_set_op(ctx, call, 3, arg1);
2505 ir_set_op(ctx, call, 4, arg2);
2506 ir_set_op(ctx, call, 5, arg3);
2507 ir_set_op(ctx, call, 6, arg4);
2508 ir_set_op(ctx, call, 7, arg5);
2509 ctx->control = call;
2513 ir_ref _ir_CALL_6(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3, ir… argument
2517 IR_ASSERT(ctx->control);
2518 call = ir_emit_N(ctx, IR_OPT(IR_CALL, type), 8);
2519 ir_set_op(ctx, call, 1, ctx->control);
2520 ir_set_op(ctx, call, 2, func);
2521 ir_set_op(ctx, call, 3, arg1);
2522 ir_set_op(ctx, call, 4, arg2);
2523 ir_set_op(ctx, call, 5, arg3);
2524 ir_set_op(ctx, call, 6, arg4);
2525 ir_set_op(ctx, call, 7, arg5);
2526 ir_set_op(ctx, call, 8, arg6);
2527 ctx->control = call;
2531 ir_ref _ir_CALL_N(ir_ctx *ctx, ir_type type, ir_ref func, uint32_t count, ir_ref *args) argument
2536 IR_ASSERT(ctx->control);
2537 call = ir_emit_N(ctx, IR_OPT(IR_CALL, type), count + 2);
2538 ir_set_op(ctx, call, 1, ctx->control);
2539 ir_set_op(ctx, call, 2, func);
2541 ir_set_op(ctx, call, i + 3, args[i]);
2543 ctx->control = call;
2547 void _ir_UNREACHABLE(ir_ctx *ctx) argument
2549 IR_ASSERT(ctx->control);
2550 ctx->control = ir_emit3(ctx, IR_UNREACHABLE, ctx->control, IR_UNUSED, ctx->ir_base[1].op1);
2551 ctx->ir_base[1].op1 = ctx->control;
2552 ctx->control = IR_UNUSED;
2555 void _ir_TAILCALL(ir_ctx *ctx, ir_type type, ir_ref func) argument
2557 IR_ASSERT(ctx->control);
2558 if (ctx->ret_type == (ir_type)-1) {
2559 ctx->ret_type = type;
2561 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2562 ctx->control = ir_emit2(ctx, IR_OPTX(IR_TAILCALL, type, 2), ctx->control, func);
2563 _ir_UNREACHABLE(ctx);
2566 void _ir_TAILCALL_1(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1) argument
2568 IR_ASSERT(ctx->control);
2569 if (ctx->ret_type == (ir_type)-1) {
2570 ctx->ret_type = type;
2572 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2573 ctx->control = ir_emit3(ctx, IR_OPTX(IR_TAILCALL, type, 3), ctx->control, func, arg1);
2574 _ir_UNREACHABLE(ctx);
2577 void _ir_TAILCALL_2(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2) argument
2581 IR_ASSERT(ctx->control);
2582 if (ctx->ret_type == (ir_type)-1) {
2583 ctx->ret_type = type;
2585 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2586 call = ir_emit_N(ctx, IR_OPT(IR_TAILCALL, type), 4);
2587 ir_set_op(ctx, call, 1, ctx->control);
2588 ir_set_op(ctx, call, 2, func);
2589 ir_set_op(ctx, call, 3, arg1);
2590 ir_set_op(ctx, call, 4, arg2);
2591 ctx->control = call;
2592 _ir_UNREACHABLE(ctx);
2595 void _ir_TAILCALL_3(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3) argument
2599 IR_ASSERT(ctx->control);
2600 if (ctx->ret_type == (ir_type)-1) {
2601 ctx->ret_type = type;
2603 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2604 call = ir_emit_N(ctx, IR_OPT(IR_TAILCALL, type), 5);
2605 ir_set_op(ctx, call, 1, ctx->control);
2606 ir_set_op(ctx, call, 2, func);
2607 ir_set_op(ctx, call, 3, arg1);
2608 ir_set_op(ctx, call, 4, arg2);
2609 ir_set_op(ctx, call, 5, arg3);
2610 ctx->control = call;
2611 _ir_UNREACHABLE(ctx);
2614 void _ir_TAILCALL_4(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3, … argument
2618 IR_ASSERT(ctx->control);
2619 if (ctx->ret_type == (ir_type)-1) {
2620 ctx->ret_type = type;
2622 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2623 call = ir_emit_N(ctx, IR_OPT(IR_TAILCALL, type), 6);
2624 ir_set_op(ctx, call, 1, ctx->control);
2625 ir_set_op(ctx, call, 2, func);
2626 ir_set_op(ctx, call, 3, arg1);
2627 ir_set_op(ctx, call, 4, arg2);
2628 ir_set_op(ctx, call, 5, arg3);
2629 ir_set_op(ctx, call, 6, arg4);
2630 ctx->control = call;
2631 _ir_UNREACHABLE(ctx);
2634 void _ir_TAILCALL_5(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3, … argument
2638 IR_ASSERT(ctx->control);
2639 if (ctx->ret_type == (ir_type)-1) {
2640 ctx->ret_type = type;
2642 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2643 call = ir_emit_N(ctx, IR_OPT(IR_TAILCALL, type), 7);
2644 ir_set_op(ctx, call, 1, ctx->control);
2645 ir_set_op(ctx, call, 2, func);
2646 ir_set_op(ctx, call, 3, arg1);
2647 ir_set_op(ctx, call, 4, arg2);
2648 ir_set_op(ctx, call, 5, arg3);
2649 ir_set_op(ctx, call, 6, arg4);
2650 ir_set_op(ctx, call, 7, arg5);
2651 ctx->control = call;
2652 _ir_UNREACHABLE(ctx);
2655 void _ir_TAILCALL_6(ir_ctx *ctx, ir_type type, ir_ref func, ir_ref arg1, ir_ref arg2, ir_ref arg3, … argument
2659 IR_ASSERT(ctx->control);
2660 if (ctx->ret_type == (ir_type)-1) {
2661 ctx->ret_type = type;
2663 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2664 call = ir_emit_N(ctx, IR_OPT(IR_TAILCALL, type), 8);
2665 ir_set_op(ctx, call, 1, ctx->control);
2666 ir_set_op(ctx, call, 2, func);
2667 ir_set_op(ctx, call, 3, arg1);
2668 ir_set_op(ctx, call, 4, arg2);
2669 ir_set_op(ctx, call, 5, arg3);
2670 ir_set_op(ctx, call, 6, arg4);
2671 ir_set_op(ctx, call, 7, arg5);
2672 ir_set_op(ctx, call, 8, arg6);
2673 ctx->control = call;
2674 _ir_UNREACHABLE(ctx);
2677 void _ir_TAILCALL_N(ir_ctx *ctx, ir_type type, ir_ref func, uint32_t count, ir_ref *args) argument
2682 IR_ASSERT(ctx->control);
2683 if (ctx->ret_type == (ir_type)-1) {
2684 ctx->ret_type = type;
2686 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2687 call = ir_emit_N(ctx, IR_OPT(IR_TAILCALL, type), count + 2);
2688 ir_set_op(ctx, call, 1, ctx->control);
2689 ir_set_op(ctx, call, 2, func);
2691 ir_set_op(ctx, call, i + 3, args[i]);
2693 ctx->control = call;
2694 _ir_UNREACHABLE(ctx);
2697 ir_ref _ir_SWITCH(ir_ctx *ctx, ir_ref val) argument
2701 IR_ASSERT(ctx->control);
2702 ref = ir_emit2(ctx, IR_SWITCH, ctx->control, val);
2703 ctx->control = IR_UNUSED;
2707 void _ir_CASE_VAL(ir_ctx *ctx, ir_ref switch_ref, ir_ref val) argument
2709 IR_ASSERT(!ctx->control);
2710 ctx->control = ir_emit2(ctx, IR_CASE_VAL, switch_ref, val);
2713 void _ir_CASE_DEFAULT(ir_ctx *ctx, ir_ref switch_ref) argument
2715 IR_ASSERT(!ctx->control);
2716 ctx->control = ir_emit1(ctx, IR_CASE_DEFAULT, switch_ref);
2719 void _ir_RETURN(ir_ctx *ctx, ir_ref val) argument
2721 ir_type type = (val != IR_UNUSED) ? ctx->ir_base[val].type : IR_VOID;
2723 IR_ASSERT(ctx->control);
2724 if (ctx->ret_type == (ir_type)-1) {
2725 ctx->ret_type = type;
2727 IR_ASSERT(ctx->ret_type == type && "conflicting return type");
2728 ctx->control = ir_emit3(ctx, IR_RETURN, ctx->control, val, ctx->ir_base[1].op1);
2729 ctx->ir_base[1].op1 = ctx->control;
2730 ctx->control = IR_UNUSED;
2733 void _ir_IJMP(ir_ctx *ctx, ir_ref addr) argument
2735 IR_ASSERT(ctx->control);
2736 ctx->control = ir_emit3(ctx, IR_IJMP, ctx->control, addr, ctx->ir_base[1].op1);
2737 ctx->ir_base[1].op1 = ctx->control;
2738 ctx->control = IR_UNUSED;
2741 ir_ref _ir_ADD_OFFSET(ir_ctx *ctx, ir_ref addr, uintptr_t offset) argument
2744 addr = ir_fold2(ctx, IR_OPT(IR_ADD, IR_ADDR), addr, ir_const_addr(ctx, offset));
2749 void _ir_GUARD(ir_ctx *ctx, ir_ref condition, ir_ref addr) argument
2751 IR_ASSERT(ctx->control);
2753 if (ir_ref_is_true(ctx, condition)) {
2757 } else if (EXPECTED(ctx->flags & IR_OPT_FOLDING)) {
2759 ir_ref ref = ctx->control;
2763 insn = &ctx->ir_base[ref];
2789 if (ctx->snapshot_create) {
2790 ctx->snapshot_create(ctx, addr);
2792 ctx->control = ir_emit3(ctx, IR_GUARD, ctx->control, condition, addr);
2795 void _ir_GUARD_NOT(ir_ctx *ctx, ir_ref condition, ir_ref addr) argument
2797 IR_ASSERT(ctx->control);
2799 if (!ir_ref_is_true(ctx, condition)) {
2803 } else if (EXPECTED(ctx->flags & IR_OPT_FOLDING)) {
2805 ir_ref ref = ctx->control;
2809 insn = &ctx->ir_base[ref];
2835 if (ctx->snapshot_create) {
2836 ctx->snapshot_create(ctx, addr);
2838 ctx->control = ir_emit3(ctx, IR_GUARD_NOT, ctx->control, condition, addr);
2841 ir_ref _ir_SNAPSHOT(ir_ctx *ctx, ir_ref n) argument
2845 IR_ASSERT(ctx->control);
2846 snapshot = ir_emit_N(ctx, IR_SNAPSHOT, 1 + n); /* op1 is used for control */
2847 ctx->ir_base[snapshot].op1 = ctx->control;
2848 ctx->control = snapshot;
2852 void _ir_SNAPSHOT_SET_OP(ir_ctx *ctx, ir_ref snapshot, ir_ref pos, ir_ref val) argument
2854 ir_insn *insn = &ctx->ir_base[snapshot];
2864 ir_ref _ir_EXITCALL(ir_ctx *ctx, ir_ref func) argument
2866 IR_ASSERT(ctx->control);
2867 return ctx->control = ir_emit2(ctx, IR_OPT(IR_EXITCALL, IR_I32), ctx->control, func);
2870 ir_ref _ir_ALLOCA(ir_ctx *ctx, ir_ref size) argument
2872 IR_ASSERT(ctx->control);
2873 return ctx->control = ir_emit2(ctx, IR_OPT(IR_ALLOCA, IR_ADDR), ctx->control, size);
2876 void _ir_AFREE(ir_ctx *ctx, ir_ref size) argument
2878 IR_ASSERT(ctx->control);
2879 ctx->control = ir_emit2(ctx, IR_AFREE, ctx->control, size);
2882 ir_ref _ir_VLOAD(ir_ctx *ctx, ir_type type, ir_ref var) argument
2884 ir_ref ref = ctx->control;
2887 if (UNEXPECTED(!(ctx->flags & IR_OPT_FOLDING))) {
2888 IR_ASSERT(ctx->control);
2889 return ctx->control = ir_emit2(ctx, IR_OPT(IR_VLOAD, type), ctx->control, var);
2892 insn = &ctx->ir_base[ref];
2898 return ir_fold1(ctx, IR_OPT(IR_BITCAST, type), ref); /* load forwarding with bitcast (L2L) */
2901 return ir_fold1(ctx, IR_OPT(IR_TRUNC, type), ref); /* partial load forwarding (L2L) */
2905 ir_type type2 = ctx->ir_base[insn->op3].type;
2911 …return ir_fold1(ctx, IR_OPT(IR_BITCAST, type), insn->op3); /* store forwarding with bitcast (S2L) …
2914 return ir_fold1(ctx, IR_OPT(IR_TRUNC, type), insn->op3); /* partial store forwarding (S2L) */
2925 IR_ASSERT(ctx->control);
2926 return ctx->control = ir_emit2(ctx, IR_OPT(IR_VLOAD, type), ctx->control, var);
2929 void _ir_VSTORE(ir_ctx *ctx, ir_ref var, ir_ref val) argument
2932 ir_ref ref = ctx->control;
2937 if (UNEXPECTED(!(ctx->flags & IR_OPT_FOLDING))) {
2938 IR_ASSERT(ctx->control);
2939 ctx->control = ir_emit3(ctx, IR_VSTORE, ctx->control, var, val);
2944 insn = &ctx->ir_base[val];
2947 && ir_type_size[insn->type] == ir_type_size[ctx->ir_base[insn->op1].type]) {
2953 IR_ASSERT(ctx->control);
2955 insn = &ctx->ir_base[ref];
2963 ctx->ir_base[prev].op1 = insn->op1;
2965 ctx->control = insn->op1;
2988 ctx->control = ir_emit3(ctx, IR_VSTORE, ctx->control, var, val);
2991 ir_ref _ir_TLS(ir_ctx *ctx, ir_ref index, ir_ref offset) argument
2993 IR_ASSERT(ctx->control);
2994 return ctx->control = ir_emit3(ctx, IR_OPT(IR_TLS, IR_ADDR), ctx->control, index, offset);
2997 ir_ref _ir_RLOAD(ir_ctx *ctx, ir_type type, ir_ref reg) argument
2999 IR_ASSERT(ctx->control);
3000 return ctx->control = ir_emit2(ctx, IR_OPT(IR_RLOAD, type), ctx->control, reg);
3003 void _ir_RSTORE(ir_ctx *ctx, ir_ref reg, ir_ref val) argument
3005 IR_ASSERT(ctx->control);
3006 ctx->control = ir_emit3(ctx, IR_RSTORE, ctx->control, val, reg);
3009 ir_ref _ir_LOAD(ir_ctx *ctx, ir_type type, ir_ref addr) argument
3013 IR_ASSERT(ctx->control);
3014 if (EXPECTED(ctx->flags & IR_OPT_FOLDING)) {
3015 ref = ir_find_aliasing_load(ctx, ctx->control, type, addr);
3018 ctx->control = ref = ir_emit2(ctx, IR_OPT(IR_LOAD, type), ctx->control, addr);
3023 void _ir_STORE(ir_ctx *ctx, ir_ref addr, ir_ref val) argument
3026 ir_ref ref = ctx->control;
3029 ir_type type = ctx->ir_base[val].type;
3033 IR_ASSERT(ctx->control);
3034 if (UNEXPECTED(!(ctx->flags & IR_OPT_FOLDING))) {
3035 ctx->control = ir_emit3(ctx, IR_STORE, ctx->control, addr, val);
3040 insn = &ctx->ir_base[val];
3043 && ir_type_size[insn->type] == ir_type_size[ctx->ir_base[insn->op1].type]) {
3050 insn = &ctx->ir_base[ref];
3053 if (ctx->ir_base[insn->op3].type == type) {
3059 ctx->ir_base[prev].op1 = insn->op1;
3061 ctx->control = insn->op1;
3071 type2 = ctx->ir_base[insn->op3].type;
3084 if (ir_check_partial_aliasing(ctx, addr, insn->op2, type, type2) != IR_NO_ALIAS) {
3095 ctx->control = ir_emit3(ctx, IR_STORE, ctx->control, addr, val);
3098 void _ir_VA_START(ir_ctx *ctx, ir_ref list) argument
3100 IR_ASSERT(ctx->control);
3101 ctx->control = ir_emit2(ctx, IR_VA_START, ctx->control, list);
3104 void _ir_VA_END(ir_ctx *ctx, ir_ref list) argument
3106 IR_ASSERT(ctx->control);
3107 ctx->control = ir_emit2(ctx, IR_VA_END, ctx->control, list);
3110 void _ir_VA_COPY(ir_ctx *ctx, ir_ref dst, ir_ref src) argument
3112 IR_ASSERT(ctx->control);
3113 ctx->control = ir_emit3(ctx, IR_VA_COPY, ctx->control, dst, src);
3116 ir_ref _ir_VA_ARG(ir_ctx *ctx, ir_type type, ir_ref list) argument
3118 IR_ASSERT(ctx->control);
3119 return ctx->control = ir_emit2(ctx, IR_OPT(IR_VA_ARG, type), ctx->control, list);
3122 ir_ref _ir_BLOCK_BEGIN(ir_ctx *ctx) argument
3124 IR_ASSERT(ctx->control);
3125 return ctx->control = ir_emit1(ctx, IR_OPT(IR_BLOCK_BEGIN, IR_ADDR), ctx->control);