Lines Matching refs:ctx

41 static int ir_assign_virtual_registers_slow(ir_ctx *ctx)  in ir_assign_virtual_registers_slow()  argument
52 vregs = ir_mem_calloc(ctx->insns_count, sizeof(ir_ref)); in ir_assign_virtual_registers_slow()
54 for (b = 1, bb = ctx->cfg_blocks + b; b <= ctx->cfg_blocks_count; b++, bb++) { in ir_assign_virtual_registers_slow()
59 insn = ctx->ir_base + i; in ir_assign_virtual_registers_slow()
65 …if (((flags & IR_OP_FLAG_DATA) && insn->op != IR_VAR && (insn->op != IR_PARAM || ctx->use_lists[i]… in ir_assign_virtual_registers_slow()
66 || ((flags & IR_OP_FLAG_MEM) && ctx->use_lists[i].count > 1)) { in ir_assign_virtual_registers_slow()
67 if (!ctx->rules || !(ctx->rules[i] & (IR_FUSED|IR_SKIPPED))) { in ir_assign_virtual_registers_slow()
76 ctx->vregs_count = vregs_count; in ir_assign_virtual_registers_slow()
77 ctx->vregs = vregs; in ir_assign_virtual_registers_slow()
82 int ir_assign_virtual_registers(ir_ctx *ctx) in ir_assign_virtual_registers() argument
89 if (!ctx->rules) { in ir_assign_virtual_registers()
90 return ir_assign_virtual_registers_slow(ctx); in ir_assign_virtual_registers()
94 vregs = ir_mem_malloc(ctx->insns_count * sizeof(ir_ref)); in ir_assign_virtual_registers()
96 for (i = 1, insn = &ctx->ir_base[1]; i < ctx->insns_count; i++, insn++) { in ir_assign_virtual_registers()
99 if (ctx->rules[i] && !(ctx->rules[i] & (IR_FUSED|IR_SKIPPED))) { in ir_assign_virtual_registers()
103 || ((flags & IR_OP_FLAG_MEM) && ctx->use_lists[i].count > 1)) { in ir_assign_virtual_registers()
110 ctx->vregs_count = vregs_count; in ir_assign_virtual_registers()
111 ctx->vregs = vregs; in ir_assign_virtual_registers()
118 static ir_live_interval *ir_new_live_range(ir_ctx *ctx, int v, ir_live_pos start, ir_live_pos end) in ir_new_live_range() argument
120 ir_live_interval *ival = ir_arena_alloc(&ctx->arena, sizeof(ir_live_interval)); in ir_new_live_range()
133 ctx->live_intervals[v] = ival; in ir_new_live_range()
137 static ir_live_interval *ir_add_live_range(ir_ctx *ctx, int v, ir_live_pos start, ir_live_pos end) in ir_add_live_range() argument
139 ir_live_interval *ival = ctx->live_intervals[v]; in ir_add_live_range()
143 return ir_new_live_range(ctx, v, start, end); in ir_add_live_range()
166 next->next = ctx->unused_ranges; in ir_add_live_range()
167 ctx->unused_ranges = next; in ir_add_live_range()
183 if (ctx->unused_ranges) { in ir_add_live_range()
185 q = ctx->unused_ranges; in ir_add_live_range()
186 ctx->unused_ranges = q->next; in ir_add_live_range()
188 q = ir_arena_alloc(&ctx->arena, sizeof(ir_live_range)); in ir_add_live_range()
198 if (ctx->unused_ranges) { in ir_add_live_range()
200 q = ctx->unused_ranges; in ir_add_live_range()
201 ctx->unused_ranges = q->next; in ir_add_live_range()
203 q = ir_arena_alloc(&ctx->arena, sizeof(ir_live_range)); in ir_add_live_range()
214 IR_ALWAYS_INLINE ir_live_interval *ir_add_prev_live_range(ir_ctx *ctx, int v, ir_live_pos start, ir… in ir_add_prev_live_range() argument
216 ir_live_interval *ival = ctx->live_intervals[v]; in ir_add_prev_live_range()
222 return ir_add_live_range(ctx, v, start, end); in ir_add_prev_live_range()
225 static void ir_add_fixed_live_range(ir_ctx *ctx, ir_reg reg, ir_live_pos start, ir_live_pos end) in ir_add_fixed_live_range() argument
227 int v = ctx->vregs_count + 1 + reg; in ir_add_fixed_live_range()
228 ir_live_interval *ival = ctx->live_intervals[v]; in ir_add_fixed_live_range()
232 ival = ir_arena_alloc(&ctx->arena, sizeof(ir_live_interval)); in ir_add_fixed_live_range()
244 ctx->live_intervals[v] = ival; in ir_add_fixed_live_range()
246 if (ctx->unused_ranges) { in ir_add_fixed_live_range()
248 q = ctx->unused_ranges; in ir_add_fixed_live_range()
249 ctx->unused_ranges = q->next; in ir_add_fixed_live_range()
251 q = ir_arena_alloc(&ctx->arena, sizeof(ir_live_range)); in ir_add_fixed_live_range()
263 ir_add_live_range(ctx, v, start, end); in ir_add_fixed_live_range()
267 static void ir_add_tmp(ir_ctx *ctx, ir_ref ref, ir_ref tmp_ref, int32_t tmp_op_num, ir_tmp_reg tmp_… in ir_add_tmp() argument
269 ir_live_interval *ival = ir_arena_alloc(&ctx->arena, sizeof(ir_live_interval)); in ir_add_tmp()
281 if (!ctx->live_intervals[0]) { in ir_add_tmp()
283 ctx->live_intervals[0] = ival; in ir_add_tmp()
284 } else if (ival->range.start >= ctx->live_intervals[0]->range.start) { in ir_add_tmp()
285 ir_live_interval *prev = ctx->live_intervals[0]; in ir_add_tmp()
293 ir_live_interval *next = ctx->live_intervals[0]; in ir_add_tmp()
296 ctx->live_intervals[0] = ival; in ir_add_tmp()
301 static bool ir_has_tmp(ir_ctx *ctx, ir_ref ref, int32_t op_num) in ir_has_tmp() argument
303 ir_live_interval *ival = ctx->live_intervals[0]; in ir_has_tmp()
316 static ir_live_interval *ir_fix_live_range(ir_ctx *ctx, int v, ir_live_pos old_start, ir_live_pos n… in ir_fix_live_range() argument
318 ir_live_interval *ival = ctx->live_intervals[v]; in ir_fix_live_range()
331 static void ir_add_use_pos(ir_ctx *ctx, ir_live_interval *ival, ir_use_pos *use_pos) in ir_add_use_pos() argument
352 IR_ALWAYS_INLINE void ir_add_use(ir_ctx *ctx, ir_live_interval *ival, int op_num, ir_live_pos pos, … in ir_add_use() argument
356 use_pos = ir_arena_alloc(&ctx->arena, sizeof(ir_use_pos)); in ir_add_use()
370 ir_add_use_pos(ctx, ival, use_pos); in ir_add_use()
373 static void ir_add_phi_use(ir_ctx *ctx, ir_live_interval *ival, int op_num, ir_live_pos pos, ir_ref… in ir_add_phi_use() argument
378 use_pos = ir_arena_alloc(&ctx->arena, sizeof(ir_use_pos)); in ir_add_phi_use()
385 ir_add_use_pos(ctx, ival, use_pos); in ir_add_phi_use()
388 static void ir_add_hint(ir_ctx *ctx, ir_ref ref, ir_live_pos pos, ir_reg hint) in ir_add_hint() argument
390 ir_live_interval *ival = ctx->live_intervals[ctx->vregs[ref]]; in ir_add_hint()
407 static void ir_hint_propagation(ir_ctx *ctx) in ir_hint_propagation() argument
414 for (i = ctx->vregs_count; i > 0; i--) { in ir_hint_propagation()
415 ival = ctx->live_intervals[i]; in ir_hint_propagation()
427 ir_add_hint(ctx, hint_use_pos->hint_ref, hint_use_pos->pos, use_pos->hint); in ir_hint_propagation()
439 static void ir_add_osr_entry_loads(ir_ctx *ctx, ir_block *bb, ir_bitset live, uint32_t len, uint32_… in ir_add_osr_entry_loads() argument
443 ir_list *list = (ir_list*)ctx->osr_entry_loads; in ir_add_osr_entry_loads()
448 ir_use_pos *use_pos = ctx->live_intervals[i]->use_pos; in ir_add_osr_entry_loads()
452 ir_ref *ops = ctx->ir_base[ref].ops; in ir_add_osr_entry_loads()
456 if (ctx->ir_base[ref].op == IR_PARAM) { in ir_add_osr_entry_loads()
459 if (ctx->binding) { in ir_add_osr_entry_loads()
460 ir_ref var = ir_binding_find(ctx, ref); in ir_add_osr_entry_loads()
466 if (!ctx->osr_entry_loads) { in ir_add_osr_entry_loads()
467 list = ctx->osr_entry_loads = ir_mem_malloc(sizeof(ir_list)); in ir_add_osr_entry_loads()
478 …fprintf(stderr, "ENTRY %d (block %d start %d) - live var %d\n", ctx->ir_base[bb->start].op2, b, bb… in ir_add_osr_entry_loads()
486 ir_list_set(list, ir_list_len(ctx->osr_entry_loads) - (count + 1), count); in ir_add_osr_entry_loads()
490 ir_ref ref = ctx->ir_base[bb->start].op1; in ir_add_osr_entry_loads()
491 ir_add_fixed_live_range(ctx, IR_REG_ALL, in ir_add_osr_entry_loads()
498 static void ir_add_fusion_ranges(ir_ctx *ctx, ir_ref ref, ir_ref input, ir_block *bb, ir_bitset liv… in ir_add_fusion_ranges() argument
512 IR_ASSERT(input > 0 && ctx->rules[input] & IR_FUSED); in ir_add_fusion_ranges()
514 if (!(ctx->rules[input] & IR_SIMPLE)) { in ir_add_fusion_ranges()
515 def_flags = ir_get_target_constraints(ctx, input, &constraints); in ir_add_fusion_ranges()
520 ir_add_tmp(ctx, ref, input, constraints.tmp_regs[n].num, constraints.tmp_regs[n]); in ir_add_fusion_ranges()
523 ir_add_fixed_live_range(ctx, constraints.tmp_regs[n].reg, in ir_add_fusion_ranges()
533 insn = &ctx->ir_base[input]; in ir_add_fusion_ranges()
546 uint32_t v = ctx->vregs[child]; in ir_add_fusion_ranges()
560 ival = ir_add_live_range(ctx, v, in ir_add_fusion_ranges()
563 ival = ctx->live_intervals[v]; in ir_add_fusion_ranges()
565 ir_add_use(ctx, ival, j, use_pos, reg, use_flags, -input); in ir_add_fusion_ranges()
566 } else if (ctx->rules[child] & IR_FUSED) { in ir_add_fusion_ranges()
569 } else if (ctx->rules[child] == (IR_SKIPPED|IR_RLOAD)) { in ir_add_fusion_ranges()
570 ir_set_alocated_reg(ctx, input, j, ctx->ir_base[child].op2); in ir_add_fusion_ranges()
581 int ir_compute_live_ranges(ir_ctx *ctx) in ir_compute_live_ranges() argument
596 if (!(ctx->flags2 & IR_LINEAR) || !ctx->vregs) { in ir_compute_live_ranges()
600 if (ctx->rules) { in ir_compute_live_ranges()
601 ctx->regs = ir_mem_malloc(sizeof(ir_regs) * ctx->insns_count); in ir_compute_live_ranges()
602 memset(ctx->regs, IR_REG_NONE, sizeof(ir_regs) * ctx->insns_count); in ir_compute_live_ranges()
606 ctx->vars = IR_UNUSED; in ir_compute_live_ranges()
609 ctx->flags2 &= ~IR_LR_HAVE_DESSA_MOVES; in ir_compute_live_ranges()
610 len = ir_bitset_len(ctx->vregs_count + 1); in ir_compute_live_ranges()
611 bb_live = ir_mem_malloc((ctx->cfg_blocks_count + 1) * len * sizeof(ir_bitset_base_t)); in ir_compute_live_ranges()
614ctx->live_intervals = ir_mem_calloc(ctx->vregs_count + 1 + IR_REG_NUM + 2, sizeof(ir_live_interval… in ir_compute_live_ranges()
617 visited = ir_bitset_malloc(ctx->cfg_blocks_count + 1); in ir_compute_live_ranges()
620 if (!ctx->arena) { in ir_compute_live_ranges()
621 ctx->arena = ir_arena_create(16 * 1024); in ir_compute_live_ranges()
625 for (b = ctx->cfg_blocks_count; b > 0; b--) { in ir_compute_live_ranges()
626 bb = &ctx->cfg_blocks[b]; in ir_compute_live_ranges()
638 p = &ctx->cfg_edges[bb->successors]; in ir_compute_live_ranges()
647 if (EXPECTED(succ > b) && EXPECTED(!(ctx->cfg_blocks[succ].flags & IR_BB_ENTRY))) { in ir_compute_live_ranges()
650 IR_ASSERT(succ > b || (ctx->cfg_blocks[succ].flags & IR_BB_LOOP_HEADER)); in ir_compute_live_ranges()
656 if (EXPECTED(succ > b) && EXPECTED(!(ctx->cfg_blocks[succ].flags & IR_BB_ENTRY))) { in ir_compute_live_ranges()
659 IR_ASSERT(succ > b || (ctx->cfg_blocks[succ].flags & IR_BB_LOOP_HEADER)); in ir_compute_live_ranges()
667 ir_add_prev_live_range(ctx, i, in ir_compute_live_ranges()
675 succ = ctx->cfg_edges[bb->successors]; in ir_compute_live_ranges()
676 succ_bb = &ctx->cfg_blocks[succ]; in ir_compute_live_ranges()
678 ir_use_list *use_list = &ctx->use_lists[succ_bb->start]; in ir_compute_live_ranges()
680 k = ir_phi_input_number(ctx, succ_bb, b); in ir_compute_live_ranges()
683 ir_ref use = ctx->use_edges[use_list->refs + ref]; in ir_compute_live_ranges()
684 insn = &ctx->ir_base[use]; in ir_compute_live_ranges()
688 uint32_t v = ctx->vregs[input]; in ir_compute_live_ranges()
694 ival = ir_add_prev_live_range(ctx, v, in ir_compute_live_ranges()
697 ir_add_phi_use(ctx, ival, k, IR_DEF_LIVE_POS_FROM_REF(bb->end), use); in ir_compute_live_ranges()
706 insn = &ctx->ir_base[ref]; in ir_compute_live_ranges()
708 ref = ctx->prev_ref[ref]; in ir_compute_live_ranges()
710 for (; ref > bb->start; ref = ctx->prev_ref[ref]) { in ir_compute_live_ranges()
717 if (ctx->rules) { in ir_compute_live_ranges()
720 if (ctx->rules[ref] & (IR_FUSED|IR_SKIPPED)) { in ir_compute_live_ranges()
721 if (((ctx->rules[ref] & IR_RULE_MASK) == IR_VAR in ir_compute_live_ranges()
722 || (ctx->rules[ref] & IR_RULE_MASK) == IR_ALLOCA) in ir_compute_live_ranges()
723 && ctx->use_lists[ref].count > 0) { in ir_compute_live_ranges()
724 insn = &ctx->ir_base[ref]; in ir_compute_live_ranges()
726 insn->op3 = ctx->vars; in ir_compute_live_ranges()
727 ctx->vars = ref; in ir_compute_live_ranges()
733 def_flags = ir_get_target_constraints(ctx, ref, &constraints); in ir_compute_live_ranges()
738 ir_add_tmp(ctx, ref, ref, constraints.tmp_regs[n].num, constraints.tmp_regs[n]); in ir_compute_live_ranges()
741 ir_add_fixed_live_range(ctx, constraints.tmp_regs[n].reg, in ir_compute_live_ranges()
752 insn = &ctx->ir_base[ref]; in ir_compute_live_ranges()
753 v = ctx->vregs[ref]; in ir_compute_live_ranges()
766 ir_add_fixed_live_range(ctx, reg, IR_START_LIVE_POS_FROM_REF(bb->start), def_pos); in ir_compute_live_ranges()
769 if (!IR_IS_CONST_REF(insn->op1) && ctx->vregs[insn->op1]) { in ir_compute_live_ranges()
778 ctx->live_intervals[v]->flags |= IR_LIVE_INTERVAL_MEM_PARAM; in ir_compute_live_ranges()
781 ctx->live_intervals[v]->flags |= IR_LIVE_INTERVAL_MEM_LOAD; in ir_compute_live_ranges()
788 ival = ir_fix_live_range(ctx, v, in ir_compute_live_ranges()
791 ir_add_use(ctx, ival, 0, def_pos, reg, def_flags, hint_ref); in ir_compute_live_ranges()
796 ival = ctx->live_intervals[v]; in ir_compute_live_ranges()
799 … ival = ir_add_live_range(ctx, v, IR_DEF_LIVE_POS_FROM_REF(ref), IR_USE_LIVE_POS_FROM_REF(ref)); in ir_compute_live_ranges()
802 … ir_add_use(ctx, ival, 0, IR_DEF_LIVE_POS_FROM_REF(ref), IR_REG_NONE, IR_USE_SHOULD_BE_IN_REG, 0); in ir_compute_live_ranges()
807 IR_ASSERT(insn->op != IR_PHI && (!ctx->rules || !(ctx->rules[ref] & (IR_FUSED|IR_SKIPPED)))); in ir_compute_live_ranges()
823 v = ctx->vregs[input]; in ir_compute_live_ranges()
828 ir_add_fixed_live_range(ctx, reg, use_pos, use_pos + IR_USE_SUB_REF); in ir_compute_live_ranges()
832 IR_ASSERT(ctx->vregs[ref]); in ir_compute_live_ranges()
843 ival = ir_add_live_range(ctx, v, IR_START_LIVE_POS_FROM_REF(bb->start), use_pos); in ir_compute_live_ranges()
845 ival = ctx->live_intervals[v]; in ir_compute_live_ranges()
847 ir_add_use(ctx, ival, j, use_pos, reg, IR_USE_FLAGS(def_flags, j), hint_ref); in ir_compute_live_ranges()
848 } else if (ctx->rules) { in ir_compute_live_ranges()
849 if (ctx->rules[input] & IR_FUSED) { in ir_compute_live_ranges()
850 ir_add_fusion_ranges(ctx, ref, input, bb, live); in ir_compute_live_ranges()
851 } else if (ctx->rules[input] == (IR_SKIPPED|IR_RLOAD)) { in ir_compute_live_ranges()
852 ir_set_alocated_reg(ctx, ref, j, ctx->ir_base[input].op2); in ir_compute_live_ranges()
857 ir_add_fixed_live_range(ctx, reg, use_pos, use_pos + IR_USE_SUB_REF); in ir_compute_live_ranges()
866 uint32_t bb_set_len = ir_bitset_len(ctx->cfg_blocks_count + 1); in ir_compute_live_ranges()
872 loops = ir_bitset_malloc(ctx->cfg_blocks_count + 1); in ir_compute_live_ranges()
873 ir_bitqueue_init(&queue, ctx->cfg_blocks_count + 1); in ir_compute_live_ranges()
881 child_bb = &ctx->cfg_blocks[child]; in ir_compute_live_ranges()
886 ir_add_live_range(ctx, i, in ir_compute_live_ranges()
893 child_bb = &ctx->cfg_blocks[child]; in ir_compute_live_ranges()
906 if (ctx->entries) { in ir_compute_live_ranges()
907 for (i = 0; i < ctx->entries_count; i++) { in ir_compute_live_ranges()
908 b = ctx->entries[i]; in ir_compute_live_ranges()
909 bb = &ctx->cfg_blocks[b]; in ir_compute_live_ranges()
911 ir_add_osr_entry_loads(ctx, bb, live, len, b); in ir_compute_live_ranges()
913 if (ctx->osr_entry_loads) { in ir_compute_live_ranges()
914 ir_list_push((ir_list*)ctx->osr_entry_loads, 0); in ir_compute_live_ranges()
941 IR_ALWAYS_INLINE uint32_t ir_live_out_top(ir_ctx *ctx, uint32_t *live_outs, ir_list *live_lists, ui… in ir_live_out_top() argument
954 IR_ALWAYS_INLINE void ir_live_out_push(ir_ctx *ctx, uint32_t *live_outs, ir_list *live_lists, uint3… in ir_live_out_push() argument
957 ir_block *bb = &ctx->cfg_blocks[b]; in ir_live_out_push()
959 ir_add_prev_live_range(ctx, v, in ir_live_out_push()
980 static void ir_compute_live_sets(ir_ctx *ctx, uint32_t *live_outs, ir_list *live_lists) in ir_compute_live_sets() argument
989 for (i = ctx->insns_count - 1; i > 0; i--) { in ir_compute_live_sets()
990 uint32_t v = ctx->vregs[i]; in ir_compute_live_sets()
993 uint32_t def_block = ctx->cfg_map[i]; in ir_compute_live_sets()
994 ir_use_list *use_list = &ctx->use_lists[i]; in ir_compute_live_sets()
998 for (p = &ctx->use_edges[use_list->refs]; n > 0; p++, n--) { in ir_compute_live_sets()
1000 ir_insn *insn = &ctx->ir_base[use]; in ir_compute_live_sets()
1005 ir_ref *q = ctx->ir_base[insn->op1].ops + 1; /* MERGE inputs */ in ir_compute_live_sets()
1009 uint32_t pred_block = ctx->cfg_map[*q]; in ir_compute_live_sets()
1011 if (ir_live_out_top(ctx, live_outs, live_lists, pred_block) != v) { in ir_compute_live_sets()
1012 ir_live_out_push(ctx, live_outs, live_lists, pred_block, v); in ir_compute_live_sets()
1019 } else if (ctx->rules && UNEXPECTED(ctx->rules[use] & IR_FUSED)) { in ir_compute_live_sets()
1021 ir_use_list *use_list = &ctx->use_lists[use]; in ir_compute_live_sets()
1024 for (p = &ctx->use_edges[use_list->refs]; n > 0; p++, n--) { in ir_compute_live_sets()
1027 if (ctx->rules[use] & IR_FUSED) { in ir_compute_live_sets()
1030 uint32_t use_block = ctx->cfg_map[use]; in ir_compute_live_sets()
1032 if (def_block != use_block && ir_live_out_top(ctx, live_outs, live_lists, use_block) != v) { in ir_compute_live_sets()
1043 uint32_t use_block = ctx->cfg_map[use]; in ir_compute_live_sets()
1046 if (def_block != use_block && ir_live_out_top(ctx, live_outs, live_lists, use_block) != v) { in ir_compute_live_sets()
1055 ir_block *bb = &ctx->cfg_blocks[b]; in ir_compute_live_sets()
1060 ir_insn *insn = &ctx->ir_base[bb->start]; in ir_compute_live_sets()
1063 IR_ASSERT(insn->op3 >= 0 && insn->op3 < (ir_ref)ctx->entries_count); in ir_compute_live_sets()
1067 ir_list_push_unchecked(live_lists, live_outs[ctx->cfg_blocks_count + 1 + insn->op3]); in ir_compute_live_sets()
1069 live_outs[ctx->cfg_blocks_count + 1 + insn->op3] = ir_list_len(live_lists) - 1; in ir_compute_live_sets()
1072 for (p = &ctx->cfg_edges[bb->predecessors]; n > 0; p++, n--) { in ir_compute_live_sets()
1076 if (ir_live_out_top(ctx, live_outs, live_lists, pred_block) != v) { in ir_compute_live_sets()
1078 ir_live_out_push(ctx, live_outs, live_lists, pred_block, v); in ir_compute_live_sets()
1092 static void ir_add_osr_entry_loads(ir_ctx *ctx, ir_block *bb, uint32_t pos, ir_list *live_lists, ui… in ir_add_osr_entry_loads() argument
1096 ir_list *list = (ir_list*)ctx->osr_entry_loads; in ir_add_osr_entry_loads()
1104 ir_use_pos *use_pos = ctx->live_intervals[i]->use_pos; in ir_add_osr_entry_loads()
1108 ir_ref *ops = ctx->ir_base[ref].ops; in ir_add_osr_entry_loads()
1112 if (ctx->ir_base[ref].op == IR_PARAM) { in ir_add_osr_entry_loads()
1115 if (ctx->binding) { in ir_add_osr_entry_loads()
1116 ir_ref var = ir_binding_find(ctx, ref); in ir_add_osr_entry_loads()
1122 if (!ctx->osr_entry_loads) { in ir_add_osr_entry_loads()
1123 list = ctx->osr_entry_loads = ir_mem_malloc(sizeof(ir_list)); in ir_add_osr_entry_loads()
1134 …fprintf(stderr, "ENTRY %d (block %d start %d) - live var %d\n", ctx->ir_base[bb->start].op2, b, bb… in ir_add_osr_entry_loads()
1142 ir_list_set(list, ir_list_len(ctx->osr_entry_loads) - (count + 1), count); in ir_add_osr_entry_loads()
1146 ir_ref ref = ctx->ir_base[bb->start].op1; in ir_add_osr_entry_loads()
1147 ir_add_fixed_live_range(ctx, IR_REG_ALL, in ir_add_osr_entry_loads()
1154 static void ir_add_fusion_ranges(ir_ctx *ctx, ir_ref ref, ir_ref input, ir_block *bb, uint32_t *liv… in ir_add_fusion_ranges() argument
1169 IR_ASSERT(input > 0 && ctx->rules[input] & IR_FUSED); in ir_add_fusion_ranges()
1171 if (!(ctx->rules[input] & IR_SIMPLE)) { in ir_add_fusion_ranges()
1172 def_flags = ir_get_target_constraints(ctx, input, &constraints); in ir_add_fusion_ranges()
1177 ir_add_tmp(ctx, ref, input, constraints.tmp_regs[n].num, constraints.tmp_regs[n]); in ir_add_fusion_ranges()
1180 ir_add_fixed_live_range(ctx, constraints.tmp_regs[n].reg, in ir_add_fusion_ranges()
1190 insn = &ctx->ir_base[input]; in ir_add_fusion_ranges()
1204 uint32_t v = ctx->vregs[child]; in ir_add_fusion_ranges()
1217 ival = ir_add_live_range(ctx, v, in ir_add_fusion_ranges()
1220 ival = ctx->live_intervals[v]; in ir_add_fusion_ranges()
1223 ir_add_use(ctx, ival, j, use_pos, reg, use_flags, -input); in ir_add_fusion_ranges()
1224 } else if (ctx->rules[child] & IR_FUSED) { in ir_add_fusion_ranges()
1227 } else if (ctx->rules[child] == (IR_SKIPPED|IR_RLOAD)) { in ir_add_fusion_ranges()
1228 ir_set_alocated_reg(ctx, input, j, ctx->ir_base[child].op2); in ir_add_fusion_ranges()
1239 int ir_compute_live_ranges(ir_ctx *ctx) in ir_compute_live_ranges() argument
1250 if (!(ctx->flags2 & IR_LINEAR) || !ctx->vregs) { in ir_compute_live_ranges()
1254 if (ctx->rules) { in ir_compute_live_ranges()
1255 ctx->regs = ir_mem_malloc(sizeof(ir_regs) * ctx->insns_count); in ir_compute_live_ranges()
1256 memset(ctx->regs, IR_REG_NONE, sizeof(ir_regs) * ctx->insns_count); in ir_compute_live_ranges()
1260 ctx->vars = IR_UNUSED; in ir_compute_live_ranges()
1263 ctx->flags2 &= ~IR_LR_HAVE_DESSA_MOVES; in ir_compute_live_ranges()
1266ctx->live_intervals = ir_mem_calloc(ctx->vregs_count + 1 + IR_REG_NUM + 2, sizeof(ir_live_interval… in ir_compute_live_ranges()
1268 if (!ctx->arena) { in ir_compute_live_ranges()
1269 ctx->arena = ir_arena_create(16 * 1024); in ir_compute_live_ranges()
1272 live_outs = ir_mem_calloc(ctx->cfg_blocks_count + 1 + ctx->entries_count, sizeof(uint32_t)); in ir_compute_live_ranges()
1274 ir_compute_live_sets(ctx, live_outs, &live_lists); in ir_compute_live_ranges()
1275 live_in_block = ir_mem_calloc(ctx->vregs_count + 1, sizeof(uint32_t)); in ir_compute_live_ranges()
1278 for (b = ctx->cfg_blocks_count; b > 0; b--) { in ir_compute_live_ranges()
1279 bb = &ctx->cfg_blocks[b]; in ir_compute_live_ranges()
1287 ir_add_prev_live_range(ctx, i, in ir_compute_live_ranges()
1295 succ = ctx->cfg_edges[bb->successors]; in ir_compute_live_ranges()
1296 succ_bb = &ctx->cfg_blocks[succ]; in ir_compute_live_ranges()
1298 ir_use_list *use_list = &ctx->use_lists[succ_bb->start]; in ir_compute_live_ranges()
1301 k = ir_phi_input_number(ctx, succ_bb, b); in ir_compute_live_ranges()
1304 for (p = &ctx->use_edges[use_list->refs]; n > 0; p++, n--) { in ir_compute_live_ranges()
1306 insn = &ctx->ir_base[use]; in ir_compute_live_ranges()
1310 uint32_t v = ctx->vregs[input]; in ir_compute_live_ranges()
1313 ival = ctx->live_intervals[v]; in ir_compute_live_ranges()
1314 ir_add_phi_use(ctx, ival, k, IR_DEF_LIVE_POS_FROM_REF(bb->end), use); in ir_compute_live_ranges()
1324 insn = &ctx->ir_base[ref]; in ir_compute_live_ranges()
1326 ref = ctx->prev_ref[ref]; in ir_compute_live_ranges()
1328 for (; ref > bb->start; ref = ctx->prev_ref[ref]) { in ir_compute_live_ranges()
1335 if (ctx->rules) { in ir_compute_live_ranges()
1338 if (ctx->rules[ref] & (IR_FUSED|IR_SKIPPED)) { in ir_compute_live_ranges()
1339 if (((ctx->rules[ref] & IR_RULE_MASK) == IR_VAR in ir_compute_live_ranges()
1340 || (ctx->rules[ref] & IR_RULE_MASK) == IR_ALLOCA) in ir_compute_live_ranges()
1341 && ctx->use_lists[ref].count > 0) { in ir_compute_live_ranges()
1342 insn = &ctx->ir_base[ref]; in ir_compute_live_ranges()
1344 insn->op3 = ctx->vars; in ir_compute_live_ranges()
1345 ctx->vars = ref; in ir_compute_live_ranges()
1351 def_flags = ir_get_target_constraints(ctx, ref, &constraints); in ir_compute_live_ranges()
1356 ir_add_tmp(ctx, ref, ref, constraints.tmp_regs[n].num, constraints.tmp_regs[n]); in ir_compute_live_ranges()
1359 ir_add_fixed_live_range(ctx, constraints.tmp_regs[n].reg, in ir_compute_live_ranges()
1370 insn = &ctx->ir_base[ref]; in ir_compute_live_ranges()
1371 v = ctx->vregs[ref]; in ir_compute_live_ranges()
1382 ir_add_fixed_live_range(ctx, reg, IR_START_LIVE_POS_FROM_REF(bb->start), def_pos); in ir_compute_live_ranges()
1385 if (!IR_IS_CONST_REF(insn->op1) && ctx->vregs[insn->op1]) { in ir_compute_live_ranges()
1398 ctx->live_intervals[v]->flags |= IR_LIVE_INTERVAL_MEM_PARAM; in ir_compute_live_ranges()
1401 ctx->live_intervals[v]->flags |= IR_LIVE_INTERVAL_MEM_LOAD; in ir_compute_live_ranges()
1406 ival = ir_fix_live_range(ctx, v, in ir_compute_live_ranges()
1409 ir_add_use(ctx, ival, 0, def_pos, reg, def_flags, hint_ref); in ir_compute_live_ranges()
1412 ival = ctx->live_intervals[v]; in ir_compute_live_ranges()
1415 … ival = ir_add_live_range(ctx, v, IR_DEF_LIVE_POS_FROM_REF(ref), IR_USE_LIVE_POS_FROM_REF(ref)); in ir_compute_live_ranges()
1418 … ir_add_use(ctx, ival, 0, IR_DEF_LIVE_POS_FROM_REF(ref), IR_REG_NONE, IR_USE_SHOULD_BE_IN_REG, 0); in ir_compute_live_ranges()
1423 IR_ASSERT(insn->op != IR_PHI && (!ctx->rules || !(ctx->rules[ref] & (IR_FUSED|IR_SKIPPED)))); in ir_compute_live_ranges()
1439 v = ctx->vregs[input]; in ir_compute_live_ranges()
1444 ir_add_fixed_live_range(ctx, reg, use_pos, use_pos + IR_USE_SUB_REF); in ir_compute_live_ranges()
1452 IR_ASSERT(ctx->vregs[ref]); in ir_compute_live_ranges()
1463 ival = ir_add_live_range(ctx, v, IR_START_LIVE_POS_FROM_REF(bb->start), use_pos); in ir_compute_live_ranges()
1465 ival = ctx->live_intervals[v]; in ir_compute_live_ranges()
1467 ir_add_use(ctx, ival, j, use_pos, reg, IR_USE_FLAGS(def_flags, j), hint_ref); in ir_compute_live_ranges()
1468 } else if (ctx->rules) { in ir_compute_live_ranges()
1469 if (ctx->rules[input] & IR_FUSED) { in ir_compute_live_ranges()
1470 ir_add_fusion_ranges(ctx, ref, input, bb, live_in_block, b); in ir_compute_live_ranges()
1472 if (ctx->rules[input] == (IR_SKIPPED|IR_RLOAD)) { in ir_compute_live_ranges()
1473 ir_set_alocated_reg(ctx, ref, j, ctx->ir_base[input].op2); in ir_compute_live_ranges()
1477 ir_add_fixed_live_range(ctx, reg, use_pos, use_pos + IR_USE_SUB_REF); in ir_compute_live_ranges()
1483 ir_add_fixed_live_range(ctx, reg, use_pos, use_pos + IR_USE_SUB_REF); in ir_compute_live_ranges()
1489 if (ctx->entries) { in ir_compute_live_ranges()
1490 for (i = 0; i < ctx->entries_count; i++) { in ir_compute_live_ranges()
1491 b = ctx->entries[i]; in ir_compute_live_ranges()
1492 bb = &ctx->cfg_blocks[b]; in ir_compute_live_ranges()
1494 ir_add_osr_entry_loads(ctx, bb, live_outs[ctx->cfg_blocks_count + 1 + i], &live_lists, b); in ir_compute_live_ranges()
1496 if (ctx->osr_entry_loads) { in ir_compute_live_ranges()
1497 ir_list_push((ir_list*)ctx->osr_entry_loads, 0); in ir_compute_live_ranges()
1533 static ir_live_pos ir_vregs_overlap(ir_ctx *ctx, uint32_t r1, uint32_t r2) in ir_vregs_overlap() argument
1535 ir_live_interval *ival1 = ctx->live_intervals[r1]; in ir_vregs_overlap()
1536 ir_live_interval *ival2 = ctx->live_intervals[r2]; in ir_vregs_overlap()
1561 static bool ir_vregs_inside(ir_ctx *ctx, uint32_t parent, uint32_t child) in ir_vregs_inside() argument
1563 ir_live_interval *child_ival = ctx->live_intervals[child]; in ir_vregs_inside()
1564 ir_live_interval *parent_ival = ctx->live_intervals[parent]; in ir_vregs_inside()
1578 static void ir_vregs_join(ir_ctx *ctx, uint32_t r1, uint32_t r2) in ir_vregs_join() argument
1580 ir_live_interval *ival = ctx->live_intervals[r2]; in ir_vregs_join()
1589 ir_add_live_range(ctx, r1, live_range->start, live_range->end); in ir_vregs_join()
1593 live_range->next = ctx->unused_ranges; in ir_vregs_join()
1594 ctx->unused_ranges = live_range; in ir_vregs_join()
1595 ir_add_live_range(ctx, r1, live_range->start, live_range->end); in ir_vregs_join()
1600 prev = &ctx->live_intervals[r1]->use_pos; in ir_vregs_join()
1603 if (use_pos->hint_ref > 0 && ctx->vregs[use_pos->hint_ref] == r1) { in ir_vregs_join()
1609 if ((*prev)->hint_ref > 0 && ctx->vregs[(*prev)->hint_ref] == r2) { in ir_vregs_join()
1622 if (use_pos->hint_ref > 0 && ctx->vregs[use_pos->hint_ref] == r2) { in ir_vregs_join()
1628 ctx->live_intervals[r1]->flags |= in ir_vregs_join()
1630 if (ctx->ir_base[IR_LIVE_POS_TO_REF(ctx->live_intervals[r1]->use_pos->pos)].op != IR_VLOAD) { in ir_vregs_join()
1631 ctx->live_intervals[r1]->flags &= ~IR_LIVE_INTERVAL_MEM_LOAD; in ir_vregs_join()
1633 ctx->live_intervals[r2] = NULL; in ir_vregs_join()
1639 static void ir_vregs_coalesce(ir_ctx *ctx, uint32_t v1, uint32_t v2, ir_ref from, ir_ref to) in ir_vregs_coalesce() argument
1642 uint16_t f1 = ctx->live_intervals[v1]->flags; in ir_vregs_coalesce()
1643 uint16_t f2 = ctx->live_intervals[v2]->flags; in ir_vregs_coalesce()
1646 if (ctx->binding) { in ir_vregs_coalesce()
1647 ir_ref b1 = ir_binding_find(ctx, from); in ir_vregs_coalesce()
1648 ir_ref b2 = ir_binding_find(ctx, to); in ir_vregs_coalesce()
1653 ir_vregs_join(ctx, v1, v2); in ir_vregs_coalesce()
1654 ctx->vregs[to] = v1; in ir_vregs_coalesce()
1656 ir_vregs_join(ctx, v2, v1); in ir_vregs_coalesce()
1657 ctx->vregs[from] = v2; in ir_vregs_coalesce()
1659 ir_vregs_join(ctx, v1, v2); in ir_vregs_coalesce()
1661 for (i = 1; i < ctx->insns_count; i++) { in ir_vregs_coalesce()
1662 if (ctx->vregs[i] == v2) { in ir_vregs_coalesce()
1663 ctx->vregs[i] = v1; in ir_vregs_coalesce()
1667 ctx->vregs[to] = v1; in ir_vregs_coalesce()
1670 ir_vregs_join(ctx, v2, v1); in ir_vregs_coalesce()
1672 for (i = 1; i < ctx->insns_count; i++) { in ir_vregs_coalesce()
1673 if (ctx->vregs[i] == v1) { in ir_vregs_coalesce()
1674 ctx->vregs[i] = v2; in ir_vregs_coalesce()
1678 ctx->vregs[from] = v2; in ir_vregs_coalesce()
1683 static void ir_add_phi_move(ir_ctx *ctx, uint32_t b, ir_ref from, ir_ref to) in ir_add_phi_move() argument
1685 if (IR_IS_CONST_REF(from) || ctx->vregs[from] != ctx->vregs[to]) { in ir_add_phi_move()
1686 ctx->cfg_blocks[b].flags &= ~IR_BB_EMPTY; in ir_add_phi_move()
1687 ctx->cfg_blocks[b].flags |= IR_BB_DESSA_MOVES; in ir_add_phi_move()
1688 ctx->flags2 |= IR_LR_HAVE_DESSA_MOVES; in ir_add_phi_move()
1718 static void ir_swap_operands(ir_ctx *ctx, ir_ref i, ir_insn *insn) in ir_swap_operands() argument
1731 ival = ctx->live_intervals[ctx->vregs[insn->op1]]; in ir_swap_operands()
1743 ival = ctx->live_intervals[ctx->vregs[i]]; in ir_swap_operands()
1753 if (insn->op2 > 0 && ctx->vregs[insn->op2]) { in ir_swap_operands()
1754 ival = ctx->live_intervals[ctx->vregs[insn->op2]]; in ir_swap_operands()
1784 static int ir_hint_conflict(ir_ctx *ctx, ir_ref ref, int use, int def) in ir_hint_conflict() argument
1790 p = ctx->live_intervals[use]->use_pos; in ir_hint_conflict()
1801 p = ctx->live_intervals[def]->use_pos; in ir_hint_conflict()
1814 static int ir_try_swap_operands(ir_ctx *ctx, ir_ref i, ir_insn *insn) in ir_try_swap_operands() argument
1816 if (ctx->vregs[insn->op1] in ir_try_swap_operands()
1817 && ctx->vregs[insn->op1] != ctx->vregs[i] in ir_try_swap_operands()
1818 && !ir_vregs_overlap(ctx, ctx->vregs[insn->op1], ctx->vregs[i]) in ir_try_swap_operands()
1819 && !ir_hint_conflict(ctx, i, ctx->vregs[insn->op1], ctx->vregs[i])) { in ir_try_swap_operands()
1822 if (ctx->vregs[insn->op2] && ctx->vregs[insn->op2] != ctx->vregs[i]) { in ir_try_swap_operands()
1825 ir_live_interval *ival = ctx->live_intervals[ctx->vregs[insn->op2]]; in ir_try_swap_operands()
1828 if ((ival->flags & IR_LIVE_INTERVAL_MEM_PARAM) && ctx->use_lists[insn->op2].count == 1) { in ir_try_swap_operands()
1837 if (!ir_vregs_overlap(ctx, ctx->vregs[insn->op2], ctx->vregs[i]) in ir_try_swap_operands()
1838 && !ir_hint_conflict(ctx, i, ctx->vregs[insn->op2], ctx->vregs[i])) { in ir_try_swap_operands()
1839 ir_swap_operands(ctx, i, insn); in ir_try_swap_operands()
1856 int ir_coalesce(ir_ctx *ctx) in ir_coalesce() argument
1868 list = ir_mem_malloc(sizeof(ir_coalesce_block) * ctx->cfg_blocks_count); in ir_coalesce()
1869 visited = ir_bitset_malloc(ctx->cfg_blocks_count + 1); in ir_coalesce()
1870 for (b = 1, bb = &ctx->cfg_blocks[1]; b <= ctx->cfg_blocks_count; b++, bb++) { in ir_coalesce()
1875 use_list = &ctx->use_lists[bb->start]; in ir_coalesce()
1877 IR_ASSERT(k == ctx->ir_base[bb->start].inputs_count); in ir_coalesce()
1878 for (p = &ctx->use_edges[use_list->refs]; n > 0; p++, n--) { in ir_coalesce()
1880 insn = &ctx->ir_base[use]; in ir_coalesce()
1884 pred_b = ctx->cfg_edges[bb->predecessors + k]; in ir_coalesce()
1888 list[count].loop_depth = ctx->cfg_blocks[pred_b].loop_depth; in ir_coalesce()
1908 bb = &ctx->cfg_blocks[b]; in ir_coalesce()
1910 succ = ctx->cfg_edges[bb->successors]; in ir_coalesce()
1911 succ_bb = &ctx->cfg_blocks[succ]; in ir_coalesce()
1913 k = ir_phi_input_number(ctx, succ_bb, b); in ir_coalesce()
1914 use_list = &ctx->use_lists[succ_bb->start]; in ir_coalesce()
1916 for (i = 0, p = &ctx->use_edges[use_list->refs]; i < n; i++, p++) { in ir_coalesce()
1918 insn = &ctx->ir_base[use]; in ir_coalesce()
1921 if (input > 0 && ctx->vregs[input]) { in ir_coalesce()
1922 uint32_t v1 = ctx->vregs[input]; in ir_coalesce()
1923 uint32_t v2 = ctx->vregs[use]; in ir_coalesce()
1928 if (!ir_vregs_overlap(ctx, v1, v2)) { in ir_coalesce()
1929 ir_vregs_coalesce(ctx, v1, v2, input, use); in ir_coalesce()
1933 if (ctx->rules && (ctx->rules[input] & IR_MAY_SWAP)) { in ir_coalesce()
1934 ir_insn *input_insn = &ctx->ir_base[input]; in ir_coalesce()
1939 && (ctx->live_intervals[v1]->use_pos->flags & IR_DEF_REUSES_OP1_REG)) { in ir_coalesce()
1940 ir_live_range *r = &ctx->live_intervals[v2]->range; in ir_coalesce()
1951 ctx->live_intervals[v2]->end = IR_LOAD_LIVE_POS_FROM_REF(input); in ir_coalesce()
1953 if (ir_vregs_overlap(ctx, v1, v2)) { in ir_coalesce()
1956 ctx->live_intervals[v2]->end = IR_USE_LIVE_POS_FROM_REF(input); in ir_coalesce()
1959 ir_swap_operands(ctx, input, input_insn); in ir_coalesce()
1960 IR_ASSERT(!ir_vregs_overlap(ctx, v1, v2)); in ir_coalesce()
1961 ir_vregs_coalesce(ctx, v1, v2, input, use); in ir_coalesce()
1969 ir_add_phi_move(ctx, b, input, use); in ir_coalesce()
1974 ir_add_phi_move(ctx, b, input, use); in ir_coalesce()
1981 ir_hint_propagation(ctx); in ir_coalesce()
1983 if (ctx->rules) { in ir_coalesce()
1985 uint32_t *rule = ctx->rules + 1; in ir_coalesce()
1988 for (i = 1; i < ctx->insns_count; rule++, i++) { in ir_coalesce()
1990 insn = &ctx->ir_base[i]; in ir_coalesce()
1991 IR_ASSERT(ctx->vregs[i]); in ir_coalesce()
1994 if (ctx->live_intervals[ctx->vregs[i]]->use_pos in ir_coalesce()
1995 && (ctx->live_intervals[ctx->vregs[i]]->use_pos->flags & IR_DEF_REUSES_OP1_REG) in ir_coalesce()
1999 ir_try_swap_operands(ctx, i, insn); in ir_coalesce()
2004 && ctx->vregs[insn->op1] in ir_coalesce()
2005 && ctx->vregs[i] != ctx->vregs[insn->op1]) { in ir_coalesce()
2006 if (ir_vregs_inside(ctx, ctx->vregs[insn->op1], ctx->vregs[i])) { in ir_coalesce()
2007 if (ctx->binding) { in ir_coalesce()
2008 ir_ref b1 = ir_binding_find(ctx, i); in ir_coalesce()
2009 ir_ref b2 = ir_binding_find(ctx, insn->op1); in ir_coalesce()
2014 ir_vregs_coalesce(ctx, ctx->vregs[i], ctx->vregs[insn->op1], i, insn->op1); in ir_coalesce()
2025 uint32_t *xlat = ir_mem_malloc((ctx->vregs_count + 1) * sizeof(uint32_t)); in ir_coalesce()
2027 for (i = 1, n = 1; i <= ctx->vregs_count; i++) { in ir_coalesce()
2028 if (ctx->live_intervals[i]) { in ir_coalesce()
2031 ctx->live_intervals[n] = ctx->live_intervals[i]; in ir_coalesce()
2032 ctx->live_intervals[n]->vreg = n; in ir_coalesce()
2038 if (n != ctx->vregs_count) { in ir_coalesce()
2039 j = ctx->vregs_count - n; in ir_coalesce()
2042 ctx->live_intervals[i] = ctx->live_intervals[i + j]; in ir_coalesce()
2043 if (ctx->live_intervals[i]) { in ir_coalesce()
2044 ctx->live_intervals[i]->vreg = i; in ir_coalesce()
2047 for (j = 1; j < ctx->insns_count; j++) { in ir_coalesce()
2048 if (ctx->vregs[j]) { in ir_coalesce()
2049 ctx->vregs[j] = xlat[ctx->vregs[j]]; in ir_coalesce()
2052 ctx->vregs_count = n; in ir_coalesce()
2062 int ir_compute_dessa_moves(ir_ctx *ctx) in ir_compute_dessa_moves() argument
2070 for (b = 1, bb = &ctx->cfg_blocks[1]; b <= ctx->cfg_blocks_count; b++, bb++) { in ir_compute_dessa_moves()
2074 use_list = &ctx->use_lists[bb->start]; in ir_compute_dessa_moves()
2077 IR_ASSERT(k == ctx->ir_base[bb->start].inputs_count); in ir_compute_dessa_moves()
2079 for (i = 0, p = &ctx->use_edges[use_list->refs]; i < n; i++, p++) { in ir_compute_dessa_moves()
2081 insn = &ctx->ir_base[use]; in ir_compute_dessa_moves()
2084 … if (IR_IS_CONST_REF(ir_insn_op(insn, j)) || ctx->vregs[ir_insn_op(insn, j)] != ctx->vregs[use]) { in ir_compute_dessa_moves()
2085 int pred = ctx->cfg_edges[bb->predecessors + (j-2)]; in ir_compute_dessa_moves()
2086 ctx->cfg_blocks[pred].flags &= ~IR_BB_EMPTY; in ir_compute_dessa_moves()
2087 ctx->cfg_blocks[pred].flags |= IR_BB_DESSA_MOVES; in ir_compute_dessa_moves()
2088 ctx->flags2 |= IR_LR_HAVE_DESSA_MOVES; in ir_compute_dessa_moves()
2108 int ir_gen_dessa_moves(ir_ctx *ctx, uint32_t b, emit_copy_t emit_copy) in ir_gen_dessa_moves() argument
2120 bb = &ctx->cfg_blocks[b]; in ir_gen_dessa_moves()
2125 succ = ctx->cfg_edges[bb->successors]; in ir_gen_dessa_moves()
2126 succ_bb = &ctx->cfg_blocks[succ]; in ir_gen_dessa_moves()
2128 use_list = &ctx->use_lists[succ_bb->start]; in ir_gen_dessa_moves()
2130 k = ir_phi_input_number(ctx, succ_bb, b); in ir_gen_dessa_moves()
2132 loc = ir_mem_malloc((ctx->vregs_count + 1) * 4 * sizeof(ir_ref)); in ir_gen_dessa_moves()
2133 pred = loc + ctx->vregs_count + 1; in ir_gen_dessa_moves()
2134 src = pred + ctx->vregs_count + 1; in ir_gen_dessa_moves()
2135 dst = src + ctx->vregs_count + 1; in ir_gen_dessa_moves()
2136 len = ir_bitset_len(ctx->vregs_count + 1); in ir_gen_dessa_moves()
2137 todo = ir_bitset_malloc(ctx->vregs_count + 1); in ir_gen_dessa_moves()
2139 for (i = 0, p = &ctx->use_edges[use_list->refs]; i < use_list->count; i++, p++) { in ir_gen_dessa_moves()
2141 insn = &ctx->ir_base[ref]; in ir_gen_dessa_moves()
2146 } else if (ctx->vregs[input] != ctx->vregs[ref]) { in ir_gen_dessa_moves()
2147 s = ctx->vregs[input]; in ir_gen_dessa_moves()
2148 d = ctx->vregs[ref]; in ir_gen_dessa_moves()
2160 ready = ir_bitset_malloc(ctx->vregs_count + 1); in ir_gen_dessa_moves()
2163 insn = &ctx->ir_base[ref]; in ir_gen_dessa_moves()
2166 s = ctx->vregs[input]; in ir_gen_dessa_moves()
2183 emit_copy(ctx, ctx->ir_base[dst[b]].type, src[c], dst[b]); in ir_gen_dessa_moves()
2196 emit_copy(ctx, ctx->ir_base[src[b]].type, src[b], 0); in ir_gen_dessa_moves()
2208 for (i = 0, p = &ctx->use_edges[use_list->refs]; i < use_list->count; i++, p++) { in ir_gen_dessa_moves()
2210 insn = &ctx->ir_base[ref]; in ir_gen_dessa_moves()
2214 emit_copy(ctx, insn->type, input, ref); in ir_gen_dessa_moves()
2227 if (ctx->flags & IR_DEBUG_RA) { \
2238 if (ctx->flags & IR_DEBUG_RA) { \
2250 if (ctx->flags & IR_DEBUG_RA) { \
2263 if (ctx->flags & IR_DEBUG_RA) { \
2350 static ir_block *ir_block_from_live_pos(ir_ctx *ctx, ir_live_pos pos) in ir_block_from_live_pos() argument
2353 uint32_t b = ctx->cfg_map[ref]; in ir_block_from_live_pos()
2358 b = ctx->cfg_map[ref]; in ir_block_from_live_pos()
2360 IR_ASSERT(b <= ctx->cfg_blocks_count); in ir_block_from_live_pos()
2361 return &ctx->cfg_blocks[b]; in ir_block_from_live_pos()
2364 static ir_live_pos ir_find_optimal_split_position(ir_ctx *ctx, ir_live_interval *ival, ir_live_pos … in ir_find_optimal_split_position() argument
2376 min_bb = ir_block_from_live_pos(ctx, min_pos); in ir_find_optimal_split_position()
2377 max_bb = ir_block_from_live_pos(ctx, max_pos); in ir_find_optimal_split_position()
2393 bb = &ctx->cfg_blocks[max_bb->loop_header]; in ir_find_optimal_split_position()
2395 bb = &ctx->cfg_blocks[bb->idom]; in ir_find_optimal_split_position()
2415 static ir_live_interval *ir_split_interval_at(ir_ctx *ctx, ir_live_interval *ival, ir_live_pos pos) in ir_split_interval_at() argument
2423 ctx->flags2 |= IR_RA_HAVE_SPLITS; in ir_split_interval_at()
2466 child = ir_arena_alloc(&ctx->arena, sizeof(ir_live_interval)); in ir_split_interval_at()
2485 p->next = ctx->unused_ranges; in ir_split_interval_at()
2486 ctx->unused_ranges = p; in ir_split_interval_at()
2511 static int32_t ir_allocate_small_spill_slot(ir_ctx *ctx, size_t size, ir_reg_alloc_data *data) in ir_allocate_small_spill_slot() argument
2520 ret = ctx->stack_frame_size; in ir_allocate_small_spill_slot()
2521 ctx->stack_frame_size += 8; in ir_allocate_small_spill_slot()
2531 ret = ctx->stack_frame_size; in ir_allocate_small_spill_slot()
2533 data->unused_slot_4 = ctx->stack_frame_size + 4; in ir_allocate_small_spill_slot()
2534 ctx->stack_frame_size += 8; in ir_allocate_small_spill_slot()
2536 ctx->stack_frame_size += 4; in ir_allocate_small_spill_slot()
2557 ret = ctx->stack_frame_size; in ir_allocate_small_spill_slot()
2558 data->unused_slot_2 = ctx->stack_frame_size + 2; in ir_allocate_small_spill_slot()
2560 data->unused_slot_4 = ctx->stack_frame_size + 4; in ir_allocate_small_spill_slot()
2561 ctx->stack_frame_size += 8; in ir_allocate_small_spill_slot()
2563 ctx->stack_frame_size += 4; in ir_allocate_small_spill_slot()
2595 ret = ctx->stack_frame_size; in ir_allocate_small_spill_slot()
2596 data->unused_slot_1 = ctx->stack_frame_size + 1; in ir_allocate_small_spill_slot()
2597 data->unused_slot_2 = ctx->stack_frame_size + 2; in ir_allocate_small_spill_slot()
2599 data->unused_slot_4 = ctx->stack_frame_size + 4; in ir_allocate_small_spill_slot()
2600 ctx->stack_frame_size += 8; in ir_allocate_small_spill_slot()
2602 ctx->stack_frame_size += 4; in ir_allocate_small_spill_slot()
2611 int32_t ir_allocate_spill_slot(ir_ctx *ctx, ir_type type, ir_reg_alloc_data *data) in ir_allocate_spill_slot() argument
2613 return ir_allocate_small_spill_slot(ctx, ir_type_size[type], data); in ir_allocate_spill_slot()
2616 static int32_t ir_allocate_big_spill_slot(ir_ctx *ctx, int32_t size, ir_reg_alloc_data *data) in ir_allocate_big_spill_slot() argument
2626 return ir_allocate_small_spill_slot(ctx, size, data); in ir_allocate_big_spill_slot()
2630 ctx->flags2 |= IR_16B_FRAME_ALIGNMENT; in ir_allocate_big_spill_slot()
2631 ret = IR_ALIGNED_SIZE(ctx->stack_frame_size, 16); in ir_allocate_big_spill_slot()
2633 ctx->stack_frame_size = ret + size; in ir_allocate_big_spill_slot()
2638 static ir_reg ir_get_first_reg_hint(ir_ctx *ctx, ir_live_interval *ival, ir_regset available) in ir_get_first_reg_hint() argument
2655 static ir_reg ir_try_allocate_preferred_reg(ir_ctx *ctx, ir_live_interval *ival, ir_regset availabl… in ir_try_allocate_preferred_reg() argument
2678 reg = ctx->live_intervals[ctx->vregs[use_pos->hint_ref]]->reg; in ir_try_allocate_preferred_reg()
2693 static ir_reg ir_get_preferred_reg(ir_ctx *ctx, ir_live_interval *ival, ir_regset available) in ir_get_preferred_reg() argument
2704 reg = ctx->live_intervals[ctx->vregs[use_pos->hint_ref]]->reg; in ir_get_preferred_reg()
2805 static ir_reg ir_try_allocate_free_reg(ir_ctx *ctx, ir_live_interval *ival, ir_live_interval **acti… in ir_try_allocate_free_reg() argument
2821 if (ctx->flags & IR_USE_FRAME_POINTER) { in ir_try_allocate_free_reg()
2838 available = IR_REGSET_DIFFERENCE(available, (ir_regset)ctx->fixed_regset); in ir_try_allocate_free_reg()
2905 reg = ir_try_allocate_preferred_reg(ctx, ival, available, freeUntilPos); in ir_try_allocate_free_reg()
2919 reg = ctx->live_intervals[ival->vreg]->reg; in ir_try_allocate_free_reg()
2941 reg = ir_get_first_reg_hint(ctx, other, non_conflicting); in ir_try_allocate_free_reg()
2994 split_pos = ir_find_optimal_split_position(ctx, ival, split_pos, pos, 0); in ir_try_allocate_free_reg()
2995 other = ir_split_interval_at(ctx, ival, split_pos); in ir_try_allocate_free_reg()
2997 …ir_reg pref_reg = ir_try_allocate_preferred_reg(ctx, ival, IR_REGSET_UNION(available, overlapped),… in ir_try_allocate_free_reg()
3020 static ir_reg ir_allocate_blocked_reg(ir_ctx *ctx, ir_live_interval *ival, ir_live_interval **activ… in ir_allocate_blocked_reg() argument
3038 ctx->flags2 |= IR_RA_HAVE_SPILLS; in ir_allocate_blocked_reg()
3055 if (ctx->flags & IR_USE_FRAME_POINTER) { in ir_allocate_blocked_reg()
3073 available = IR_REGSET_DIFFERENCE(available, (ir_regset)ctx->fixed_regset); in ir_allocate_blocked_reg()
3166 reg = ir_get_preferred_reg(ctx, ival, available); in ir_allocate_blocked_reg()
3197 split_pos = ir_find_optimal_split_position(ctx, ival, ival->range.start, next_use_pos - 1, 1); in ir_allocate_blocked_reg()
3202 other = ir_split_interval_at(ctx, ival, split_pos); in ir_allocate_blocked_reg()
3219 other = ir_split_interval_at(ctx, ival, split_pos); in ir_allocate_blocked_reg()
3235 split_pos = ir_find_optimal_split_position(ctx, ival, split_pos, blockPos[reg], 1); in ir_allocate_blocked_reg()
3236 other = ir_split_interval_at(ctx, ival, split_pos); in ir_allocate_blocked_reg()
3261 split_pos = ir_find_optimal_split_position(ctx, other, split_pos, ival->range.start, 1); in ir_allocate_blocked_reg()
3263 child = ir_split_interval_at(ctx, other, split_pos); in ir_allocate_blocked_reg()
3279 … split_pos = ir_find_optimal_split_position(ctx, ival, ival->range.start, next_use_pos - 1, 1); in ir_allocate_blocked_reg()
3300 …ir_live_pos opt_split_pos = ir_find_optimal_split_position(ctx, child, ival->range.start, split_po… in ir_allocate_blocked_reg()
3304 child2 = ir_split_interval_at(ctx, child, split_pos); in ir_allocate_blocked_reg()
3333 child = ir_split_interval_at(ctx, other, overlap); in ir_allocate_blocked_reg()
3354 static int ir_fix_dessa_tmps(ir_ctx *ctx, uint8_t type, ir_ref from, ir_ref to) in ir_fix_dessa_tmps() argument
3356 ir_block *bb = ctx->data; in ir_fix_dessa_tmps()
3388 if (!ir_has_tmp(ctx, bb->end, tmp_reg.num)) { in ir_fix_dessa_tmps()
3389 ir_add_tmp(ctx, bb->end, bb->end, tmp_reg.num, tmp_reg); in ir_fix_dessa_tmps()
3394 static bool ir_ival_spill_for_fuse_load(ir_ctx *ctx, ir_live_interval *ival, ir_reg_alloc_data *dat… in ir_ival_spill_for_fuse_load() argument
3401 insn = &ctx->ir_base[IR_LIVE_POS_TO_REF(use_pos->pos)]; in ir_ival_spill_for_fuse_load()
3409 ir_block *bb = ir_block_from_live_pos(ctx, use_pos->pos); in ir_ival_spill_for_fuse_load()
3417 insn = &ctx->ir_base[IR_LIVE_POS_TO_REF(use_pos->pos)]; in ir_ival_spill_for_fuse_load()
3419 IR_ASSERT(ctx->ir_base[insn->op2].op == IR_VAR); in ir_ival_spill_for_fuse_load()
3426 ir_block *bb = ir_block_from_live_pos(ctx, use_pos->pos); in ir_ival_spill_for_fuse_load()
3427 if (bb->loop_depth && bb != ir_block_from_live_pos(ctx, ival->use_pos->pos)) { in ir_ival_spill_for_fuse_load()
3431 ir_use_list *use_list = &ctx->use_lists[insn->op2]; in ir_ival_spill_for_fuse_load()
3433 ir_ref *p = &ctx->use_edges[use_list->refs]; in ir_ival_spill_for_fuse_load()
3436 if (ctx->ir_base[use].op == IR_VSTORE) { in ir_ival_spill_for_fuse_load()
3440 } else if (ctx->ir_base[use].op == IR_VADDR) { in ir_ival_spill_for_fuse_load()
3445 ival->stack_spill_pos = ctx->ir_base[insn->op2].op3; in ir_ival_spill_for_fuse_load()
3452 static void ir_assign_bound_spill_slots(ir_ctx *ctx) in ir_assign_bound_spill_slots() argument
3454 ir_hashtab_bucket *b = ctx->binding->data; in ir_assign_bound_spill_slots()
3455 uint32_t n = ctx->binding->count; in ir_assign_bound_spill_slots()
3460 v = ctx->vregs[b->key]; in ir_assign_bound_spill_slots()
3462 ival = ctx->live_intervals[v]; in ir_assign_bound_spill_slots()
3477 static int ir_linear_scan(ir_ctx *ctx) in ir_linear_scan() argument
3489 ir_ref vars = ctx->vars; in ir_linear_scan()
3491 if (!ctx->live_intervals) { in ir_linear_scan()
3495 if (ctx->flags2 & IR_LR_HAVE_DESSA_MOVES) { in ir_linear_scan()
3497 for (b = 1, bb = &ctx->cfg_blocks[1]; b <= ctx->cfg_blocks_count; b++, bb++) { in ir_linear_scan()
3500 ctx->data = bb; in ir_linear_scan()
3501 ir_gen_dessa_moves(ctx, b, ir_fix_dessa_tmps); in ir_linear_scan()
3506 ctx->data = &data; in ir_linear_scan()
3507 ctx->stack_frame_size = 0; in ir_linear_scan()
3515 ir_insn *insn = &ctx->ir_base[var]; in ir_linear_scan()
3521 ir_ref slot = ir_allocate_spill_slot(ctx, insn->type, &data);; in ir_linear_scan()
3526 use_list = &ctx->use_lists[var]; in ir_linear_scan()
3528 p = &ctx->use_edges[use_list->refs]; in ir_linear_scan()
3530 insn = &ctx->ir_base[*p]; in ir_linear_scan()
3536 ir_insn *val = &ctx->ir_base[insn->op2]; in ir_linear_scan()
3544 insn->op3 = ir_allocate_big_spill_slot(ctx, val->val.i32, &data); in ir_linear_scan()
3548 for (j = ctx->vregs_count; j != 0; j--) { in ir_linear_scan()
3549 ival = ctx->live_intervals[j]; in ir_linear_scan()
3552 || !ir_ival_spill_for_fuse_load(ctx, ival, &data)) { in ir_linear_scan()
3558 ival = ctx->live_intervals[0]; in ir_linear_scan()
3564 for (j = ctx->vregs_count + 1; j <= ctx->vregs_count + IR_REG_NUM + 2; j++) { in ir_linear_scan()
3565 ival = ctx->live_intervals[j]; in ir_linear_scan()
3573 ctx->flags2 &= ~(IR_RA_HAVE_SPLITS|IR_RA_HAVE_SPILLS); in ir_linear_scan()
3576 if (ctx->flags & IR_DEBUG_RA) { in ir_linear_scan()
3578 ir_dump_live_ranges(ctx, stderr); in ir_linear_scan()
3667 reg = ir_try_allocate_free_reg(ctx, ival, &active, inactive, &unhandled); in ir_linear_scan()
3669 reg = ir_allocate_blocked_reg(ctx, ival, &active, &inactive, &unhandled); in ir_linear_scan()
3687 if (ctx->flags2 & (IR_RA_HAVE_SPLITS|IR_RA_HAVE_SPILLS)) { in ir_linear_scan()
3689 if (ctx->binding) { in ir_linear_scan()
3690 ir_assign_bound_spill_slots(ctx); in ir_linear_scan()
3695 for (j = ctx->vregs_count; j != 0; j--) { in ir_linear_scan()
3696 ival = ctx->live_intervals[j]; in ir_linear_scan()
3761 ival->stack_spill_pos = ir_allocate_spill_slot(ctx, ival->type, &data); in ir_linear_scan()
3786 if (ctx->flags2 & IR_HAS_FP_RET_SLOT) { in ir_linear_scan()
3787 ctx->ret_slot = ir_allocate_spill_slot(ctx, IR_DOUBLE, &data); in ir_linear_scan()
3788 } else if (ctx->ret_type == IR_FLOAT || ctx->ret_type == IR_DOUBLE) { in ir_linear_scan()
3789 ctx->ret_slot = ir_allocate_spill_slot(ctx, ctx->ret_type, &data); in ir_linear_scan()
3791 ctx->ret_slot = -1; in ir_linear_scan()
3796 if (ctx->flags & IR_DEBUG_RA) { in ir_linear_scan()
3798 ir_dump_live_ranges(ctx, stderr); in ir_linear_scan()
3806 static bool needs_spill_reload(ir_ctx *ctx, ir_live_interval *ival, uint32_t b0, ir_bitset availabl… in needs_spill_reload() argument
3812 ir_worklist_init(&worklist, ctx->cfg_blocks_count + 1); in needs_spill_reload()
3816 bb = &ctx->cfg_blocks[b]; in needs_spill_reload()
3822 for (p = &ctx->cfg_edges[bb->predecessors]; n > 0; p++, n--) { in needs_spill_reload()
3824 bb = &ctx->cfg_blocks[b]; in needs_spill_reload()
3838 static bool needs_spill_load(ir_ctx *ctx, ir_live_interval *ival, ir_use_pos *use_pos) in needs_spill_load() argument
3850 static void ir_set_fused_reg(ir_ctx *ctx, ir_ref root, ir_ref ref_and_op, int8_t reg) in ir_set_fused_reg() argument
3855 if (!ctx->fused_regs) { in ir_set_fused_reg()
3856 ctx->fused_regs = ir_mem_malloc(sizeof(ir_strtab)); in ir_set_fused_reg()
3857 ir_strtab_init(ctx->fused_regs, 8, 128); in ir_set_fused_reg()
3861 ir_strtab_lookup(ctx->fused_regs, key, 8, 0x10000000 | reg); in ir_set_fused_reg()
3864 static void assign_regs(ir_ctx *ctx) in assign_regs() argument
3873 if (!ctx->regs) { in assign_regs()
3874 ctx->regs = ir_mem_malloc(sizeof(ir_regs) * ctx->insns_count); in assign_regs()
3875 memset(ctx->regs, IR_REG_NONE, sizeof(ir_regs) * ctx->insns_count); in assign_regs()
3878 if (!(ctx->flags2 & (IR_RA_HAVE_SPLITS|IR_RA_HAVE_SPILLS))) { in assign_regs()
3879 for (i = 1; i <= ctx->vregs_count; i++) { in assign_regs()
3880 ival = ctx->live_intervals[i]; in assign_regs()
3889 ir_set_alocated_reg(ctx, ref, use_pos->op_num, reg); in assign_regs()
3898 ir_bitset available = ir_bitset_malloc(ctx->cfg_blocks_count + 1); in assign_regs()
3900 for (i = 1; i <= ctx->vregs_count; i++) { in assign_regs()
3901 top_ival = ival = ctx->live_intervals[i]; in assign_regs()
3914 ir_set_alocated_reg(ctx, ref, use_pos->op_num, reg); in assign_regs()
3926 ir_bitset_clear(available, ir_bitset_len(ctx->cfg_blocks_count + 1)); in assign_regs()
3934 if ((ctx->ir_base[ref].op == IR_COPY in assign_regs()
3935 || ctx->ir_base[ref].op == IR_BITCAST in assign_regs()
3936 || ctx->ir_base[ref].op == IR_TRUNC) in assign_regs()
3937 && !IR_IS_CONST_REF(ctx->ir_base[ref].op1) in assign_regs()
3938 && ctx->vregs[ctx->ir_base[ref].op1] == (uint32_t)i) { in assign_regs()
3940 ir_set_alocated_reg(ctx, ref, use_pos->op_num, reg); in assign_regs()
3945 ir_bitset_clear(available, ir_bitset_len(ctx->cfg_blocks_count + 1)); in assign_regs()
3946 if (ctx->ir_base[ref].op == IR_PHI) { in assign_regs()
3950 } else if (ctx->ir_base[ref].op == IR_PARAM in assign_regs()
3955 uint32_t use_b = ctx->cfg_map[ref]; in assign_regs()
3957 if (ir_ival_covers(ival, IR_SAVE_LIVE_POS_FROM_REF(ctx->cfg_blocks[use_b].end))) { in assign_regs()
3967 } else if ((!prev_use_ref || ctx->cfg_map[prev_use_ref] != ctx->cfg_map[ref]) in assign_regs()
3968 && needs_spill_reload(ctx, ival, ctx->cfg_map[ref], available)) { in assign_regs()
3973 && ctx->ir_base[ref].op != IR_SNAPSHOT in assign_regs()
3974 && !needs_spill_load(ctx, ival, use_pos)) { in assign_regs()
3985 ir_set_alocated_reg(ctx, ref, use_pos->op_num, reg); in assign_regs()
3994 if (ctx->ir_base[ref].op != IR_SNAPSHOT && !(use_pos->flags & IR_PHI_USE)) { in assign_regs()
3995 uint32_t use_b = ctx->cfg_map[ref]; in assign_regs()
3997 if (ir_ival_covers(ival, IR_SAVE_LIVE_POS_FROM_REF(ctx->cfg_blocks[use_b].end))) { in assign_regs()
4004 … && (old_reg = ir_get_alocated_reg(ctx, -use_pos->hint_ref, use_pos->op_num)) != IR_REG_NONE) { in assign_regs()
4011 IR_ASSERT(ctx->rules[-use_pos->hint_ref] & IR_FUSED); in assign_regs()
4012 ctx->rules[-use_pos->hint_ref] |= IR_FUSED_REG; in assign_regs()
4013 ir_set_fused_reg(ctx, ref, -use_pos->hint_ref * sizeof(ir_ref) + use_pos->op_num, reg); in assign_regs()
4020 IR_ASSERT(ctx->vregs[-use_pos->hint_ref]); in assign_regs()
4021 IR_ASSERT(ctx->live_intervals[ctx->vregs[-use_pos->hint_ref]]); in assign_regs()
4022 … if (ctx->live_intervals[ctx->vregs[-use_pos->hint_ref]]->flags & IR_LIVE_INTERVAL_SPILLED) { in assign_regs()
4027 … && (old_reg = ir_get_alocated_reg(ctx, -use_pos->hint_ref, use_pos->op_num)) != IR_REG_NONE) { in assign_regs()
4029 IR_ASSERT(ctx->rules[-use_pos->hint_ref] & IR_FUSED); in assign_regs()
4030 ctx->rules[-use_pos->hint_ref] |= IR_FUSED_REG; in assign_regs()
4031 ir_set_fused_reg(ctx, ref, -use_pos->hint_ref * sizeof(ir_ref) + use_pos->op_num, reg); in assign_regs()
4041 ir_set_alocated_reg(ctx, ref, use_pos->op_num, reg); in assign_regs()
4049 if (ctx->ir_base[ref].op == IR_SNAPSHOT) { in assign_regs()
4053 ir_set_alocated_reg(ctx, ref, use_pos->op_num, reg); in assign_regs()
4067 ival = ctx->live_intervals[0]; in assign_regs()
4074 ir_insn *insn = &ctx->ir_base[ival->tmp_ref]; in assign_regs()
4081 } else if (ctx->ir_base[ops[ival->tmp_op_num]].op == IR_ALLOCA in assign_regs()
4082 || ctx->ir_base[ops[ival->tmp_op_num]].op == IR_VADDR) { in assign_regs()
4088 ir_set_alocated_reg(ctx, ival->tmp_ref, ival->tmp_op_num, reg); in assign_regs()
4093 if (ctx->fixed_stack_frame_size != -1) { in assign_regs()
4094 ctx->used_preserved_regs = (ir_regset)ctx->fixed_save_regset; in assign_regs()
4096 ctx->used_preserved_regs)) { in assign_regs()
4101 ctx->used_preserved_regs = IR_REGSET_UNION((ir_regset)ctx->fixed_save_regset, in assign_regs()
4103 (ctx->flags & IR_FUNCTION) ? (ir_regset)ctx->fixed_regset : IR_REGSET_PRESERVED)); in assign_regs()
4106 ir_fix_stack_frame(ctx); in assign_regs()
4109 int ir_reg_alloc(ir_ctx *ctx) in ir_reg_alloc() argument
4111 if (ir_linear_scan(ctx)) { in ir_reg_alloc()
4112 assign_regs(ctx); in ir_reg_alloc()