Lines Matching refs:insns_count

389 	ctx->use_lists = ir_mem_realloc(ctx->use_lists, ctx->insns_count * sizeof(ir_use_list));  in ir_split_partially_dead_node()
390 ctx->cfg_map = ir_mem_realloc(ctx->cfg_map, ctx->insns_count * sizeof(uint32_t)); in ir_split_partially_dead_node()
554 ir_list_init(&queue_early, ctx->insns_count); in ir_gcm()
599 ir_list_init(&queue_late, ctx->insns_count); in ir_gcm()
672 for (n = 1; n < ctx->insns_count; n++) { in ir_gcm()
708 for (n = 1; n < ctx->insns_count; n++) { in ir_gcm()
733 IR_ASSERT(key < ctx->insns_count); in ir_xlat_binding()
768 ir_ref i, j, k, n, *p, *q, ref, new_ref, prev_ref, insns_count, consts_count, use_edges_count; in ir_schedule() local
774 ir_ref *_next = ir_mem_malloc(ctx->insns_count * sizeof(ir_ref)); in ir_schedule()
775 ir_ref *_prev = ir_mem_malloc(ctx->insns_count * sizeof(ir_ref)); in ir_schedule()
787 for (i = 2, j = 1; i < ctx->insns_count; i++) { in ir_schedule()
863 _xlat = ir_mem_calloc((ctx->consts_count + ctx->insns_count), sizeof(ir_ref)); in ir_schedule()
869 insns_count = 1; in ir_schedule()
877 _xlat[i] = bb->start = insns_count; in ir_schedule()
884 insns_count += ir_insn_inputs_to_len(n); in ir_schedule()
890 _xlat[i] = insns_count; in ir_schedule()
891 insns_count += 1; in ir_schedule()
899 _xlat[i] = insns_count; in ir_schedule()
901 insns_count += ir_insn_inputs_to_len(n + 1); in ir_schedule()
970 _xlat[i] = insns_count; in ir_schedule()
971 insns_count += ir_insn_inputs_to_len(n); in ir_schedule()
976 _xlat[i] = bb->end = insns_count; in ir_schedule()
977 insns_count++; in ir_schedule()
996 if (consts_count == ctx->consts_count && insns_count == ctx->insns_count) { in ir_schedule()
1021 ir_init(&new_ctx, ctx->flags, consts_count, insns_count); in ir_schedule()
1022 new_ctx.insns_count = insns_count; in ir_schedule()
1110 new_ctx.cfg_map = ir_mem_calloc(ctx->insns_count, sizeof(uint32_t)); in ir_schedule()
1111 new_ctx.prev_ref = _prev = ir_mem_malloc(insns_count * sizeof(ir_ref)); in ir_schedule()
1112 new_ctx.use_lists = lists = ir_mem_malloc(insns_count * sizeof(ir_use_list)); in ir_schedule()
1261 IR_ASSERT(new_ctx.insns_count == new_ctx.insns_limit); in ir_schedule()
1277 ctx->prev_ref = ir_mem_malloc(ctx->insns_count * sizeof(ir_ref)); in ir_build_prev_refs()