Lines Matching refs:ref

20 static uint32_t ir_gcm_schedule_early(ir_ctx *ctx, ir_ref ref, ir_list *queue_late)  in ir_gcm_schedule_early()  argument
27 insn = &ctx->ir_base[ref]; in ir_gcm_schedule_early()
52 ctx->cfg_map[ref] = IR_GCM_EARLY_BLOCK(result); in ir_gcm_schedule_early()
53 ir_list_push_unchecked(queue_late, ref); in ir_gcm_schedule_early()
77 static uint32_t ir_gcm_select_best_block(ir_ctx *ctx, ir_ref ref, uint32_t lca) in ir_gcm_select_best_block() argument
88 if (ctx->ir_base[ref].op >= IR_EQ && ctx->ir_base[ref].op <= IR_UGT) { in ir_gcm_select_best_block()
89 ir_use_list *use_list = &ctx->use_lists[ref]; in ir_gcm_select_best_block()
104 && !(ctx->binding && ir_binding_find(ctx, ref))) { in ir_gcm_select_best_block()
139 && !(ctx->binding && ir_binding_find(ctx, ref))) { in ir_gcm_select_best_block()
145 } while (b != ctx->cfg_map[ref]); in ir_gcm_select_best_block()
200 static bool ir_split_partially_dead_node(ir_ctx *ctx, ir_ref ref, uint32_t b) in ir_split_partially_dead_node() argument
215 use_list = &ctx->use_lists[ref]; in ir_split_partially_dead_node()
226 if (*p == ref) { in ir_split_partially_dead_node()
251 fprintf(stderr, "*** Split partially dead node d_%d scheduled to BB%d\n", ref, b); in ir_split_partially_dead_node()
254 fprintf(stderr, "\td_%d is USED in [BB%d", ref, i); in ir_split_partially_dead_node()
295 fprintf(stderr, "\td_%d is TOTALLY_USEFUL in [BB%d", ref, i); in ir_split_partially_dead_node()
306 use_list = &ctx->use_lists[ref]; in ir_split_partially_dead_node()
310 ir_ref ref; in ir_split_partially_dead_node() member
316 ir_ref ref; in ir_split_partially_dead_node() member
337 if (*p == ref) { in ir_split_partially_dead_node()
350 uses[uses_count].ref = use; in ir_split_partially_dead_node()
371 uses[uses_count].ref = use; in ir_split_partially_dead_node()
385 i, clones[i].block, clones[i].use_count, uses[u].ref, uses[u].block); in ir_split_partially_dead_node()
388 fprintf(stderr, ", d_%d/BB%d", uses[u].ref, uses[u].block); in ir_split_partially_dead_node()
397 insn = &ctx->ir_base[ref]; in ir_split_partially_dead_node()
398 clones[0].ref = ref; in ir_split_partially_dead_node()
400 clones[i].ref = clone = ir_emit(ctx, insn->optx, insn->op1, insn->op2, insn->op3); in ir_split_partially_dead_node()
401 insn = &ctx->ir_base[ref]; in ir_split_partially_dead_node()
410 n = ctx->use_lists[ref].refs; in ir_split_partially_dead_node()
412 clone = clones[i].ref; in ir_split_partially_dead_node()
426 use = uses[u].ref; in ir_split_partially_dead_node()
436 if (ir_insn_op(insn, k) == ref) { in ir_split_partially_dead_node()
453 if (ir_insn_op(insn, k) == ref) { in ir_split_partially_dead_node()
491 static void ir_gcm_schedule_late(ir_ctx *ctx, ir_ref ref, uint32_t b) in ir_gcm_schedule_late() argument
496 IR_ASSERT(ctx->ir_base[ref].op != IR_PARAM && ctx->ir_base[ref].op != IR_VAR); in ir_gcm_schedule_late()
497 IR_ASSERT(ctx->ir_base[ref].op != IR_PHI && ctx->ir_base[ref].op != IR_PI); in ir_gcm_schedule_late()
501 ctx->cfg_map[ref] = b; in ir_gcm_schedule_late()
503 for (n = 0; n < ctx->use_lists[ref].count; n++) { in ir_gcm_schedule_late()
504 use = ctx->use_edges[ctx->use_lists[ref].refs + n]; in ir_gcm_schedule_late()
519 if (*p == ref) { in ir_gcm_schedule_late()
530 …IR_ASSERT(ir_gcm_dominates(ctx, ctx->cfg_map[ref], lca) && "Early placement doesn't dominate the l… in ir_gcm_schedule_late()
533 if (ctx->use_lists[ref].count > 1 in ir_gcm_schedule_late()
534 && ir_split_partially_dead_node(ctx, ref, lca)) { in ir_gcm_schedule_late()
539 if (lca != ctx->cfg_map[ref]) { in ir_gcm_schedule_late()
540 b = ir_gcm_select_best_block(ctx, ref, lca); in ir_gcm_schedule_late()
542 ctx->cfg_map[ref] = b; in ir_gcm_schedule_late()
545 if (ctx->ir_base[ref].op >= IR_ADD_OV && ctx->ir_base[ref].op <= IR_MUL_OV) { in ir_gcm_schedule_late()
546 ir_use_list *use_list = &ctx->use_lists[ref]; in ir_gcm_schedule_late()
562 ir_ref k, n, *p, ref; in ir_gcm() local
576 ref = ctx->cfg_blocks[1].end; in ir_gcm()
578 insn = &ctx->ir_base[ref]; in ir_gcm()
579 _blocks[ref] = 1; /* pin to block */ in ir_gcm()
582 ir_list_push_unchecked(&queue_early, ref); in ir_gcm()
584 ref = insn->op1; /* control predecessor */ in ir_gcm()
585 } while (ref != 1); /* IR_START */ in ir_gcm()
591 ref = *p; in ir_gcm()
592 use_insn = &ctx->ir_base[ref]; in ir_gcm()
595 _blocks[ref] = 1; /* pin to block */ in ir_gcm()
601 ref = ir_list_pop(&queue_early); in ir_gcm()
602 insn = &ctx->ir_base[ref]; in ir_gcm()
605 ref = *p; in ir_gcm()
606 if (ref > 0 && _blocks[ref] == 0) { in ir_gcm()
607 _blocks[ref] = 1; in ir_gcm()
608 ir_list_push_unchecked(&queue_early, ref); in ir_gcm()
624 ref = bb->end; in ir_gcm()
627 insn = &ctx->ir_base[ref]; in ir_gcm()
628 _blocks[ref] = b; /* pin to block */ in ir_gcm()
631 ir_list_push_unchecked(&queue_early, ref); in ir_gcm()
633 ref = insn->op1; /* control predecessor */ in ir_gcm()
635 while (ref != bb->start) { in ir_gcm()
636 insn = &ctx->ir_base[ref]; in ir_gcm()
637 _blocks[ref] = b; /* pin to block */ in ir_gcm()
640 ir_list_push_unchecked(&queue_early, ref); in ir_gcm()
645 ref = insn->op1; /* control predecessor */ in ir_gcm()
649 _blocks[ref] = b; /* pin to block */ in ir_gcm()
651 use_list = &ctx->use_lists[ref]; in ir_gcm()
655 ref = *p; in ir_gcm()
656 use_insn = &ctx->ir_base[ref]; in ir_gcm()
659 if (EXPECTED(ctx->use_lists[ref].count != 0)) { in ir_gcm()
660 _blocks[ref] = b; /* pin to block */ in ir_gcm()
661 ir_list_push_unchecked(&queue_early, ref); in ir_gcm()
665 _blocks[ref] = b; /* pin to block */ in ir_gcm()
668 _blocks[ref] = b; /* pin to block */ in ir_gcm()
677 ref = ir_list_at(&queue_early, n); in ir_gcm()
678 insn = &ctx->ir_base[ref]; in ir_gcm()
681 ref = *p; in ir_gcm()
682 if (ref > 0 && _blocks[ref] == 0) { in ir_gcm()
683 ir_gcm_schedule_early(ctx, ref, &queue_late); in ir_gcm()
708 ref = ir_list_at(&queue_late, n); in ir_gcm()
709 b = ctx->cfg_map[ref]; in ir_gcm()
711 ir_gcm_schedule_late(ctx, ref, b); in ir_gcm()
775 IR_ALWAYS_INLINE ir_ref ir_count_constant(ir_ref *_xlat, ir_ref ref) in ir_count_constant() argument
777 if (!_xlat[ref]) { in ir_count_constant()
778 _xlat[ref] = ref; /* this is only a "used constant" marker */ in ir_count_constant()
787 ir_ref i, j, k, n, *p, *q, ref, new_ref, prev_ref, insns_count, consts_count, use_edges_count; in ir_schedule() local
1063 ref = 1 - consts_count; in ir_schedule()
1064 insn = &ctx->ir_base[ref]; in ir_schedule()
1065 new_insn = &new_ctx.ir_base[ref]; in ir_schedule()
1067 memcpy(new_insn, insn, sizeof(ir_insn) * (IR_TRUE - ref)); in ir_schedule()
1069 while (ref != IR_TRUE) { in ir_schedule()
1087 ref++; in ir_schedule()
1093 for (ref = IR_TRUE - 1, insn = &ctx->ir_base[ref]; ref > -ctx->consts_count; insn--, ref--) { in ir_schedule()
1094 if (!_xlat[ref]) { in ir_schedule()
1122 _xlat[ref] = new_ref; in ir_schedule()
1147 ref = ctx->use_edges[use_list->refs]; in ir_schedule()
1148 if (_xlat[ref]) { in ir_schedule()
1149 *edges = _xlat[ref]; in ir_schedule()
1156 ref = *p; in ir_schedule()
1157 if (_xlat[ref]) { in ir_schedule()
1158 *edges = _xlat[ref]; in ir_schedule()
1245 ref = insn->op1; in ir_schedule()
1246 if (ref) { in ir_schedule()
1247 insn->op1 = ref = _xlat[ref]; in ir_schedule()
1249 insn = &new_ctx.ir_base[ref]; in ir_schedule()
1250 ref = insn->op3; in ir_schedule()
1251 if (!ref) { in ir_schedule()
1254 insn->op3 = ref = _xlat[ref]; in ir_schedule()