Lines Matching refs:code_buffer
63 static bool aarch64_may_use_b(ir_code_buffer *code_buffer, const void *addr)
65 if (code_buffer) {
66 if (addr >= code_buffer->start && (char*)addr < (char*)code_buffer->end) {
67 return (((char*)code_buffer->end - (char*)code_buffer->start) < B_IMM);
68 } else if ((char*)addr >= (char*)code_buffer->end) {
69 return (((char*)addr - (char*)code_buffer->start) < B_IMM);
70 } else if (addr < code_buffer->start) {
71 return (((char*)code_buffer->end - (char*)addr) < B_IMM);
78 static bool aarch64_may_use_adr(ir_code_buffer *code_buffer, const void *addr)
80 if (code_buffer) {
81 if (addr >= code_buffer->start && (char*)addr < (char*)code_buffer->end) {
82 return (((char*)code_buffer->end - (char*)code_buffer->start) < ADR_IMM);
83 } else if ((char*)addr >= (char*)code_buffer->end) {
84 return (((char*)addr - (char*)code_buffer->start) < ADR_IMM);
85 } else if (addr < code_buffer->start) {
86 return (((char*)code_buffer->end - (char*)addr) < ADR_IMM);
92 static bool aarch64_may_use_adrp(ir_code_buffer *code_buffer, const void *addr)
94 if (code_buffer) {
95 if (addr >= code_buffer->start && (char*)addr < (char*)code_buffer->end) {
96 return (((char*)code_buffer->end - (char*)code_buffer->start) < ADRP_IMM);
97 } else if ((char*)addr >= (char*)code_buffer->end) {
98 return (((char*)addr - (char*)code_buffer->start) < ADRP_IMM);
99 } else if (addr < code_buffer->start) {
100 return (((char*)code_buffer->end - (char*)addr) < ADRP_IMM);
4851 if (aarch64_may_use_b(ctx->code_buffer, addr)) {
4926 if (aarch64_may_use_b(ctx->code_buffer, addr)) {
4959 if (aarch64_may_use_b(ctx->code_buffer, addr)) {
4985 if (aarch64_may_use_b(ctx->code_buffer, addr)) {
5147 if (aarch64_may_use_b(ctx->code_buffer, addr)) {
5310 if (aarch64_may_use_b(ctx->code_buffer, addr)) {
6251 if (ctx->code_buffer) {
6252 entry = ctx->code_buffer->pos;
6254 if (size > (size_t)((char*)ctx->code_buffer->end - (char*)entry)) {
6259 ctx->code_buffer->pos = (char*)entry + size;
6282 if (ctx->code_buffer) {
6283 if (ctx->code_buffer->pos == (char*)entry + size) {
6285 ctx->code_buffer->pos = (char*)entry - size;
6320 if (ctx->code_buffer) {
6321 size = (char*)ctx->code_buffer->pos - (char*)entry;
6326 if (!ctx->code_buffer) {
6334 …uint32_t exit_points_per_group, const void *exit_addr, ir_code_buffer *code_buffer, size_t *size_p…
6342 IR_ASSERT(code_buffer);
6343 IR_ASSERT(aarch64_may_use_b(code_buffer, exit_addr));
6372 entry = code_buffer->pos;
6374 if (size > (size_t)((char*)code_buffer->end - (char*)entry)) {
6377 code_buffer->pos = (char*)entry + size;
6384 if (code_buffer->pos == (char*)entry + size) {
6386 code_buffer->pos = (char*)entry - size;
6406 IR_ASSERT(ctx && ctx->code_buffer);
6444 veneer = ctx->code_buffer->pos;
6445 if ((char*)ctx->code_buffer->end - (char*)veneer < 4 ) {
6504 ctx->code_buffer->pos = (char*)ctx->code_buffer->pos + 4;
6509 bool ir_needs_thunk(ir_code_buffer *code_buffer, void *addr)
6511 return !aarch64_may_use_b(code_buffer, addr);
6514 void *ir_emit_thunk(ir_code_buffer *code_buffer, void *addr, size_t *size_ptr)
6541 entry = code_buffer->pos;
6543 if (size > (size_t)((char*)code_buffer->end - (char*)entry)) {
6555 code_buffer->pos = (char*)code_buffer->pos + size;