Lines Matching refs:immb
65 sljit_s32 b, sljit_sw immb) in emit_x86_instruction() argument
102 if (!(b & OFFS_REG_MASK) && NOT_HALFWORD(immb)) { in emit_x86_instruction()
103 PTR_FAIL_IF(emit_load_imm64(compiler, TMP_REG2, immb)); in emit_x86_instruction()
104 immb = 0; in emit_x86_instruction()
114 if (immb != 0 && !(b & OFFS_REG_MASK)) { in emit_x86_instruction()
116 if (immb <= 127 && immb >= -128) in emit_x86_instruction()
122 if ((b & OFFS_REG_MASK) && (immb & 0x3) == 0 && reg_lmap[OFFS_REG(b)] != 5) in emit_x86_instruction()
232 if (immb != 0 || reg_lmap_b == 5) { in emit_x86_instruction()
233 if (immb <= 127 && immb >= -128) in emit_x86_instruction()
247 if (immb != 0 || reg_lmap_b == 5) { in emit_x86_instruction()
248 if (immb <= 127 && immb >= -128) in emit_x86_instruction()
249 *buf_ptr++ = U8(immb); /* 8 bit displacement. */ in emit_x86_instruction()
251 sljit_unaligned_store_s32(buf_ptr, (sljit_s32)immb); /* 32 bit displacement. */ in emit_x86_instruction()
260 buf_ptr[1] = U8(reg_lmap_b | (reg_lmap[OFFS_REG(b)] << 3) | (immb << 6)); in emit_x86_instruction()
270 sljit_unaligned_store_s32(buf_ptr, (sljit_s32)immb); /* 32 bit displacement. */ in emit_x86_instruction()
290 sljit_s32 b, sljit_sw immb) in emit_vex_instruction() argument
331 inst = emit_x86_instruction(compiler, size, a, 0, b, immb); in emit_vex_instruction()