Searched refs:immb (Results 1 – 2 of 2) sorted by relevance
65 sljit_s32 b, sljit_sw immb) in emit_x86_instruction() argument102 if (!(b & OFFS_REG_MASK) && NOT_HALFWORD(immb)) { in emit_x86_instruction()104 immb = 0; in emit_x86_instruction()114 if (immb != 0 && !(b & OFFS_REG_MASK)) { in emit_x86_instruction()116 if (immb <= 127 && immb >= -128) in emit_x86_instruction()232 if (immb != 0 || reg_lmap_b == 5) { in emit_x86_instruction()233 if (immb <= 127 && immb >= -128) in emit_x86_instruction()247 if (immb != 0 || reg_lmap_b == 5) { in emit_x86_instruction()248 if (immb <= 127 && immb >= -128) in emit_x86_instruction()249 *buf_ptr++ = U8(immb); /* 8 bit displacement. */ in emit_x86_instruction()[all …]
50 sljit_s32 b, sljit_sw immb) in emit_x86_instruction() argument82 if (immb != 0 && !(b & OFFS_REG_MASK)) { in emit_x86_instruction()84 if (immb <= 127 && immb >= -128) in emit_x86_instruction()170 if (immb != 0 || reg_map_b == 5) { in emit_x86_instruction()171 if (immb <= 127 && immb >= -128) in emit_x86_instruction()185 if (immb != 0 || reg_map_b == 5) { in emit_x86_instruction()186 if (immb <= 127 && immb >= -128) in emit_x86_instruction()187 *buf_ptr++ = U8(immb); /* 8 bit displacement. */ in emit_x86_instruction()206 sljit_unaligned_store_sw(buf_ptr, immb); /* 32 bit displacement. */ in emit_x86_instruction()226 sljit_s32 b, sljit_sw immb) in emit_vex_instruction() argument[all …]
Completed in 13 milliseconds