Lines Matching refs:dst

570 	sljit_si dst, sljit_sw dstw,
576 sljit_si dst, sljit_sw dstw,
581 sljit_si dst, sljit_sw dstw,
655 sljit_si dst, sljit_sw dstw, in emit_mov() argument
660 if (dst == SLJIT_UNUSED) { in emit_mov()
670 inst = emit_x86_instruction(compiler, 1, src, 0, dst, dstw); in emit_mov()
676 if (FAST_IS_REG(dst)) { in emit_mov()
678 return emit_do_imm(compiler, MOV_r_i32 + reg_map[dst], srcw); in emit_mov()
682 return emit_load_imm64(compiler, dst, srcw); in emit_mov()
685 … return emit_do_imm32(compiler, (reg_map[dst] >= 8) ? REX_B : 0, MOV_r_i32 + reg_lmap[dst], srcw); in emit_mov()
691 inst = emit_x86_instruction(compiler, 1, TMP_REG2, 0, dst, dstw); in emit_mov()
697 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, dstw); in emit_mov()
702 if (FAST_IS_REG(dst)) { in emit_mov()
703 inst = emit_x86_instruction(compiler, 1, dst, 0, src, srcw); in emit_mov()
713 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw); in emit_mov()
719 #define EMIT_MOV(compiler, dst, dstw, src, srcw) \ argument
720 FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw));
875 sljit_si dst, sljit_sw dstw, in emit_mov_byte() argument
888 if (dst == SLJIT_UNUSED && !(src & SLJIT_MEM)) in emit_mov_byte()
892 if (FAST_IS_REG(dst)) { in emit_mov_byte()
894 return emit_do_imm(compiler, MOV_r_i32 + reg_map[dst], srcw); in emit_mov_byte()
896 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0); in emit_mov_byte()
902 …inst = emit_x86_instruction(compiler, 1 | EX86_BYTE_ARG | EX86_NO_REXW, SLJIT_IMM, srcw, dst, dstw… in emit_mov_byte()
908 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1; in emit_mov_byte()
910 if ((dst & SLJIT_MEM) && FAST_IS_REG(src)) { in emit_mov_byte()
924 SLJIT_ASSERT(SLOW_IS_REG(dst)); in emit_mov_byte()
925 if (reg_map[dst] < 4) { in emit_mov_byte()
926 if (dst != src) in emit_mov_byte()
927 EMIT_MOV(compiler, dst, 0, src, 0); in emit_mov_byte()
928 inst = emit_x86_instruction(compiler, 2, dst, 0, dst, 0); in emit_mov_byte()
934 if (dst != src) in emit_mov_byte()
935 EMIT_MOV(compiler, dst, 0, src, 0); in emit_mov_byte()
938 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0); in emit_mov_byte()
942 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0); in emit_mov_byte()
947 inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 0xff, dst, 0); in emit_mov_byte()
963 if (dst & SLJIT_MEM) { in emit_mov_byte()
967 if ((dst & REG_MASK) == SLJIT_R0) { in emit_mov_byte()
968 if ((dst & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_R1)) in emit_mov_byte()
974 if ((dst & OFFS_REG_MASK) != TO_OFFS_REG(SLJIT_R0)) in emit_mov_byte()
976 else if ((dst & REG_MASK) == SLJIT_R1) in emit_mov_byte()
991 inst = emit_x86_instruction(compiler, 1, work_r, 0, dst, dstw); in emit_mov_byte()
1005 inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw); in emit_mov_byte()
1010 inst = emit_x86_instruction(compiler, 1 | EX86_REX | EX86_NO_REXW, dst_r, 0, dst, dstw); in emit_mov_byte()
1020 sljit_si dst, sljit_sw dstw, in emit_mov_half() argument
1030 if (dst == SLJIT_UNUSED && !(src & SLJIT_MEM)) in emit_mov_half()
1034 if (FAST_IS_REG(dst)) { in emit_mov_half()
1036 return emit_do_imm(compiler, MOV_r_i32 + reg_map[dst], srcw); in emit_mov_half()
1038 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0); in emit_mov_half()
1044 …instruction(compiler, 1 | EX86_HALF_ARG | EX86_NO_REXW | EX86_PREF_66, SLJIT_IMM, srcw, dst, dstw); in emit_mov_half()
1050 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1; in emit_mov_half()
1052 if ((dst & SLJIT_MEM) && FAST_IS_REG(src)) in emit_mov_half()
1061 if (dst & SLJIT_MEM) { in emit_mov_half()
1062 inst = emit_x86_instruction(compiler, 1 | EX86_NO_REXW | EX86_PREF_66, dst_r, 0, dst, dstw); in emit_mov_half()
1071 sljit_si dst, sljit_sw dstw, in emit_unary() argument
1076 if (dst == SLJIT_UNUSED) { in emit_unary()
1084 if (dst == src && dstw == srcw) { in emit_unary()
1086 inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw); in emit_unary()
1092 if (FAST_IS_REG(dst)) { in emit_unary()
1093 EMIT_MOV(compiler, dst, 0, src, srcw); in emit_unary()
1094 inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw); in emit_unary()
1105 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0); in emit_unary()
1110 sljit_si dst, sljit_sw dstw, in emit_not_with_flags() argument
1115 if (dst == SLJIT_UNUSED) { in emit_not_with_flags()
1126 if (FAST_IS_REG(dst)) { in emit_not_with_flags()
1127 EMIT_MOV(compiler, dst, 0, src, srcw); in emit_not_with_flags()
1128 inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw); in emit_not_with_flags()
1132 inst = emit_x86_instruction(compiler, 1, dst, 0, dst, 0); in emit_not_with_flags()
1145 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0); in emit_not_with_flags()
1150 sljit_si dst, sljit_sw dstw, in emit_clz() argument
1157 if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) { in emit_clz()
1186 if (FAST_IS_REG(dst)) in emit_clz()
1187 dst_r = dst; in emit_clz()
1190 if ((dst & REG_MASK) != SLJIT_R0 && (dst & OFFS_REG_MASK) != TO_OFFS_REG(SLJIT_R0)) in emit_clz()
1192 else if ((dst & REG_MASK) != SLJIT_R1 && (dst & OFFS_REG_MASK) != TO_OFFS_REG(SLJIT_R1)) in emit_clz()
1196 EMIT_MOV(compiler, dst, dstw, dst_r, 0); in emit_clz()
1200 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2; in emit_clz()
1246 if (dst & SLJIT_MEM) { in emit_clz()
1247 inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw); in emit_clz()
1252 if (dst & SLJIT_MEM) in emit_clz()
1253 EMIT_MOV(compiler, dst, dstw, TMP_REG2, 0); in emit_clz()
1259 sljit_si dst, sljit_sw dstw, in sljit_emit_op1() argument
1273 CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw)); in sljit_emit_op1()
1274 ADJUST_LOCAL_OFFSET(dst, dstw); in sljit_emit_op1()
1277 CHECK_EXTRA_REGS(dst, dstw, dst_is_ereg = 1); in sljit_emit_op1()
1290 if (FAST_IS_REG(src) && src == dst) { in sljit_emit_op1()
1337 return emit_mov(compiler, dst, dstw, src, srcw); in sljit_emit_op1()
1351 SLJIT_ASSERT(dst == SLJIT_MEM1(SLJIT_SP)); in sljit_emit_op1()
1352 dst = TMP_REG1; in sljit_emit_op1()
1363 FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw)); in sljit_emit_op1()
1366 FAIL_IF(emit_mov_byte(compiler, 0, dst, dstw, src, srcw)); in sljit_emit_op1()
1369 FAIL_IF(emit_mov_byte(compiler, 1, dst, dstw, src, srcw)); in sljit_emit_op1()
1372 FAIL_IF(emit_mov_half(compiler, 0, dst, dstw, src, srcw)); in sljit_emit_op1()
1375 FAIL_IF(emit_mov_half(compiler, 1, dst, dstw, src, srcw)); in sljit_emit_op1()
1379 FAIL_IF(emit_mov_int(compiler, 0, dst, dstw, src, srcw)); in sljit_emit_op1()
1382 FAIL_IF(emit_mov_int(compiler, 1, dst, dstw, src, srcw)); in sljit_emit_op1()
1388 if (SLJIT_UNLIKELY(dst_is_ereg) && dst == TMP_REG1) in sljit_emit_op1()
1392 …if (SLJIT_UNLIKELY(update) && (dst & SLJIT_MEM) && (dst & REG_MASK) && (dstw != 0 || (dst & OFFS_R… in sljit_emit_op1()
1393 inst = emit_x86_instruction(compiler, 1, dst & REG_MASK, 0, dst, dstw); in sljit_emit_op1()
1406 return emit_not_with_flags(compiler, dst, dstw, src, srcw); in sljit_emit_op1()
1407 return emit_unary(compiler, NOT_rm, dst, dstw, src, srcw); in sljit_emit_op1()
1412 return emit_unary(compiler, NEG_rm, dst, dstw, src, srcw); in sljit_emit_op1()
1417 return emit_clz(compiler, op_flags, dst, dstw, src, srcw); in sljit_emit_op1()
1459 sljit_si dst, sljit_sw dstw, argument
1465 if (dst == SLJIT_UNUSED) {
1478 if (dst == src1 && dstw == src1w) {
1481 …if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))…
1483 if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128)) {
1488 BINARY_IMM(op_imm, op_mr, src2w, dst, dstw);
1491 else if (FAST_IS_REG(dst)) {
1492 inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w);
1498 inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw);
1504 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1512 if (dst == src2 && dstw == src2w) {
1515 …if ((dst == SLJIT_R0) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))…
1517 if ((dst == SLJIT_R0) && (src1w > 127 || src1w < -128)) {
1522 BINARY_IMM(op_imm, op_mr, src1w, dst, dstw);
1525 else if (FAST_IS_REG(dst)) {
1526 inst = emit_x86_instruction(compiler, 1, dst, dstw, src1, src1w);
1531 inst = emit_x86_instruction(compiler, 1, src1, src1w, dst, dstw);
1537 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1545 if (FAST_IS_REG(dst)) {
1546 EMIT_MOV(compiler, dst, 0, src1, src1w);
1548 BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
1551 inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w);
1567 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1575 sljit_si dst, sljit_sw dstw, argument
1581 if (dst == SLJIT_UNUSED) {
1594 if (dst == src1 && dstw == src1w) {
1597 …if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))…
1599 if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128)) {
1604 BINARY_IMM(op_imm, op_mr, src2w, dst, dstw);
1607 else if (FAST_IS_REG(dst)) {
1608 inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w);
1613 inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw);
1619 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1627 if (FAST_IS_REG(dst) && dst != src2) {
1628 EMIT_MOV(compiler, dst, 0, src1, src1w);
1630 BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
1633 inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w);
1649 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1656 sljit_si dst, sljit_sw dstw, argument
1663 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1780 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1786 sljit_si dst, sljit_sw dstw, argument
1795 if (dst == src1 && dstw == src1w)
1797 if (dst == src2 && dstw == src2w)
1801 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1838 return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2007 sljit_si dst, sljit_sw dstw, argument
2014 if (dst == src1 && dstw == src1w) {
2015 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, dstw);
2020 if (dst == SLJIT_UNUSED) {
2027 if (dst == SLJIT_PREF_SHIFT_REG && src2 == SLJIT_PREF_SHIFT_REG) {
2035 if (FAST_IS_REG(dst)) {
2036 EMIT_MOV(compiler, dst, 0, src1, src1w);
2037 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, 0);
2047 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
2051 if (dst == SLJIT_PREF_SHIFT_REG) {
2059 else if (FAST_IS_REG(dst) && dst != src2 && !ADDRESSING_DEPENDS_ON(src2, dst)) {
2060 if (src1 != dst)
2061 EMIT_MOV(compiler, dst, 0, src1, src1w);
2064 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, dst, 0);
2088 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
2096 sljit_si dst, sljit_sw dstw, argument
2104 return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2107 return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2110 return emit_mov(compiler, dst, dstw, src1, src1w);
2113 dst, dstw, src1, src1w, SLJIT_IMM, 0);
2117 return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2119 if (!FAST_IS_REG(dst))
2122 FAIL_IF(emit_shift(compiler,mode, dst, dstw, src1, src1w, src2, src2w));
2124 if (FAST_IS_REG(dst))
2125 return emit_cmp_binary(compiler, dst, dstw, SLJIT_IMM, 0);
2130 sljit_si dst, sljit_sw dstw, argument
2135 CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
2136 ADJUST_LOCAL_OFFSET(dst, dstw);
2140 CHECK_EXTRA_REGS(dst, dstw, (void)0);
2157 …if (emit_lea_binary(compiler, op & SLJIT_KEEP_FLAGS, dst, dstw, src1, src1w, src2, src2w) != SLJIT…
2165 dst, dstw, src1, src1w, src2, src2w);
2174 dst, dstw, src1, src1w, src2, src2w);
2177 …if ((src2 & SLJIT_IMM) && emit_lea_binary(compiler, op & SLJIT_KEEP_FLAGS, dst, dstw, src1, src1w,…
2184 if (dst == SLJIT_UNUSED)
2187 dst, dstw, src1, src1w, src2, src2w);
2196 dst, dstw, src1, src1w, src2, src2w);
2198 return emit_mul(compiler, dst, dstw, src1, src1w, src2, src2w);
2200 if (dst == SLJIT_UNUSED)
2203 dst, dstw, src1, src1w, src2, src2w);
2206 dst, dstw, src1, src1w, src2, src2w);
2209 dst, dstw, src1, src1w, src2, src2w);
2212 dst, dstw, src1, src1w, src2, src2w);
2215 dst, dstw, src1, src1w, src2, src2w);
2218 dst, dstw, src1, src1w, src2, src2w);
2314 sljit_si single, sljit_si dst, sljit_si src, sljit_sw srcw) argument
2316 return emit_sse2(compiler, MOVSD_x_xm, single, dst, src, srcw);
2320 sljit_si single, sljit_si dst, sljit_sw dstw, sljit_si src) argument
2322 return emit_sse2(compiler, MOVSD_xm_x, single, src, dst, dstw);
2326 sljit_si dst, sljit_sw dstw, argument
2329 sljit_si dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
2342 if (dst_r == TMP_REG1 && dst != SLJIT_UNUSED)
2343 return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2348 sljit_si dst, sljit_sw dstw, argument
2351 sljit_si dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
2378 return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, TMP_FREG);
2395 sljit_si dst, sljit_sw dstw, argument
2405 SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
2408 if (FAST_IS_REG(dst))
2409 return emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, dst, src, srcw);
2411 return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, src);
2413 return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, TMP_FREG);
2417 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
2431 return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, TMP_FREG);
2435 if (SLOW_IS_REG(dst)) {
2436 dst_r = dst;
2437 if (dst != src)
2456 return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, TMP_FREG);
2461 sljit_si dst, sljit_sw dstw, argument
2468 CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
2469 ADJUST_LOCAL_OFFSET(dst, dstw);
2477 if (FAST_IS_REG(dst)) {
2478 dst_r = dst;
2479 if (dst == src1)
2481 else if (dst == src2 && (op == SLJIT_DADD || op == SLJIT_DMUL)) {
2486 else if (dst != src2)
2517 return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, TMP_FREG);
2661 sljit_si dst, sljit_sw dstw, argument
2671 sljit_si dst_save = dst;
2676 CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type));
2679 if (dst == SLJIT_UNUSED)
2682 ADJUST_LOCAL_OFFSET(dst, dstw);
2683 CHECK_EXTRA_REGS(dst, dstw, (void)0);
2692 if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst) && dst == src) {
2701 *inst++ = REX | (reg_map[TMP_REG1] <= 7 ? 0 : REX_R) | (reg_map[dst] <= 7 ? 0 : REX_B);
2703 *inst++ = MOD_REG | (reg_lmap[TMP_REG1] << 3) | reg_lmap[dst];
2707 reg = (op == SLJIT_MOV && FAST_IS_REG(dst)) ? dst : TMP_REG1;
2727 return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2733 return sljit_emit_op2(compiler, op, dst, dstw, dst, dstw, TMP_REG1, 0);
2735 if (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst)) {
2736 if (reg_map[dst] <= 4) {
2744 *inst++ = MOD_REG | reg_map[dst];
2748 *inst = MOD_REG | (reg_map[dst] << 3) | reg_map[dst];
2759 EMIT_MOV(compiler, dst, 0, SLJIT_IMM, 0);
2768 *inst++ = MOD_REG | (reg_map[dst] << 3) | reg_map[TMP_REG1];
2783 *inst++ = MOD_REG | (reg_map[dst] << 3) | 0 /* eax */;
2788 …OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst) && dst == src && reg_map[dst] <= …
2790 if (dst != SLJIT_R0) {
2800 *inst++ = MOD_REG | (0 /* eax */ << 3) | reg_map[dst];
2838 return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2848 …TE sljit_si sljit_get_local_base(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, slj… argument
2851 CHECK(check_sljit_get_local_base(compiler, dst, dstw, offset));
2852 ADJUST_LOCAL_OFFSET(dst, dstw);
2854 CHECK_EXTRA_REGS(dst, dstw, (void)0);
2866 …SLJIT_ASSERT(emit_lea_binary(compiler, SLJIT_KEEP_FLAGS, dst, dstw, SLJIT_SP, 0, TMP_REG1, 0) != S…
2869 return emit_lea_binary(compiler, SLJIT_KEEP_FLAGS, dst, dstw, SLJIT_SP, 0, TMP_REG1, 0);
2875 return emit_lea_binary(compiler, SLJIT_KEEP_FLAGS, dst, dstw, SLJIT_SP, 0, SLJIT_IMM, offset);
2876 return emit_mov(compiler, dst, dstw, SLJIT_SP, 0);
2879 …ct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, slj… argument
2888 CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
2889 ADJUST_LOCAL_OFFSET(dst, dstw);
2891 CHECK_EXTRA_REGS(dst, dstw, (void)0);
2899 reg = SLOW_IS_REG(dst) ? dst : TMP_REG1;
2904 if (dst == SLJIT_UNUSED)
2905 dst = TMP_REG1;
2907 if (emit_mov(compiler, dst, dstw, SLJIT_IMM, init_value))
2918 if (dst & SLJIT_MEM)
2919 if (emit_mov(compiler, dst, dstw, TMP_REG1, 0))