1 /*
2  *    Stack-less Just-In-Time compiler
3  *
4  *    Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without modification, are
7  * permitted provided that the following conditions are met:
8  *
9  *   1. Redistributions of source code must retain the above copyright notice, this list of
10  *      conditions and the following disclaimer.
11  *
12  *   2. Redistributions in binary form must reproduce the above copyright notice, this list
13  *      of conditions and the following disclaimer in the documentation and/or other materials
14  *      provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19  * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21  * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22  * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  */
26 
sljit_get_platform_name(void)27 SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
28 {
29 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
30 	return "x86" SLJIT_CPUINFO " ABI:fastcall";
31 #else
32 	return "x86" SLJIT_CPUINFO;
33 #endif
34 }
35 
36 /*
37    32b register indexes:
38      0 - EAX
39      1 - ECX
40      2 - EDX
41      3 - EBX
42      4 - ESP
43      5 - EBP
44      6 - ESI
45      7 - EDI
46 */
47 
48 /*
49    64b register indexes:
50      0 - RAX
51      1 - RCX
52      2 - RDX
53      3 - RBX
54      4 - RSP
55      5 - RBP
56      6 - RSI
57      7 - RDI
58      8 - R8   - From now on REX prefix is required
59      9 - R9
60     10 - R10
61     11 - R11
62     12 - R12
63     13 - R13
64     14 - R14
65     15 - R15
66 */
67 
68 #define TMP_FREG	(0)
69 
70 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
71 
72 /* Last register + 1. */
73 #define TMP_REG1	(SLJIT_NUMBER_OF_REGISTERS + 2)
74 
75 static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 3] = {
76 	0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 7, 6, 3, 4, 5
77 };
78 
79 #define CHECK_EXTRA_REGS(p, w, do) \
80 	if (p >= SLJIT_R3 && p <= SLJIT_S3) { \
81 		if (p <= compiler->scratches) \
82 			w = compiler->scratches_offset + ((p) - SLJIT_R3) * SSIZE_OF(sw); \
83 		else \
84 			w = compiler->locals_offset + ((p) - SLJIT_S2) * SSIZE_OF(sw); \
85 		p = SLJIT_MEM1(SLJIT_SP); \
86 		do; \
87 	}
88 
89 #else /* SLJIT_CONFIG_X86_32 */
90 
91 /* Last register + 1. */
92 #define TMP_REG1	(SLJIT_NUMBER_OF_REGISTERS + 2)
93 #define TMP_REG2	(SLJIT_NUMBER_OF_REGISTERS + 3)
94 
95 /* Note: r12 & 0x7 == 0b100, which decoded as SIB byte present
96    Note: avoid to use r12 and r13 for memory addessing
97    therefore r12 is better to be a higher saved register. */
98 #ifndef _WIN64
99 /* Args: rdi(=7), rsi(=6), rdx(=2), rcx(=1), r8, r9. Scratches: rax(=0), r10, r11 */
100 static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 4] = {
101 	0, 0, 6, 7, 1, 8, 11, 10, 12, 5, 13, 14, 15, 3, 4, 2, 9
102 };
103 /* low-map. reg_map & 0x7. */
104 static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 4] = {
105 	0, 0, 6, 7, 1, 0, 3,  2,  4,  5,  5,  6,  7, 3, 4, 2, 1
106 };
107 #else
108 /* Args: rcx(=1), rdx(=2), r8, r9. Scratches: rax(=0), r10, r11 */
109 static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 4] = {
110 	0, 0, 2, 8, 1, 11, 12, 5, 13, 14, 15, 7, 6, 3, 4, 9, 10
111 };
112 /* low-map. reg_map & 0x7. */
113 static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 4] = {
114 	0, 0, 2, 0, 1,  3,  4, 5,  5,  6,  7, 7, 6, 3, 4, 1,  2
115 };
116 #endif
117 
118 /* Args: xmm0-xmm3 */
119 static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1] = {
120 	4, 0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
121 };
122 /* low-map. freg_map & 0x7. */
123 static const sljit_u8 freg_lmap[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1] = {
124 	4, 0, 1, 2, 3, 5, 6, 7, 0, 1, 2,  3,  4,  5,  6,  7
125 };
126 
127 #define REX_W		0x48
128 #define REX_R		0x44
129 #define REX_X		0x42
130 #define REX_B		0x41
131 #define REX		0x40
132 
133 #ifndef _WIN64
134 #define HALFWORD_MAX 0x7fffffffl
135 #define HALFWORD_MIN -0x80000000l
136 #else
137 #define HALFWORD_MAX 0x7fffffffll
138 #define HALFWORD_MIN -0x80000000ll
139 #endif
140 
141 #define IS_HALFWORD(x)		((x) <= HALFWORD_MAX && (x) >= HALFWORD_MIN)
142 #define NOT_HALFWORD(x)		((x) > HALFWORD_MAX || (x) < HALFWORD_MIN)
143 
144 #define CHECK_EXTRA_REGS(p, w, do)
145 
146 #endif /* SLJIT_CONFIG_X86_32 */
147 
148 #define U8(v)			((sljit_u8)(v))
149 
150 
151 /* Size flags for emit_x86_instruction: */
152 #define EX86_BIN_INS		0x0010
153 #define EX86_SHIFT_INS		0x0020
154 #define EX86_REX		0x0040
155 #define EX86_NO_REXW		0x0080
156 #define EX86_BYTE_ARG		0x0100
157 #define EX86_HALF_ARG		0x0200
158 #define EX86_PREF_66		0x0400
159 #define EX86_PREF_F2		0x0800
160 #define EX86_PREF_F3		0x1000
161 #define EX86_SSE2_OP1		0x2000
162 #define EX86_SSE2_OP2		0x4000
163 #define EX86_SSE2		(EX86_SSE2_OP1 | EX86_SSE2_OP2)
164 
165 /* --------------------------------------------------------------------- */
166 /*  Instrucion forms                                                     */
167 /* --------------------------------------------------------------------- */
168 
169 #define ADD		(/* BINARY */ 0 << 3)
170 #define ADD_EAX_i32	0x05
171 #define ADD_r_rm	0x03
172 #define ADD_rm_r	0x01
173 #define ADDSD_x_xm	0x58
174 #define ADC		(/* BINARY */ 2 << 3)
175 #define ADC_EAX_i32	0x15
176 #define ADC_r_rm	0x13
177 #define ADC_rm_r	0x11
178 #define AND		(/* BINARY */ 4 << 3)
179 #define AND_EAX_i32	0x25
180 #define AND_r_rm	0x23
181 #define AND_rm_r	0x21
182 #define ANDPD_x_xm	0x54
183 #define BSR_r_rm	(/* GROUP_0F */ 0xbd)
184 #define CALL_i32	0xe8
185 #define CALL_rm		(/* GROUP_FF */ 2 << 3)
186 #define CDQ		0x99
187 #define CMOVE_r_rm	(/* GROUP_0F */ 0x44)
188 #define CMP		(/* BINARY */ 7 << 3)
189 #define CMP_EAX_i32	0x3d
190 #define CMP_r_rm	0x3b
191 #define CMP_rm_r	0x39
192 #define CVTPD2PS_x_xm	0x5a
193 #define CVTSI2SD_x_rm	0x2a
194 #define CVTTSD2SI_r_xm	0x2c
195 #define DIV		(/* GROUP_F7 */ 6 << 3)
196 #define DIVSD_x_xm	0x5e
197 #define FSTPS		0xd9
198 #define FSTPD		0xdd
199 #define INT3		0xcc
200 #define IDIV		(/* GROUP_F7 */ 7 << 3)
201 #define IMUL		(/* GROUP_F7 */ 5 << 3)
202 #define IMUL_r_rm	(/* GROUP_0F */ 0xaf)
203 #define IMUL_r_rm_i8	0x6b
204 #define IMUL_r_rm_i32	0x69
205 #define JE_i8		0x74
206 #define JNE_i8		0x75
207 #define JMP_i8		0xeb
208 #define JMP_i32		0xe9
209 #define JMP_rm		(/* GROUP_FF */ 4 << 3)
210 #define LEA_r_m		0x8d
211 #define LOOP_i8		0xe2
212 #define MOV_r_rm	0x8b
213 #define MOV_r_i32	0xb8
214 #define MOV_rm_r	0x89
215 #define MOV_rm_i32	0xc7
216 #define MOV_rm8_i8	0xc6
217 #define MOV_rm8_r8	0x88
218 #define MOVAPS_x_xm	0x28
219 #define MOVAPS_xm_x	0x29
220 #define MOVSD_x_xm	0x10
221 #define MOVSD_xm_x	0x11
222 #define MOVSXD_r_rm	0x63
223 #define MOVSX_r_rm8	(/* GROUP_0F */ 0xbe)
224 #define MOVSX_r_rm16	(/* GROUP_0F */ 0xbf)
225 #define MOVZX_r_rm8	(/* GROUP_0F */ 0xb6)
226 #define MOVZX_r_rm16	(/* GROUP_0F */ 0xb7)
227 #define MUL		(/* GROUP_F7 */ 4 << 3)
228 #define MULSD_x_xm	0x59
229 #define NEG_rm		(/* GROUP_F7 */ 3 << 3)
230 #define NOP		0x90
231 #define NOT_rm		(/* GROUP_F7 */ 2 << 3)
232 #define OR		(/* BINARY */ 1 << 3)
233 #define OR_r_rm		0x0b
234 #define OR_EAX_i32	0x0d
235 #define OR_rm_r		0x09
236 #define OR_rm8_r8	0x08
237 #define POP_r		0x58
238 #define POP_rm		0x8f
239 #define POPF		0x9d
240 #define PREFETCH	0x18
241 #define PUSH_i32	0x68
242 #define PUSH_r		0x50
243 #define PUSH_rm		(/* GROUP_FF */ 6 << 3)
244 #define PUSHF		0x9c
245 #define RET_near	0xc3
246 #define RET_i16		0xc2
247 #define SBB		(/* BINARY */ 3 << 3)
248 #define SBB_EAX_i32	0x1d
249 #define SBB_r_rm	0x1b
250 #define SBB_rm_r	0x19
251 #define SAR		(/* SHIFT */ 7 << 3)
252 #define SHL		(/* SHIFT */ 4 << 3)
253 #define SHR		(/* SHIFT */ 5 << 3)
254 #define SUB		(/* BINARY */ 5 << 3)
255 #define SUB_EAX_i32	0x2d
256 #define SUB_r_rm	0x2b
257 #define SUB_rm_r	0x29
258 #define SUBSD_x_xm	0x5c
259 #define TEST_EAX_i32	0xa9
260 #define TEST_rm_r	0x85
261 #define UCOMISD_x_xm	0x2e
262 #define UNPCKLPD_x_xm	0x14
263 #define XCHG_EAX_r	0x90
264 #define XCHG_r_rm	0x87
265 #define XOR		(/* BINARY */ 6 << 3)
266 #define XOR_EAX_i32	0x35
267 #define XOR_r_rm	0x33
268 #define XOR_rm_r	0x31
269 #define XORPD_x_xm	0x57
270 
271 #define GROUP_0F	0x0f
272 #define GROUP_F7	0xf7
273 #define GROUP_FF	0xff
274 #define GROUP_BINARY_81	0x81
275 #define GROUP_BINARY_83	0x83
276 #define GROUP_SHIFT_1	0xd1
277 #define GROUP_SHIFT_N	0xc1
278 #define GROUP_SHIFT_CL	0xd3
279 
280 #define MOD_REG		0xc0
281 #define MOD_DISP8	0x40
282 
283 #define INC_SIZE(s)			(*inst++ = U8(s), compiler->size += (s))
284 
285 #define PUSH_REG(r)			(*inst++ = U8(PUSH_r + (r)))
286 #define POP_REG(r)			(*inst++ = U8(POP_r + (r)))
287 #define RET()				(*inst++ = RET_near)
288 #define RET_I16(n)			(*inst++ = RET_i16, *inst++ = U8(n), *inst++ = 0)
289 
290 /* Multithreading does not affect these static variables, since they store
291    built-in CPU features. Therefore they can be overwritten by different threads
292    if they detect the CPU features in the same time. */
293 #if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
294 static sljit_s32 cpu_has_sse2 = -1;
295 #endif
296 static sljit_s32 cpu_has_cmov = -1;
297 
298 #ifdef _WIN32_WCE
299 #include <cmnintrin.h>
300 #elif defined(_MSC_VER) && _MSC_VER >= 1400
301 #include <intrin.h>
302 #endif
303 
304 /******************************************************/
305 /*    Unaligned-store functions                       */
306 /******************************************************/
307 
sljit_unaligned_store_s16(void * addr,sljit_s16 value)308 static SLJIT_INLINE void sljit_unaligned_store_s16(void *addr, sljit_s16 value)
309 {
310 	SLJIT_MEMCPY(addr, &value, sizeof(value));
311 }
312 
sljit_unaligned_store_s32(void * addr,sljit_s32 value)313 static SLJIT_INLINE void sljit_unaligned_store_s32(void *addr, sljit_s32 value)
314 {
315 	SLJIT_MEMCPY(addr, &value, sizeof(value));
316 }
317 
sljit_unaligned_store_sw(void * addr,sljit_sw value)318 static SLJIT_INLINE void sljit_unaligned_store_sw(void *addr, sljit_sw value)
319 {
320 	SLJIT_MEMCPY(addr, &value, sizeof(value));
321 }
322 
323 /******************************************************/
324 /*    Utility functions                               */
325 /******************************************************/
326 
get_cpu_features(void)327 static void get_cpu_features(void)
328 {
329 	sljit_u32 features;
330 
331 #if defined(_MSC_VER) && _MSC_VER >= 1400
332 
333 	int CPUInfo[4];
334 	__cpuid(CPUInfo, 1);
335 	features = (sljit_u32)CPUInfo[3];
336 
337 #elif defined(__GNUC__) || defined(__INTEL_COMPILER) || defined(__SUNPRO_C)
338 
339 	/* AT&T syntax. */
340 	__asm__ (
341 		"movl $0x1, %%eax\n"
342 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
343 		/* On x86-32, there is no red zone, so this
344 		   should work (no need for a local variable). */
345 		"push %%ebx\n"
346 #endif
347 		"cpuid\n"
348 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
349 		"pop %%ebx\n"
350 #endif
351 		"movl %%edx, %0\n"
352 		: "=g" (features)
353 		:
354 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
355 		: "%eax", "%ecx", "%edx"
356 #else
357 		: "%rax", "%rbx", "%rcx", "%rdx"
358 #endif
359 	);
360 
361 #else /* _MSC_VER && _MSC_VER >= 1400 */
362 
363 	/* Intel syntax. */
364 	__asm {
365 		mov eax, 1
366 		cpuid
367 		mov features, edx
368 	}
369 
370 #endif /* _MSC_VER && _MSC_VER >= 1400 */
371 
372 #if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
373 	cpu_has_sse2 = (features >> 26) & 0x1;
374 #endif
375 	cpu_has_cmov = (features >> 15) & 0x1;
376 }
377 
get_jump_code(sljit_uw type)378 static sljit_u8 get_jump_code(sljit_uw type)
379 {
380 	switch (type) {
381 	case SLJIT_EQUAL:
382 	case SLJIT_EQUAL_F64:
383 		return 0x84 /* je */;
384 
385 	case SLJIT_NOT_EQUAL:
386 	case SLJIT_NOT_EQUAL_F64:
387 		return 0x85 /* jne */;
388 
389 	case SLJIT_LESS:
390 	case SLJIT_CARRY:
391 	case SLJIT_LESS_F64:
392 		return 0x82 /* jc */;
393 
394 	case SLJIT_GREATER_EQUAL:
395 	case SLJIT_NOT_CARRY:
396 	case SLJIT_GREATER_EQUAL_F64:
397 		return 0x83 /* jae */;
398 
399 	case SLJIT_GREATER:
400 	case SLJIT_GREATER_F64:
401 		return 0x87 /* jnbe */;
402 
403 	case SLJIT_LESS_EQUAL:
404 	case SLJIT_LESS_EQUAL_F64:
405 		return 0x86 /* jbe */;
406 
407 	case SLJIT_SIG_LESS:
408 		return 0x8c /* jl */;
409 
410 	case SLJIT_SIG_GREATER_EQUAL:
411 		return 0x8d /* jnl */;
412 
413 	case SLJIT_SIG_GREATER:
414 		return 0x8f /* jnle */;
415 
416 	case SLJIT_SIG_LESS_EQUAL:
417 		return 0x8e /* jle */;
418 
419 	case SLJIT_OVERFLOW:
420 		return 0x80 /* jo */;
421 
422 	case SLJIT_NOT_OVERFLOW:
423 		return 0x81 /* jno */;
424 
425 	case SLJIT_UNORDERED_F64:
426 		return 0x8a /* jp */;
427 
428 	case SLJIT_ORDERED_F64:
429 		return 0x8b /* jpo */;
430 	}
431 	return 0;
432 }
433 
434 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
435 static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_sw executable_offset);
436 #else
437 static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr);
438 static sljit_u8* generate_put_label_code(struct sljit_put_label *put_label, sljit_u8 *code_ptr, sljit_uw max_label);
439 #endif
440 
generate_near_jump_code(struct sljit_jump * jump,sljit_u8 * code_ptr,sljit_u8 * code,sljit_sw executable_offset)441 static sljit_u8* generate_near_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_sw executable_offset)
442 {
443 	sljit_uw type = jump->flags >> TYPE_SHIFT;
444 	sljit_s32 short_jump;
445 	sljit_uw label_addr;
446 
447 	if (jump->flags & JUMP_LABEL)
448 		label_addr = (sljit_uw)(code + jump->u.label->size);
449 	else
450 		label_addr = jump->u.target - (sljit_uw)executable_offset;
451 
452 	short_jump = (sljit_sw)(label_addr - (jump->addr + 2)) >= -128 && (sljit_sw)(label_addr - (jump->addr + 2)) <= 127;
453 
454 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
455 	if ((sljit_sw)(label_addr - (jump->addr + 1)) > HALFWORD_MAX || (sljit_sw)(label_addr - (jump->addr + 1)) < HALFWORD_MIN)
456 		return generate_far_jump_code(jump, code_ptr);
457 #endif
458 
459 	if (type == SLJIT_JUMP) {
460 		if (short_jump)
461 			*code_ptr++ = JMP_i8;
462 		else
463 			*code_ptr++ = JMP_i32;
464 		jump->addr++;
465 	}
466 	else if (type >= SLJIT_FAST_CALL) {
467 		short_jump = 0;
468 		*code_ptr++ = CALL_i32;
469 		jump->addr++;
470 	}
471 	else if (short_jump) {
472 		*code_ptr++ = U8(get_jump_code(type) - 0x10);
473 		jump->addr++;
474 	}
475 	else {
476 		*code_ptr++ = GROUP_0F;
477 		*code_ptr++ = get_jump_code(type);
478 		jump->addr += 2;
479 	}
480 
481 	if (short_jump) {
482 		jump->flags |= PATCH_MB;
483 		code_ptr += sizeof(sljit_s8);
484 	} else {
485 		jump->flags |= PATCH_MW;
486 		code_ptr += sizeof(sljit_s32);
487 	}
488 
489 	return code_ptr;
490 }
491 
sljit_generate_code(struct sljit_compiler * compiler)492 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
493 {
494 	struct sljit_memory_fragment *buf;
495 	sljit_u8 *code;
496 	sljit_u8 *code_ptr;
497 	sljit_u8 *buf_ptr;
498 	sljit_u8 *buf_end;
499 	sljit_u8 len;
500 	sljit_sw executable_offset;
501 	sljit_uw jump_addr;
502 
503 	struct sljit_label *label;
504 	struct sljit_jump *jump;
505 	struct sljit_const *const_;
506 	struct sljit_put_label *put_label;
507 
508 	CHECK_ERROR_PTR();
509 	CHECK_PTR(check_sljit_generate_code(compiler));
510 	reverse_buf(compiler);
511 
512 	/* Second code generation pass. */
513 	code = (sljit_u8*)SLJIT_MALLOC_EXEC(compiler->size, compiler->exec_allocator_data);
514 	PTR_FAIL_WITH_EXEC_IF(code);
515 	buf = compiler->buf;
516 
517 	code_ptr = code;
518 	label = compiler->labels;
519 	jump = compiler->jumps;
520 	const_ = compiler->consts;
521 	put_label = compiler->put_labels;
522 	executable_offset = SLJIT_EXEC_OFFSET(code);
523 
524 	do {
525 		buf_ptr = buf->memory;
526 		buf_end = buf_ptr + buf->used_size;
527 		do {
528 			len = *buf_ptr++;
529 			if (len > 0) {
530 				/* The code is already generated. */
531 				SLJIT_MEMCPY(code_ptr, buf_ptr, len);
532 				code_ptr += len;
533 				buf_ptr += len;
534 			}
535 			else {
536 				switch (*buf_ptr) {
537 				case 0:
538 					label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
539 					label->size = (sljit_uw)(code_ptr - code);
540 					label = label->next;
541 					break;
542 				case 1:
543 					jump->addr = (sljit_uw)code_ptr;
544 					if (!(jump->flags & SLJIT_REWRITABLE_JUMP))
545 						code_ptr = generate_near_jump_code(jump, code_ptr, code, executable_offset);
546 					else {
547 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
548 						code_ptr = generate_far_jump_code(jump, code_ptr, executable_offset);
549 #else
550 						code_ptr = generate_far_jump_code(jump, code_ptr);
551 #endif
552 					}
553 					jump = jump->next;
554 					break;
555 				case 2:
556 					const_->addr = ((sljit_uw)code_ptr) - sizeof(sljit_sw);
557 					const_ = const_->next;
558 					break;
559 				default:
560 					SLJIT_ASSERT(*buf_ptr == 3);
561 					SLJIT_ASSERT(put_label->label);
562 					put_label->addr = (sljit_uw)code_ptr;
563 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
564 					code_ptr = generate_put_label_code(put_label, code_ptr, (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code, executable_offset) + put_label->label->size);
565 #endif
566 					put_label = put_label->next;
567 					break;
568 				}
569 				buf_ptr++;
570 			}
571 		} while (buf_ptr < buf_end);
572 		SLJIT_ASSERT(buf_ptr == buf_end);
573 		buf = buf->next;
574 	} while (buf);
575 
576 	SLJIT_ASSERT(!label);
577 	SLJIT_ASSERT(!jump);
578 	SLJIT_ASSERT(!const_);
579 	SLJIT_ASSERT(!put_label);
580 	SLJIT_ASSERT(code_ptr <= code + compiler->size);
581 
582 	jump = compiler->jumps;
583 	while (jump) {
584 		jump_addr = jump->addr + (sljit_uw)executable_offset;
585 
586 		if (jump->flags & PATCH_MB) {
587 			SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s8))) >= -128 && (sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s8))) <= 127);
588 			*(sljit_u8*)jump->addr = U8(jump->u.label->addr - (jump_addr + sizeof(sljit_s8)));
589 		} else if (jump->flags & PATCH_MW) {
590 			if (jump->flags & JUMP_LABEL) {
591 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
592 				sljit_unaligned_store_sw((void*)jump->addr, (sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_sw))));
593 #else
594 				SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s32))) >= HALFWORD_MIN && (sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s32))) <= HALFWORD_MAX);
595 				sljit_unaligned_store_s32((void*)jump->addr, (sljit_s32)(jump->u.label->addr - (jump_addr + sizeof(sljit_s32))));
596 #endif
597 			}
598 			else {
599 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
600 				sljit_unaligned_store_sw((void*)jump->addr, (sljit_sw)(jump->u.target - (jump_addr + sizeof(sljit_sw))));
601 #else
602 				SLJIT_ASSERT((sljit_sw)(jump->u.target - (jump_addr + sizeof(sljit_s32))) >= HALFWORD_MIN && (sljit_sw)(jump->u.target - (jump_addr + sizeof(sljit_s32))) <= HALFWORD_MAX);
603 				sljit_unaligned_store_s32((void*)jump->addr, (sljit_s32)(jump->u.target - (jump_addr + sizeof(sljit_s32))));
604 #endif
605 			}
606 		}
607 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
608 		else if (jump->flags & PATCH_MD)
609 			sljit_unaligned_store_sw((void*)jump->addr, (sljit_sw)jump->u.label->addr);
610 #endif
611 
612 		jump = jump->next;
613 	}
614 
615 	put_label = compiler->put_labels;
616 	while (put_label) {
617 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
618 		sljit_unaligned_store_sw((void*)(put_label->addr - sizeof(sljit_sw)), (sljit_sw)put_label->label->addr);
619 #else
620 		if (put_label->flags & PATCH_MD) {
621 			SLJIT_ASSERT(put_label->label->addr > HALFWORD_MAX);
622 			sljit_unaligned_store_sw((void*)(put_label->addr - sizeof(sljit_sw)), (sljit_sw)put_label->label->addr);
623 		}
624 		else {
625 			SLJIT_ASSERT(put_label->label->addr <= HALFWORD_MAX);
626 			sljit_unaligned_store_s32((void*)(put_label->addr - sizeof(sljit_s32)), (sljit_s32)put_label->label->addr);
627 		}
628 #endif
629 
630 		put_label = put_label->next;
631 	}
632 
633 	compiler->error = SLJIT_ERR_COMPILED;
634 	compiler->executable_offset = executable_offset;
635 	compiler->executable_size = (sljit_uw)(code_ptr - code);
636 
637 	code = (sljit_u8*)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
638 
639 	SLJIT_UPDATE_WX_FLAGS(code, (sljit_u8*)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset), 1);
640 	return (void*)code;
641 }
642 
sljit_has_cpu_feature(sljit_s32 feature_type)643 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
644 {
645 	switch (feature_type) {
646 	case SLJIT_HAS_FPU:
647 #ifdef SLJIT_IS_FPU_AVAILABLE
648 		return SLJIT_IS_FPU_AVAILABLE;
649 #elif (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
650 		if (cpu_has_sse2 == -1)
651 			get_cpu_features();
652 		return cpu_has_sse2;
653 #else /* SLJIT_DETECT_SSE2 */
654 		return 1;
655 #endif /* SLJIT_DETECT_SSE2 */
656 
657 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
658 	case SLJIT_HAS_VIRTUAL_REGISTERS:
659 		return 1;
660 #endif
661 
662 	case SLJIT_HAS_CLZ:
663 	case SLJIT_HAS_CMOV:
664 		if (cpu_has_cmov == -1)
665 			get_cpu_features();
666 		return cpu_has_cmov;
667 
668 	case SLJIT_HAS_PREFETCH:
669 		return 1;
670 
671 	case SLJIT_HAS_SSE2:
672 #if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
673 		if (cpu_has_sse2 == -1)
674 			get_cpu_features();
675 		return cpu_has_sse2;
676 #else
677 		return 1;
678 #endif
679 
680 	default:
681 		return 0;
682 	}
683 }
684 
685 /* --------------------------------------------------------------------- */
686 /*  Operators                                                            */
687 /* --------------------------------------------------------------------- */
688 
689 #define BINARY_OPCODE(opcode) (((opcode ## _EAX_i32) << 24) | ((opcode ## _r_rm) << 16) | ((opcode ## _rm_r) << 8) | (opcode))
690 
691 #define BINARY_IMM32(op_imm, immw, arg, argw) \
692 	do { \
693 		inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, immw, arg, argw); \
694 		FAIL_IF(!inst); \
695 		*(inst + 1) |= (op_imm); \
696 	} while (0)
697 
698 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
699 
700 #define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \
701 	do { \
702 		if (IS_HALFWORD(immw) || compiler->mode32) { \
703 			BINARY_IMM32(op_imm, immw, arg, argw); \
704 		} \
705 		else { \
706 			FAIL_IF(emit_load_imm64(compiler, (arg == TMP_REG1) ? TMP_REG2 : TMP_REG1, immw)); \
707 			inst = emit_x86_instruction(compiler, 1, (arg == TMP_REG1) ? TMP_REG2 : TMP_REG1, 0, arg, argw); \
708 			FAIL_IF(!inst); \
709 			*inst = (op_mr); \
710 		} \
711 	} while (0)
712 
713 #define BINARY_EAX_IMM(op_eax_imm, immw) \
714 	FAIL_IF(emit_do_imm32(compiler, (!compiler->mode32) ? REX_W : 0, (op_eax_imm), immw))
715 
716 #else /* !SLJIT_CONFIG_X86_64 */
717 
718 #define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \
719 	BINARY_IMM32(op_imm, immw, arg, argw)
720 
721 #define BINARY_EAX_IMM(op_eax_imm, immw) \
722 	FAIL_IF(emit_do_imm(compiler, (op_eax_imm), immw))
723 
724 #endif /* SLJIT_CONFIG_X86_64 */
725 
726 static sljit_s32 emit_mov(struct sljit_compiler *compiler,
727 	sljit_s32 dst, sljit_sw dstw,
728 	sljit_s32 src, sljit_sw srcw);
729 
730 #define EMIT_MOV(compiler, dst, dstw, src, srcw) \
731 	FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw));
732 
733 static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler,
734 	sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src);
735 
736 static SLJIT_INLINE sljit_s32 emit_sse2_load(struct sljit_compiler *compiler,
737 	sljit_s32 single, sljit_s32 dst, sljit_s32 src, sljit_sw srcw);
738 
739 static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler,
740 	sljit_s32 src1, sljit_sw src1w,
741 	sljit_s32 src2, sljit_sw src2w);
742 
emit_endbranch(struct sljit_compiler * compiler)743 static SLJIT_INLINE sljit_s32 emit_endbranch(struct sljit_compiler *compiler)
744 {
745 #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET)
746 	/* Emit endbr32/endbr64 when CET is enabled.  */
747 	sljit_u8 *inst;
748 	inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
749 	FAIL_IF(!inst);
750 	INC_SIZE(4);
751 	*inst++ = 0xf3;
752 	*inst++ = 0x0f;
753 	*inst++ = 0x1e;
754 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
755 	*inst = 0xfb;
756 #else
757 	*inst = 0xfa;
758 #endif
759 #else /* !SLJIT_CONFIG_X86_CET */
760 	SLJIT_UNUSED_ARG(compiler);
761 #endif /* SLJIT_CONFIG_X86_CET */
762 	return SLJIT_SUCCESS;
763 }
764 
765 #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__)
766 
emit_rdssp(struct sljit_compiler * compiler,sljit_s32 reg)767 static SLJIT_INLINE sljit_s32 emit_rdssp(struct sljit_compiler *compiler, sljit_s32 reg)
768 {
769 	sljit_u8 *inst;
770 	sljit_s32 size;
771 
772 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
773 	size = 5;
774 #else
775 	size = 4;
776 #endif
777 
778 	inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
779 	FAIL_IF(!inst);
780 	INC_SIZE(size);
781 	*inst++ = 0xf3;
782 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
783 	*inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : REX_B);
784 #endif
785 	*inst++ = 0x0f;
786 	*inst++ = 0x1e;
787 	*inst = (0x3 << 6) | (0x1 << 3) | (reg_map[reg] & 0x7);
788 	return SLJIT_SUCCESS;
789 }
790 
emit_incssp(struct sljit_compiler * compiler,sljit_s32 reg)791 static SLJIT_INLINE sljit_s32 emit_incssp(struct sljit_compiler *compiler, sljit_s32 reg)
792 {
793 	sljit_u8 *inst;
794 	sljit_s32 size;
795 
796 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
797 	size = 5;
798 #else
799 	size = 4;
800 #endif
801 
802 	inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
803 	FAIL_IF(!inst);
804 	INC_SIZE(size);
805 	*inst++ = 0xf3;
806 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
807 	*inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : REX_B);
808 #endif
809 	*inst++ = 0x0f;
810 	*inst++ = 0xae;
811 	*inst = (0x3 << 6) | (0x5 << 3) | (reg_map[reg] & 0x7);
812 	return SLJIT_SUCCESS;
813 }
814 
815 #endif /* SLJIT_CONFIG_X86_CET && __SHSTK__ */
816 
cpu_has_shadow_stack(void)817 static SLJIT_INLINE sljit_s32 cpu_has_shadow_stack(void)
818 {
819 #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__)
820 	return _get_ssp() != 0;
821 #else /* !SLJIT_CONFIG_X86_CET || !__SHSTK__ */
822 	return 0;
823 #endif /* SLJIT_CONFIG_X86_CET && __SHSTK__ */
824 }
825 
adjust_shadow_stack(struct sljit_compiler * compiler,sljit_s32 src,sljit_sw srcw)826 static SLJIT_INLINE sljit_s32 adjust_shadow_stack(struct sljit_compiler *compiler,
827 	sljit_s32 src, sljit_sw srcw)
828 {
829 #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__)
830 	sljit_u8 *inst, *jz_after_cmp_inst;
831 	sljit_uw size_jz_after_cmp_inst;
832 
833 	sljit_uw size_before_rdssp_inst = compiler->size;
834 
835 	/* Generate "RDSSP TMP_REG1". */
836 	FAIL_IF(emit_rdssp(compiler, TMP_REG1));
837 
838 	/* Load return address on shadow stack into TMP_REG1. */
839 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
840 	SLJIT_ASSERT(reg_map[TMP_REG1] == 5);
841 
842 	/* Hand code unsupported "mov 0x0(%ebp),%ebp". */
843 	inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
844 	FAIL_IF(!inst);
845 	INC_SIZE(3);
846 	*inst++ = 0x8b;
847 	*inst++ = 0x6d;
848 	*inst = 0;
849 #else /* !SLJIT_CONFIG_X86_32 */
850 	EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(TMP_REG1), 0);
851 #endif /* SLJIT_CONFIG_X86_32 */
852 
853 	/* Compare return address against TMP_REG1. */
854 	FAIL_IF(emit_cmp_binary (compiler, TMP_REG1, 0, src, srcw));
855 
856 	/* Generate JZ to skip shadow stack ajdustment when shadow
857 	   stack matches normal stack. */
858 	inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
859 	FAIL_IF(!inst);
860 	INC_SIZE(2);
861 	*inst++ = get_jump_code(SLJIT_EQUAL) - 0x10;
862 	size_jz_after_cmp_inst = compiler->size;
863 	jz_after_cmp_inst = inst;
864 
865 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
866 	/* REX_W is not necessary. */
867 	compiler->mode32 = 1;
868 #endif
869 	/* Load 1 into TMP_REG1. */
870 	EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, 1);
871 
872 	/* Generate "INCSSP TMP_REG1". */
873 	FAIL_IF(emit_incssp(compiler, TMP_REG1));
874 
875 	/* Jump back to "RDSSP TMP_REG1" to check shadow stack again. */
876 	inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
877 	FAIL_IF(!inst);
878 	INC_SIZE(2);
879 	*inst++ = JMP_i8;
880 	*inst = size_before_rdssp_inst - compiler->size;
881 
882 	*jz_after_cmp_inst = compiler->size - size_jz_after_cmp_inst;
883 #else /* !SLJIT_CONFIG_X86_CET || !__SHSTK__ */
884 	SLJIT_UNUSED_ARG(compiler);
885 	SLJIT_UNUSED_ARG(src);
886 	SLJIT_UNUSED_ARG(srcw);
887 #endif /* SLJIT_CONFIG_X86_CET && __SHSTK__ */
888 	return SLJIT_SUCCESS;
889 }
890 
891 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
892 #include "sljitNativeX86_32.c"
893 #else
894 #include "sljitNativeX86_64.c"
895 #endif
896 
emit_mov(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)897 static sljit_s32 emit_mov(struct sljit_compiler *compiler,
898 	sljit_s32 dst, sljit_sw dstw,
899 	sljit_s32 src, sljit_sw srcw)
900 {
901 	sljit_u8* inst;
902 
903 	if (FAST_IS_REG(src)) {
904 		inst = emit_x86_instruction(compiler, 1, src, 0, dst, dstw);
905 		FAIL_IF(!inst);
906 		*inst = MOV_rm_r;
907 		return SLJIT_SUCCESS;
908 	}
909 	if (src & SLJIT_IMM) {
910 		if (FAST_IS_REG(dst)) {
911 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
912 			return emit_do_imm(compiler, MOV_r_i32 | reg_map[dst], srcw);
913 #else
914 			if (!compiler->mode32) {
915 				if (NOT_HALFWORD(srcw))
916 					return emit_load_imm64(compiler, dst, srcw);
917 			}
918 			else
919 				return emit_do_imm32(compiler, (reg_map[dst] >= 8) ? REX_B : 0, U8(MOV_r_i32 | reg_lmap[dst]), srcw);
920 #endif
921 		}
922 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
923 		if (!compiler->mode32 && NOT_HALFWORD(srcw)) {
924 			/* Immediate to memory move. Only SLJIT_MOV operation copies
925 			   an immediate directly into memory so TMP_REG1 can be used. */
926 			FAIL_IF(emit_load_imm64(compiler, TMP_REG1, srcw));
927 			inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
928 			FAIL_IF(!inst);
929 			*inst = MOV_rm_r;
930 			return SLJIT_SUCCESS;
931 		}
932 #endif
933 		inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, dstw);
934 		FAIL_IF(!inst);
935 		*inst = MOV_rm_i32;
936 		return SLJIT_SUCCESS;
937 	}
938 	if (FAST_IS_REG(dst)) {
939 		inst = emit_x86_instruction(compiler, 1, dst, 0, src, srcw);
940 		FAIL_IF(!inst);
941 		*inst = MOV_r_rm;
942 		return SLJIT_SUCCESS;
943 	}
944 
945 	/* Memory to memory move. Only SLJIT_MOV operation copies
946 	   data from memory to memory so TMP_REG1 can be used. */
947 	inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src, srcw);
948 	FAIL_IF(!inst);
949 	*inst = MOV_r_rm;
950 	inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
951 	FAIL_IF(!inst);
952 	*inst = MOV_rm_r;
953 	return SLJIT_SUCCESS;
954 }
955 
sljit_emit_op0(struct sljit_compiler * compiler,sljit_s32 op)956 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
957 {
958 	sljit_u8 *inst;
959 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
960 	sljit_uw size;
961 #endif
962 
963 	CHECK_ERROR();
964 	CHECK(check_sljit_emit_op0(compiler, op));
965 
966 	switch (GET_OPCODE(op)) {
967 	case SLJIT_BREAKPOINT:
968 		inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
969 		FAIL_IF(!inst);
970 		INC_SIZE(1);
971 		*inst = INT3;
972 		break;
973 	case SLJIT_NOP:
974 		inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
975 		FAIL_IF(!inst);
976 		INC_SIZE(1);
977 		*inst = NOP;
978 		break;
979 	case SLJIT_LMUL_UW:
980 	case SLJIT_LMUL_SW:
981 	case SLJIT_DIVMOD_UW:
982 	case SLJIT_DIVMOD_SW:
983 	case SLJIT_DIV_UW:
984 	case SLJIT_DIV_SW:
985 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
986 #ifdef _WIN64
987 		SLJIT_ASSERT(
988 			reg_map[SLJIT_R0] == 0
989 			&& reg_map[SLJIT_R1] == 2
990 			&& reg_map[TMP_REG1] > 7);
991 #else
992 		SLJIT_ASSERT(
993 			reg_map[SLJIT_R0] == 0
994 			&& reg_map[SLJIT_R1] < 7
995 			&& reg_map[TMP_REG1] == 2);
996 #endif
997 		compiler->mode32 = op & SLJIT_32;
998 #endif
999 		SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW & 0x2) == 0 && SLJIT_DIV_UW - 0x2 == SLJIT_DIVMOD_UW, bad_div_opcode_assignments);
1000 
1001 		op = GET_OPCODE(op);
1002 		if ((op | 0x2) == SLJIT_DIV_UW) {
1003 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64)
1004 			EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R1, 0);
1005 			inst = emit_x86_instruction(compiler, 1, SLJIT_R1, 0, SLJIT_R1, 0);
1006 #else
1007 			inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, TMP_REG1, 0);
1008 #endif
1009 			FAIL_IF(!inst);
1010 			*inst = XOR_r_rm;
1011 		}
1012 
1013 		if ((op | 0x2) == SLJIT_DIV_SW) {
1014 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64)
1015 			EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R1, 0);
1016 #endif
1017 
1018 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1019 			inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
1020 			FAIL_IF(!inst);
1021 			INC_SIZE(1);
1022 			*inst = CDQ;
1023 #else
1024 			if (compiler->mode32) {
1025 				inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
1026 				FAIL_IF(!inst);
1027 				INC_SIZE(1);
1028 				*inst = CDQ;
1029 			} else {
1030 				inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1031 				FAIL_IF(!inst);
1032 				INC_SIZE(2);
1033 				*inst++ = REX_W;
1034 				*inst = CDQ;
1035 			}
1036 #endif
1037 		}
1038 
1039 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1040 		inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
1041 		FAIL_IF(!inst);
1042 		INC_SIZE(2);
1043 		*inst++ = GROUP_F7;
1044 		*inst = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_map[TMP_REG1] : reg_map[SLJIT_R1]);
1045 #else
1046 #ifdef _WIN64
1047 		size = (!compiler->mode32 || op >= SLJIT_DIVMOD_UW) ? 3 : 2;
1048 #else
1049 		size = (!compiler->mode32) ? 3 : 2;
1050 #endif
1051 		inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
1052 		FAIL_IF(!inst);
1053 		INC_SIZE(size);
1054 #ifdef _WIN64
1055 		if (!compiler->mode32)
1056 			*inst++ = REX_W | ((op >= SLJIT_DIVMOD_UW) ? REX_B : 0);
1057 		else if (op >= SLJIT_DIVMOD_UW)
1058 			*inst++ = REX_B;
1059 		*inst++ = GROUP_F7;
1060 		*inst = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_lmap[TMP_REG1] : reg_lmap[SLJIT_R1]);
1061 #else
1062 		if (!compiler->mode32)
1063 			*inst++ = REX_W;
1064 		*inst++ = GROUP_F7;
1065 		*inst = MOD_REG | reg_map[SLJIT_R1];
1066 #endif
1067 #endif
1068 		switch (op) {
1069 		case SLJIT_LMUL_UW:
1070 			*inst |= MUL;
1071 			break;
1072 		case SLJIT_LMUL_SW:
1073 			*inst |= IMUL;
1074 			break;
1075 		case SLJIT_DIVMOD_UW:
1076 		case SLJIT_DIV_UW:
1077 			*inst |= DIV;
1078 			break;
1079 		case SLJIT_DIVMOD_SW:
1080 		case SLJIT_DIV_SW:
1081 			*inst |= IDIV;
1082 			break;
1083 		}
1084 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) && !defined(_WIN64)
1085 		if (op <= SLJIT_DIVMOD_SW)
1086 			EMIT_MOV(compiler, SLJIT_R1, 0, TMP_REG1, 0);
1087 #else
1088 		if (op >= SLJIT_DIV_UW)
1089 			EMIT_MOV(compiler, SLJIT_R1, 0, TMP_REG1, 0);
1090 #endif
1091 		break;
1092 	case SLJIT_ENDBR:
1093 		return emit_endbranch(compiler);
1094 	case SLJIT_SKIP_FRAMES_BEFORE_RETURN:
1095 		return skip_frames_before_return(compiler);
1096 	}
1097 
1098 	return SLJIT_SUCCESS;
1099 }
1100 
1101 #define ENCODE_PREFIX(prefix) \
1102 	do { \
1103 		inst = (sljit_u8*)ensure_buf(compiler, 1 + 1); \
1104 		FAIL_IF(!inst); \
1105 		INC_SIZE(1); \
1106 		*inst = U8(prefix); \
1107 	} while (0)
1108 
emit_mov_byte(struct sljit_compiler * compiler,sljit_s32 sign,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1109 static sljit_s32 emit_mov_byte(struct sljit_compiler *compiler, sljit_s32 sign,
1110 	sljit_s32 dst, sljit_sw dstw,
1111 	sljit_s32 src, sljit_sw srcw)
1112 {
1113 	sljit_u8* inst;
1114 	sljit_s32 dst_r;
1115 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1116 	sljit_s32 work_r;
1117 #endif
1118 
1119 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1120 	compiler->mode32 = 0;
1121 #endif
1122 
1123 	if (src & SLJIT_IMM) {
1124 		if (FAST_IS_REG(dst)) {
1125 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1126 			return emit_do_imm(compiler, MOV_r_i32 | reg_map[dst], srcw);
1127 #else
1128 			inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0);
1129 			FAIL_IF(!inst);
1130 			*inst = MOV_rm_i32;
1131 			return SLJIT_SUCCESS;
1132 #endif
1133 		}
1134 		inst = emit_x86_instruction(compiler, 1 | EX86_BYTE_ARG | EX86_NO_REXW, SLJIT_IMM, srcw, dst, dstw);
1135 		FAIL_IF(!inst);
1136 		*inst = MOV_rm8_i8;
1137 		return SLJIT_SUCCESS;
1138 	}
1139 
1140 	dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1141 
1142 	if ((dst & SLJIT_MEM) && FAST_IS_REG(src)) {
1143 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1144 		if (reg_map[src] >= 4) {
1145 			SLJIT_ASSERT(dst_r == TMP_REG1);
1146 			EMIT_MOV(compiler, TMP_REG1, 0, src, 0);
1147 		} else
1148 			dst_r = src;
1149 #else
1150 		dst_r = src;
1151 #endif
1152 	}
1153 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1154 	else if (FAST_IS_REG(src) && reg_map[src] >= 4) {
1155 		/* src, dst are registers. */
1156 		SLJIT_ASSERT(FAST_IS_REG(dst));
1157 		if (reg_map[dst] < 4) {
1158 			if (dst != src)
1159 				EMIT_MOV(compiler, dst, 0, src, 0);
1160 			inst = emit_x86_instruction(compiler, 2, dst, 0, dst, 0);
1161 			FAIL_IF(!inst);
1162 			*inst++ = GROUP_0F;
1163 			*inst = sign ? MOVSX_r_rm8 : MOVZX_r_rm8;
1164 		}
1165 		else {
1166 			if (dst != src)
1167 				EMIT_MOV(compiler, dst, 0, src, 0);
1168 			if (sign) {
1169 				/* shl reg, 24 */
1170 				inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0);
1171 				FAIL_IF(!inst);
1172 				*inst |= SHL;
1173 				/* sar reg, 24 */
1174 				inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0);
1175 				FAIL_IF(!inst);
1176 				*inst |= SAR;
1177 			}
1178 			else {
1179 				inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 0xff, dst, 0);
1180 				FAIL_IF(!inst);
1181 				*(inst + 1) |= AND;
1182 			}
1183 		}
1184 		return SLJIT_SUCCESS;
1185 	}
1186 #endif
1187 	else {
1188 		/* src can be memory addr or reg_map[src] < 4 on x86_32 architectures. */
1189 		inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
1190 		FAIL_IF(!inst);
1191 		*inst++ = GROUP_0F;
1192 		*inst = sign ? MOVSX_r_rm8 : MOVZX_r_rm8;
1193 	}
1194 
1195 	if (dst & SLJIT_MEM) {
1196 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1197 		if (dst_r == TMP_REG1) {
1198 			/* Find a non-used register, whose reg_map[src] < 4. */
1199 			if ((dst & REG_MASK) == SLJIT_R0) {
1200 				if ((dst & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_R1))
1201 					work_r = SLJIT_R2;
1202 				else
1203 					work_r = SLJIT_R1;
1204 			}
1205 			else {
1206 				if ((dst & OFFS_REG_MASK) != TO_OFFS_REG(SLJIT_R0))
1207 					work_r = SLJIT_R0;
1208 				else if ((dst & REG_MASK) == SLJIT_R1)
1209 					work_r = SLJIT_R2;
1210 				else
1211 					work_r = SLJIT_R1;
1212 			}
1213 
1214 			if (work_r == SLJIT_R0) {
1215 				ENCODE_PREFIX(XCHG_EAX_r | reg_map[TMP_REG1]);
1216 			}
1217 			else {
1218 				inst = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0);
1219 				FAIL_IF(!inst);
1220 				*inst = XCHG_r_rm;
1221 			}
1222 
1223 			inst = emit_x86_instruction(compiler, 1, work_r, 0, dst, dstw);
1224 			FAIL_IF(!inst);
1225 			*inst = MOV_rm8_r8;
1226 
1227 			if (work_r == SLJIT_R0) {
1228 				ENCODE_PREFIX(XCHG_EAX_r | reg_map[TMP_REG1]);
1229 			}
1230 			else {
1231 				inst = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0);
1232 				FAIL_IF(!inst);
1233 				*inst = XCHG_r_rm;
1234 			}
1235 		}
1236 		else {
1237 			inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw);
1238 			FAIL_IF(!inst);
1239 			*inst = MOV_rm8_r8;
1240 		}
1241 #else
1242 		inst = emit_x86_instruction(compiler, 1 | EX86_REX | EX86_NO_REXW, dst_r, 0, dst, dstw);
1243 		FAIL_IF(!inst);
1244 		*inst = MOV_rm8_r8;
1245 #endif
1246 	}
1247 
1248 	return SLJIT_SUCCESS;
1249 }
1250 
emit_prefetch(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src,sljit_sw srcw)1251 static sljit_s32 emit_prefetch(struct sljit_compiler *compiler, sljit_s32 op,
1252 	sljit_s32 src, sljit_sw srcw)
1253 {
1254 	sljit_u8* inst;
1255 
1256 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1257 	compiler->mode32 = 1;
1258 #endif
1259 
1260 	inst = emit_x86_instruction(compiler, 2, 0, 0, src, srcw);
1261 	FAIL_IF(!inst);
1262 	*inst++ = GROUP_0F;
1263 	*inst++ = PREFETCH;
1264 
1265 	if (op == SLJIT_PREFETCH_L1)
1266 		*inst |= (1 << 3);
1267 	else if (op == SLJIT_PREFETCH_L2)
1268 		*inst |= (2 << 3);
1269 	else if (op == SLJIT_PREFETCH_L3)
1270 		*inst |= (3 << 3);
1271 
1272 	return SLJIT_SUCCESS;
1273 }
1274 
emit_mov_half(struct sljit_compiler * compiler,sljit_s32 sign,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1275 static sljit_s32 emit_mov_half(struct sljit_compiler *compiler, sljit_s32 sign,
1276 	sljit_s32 dst, sljit_sw dstw,
1277 	sljit_s32 src, sljit_sw srcw)
1278 {
1279 	sljit_u8* inst;
1280 	sljit_s32 dst_r;
1281 
1282 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1283 	compiler->mode32 = 0;
1284 #endif
1285 
1286 	if (src & SLJIT_IMM) {
1287 		if (FAST_IS_REG(dst)) {
1288 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1289 			return emit_do_imm(compiler, MOV_r_i32 | reg_map[dst], srcw);
1290 #else
1291 			inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0);
1292 			FAIL_IF(!inst);
1293 			*inst = MOV_rm_i32;
1294 			return SLJIT_SUCCESS;
1295 #endif
1296 		}
1297 		inst = emit_x86_instruction(compiler, 1 | EX86_HALF_ARG | EX86_NO_REXW | EX86_PREF_66, SLJIT_IMM, srcw, dst, dstw);
1298 		FAIL_IF(!inst);
1299 		*inst = MOV_rm_i32;
1300 		return SLJIT_SUCCESS;
1301 	}
1302 
1303 	dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1304 
1305 	if ((dst & SLJIT_MEM) && FAST_IS_REG(src))
1306 		dst_r = src;
1307 	else {
1308 		inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
1309 		FAIL_IF(!inst);
1310 		*inst++ = GROUP_0F;
1311 		*inst = sign ? MOVSX_r_rm16 : MOVZX_r_rm16;
1312 	}
1313 
1314 	if (dst & SLJIT_MEM) {
1315 		inst = emit_x86_instruction(compiler, 1 | EX86_NO_REXW | EX86_PREF_66, dst_r, 0, dst, dstw);
1316 		FAIL_IF(!inst);
1317 		*inst = MOV_rm_r;
1318 	}
1319 
1320 	return SLJIT_SUCCESS;
1321 }
1322 
emit_unary(struct sljit_compiler * compiler,sljit_u8 opcode,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1323 static sljit_s32 emit_unary(struct sljit_compiler *compiler, sljit_u8 opcode,
1324 	sljit_s32 dst, sljit_sw dstw,
1325 	sljit_s32 src, sljit_sw srcw)
1326 {
1327 	sljit_u8* inst;
1328 
1329 	if (dst == src && dstw == srcw) {
1330 		/* Same input and output */
1331 		inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
1332 		FAIL_IF(!inst);
1333 		*inst++ = GROUP_F7;
1334 		*inst |= opcode;
1335 		return SLJIT_SUCCESS;
1336 	}
1337 
1338 	if (FAST_IS_REG(dst)) {
1339 		EMIT_MOV(compiler, dst, 0, src, srcw);
1340 		inst = emit_x86_instruction(compiler, 1, 0, 0, dst, 0);
1341 		FAIL_IF(!inst);
1342 		*inst++ = GROUP_F7;
1343 		*inst |= opcode;
1344 		return SLJIT_SUCCESS;
1345 	}
1346 
1347 	EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
1348 	inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REG1, 0);
1349 	FAIL_IF(!inst);
1350 	*inst++ = GROUP_F7;
1351 	*inst |= opcode;
1352 	EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1353 	return SLJIT_SUCCESS;
1354 }
1355 
emit_not_with_flags(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1356 static sljit_s32 emit_not_with_flags(struct sljit_compiler *compiler,
1357 	sljit_s32 dst, sljit_sw dstw,
1358 	sljit_s32 src, sljit_sw srcw)
1359 {
1360 	sljit_u8* inst;
1361 
1362 	if (FAST_IS_REG(dst)) {
1363 		EMIT_MOV(compiler, dst, 0, src, srcw);
1364 		inst = emit_x86_instruction(compiler, 1, 0, 0, dst, 0);
1365 		FAIL_IF(!inst);
1366 		*inst++ = GROUP_F7;
1367 		*inst |= NOT_rm;
1368 		inst = emit_x86_instruction(compiler, 1, dst, 0, dst, 0);
1369 		FAIL_IF(!inst);
1370 		*inst = OR_r_rm;
1371 		return SLJIT_SUCCESS;
1372 	}
1373 
1374 	EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
1375 	inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REG1, 0);
1376 	FAIL_IF(!inst);
1377 	*inst++ = GROUP_F7;
1378 	*inst |= NOT_rm;
1379 	inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, TMP_REG1, 0);
1380 	FAIL_IF(!inst);
1381 	*inst = OR_r_rm;
1382 	EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1383 	return SLJIT_SUCCESS;
1384 }
1385 
1386 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1387 static const sljit_sw emit_clz_arg = 32 + 31;
1388 #endif
1389 
emit_clz(struct sljit_compiler * compiler,sljit_s32 op_flags,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1390 static sljit_s32 emit_clz(struct sljit_compiler *compiler, sljit_s32 op_flags,
1391 	sljit_s32 dst, sljit_sw dstw,
1392 	sljit_s32 src, sljit_sw srcw)
1393 {
1394 	sljit_u8* inst;
1395 	sljit_s32 dst_r;
1396 
1397 	SLJIT_UNUSED_ARG(op_flags);
1398 
1399 	if (cpu_has_cmov == -1)
1400 		get_cpu_features();
1401 
1402 	dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1403 
1404 	inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
1405 	FAIL_IF(!inst);
1406 	*inst++ = GROUP_0F;
1407 	*inst = BSR_r_rm;
1408 
1409 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1410 	if (cpu_has_cmov) {
1411 		if (dst_r != TMP_REG1) {
1412 			EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, 32 + 31);
1413 			inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG1, 0);
1414 		}
1415 		else
1416 			inst = emit_x86_instruction(compiler, 2, dst_r, 0, SLJIT_MEM0(), (sljit_sw)&emit_clz_arg);
1417 
1418 		FAIL_IF(!inst);
1419 		*inst++ = GROUP_0F;
1420 		*inst = CMOVE_r_rm;
1421 	}
1422 	else
1423 		FAIL_IF(sljit_emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, 32 + 31));
1424 
1425 	inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 31, dst_r, 0);
1426 #else
1427 	if (cpu_has_cmov) {
1428 		EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_IMM, !(op_flags & SLJIT_32) ? (64 + 63) : (32 + 31));
1429 
1430 		inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0);
1431 		FAIL_IF(!inst);
1432 		*inst++ = GROUP_0F;
1433 		*inst = CMOVE_r_rm;
1434 	}
1435 	else
1436 		FAIL_IF(sljit_emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, !(op_flags & SLJIT_32) ? (64 + 63) : (32 + 31)));
1437 
1438 	inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, !(op_flags & SLJIT_32) ? 63 : 31, dst_r, 0);
1439 #endif
1440 
1441 	FAIL_IF(!inst);
1442 	*(inst + 1) |= XOR;
1443 
1444 	if (dst & SLJIT_MEM)
1445 		EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1446 	return SLJIT_SUCCESS;
1447 }
1448 
sljit_emit_op1(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1449 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
1450 	sljit_s32 dst, sljit_sw dstw,
1451 	sljit_s32 src, sljit_sw srcw)
1452 {
1453 	sljit_s32 op_flags = GET_ALL_FLAGS(op);
1454 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1455 	sljit_s32 dst_is_ereg = 0;
1456 #endif
1457 
1458 	CHECK_ERROR();
1459 	CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
1460 	ADJUST_LOCAL_OFFSET(dst, dstw);
1461 	ADJUST_LOCAL_OFFSET(src, srcw);
1462 
1463 	CHECK_EXTRA_REGS(dst, dstw, dst_is_ereg = 1);
1464 	CHECK_EXTRA_REGS(src, srcw, (void)0);
1465 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1466 	compiler->mode32 = op_flags & SLJIT_32;
1467 #endif
1468 
1469 	op = GET_OPCODE(op);
1470 
1471 	if (op >= SLJIT_MOV && op <= SLJIT_MOV_P) {
1472 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1473 		compiler->mode32 = 0;
1474 #endif
1475 
1476 		if (FAST_IS_REG(src) && src == dst) {
1477 			if (!TYPE_CAST_NEEDED(op))
1478 				return SLJIT_SUCCESS;
1479 		}
1480 
1481 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1482 		if (op_flags & SLJIT_32) {
1483 			if (src & SLJIT_MEM) {
1484 				if (op == SLJIT_MOV_S32)
1485 					op = SLJIT_MOV_U32;
1486 			}
1487 			else if (src & SLJIT_IMM) {
1488 				if (op == SLJIT_MOV_U32)
1489 					op = SLJIT_MOV_S32;
1490 			}
1491 		}
1492 #endif
1493 
1494 		if (src & SLJIT_IMM) {
1495 			switch (op) {
1496 			case SLJIT_MOV_U8:
1497 				srcw = (sljit_u8)srcw;
1498 				break;
1499 			case SLJIT_MOV_S8:
1500 				srcw = (sljit_s8)srcw;
1501 				break;
1502 			case SLJIT_MOV_U16:
1503 				srcw = (sljit_u16)srcw;
1504 				break;
1505 			case SLJIT_MOV_S16:
1506 				srcw = (sljit_s16)srcw;
1507 				break;
1508 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1509 			case SLJIT_MOV_U32:
1510 				srcw = (sljit_u32)srcw;
1511 				break;
1512 			case SLJIT_MOV_S32:
1513 				srcw = (sljit_s32)srcw;
1514 				break;
1515 #endif
1516 			}
1517 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1518 			if (SLJIT_UNLIKELY(dst_is_ereg))
1519 				return emit_mov(compiler, dst, dstw, src, srcw);
1520 #endif
1521 		}
1522 
1523 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1524 		if (SLJIT_UNLIKELY(dst_is_ereg) && (!(op == SLJIT_MOV || op == SLJIT_MOV_U32 || op == SLJIT_MOV_S32 || op == SLJIT_MOV_P) || (src & SLJIT_MEM))) {
1525 			SLJIT_ASSERT(dst == SLJIT_MEM1(SLJIT_SP));
1526 			dst = TMP_REG1;
1527 		}
1528 #endif
1529 
1530 		switch (op) {
1531 		case SLJIT_MOV:
1532 		case SLJIT_MOV_P:
1533 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1534 		case SLJIT_MOV_U32:
1535 		case SLJIT_MOV_S32:
1536 		case SLJIT_MOV32:
1537 #endif
1538 			EMIT_MOV(compiler, dst, dstw, src, srcw);
1539 			break;
1540 		case SLJIT_MOV_U8:
1541 			FAIL_IF(emit_mov_byte(compiler, 0, dst, dstw, src, srcw));
1542 			break;
1543 		case SLJIT_MOV_S8:
1544 			FAIL_IF(emit_mov_byte(compiler, 1, dst, dstw, src, srcw));
1545 			break;
1546 		case SLJIT_MOV_U16:
1547 			FAIL_IF(emit_mov_half(compiler, 0, dst, dstw, src, srcw));
1548 			break;
1549 		case SLJIT_MOV_S16:
1550 			FAIL_IF(emit_mov_half(compiler, 1, dst, dstw, src, srcw));
1551 			break;
1552 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1553 		case SLJIT_MOV_U32:
1554 			FAIL_IF(emit_mov_int(compiler, 0, dst, dstw, src, srcw));
1555 			break;
1556 		case SLJIT_MOV_S32:
1557 			FAIL_IF(emit_mov_int(compiler, 1, dst, dstw, src, srcw));
1558 			break;
1559 		case SLJIT_MOV32:
1560 			compiler->mode32 = 1;
1561 			EMIT_MOV(compiler, dst, dstw, src, srcw);
1562 			compiler->mode32 = 0;
1563 			break;
1564 #endif
1565 		}
1566 
1567 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1568 		if (SLJIT_UNLIKELY(dst_is_ereg) && dst == TMP_REG1)
1569 			return emit_mov(compiler, SLJIT_MEM1(SLJIT_SP), dstw, TMP_REG1, 0);
1570 #endif
1571 		return SLJIT_SUCCESS;
1572 	}
1573 
1574 	switch (op) {
1575 	case SLJIT_NOT:
1576 		if (SLJIT_UNLIKELY(op_flags & SLJIT_SET_Z))
1577 			return emit_not_with_flags(compiler, dst, dstw, src, srcw);
1578 		return emit_unary(compiler, NOT_rm, dst, dstw, src, srcw);
1579 
1580 	case SLJIT_CLZ:
1581 		return emit_clz(compiler, op_flags, dst, dstw, src, srcw);
1582 	}
1583 
1584 	return SLJIT_SUCCESS;
1585 }
1586 
emit_cum_binary(struct sljit_compiler * compiler,sljit_u32 op_types,sljit_s32 dst,sljit_sw dstw,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1587 static sljit_s32 emit_cum_binary(struct sljit_compiler *compiler,
1588 	sljit_u32 op_types,
1589 	sljit_s32 dst, sljit_sw dstw,
1590 	sljit_s32 src1, sljit_sw src1w,
1591 	sljit_s32 src2, sljit_sw src2w)
1592 {
1593 	sljit_u8* inst;
1594 	sljit_u8 op_eax_imm = U8(op_types >> 24);
1595 	sljit_u8 op_rm = U8((op_types >> 16) & 0xff);
1596 	sljit_u8 op_mr = U8((op_types >> 8) & 0xff);
1597 	sljit_u8 op_imm = U8(op_types & 0xff);
1598 
1599 	if (dst == src1 && dstw == src1w) {
1600 		if (src2 & SLJIT_IMM) {
1601 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1602 			if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
1603 #else
1604 			if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128)) {
1605 #endif
1606 				BINARY_EAX_IMM(op_eax_imm, src2w);
1607 			}
1608 			else {
1609 				BINARY_IMM(op_imm, op_mr, src2w, dst, dstw);
1610 			}
1611 		}
1612 		else if (FAST_IS_REG(dst)) {
1613 			inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w);
1614 			FAIL_IF(!inst);
1615 			*inst = op_rm;
1616 		}
1617 		else if (FAST_IS_REG(src2)) {
1618 			/* Special exception for sljit_emit_op_flags. */
1619 			inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw);
1620 			FAIL_IF(!inst);
1621 			*inst = op_mr;
1622 		}
1623 		else {
1624 			EMIT_MOV(compiler, TMP_REG1, 0, src2, src2w);
1625 			inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1626 			FAIL_IF(!inst);
1627 			*inst = op_mr;
1628 		}
1629 		return SLJIT_SUCCESS;
1630 	}
1631 
1632 	/* Only for cumulative operations. */
1633 	if (dst == src2 && dstw == src2w) {
1634 		if (src1 & SLJIT_IMM) {
1635 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1636 			if ((dst == SLJIT_R0) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) {
1637 #else
1638 			if ((dst == SLJIT_R0) && (src1w > 127 || src1w < -128)) {
1639 #endif
1640 				BINARY_EAX_IMM(op_eax_imm, src1w);
1641 			}
1642 			else {
1643 				BINARY_IMM(op_imm, op_mr, src1w, dst, dstw);
1644 			}
1645 		}
1646 		else if (FAST_IS_REG(dst)) {
1647 			inst = emit_x86_instruction(compiler, 1, dst, dstw, src1, src1w);
1648 			FAIL_IF(!inst);
1649 			*inst = op_rm;
1650 		}
1651 		else if (FAST_IS_REG(src1)) {
1652 			inst = emit_x86_instruction(compiler, 1, src1, src1w, dst, dstw);
1653 			FAIL_IF(!inst);
1654 			*inst = op_mr;
1655 		}
1656 		else {
1657 			EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1658 			inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1659 			FAIL_IF(!inst);
1660 			*inst = op_mr;
1661 		}
1662 		return SLJIT_SUCCESS;
1663 	}
1664 
1665 	/* General version. */
1666 	if (FAST_IS_REG(dst)) {
1667 		EMIT_MOV(compiler, dst, 0, src1, src1w);
1668 		if (src2 & SLJIT_IMM) {
1669 			BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
1670 		}
1671 		else {
1672 			inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w);
1673 			FAIL_IF(!inst);
1674 			*inst = op_rm;
1675 		}
1676 	}
1677 	else {
1678 		/* This version requires less memory writing. */
1679 		EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1680 		if (src2 & SLJIT_IMM) {
1681 			BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
1682 		}
1683 		else {
1684 			inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
1685 			FAIL_IF(!inst);
1686 			*inst = op_rm;
1687 		}
1688 		EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1689 	}
1690 
1691 	return SLJIT_SUCCESS;
1692 }
1693 
1694 static sljit_s32 emit_non_cum_binary(struct sljit_compiler *compiler,
1695 	sljit_u32 op_types,
1696 	sljit_s32 dst, sljit_sw dstw,
1697 	sljit_s32 src1, sljit_sw src1w,
1698 	sljit_s32 src2, sljit_sw src2w)
1699 {
1700 	sljit_u8* inst;
1701 	sljit_u8 op_eax_imm = U8(op_types >> 24);
1702 	sljit_u8 op_rm = U8((op_types >> 16) & 0xff);
1703 	sljit_u8 op_mr = U8((op_types >> 8) & 0xff);
1704 	sljit_u8 op_imm = U8(op_types & 0xff);
1705 
1706 	if (dst == src1 && dstw == src1w) {
1707 		if (src2 & SLJIT_IMM) {
1708 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1709 			if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
1710 #else
1711 			if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128)) {
1712 #endif
1713 				BINARY_EAX_IMM(op_eax_imm, src2w);
1714 			}
1715 			else {
1716 				BINARY_IMM(op_imm, op_mr, src2w, dst, dstw);
1717 			}
1718 		}
1719 		else if (FAST_IS_REG(dst)) {
1720 			inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w);
1721 			FAIL_IF(!inst);
1722 			*inst = op_rm;
1723 		}
1724 		else if (FAST_IS_REG(src2)) {
1725 			inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw);
1726 			FAIL_IF(!inst);
1727 			*inst = op_mr;
1728 		}
1729 		else {
1730 			EMIT_MOV(compiler, TMP_REG1, 0, src2, src2w);
1731 			inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1732 			FAIL_IF(!inst);
1733 			*inst = op_mr;
1734 		}
1735 		return SLJIT_SUCCESS;
1736 	}
1737 
1738 	/* General version. */
1739 	if (FAST_IS_REG(dst) && dst != src2) {
1740 		EMIT_MOV(compiler, dst, 0, src1, src1w);
1741 		if (src2 & SLJIT_IMM) {
1742 			BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
1743 		}
1744 		else {
1745 			inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w);
1746 			FAIL_IF(!inst);
1747 			*inst = op_rm;
1748 		}
1749 	}
1750 	else {
1751 		/* This version requires less memory writing. */
1752 		EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1753 		if (src2 & SLJIT_IMM) {
1754 			BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
1755 		}
1756 		else {
1757 			inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
1758 			FAIL_IF(!inst);
1759 			*inst = op_rm;
1760 		}
1761 		EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1762 	}
1763 
1764 	return SLJIT_SUCCESS;
1765 }
1766 
1767 static sljit_s32 emit_mul(struct sljit_compiler *compiler,
1768 	sljit_s32 dst, sljit_sw dstw,
1769 	sljit_s32 src1, sljit_sw src1w,
1770 	sljit_s32 src2, sljit_sw src2w)
1771 {
1772 	sljit_u8* inst;
1773 	sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1774 
1775 	/* Register destination. */
1776 	if (dst_r == src1 && !(src2 & SLJIT_IMM)) {
1777 		inst = emit_x86_instruction(compiler, 2, dst_r, 0, src2, src2w);
1778 		FAIL_IF(!inst);
1779 		*inst++ = GROUP_0F;
1780 		*inst = IMUL_r_rm;
1781 	}
1782 	else if (dst_r == src2 && !(src1 & SLJIT_IMM)) {
1783 		inst = emit_x86_instruction(compiler, 2, dst_r, 0, src1, src1w);
1784 		FAIL_IF(!inst);
1785 		*inst++ = GROUP_0F;
1786 		*inst = IMUL_r_rm;
1787 	}
1788 	else if (src1 & SLJIT_IMM) {
1789 		if (src2 & SLJIT_IMM) {
1790 			EMIT_MOV(compiler, dst_r, 0, SLJIT_IMM, src2w);
1791 			src2 = dst_r;
1792 			src2w = 0;
1793 		}
1794 
1795 		if (src1w <= 127 && src1w >= -128) {
1796 			inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
1797 			FAIL_IF(!inst);
1798 			*inst = IMUL_r_rm_i8;
1799 			inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
1800 			FAIL_IF(!inst);
1801 			INC_SIZE(1);
1802 			*inst = U8(src1w);
1803 		}
1804 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1805 		else {
1806 			inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
1807 			FAIL_IF(!inst);
1808 			*inst = IMUL_r_rm_i32;
1809 			inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
1810 			FAIL_IF(!inst);
1811 			INC_SIZE(4);
1812 			sljit_unaligned_store_sw(inst, src1w);
1813 		}
1814 #else
1815 		else if (IS_HALFWORD(src1w)) {
1816 			inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
1817 			FAIL_IF(!inst);
1818 			*inst = IMUL_r_rm_i32;
1819 			inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
1820 			FAIL_IF(!inst);
1821 			INC_SIZE(4);
1822 			sljit_unaligned_store_s32(inst, (sljit_s32)src1w);
1823 		}
1824 		else {
1825 			if (dst_r != src2)
1826 				EMIT_MOV(compiler, dst_r, 0, src2, src2w);
1827 			FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src1w));
1828 			inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0);
1829 			FAIL_IF(!inst);
1830 			*inst++ = GROUP_0F;
1831 			*inst = IMUL_r_rm;
1832 		}
1833 #endif
1834 	}
1835 	else if (src2 & SLJIT_IMM) {
1836 		/* Note: src1 is NOT immediate. */
1837 
1838 		if (src2w <= 127 && src2w >= -128) {
1839 			inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
1840 			FAIL_IF(!inst);
1841 			*inst = IMUL_r_rm_i8;
1842 			inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
1843 			FAIL_IF(!inst);
1844 			INC_SIZE(1);
1845 			*inst = U8(src2w);
1846 		}
1847 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1848 		else {
1849 			inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
1850 			FAIL_IF(!inst);
1851 			*inst = IMUL_r_rm_i32;
1852 			inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
1853 			FAIL_IF(!inst);
1854 			INC_SIZE(4);
1855 			sljit_unaligned_store_sw(inst, src2w);
1856 		}
1857 #else
1858 		else if (IS_HALFWORD(src2w)) {
1859 			inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
1860 			FAIL_IF(!inst);
1861 			*inst = IMUL_r_rm_i32;
1862 			inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
1863 			FAIL_IF(!inst);
1864 			INC_SIZE(4);
1865 			sljit_unaligned_store_s32(inst, (sljit_s32)src2w);
1866 		}
1867 		else {
1868 			if (dst_r != src1)
1869 				EMIT_MOV(compiler, dst_r, 0, src1, src1w);
1870 			FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w));
1871 			inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0);
1872 			FAIL_IF(!inst);
1873 			*inst++ = GROUP_0F;
1874 			*inst = IMUL_r_rm;
1875 		}
1876 #endif
1877 	}
1878 	else {
1879 		/* Neither argument is immediate. */
1880 		if (ADDRESSING_DEPENDS_ON(src2, dst_r))
1881 			dst_r = TMP_REG1;
1882 		EMIT_MOV(compiler, dst_r, 0, src1, src1w);
1883 		inst = emit_x86_instruction(compiler, 2, dst_r, 0, src2, src2w);
1884 		FAIL_IF(!inst);
1885 		*inst++ = GROUP_0F;
1886 		*inst = IMUL_r_rm;
1887 	}
1888 
1889 	if (dst & SLJIT_MEM)
1890 		EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1891 
1892 	return SLJIT_SUCCESS;
1893 }
1894 
1895 static sljit_s32 emit_lea_binary(struct sljit_compiler *compiler,
1896 	sljit_s32 dst, sljit_sw dstw,
1897 	sljit_s32 src1, sljit_sw src1w,
1898 	sljit_s32 src2, sljit_sw src2w)
1899 {
1900 	sljit_u8* inst;
1901 	sljit_s32 dst_r, done = 0;
1902 
1903 	/* These cases better be left to handled by normal way. */
1904 	if (dst == src1 && dstw == src1w)
1905 		return SLJIT_ERR_UNSUPPORTED;
1906 	if (dst == src2 && dstw == src2w)
1907 		return SLJIT_ERR_UNSUPPORTED;
1908 
1909 	dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1910 
1911 	if (FAST_IS_REG(src1)) {
1912 		if (FAST_IS_REG(src2)) {
1913 			inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM2(src1, src2), 0);
1914 			FAIL_IF(!inst);
1915 			*inst = LEA_r_m;
1916 			done = 1;
1917 		}
1918 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1919 		if ((src2 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src2w))) {
1920 			inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), (sljit_s32)src2w);
1921 #else
1922 		if (src2 & SLJIT_IMM) {
1923 			inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), src2w);
1924 #endif
1925 			FAIL_IF(!inst);
1926 			*inst = LEA_r_m;
1927 			done = 1;
1928 		}
1929 	}
1930 	else if (FAST_IS_REG(src2)) {
1931 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1932 		if ((src1 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src1w))) {
1933 			inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), (sljit_s32)src1w);
1934 #else
1935 		if (src1 & SLJIT_IMM) {
1936 			inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), src1w);
1937 #endif
1938 			FAIL_IF(!inst);
1939 			*inst = LEA_r_m;
1940 			done = 1;
1941 		}
1942 	}
1943 
1944 	if (done) {
1945 		if (dst_r == TMP_REG1)
1946 			return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
1947 		return SLJIT_SUCCESS;
1948 	}
1949 	return SLJIT_ERR_UNSUPPORTED;
1950 }
1951 
1952 static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler,
1953 	sljit_s32 src1, sljit_sw src1w,
1954 	sljit_s32 src2, sljit_sw src2w)
1955 {
1956 	sljit_u8* inst;
1957 
1958 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1959 	if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
1960 #else
1961 	if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128)) {
1962 #endif
1963 		BINARY_EAX_IMM(CMP_EAX_i32, src2w);
1964 		return SLJIT_SUCCESS;
1965 	}
1966 
1967 	if (FAST_IS_REG(src1)) {
1968 		if (src2 & SLJIT_IMM) {
1969 			BINARY_IMM(CMP, CMP_rm_r, src2w, src1, 0);
1970 		}
1971 		else {
1972 			inst = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w);
1973 			FAIL_IF(!inst);
1974 			*inst = CMP_r_rm;
1975 		}
1976 		return SLJIT_SUCCESS;
1977 	}
1978 
1979 	if (FAST_IS_REG(src2) && !(src1 & SLJIT_IMM)) {
1980 		inst = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w);
1981 		FAIL_IF(!inst);
1982 		*inst = CMP_rm_r;
1983 		return SLJIT_SUCCESS;
1984 	}
1985 
1986 	if (src2 & SLJIT_IMM) {
1987 		if (src1 & SLJIT_IMM) {
1988 			EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1989 			src1 = TMP_REG1;
1990 			src1w = 0;
1991 		}
1992 		BINARY_IMM(CMP, CMP_rm_r, src2w, src1, src1w);
1993 	}
1994 	else {
1995 		EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1996 		inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
1997 		FAIL_IF(!inst);
1998 		*inst = CMP_r_rm;
1999 	}
2000 	return SLJIT_SUCCESS;
2001 }
2002 
2003 static sljit_s32 emit_test_binary(struct sljit_compiler *compiler,
2004 	sljit_s32 src1, sljit_sw src1w,
2005 	sljit_s32 src2, sljit_sw src2w)
2006 {
2007 	sljit_u8* inst;
2008 
2009 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2010 	if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
2011 #else
2012 	if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128)) {
2013 #endif
2014 		BINARY_EAX_IMM(TEST_EAX_i32, src2w);
2015 		return SLJIT_SUCCESS;
2016 	}
2017 
2018 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2019 	if (src2 == SLJIT_R0 && (src1 & SLJIT_IMM) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) {
2020 #else
2021 	if (src2 == SLJIT_R0 && (src1 & SLJIT_IMM) && (src1w > 127 || src1w < -128)) {
2022 #endif
2023 		BINARY_EAX_IMM(TEST_EAX_i32, src1w);
2024 		return SLJIT_SUCCESS;
2025 	}
2026 
2027 	if (!(src1 & SLJIT_IMM)) {
2028 		if (src2 & SLJIT_IMM) {
2029 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2030 			if (IS_HALFWORD(src2w) || compiler->mode32) {
2031 				inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, src1w);
2032 				FAIL_IF(!inst);
2033 				*inst = GROUP_F7;
2034 			}
2035 			else {
2036 				FAIL_IF(emit_load_imm64(compiler, TMP_REG1, src2w));
2037 				inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src1, src1w);
2038 				FAIL_IF(!inst);
2039 				*inst = TEST_rm_r;
2040 			}
2041 #else
2042 			inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, src1w);
2043 			FAIL_IF(!inst);
2044 			*inst = GROUP_F7;
2045 #endif
2046 			return SLJIT_SUCCESS;
2047 		}
2048 		else if (FAST_IS_REG(src1)) {
2049 			inst = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w);
2050 			FAIL_IF(!inst);
2051 			*inst = TEST_rm_r;
2052 			return SLJIT_SUCCESS;
2053 		}
2054 	}
2055 
2056 	if (!(src2 & SLJIT_IMM)) {
2057 		if (src1 & SLJIT_IMM) {
2058 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2059 			if (IS_HALFWORD(src1w) || compiler->mode32) {
2060 				inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src1w, src2, src2w);
2061 				FAIL_IF(!inst);
2062 				*inst = GROUP_F7;
2063 			}
2064 			else {
2065 				FAIL_IF(emit_load_imm64(compiler, TMP_REG1, src1w));
2066 				inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
2067 				FAIL_IF(!inst);
2068 				*inst = TEST_rm_r;
2069 			}
2070 #else
2071 			inst = emit_x86_instruction(compiler, 1, src1, src1w, src2, src2w);
2072 			FAIL_IF(!inst);
2073 			*inst = GROUP_F7;
2074 #endif
2075 			return SLJIT_SUCCESS;
2076 		}
2077 		else if (FAST_IS_REG(src2)) {
2078 			inst = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w);
2079 			FAIL_IF(!inst);
2080 			*inst = TEST_rm_r;
2081 			return SLJIT_SUCCESS;
2082 		}
2083 	}
2084 
2085 	EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2086 	if (src2 & SLJIT_IMM) {
2087 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2088 		if (IS_HALFWORD(src2w) || compiler->mode32) {
2089 			inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REG1, 0);
2090 			FAIL_IF(!inst);
2091 			*inst = GROUP_F7;
2092 		}
2093 		else {
2094 			FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w));
2095 			inst = emit_x86_instruction(compiler, 1, TMP_REG2, 0, TMP_REG1, 0);
2096 			FAIL_IF(!inst);
2097 			*inst = TEST_rm_r;
2098 		}
2099 #else
2100 		inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REG1, 0);
2101 		FAIL_IF(!inst);
2102 		*inst = GROUP_F7;
2103 #endif
2104 	}
2105 	else {
2106 		inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
2107 		FAIL_IF(!inst);
2108 		*inst = TEST_rm_r;
2109 	}
2110 	return SLJIT_SUCCESS;
2111 }
2112 
2113 static sljit_s32 emit_shift(struct sljit_compiler *compiler,
2114 	sljit_u8 mode,
2115 	sljit_s32 dst, sljit_sw dstw,
2116 	sljit_s32 src1, sljit_sw src1w,
2117 	sljit_s32 src2, sljit_sw src2w)
2118 {
2119 	sljit_u8* inst;
2120 
2121 	if ((src2 & SLJIT_IMM) || (src2 == SLJIT_PREF_SHIFT_REG)) {
2122 		if (dst == src1 && dstw == src1w) {
2123 			inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, dstw);
2124 			FAIL_IF(!inst);
2125 			*inst |= mode;
2126 			return SLJIT_SUCCESS;
2127 		}
2128 		if (dst == SLJIT_PREF_SHIFT_REG && src2 == SLJIT_PREF_SHIFT_REG) {
2129 			EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2130 			inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2131 			FAIL_IF(!inst);
2132 			*inst |= mode;
2133 			EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2134 			return SLJIT_SUCCESS;
2135 		}
2136 		if (FAST_IS_REG(dst)) {
2137 			EMIT_MOV(compiler, dst, 0, src1, src1w);
2138 			inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, 0);
2139 			FAIL_IF(!inst);
2140 			*inst |= mode;
2141 			return SLJIT_SUCCESS;
2142 		}
2143 
2144 		EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2145 		inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, TMP_REG1, 0);
2146 		FAIL_IF(!inst);
2147 		*inst |= mode;
2148 		EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
2149 		return SLJIT_SUCCESS;
2150 	}
2151 
2152 	if (dst == SLJIT_PREF_SHIFT_REG) {
2153 		EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2154 		EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2155 		inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2156 		FAIL_IF(!inst);
2157 		*inst |= mode;
2158 		EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2159 	}
2160 	else if (FAST_IS_REG(dst) && dst != src2 && dst != TMP_REG1 && !ADDRESSING_DEPENDS_ON(src2, dst)) {
2161 		if (src1 != dst)
2162 			EMIT_MOV(compiler, dst, 0, src1, src1w);
2163 		EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
2164 		EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2165 		inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, dst, 0);
2166 		FAIL_IF(!inst);
2167 		*inst |= mode;
2168 		EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2169 	}
2170 	else {
2171 		/* This case is complex since ecx itself may be used for
2172 		   addressing, and this case must be supported as well. */
2173 		EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2174 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2175 		EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_PREF_SHIFT_REG, 0);
2176 		EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2177 		inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2178 		FAIL_IF(!inst);
2179 		*inst |= mode;
2180 		EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, SLJIT_MEM1(SLJIT_SP), 0);
2181 #else
2182 		EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_PREF_SHIFT_REG, 0);
2183 		EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2184 		inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2185 		FAIL_IF(!inst);
2186 		*inst |= mode;
2187 		EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG2, 0);
2188 #endif
2189 		if (dst != TMP_REG1)
2190 			return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2191 	}
2192 
2193 	return SLJIT_SUCCESS;
2194 }
2195 
2196 static sljit_s32 emit_shift_with_flags(struct sljit_compiler *compiler,
2197 	sljit_u8 mode, sljit_s32 set_flags,
2198 	sljit_s32 dst, sljit_sw dstw,
2199 	sljit_s32 src1, sljit_sw src1w,
2200 	sljit_s32 src2, sljit_sw src2w)
2201 {
2202 	/* The CPU does not set flags if the shift count is 0. */
2203 	if (src2 & SLJIT_IMM) {
2204 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2205 		if ((src2w & 0x3f) != 0 || (compiler->mode32 && (src2w & 0x1f) != 0))
2206 			return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2207 #else
2208 		if ((src2w & 0x1f) != 0)
2209 			return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2210 #endif
2211 		if (!set_flags)
2212 			return emit_mov(compiler, dst, dstw, src1, src1w);
2213 		/* OR dst, src, 0 */
2214 		return emit_cum_binary(compiler, BINARY_OPCODE(OR),
2215 			dst, dstw, src1, src1w, SLJIT_IMM, 0);
2216 	}
2217 
2218 	if (!set_flags)
2219 		return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2220 
2221 	if (!FAST_IS_REG(dst))
2222 		FAIL_IF(emit_cmp_binary(compiler, src1, src1w, SLJIT_IMM, 0));
2223 
2224 	FAIL_IF(emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w));
2225 
2226 	if (FAST_IS_REG(dst))
2227 		return emit_cmp_binary(compiler, dst, dstw, SLJIT_IMM, 0);
2228 	return SLJIT_SUCCESS;
2229 }
2230 
2231 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
2232 	sljit_s32 dst, sljit_sw dstw,
2233 	sljit_s32 src1, sljit_sw src1w,
2234 	sljit_s32 src2, sljit_sw src2w)
2235 {
2236 	CHECK_ERROR();
2237 	CHECK(check_sljit_emit_op2(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w));
2238 	ADJUST_LOCAL_OFFSET(dst, dstw);
2239 	ADJUST_LOCAL_OFFSET(src1, src1w);
2240 	ADJUST_LOCAL_OFFSET(src2, src2w);
2241 
2242 	CHECK_EXTRA_REGS(dst, dstw, (void)0);
2243 	CHECK_EXTRA_REGS(src1, src1w, (void)0);
2244 	CHECK_EXTRA_REGS(src2, src2w, (void)0);
2245 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2246 	compiler->mode32 = op & SLJIT_32;
2247 #endif
2248 
2249 	SLJIT_ASSERT(dst != TMP_REG1 || HAS_FLAGS(op));
2250 
2251 	switch (GET_OPCODE(op)) {
2252 	case SLJIT_ADD:
2253 		if (!HAS_FLAGS(op)) {
2254 			if (emit_lea_binary(compiler, dst, dstw, src1, src1w, src2, src2w) != SLJIT_ERR_UNSUPPORTED)
2255 				return compiler->error;
2256 		}
2257 		return emit_cum_binary(compiler, BINARY_OPCODE(ADD),
2258 			dst, dstw, src1, src1w, src2, src2w);
2259 	case SLJIT_ADDC:
2260 		return emit_cum_binary(compiler, BINARY_OPCODE(ADC),
2261 			dst, dstw, src1, src1w, src2, src2w);
2262 	case SLJIT_SUB:
2263 		if (src1 == SLJIT_IMM && src1w == 0)
2264 			return emit_unary(compiler, NEG_rm, dst, dstw, src2, src2w);
2265 
2266 		if (!HAS_FLAGS(op)) {
2267 			if ((src2 & SLJIT_IMM) && emit_lea_binary(compiler, dst, dstw, src1, src1w, SLJIT_IMM, -src2w) != SLJIT_ERR_UNSUPPORTED)
2268 				return compiler->error;
2269 			if (FAST_IS_REG(dst) && src2 == dst) {
2270 				FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB), dst, 0, dst, 0, src1, src1w));
2271 				return emit_unary(compiler, NEG_rm, dst, 0, dst, 0);
2272 			}
2273 		}
2274 
2275 		return emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
2276 			dst, dstw, src1, src1w, src2, src2w);
2277 	case SLJIT_SUBC:
2278 		return emit_non_cum_binary(compiler, BINARY_OPCODE(SBB),
2279 			dst, dstw, src1, src1w, src2, src2w);
2280 	case SLJIT_MUL:
2281 		return emit_mul(compiler, dst, dstw, src1, src1w, src2, src2w);
2282 	case SLJIT_AND:
2283 		return emit_cum_binary(compiler, BINARY_OPCODE(AND),
2284 			dst, dstw, src1, src1w, src2, src2w);
2285 	case SLJIT_OR:
2286 		return emit_cum_binary(compiler, BINARY_OPCODE(OR),
2287 			dst, dstw, src1, src1w, src2, src2w);
2288 	case SLJIT_XOR:
2289 		return emit_cum_binary(compiler, BINARY_OPCODE(XOR),
2290 			dst, dstw, src1, src1w, src2, src2w);
2291 	case SLJIT_SHL:
2292 		return emit_shift_with_flags(compiler, SHL, HAS_FLAGS(op),
2293 			dst, dstw, src1, src1w, src2, src2w);
2294 	case SLJIT_LSHR:
2295 		return emit_shift_with_flags(compiler, SHR, HAS_FLAGS(op),
2296 			dst, dstw, src1, src1w, src2, src2w);
2297 	case SLJIT_ASHR:
2298 		return emit_shift_with_flags(compiler, SAR, HAS_FLAGS(op),
2299 			dst, dstw, src1, src1w, src2, src2w);
2300 	}
2301 
2302 	return SLJIT_SUCCESS;
2303 }
2304 
2305 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compiler, sljit_s32 op,
2306 	sljit_s32 src1, sljit_sw src1w,
2307 	sljit_s32 src2, sljit_sw src2w)
2308 {
2309 	sljit_s32 opcode = GET_OPCODE(op);
2310 
2311 	CHECK_ERROR();
2312 	CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
2313 
2314 	if (opcode != SLJIT_SUB && opcode != SLJIT_AND) {
2315 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
2316 			|| (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
2317 		compiler->skip_checks = 1;
2318 #endif
2319 		return sljit_emit_op2(compiler, op, TMP_REG1, 0, src1, src1w, src2, src2w);
2320 	}
2321 
2322 	ADJUST_LOCAL_OFFSET(src1, src1w);
2323 	ADJUST_LOCAL_OFFSET(src2, src2w);
2324 
2325 	CHECK_EXTRA_REGS(src1, src1w, (void)0);
2326 	CHECK_EXTRA_REGS(src2, src2w, (void)0);
2327 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2328 	compiler->mode32 = op & SLJIT_32;
2329 #endif
2330 
2331 	if (opcode == SLJIT_SUB) {
2332 		return emit_cmp_binary(compiler, src1, src1w, src2, src2w);
2333 	}
2334 	return emit_test_binary(compiler, src1, src1w, src2, src2w);
2335 }
2336 
2337 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
2338 	sljit_s32 src, sljit_sw srcw)
2339 {
2340 	CHECK_ERROR();
2341 	CHECK(check_sljit_emit_op_src(compiler, op, src, srcw));
2342 	ADJUST_LOCAL_OFFSET(src, srcw);
2343 
2344 	CHECK_EXTRA_REGS(src, srcw, (void)0);
2345 
2346 	switch (op) {
2347 	case SLJIT_FAST_RETURN:
2348 		return emit_fast_return(compiler, src, srcw);
2349 	case SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN:
2350 		/* Don't adjust shadow stack if it isn't enabled.  */
2351 		if (!cpu_has_shadow_stack ())
2352 			return SLJIT_SUCCESS;
2353 		return adjust_shadow_stack(compiler, src, srcw);
2354 	case SLJIT_PREFETCH_L1:
2355 	case SLJIT_PREFETCH_L2:
2356 	case SLJIT_PREFETCH_L3:
2357 	case SLJIT_PREFETCH_ONCE:
2358 		return emit_prefetch(compiler, op, src, srcw);
2359 	}
2360 
2361 	return SLJIT_SUCCESS;
2362 }
2363 
2364 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
2365 {
2366 	CHECK_REG_INDEX(check_sljit_get_register_index(reg));
2367 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2368 	if (reg >= SLJIT_R3 && reg <= SLJIT_R8)
2369 		return -1;
2370 #endif
2371 	return reg_map[reg];
2372 }
2373 
2374 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
2375 {
2376 	CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
2377 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2378 	return reg;
2379 #else
2380 	return freg_map[reg];
2381 #endif
2382 }
2383 
2384 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
2385 	void *instruction, sljit_u32 size)
2386 {
2387 	sljit_u8 *inst;
2388 
2389 	CHECK_ERROR();
2390 	CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
2391 
2392 	inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
2393 	FAIL_IF(!inst);
2394 	INC_SIZE(size);
2395 	SLJIT_MEMCPY(inst, instruction, size);
2396 	return SLJIT_SUCCESS;
2397 }
2398 
2399 /* --------------------------------------------------------------------- */
2400 /*  Floating point operators                                             */
2401 /* --------------------------------------------------------------------- */
2402 
2403 /* Alignment(3) + 4 * 16 bytes. */
2404 static sljit_u32 sse2_data[3 + (4 * 4)];
2405 static sljit_u32 *sse2_buffer;
2406 
2407 static void init_compiler(void)
2408 {
2409 	/* Align to 16 bytes. */
2410 	sse2_buffer = (sljit_u32*)(((sljit_uw)sse2_data + 15) & ~(sljit_uw)0xf);
2411 
2412 	/* Single precision constants (each constant is 16 byte long). */
2413 	sse2_buffer[0] = 0x80000000;
2414 	sse2_buffer[4] = 0x7fffffff;
2415 	/* Double precision constants (each constant is 16 byte long). */
2416 	sse2_buffer[8] = 0;
2417 	sse2_buffer[9] = 0x80000000;
2418 	sse2_buffer[12] = 0xffffffff;
2419 	sse2_buffer[13] = 0x7fffffff;
2420 }
2421 
2422 static sljit_s32 emit_sse2(struct sljit_compiler *compiler, sljit_u8 opcode,
2423 	sljit_s32 single, sljit_s32 xmm1, sljit_s32 xmm2, sljit_sw xmm2w)
2424 {
2425 	sljit_u8 *inst;
2426 
2427 	inst = emit_x86_instruction(compiler, 2 | (single ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, xmm1, 0, xmm2, xmm2w);
2428 	FAIL_IF(!inst);
2429 	*inst++ = GROUP_0F;
2430 	*inst = opcode;
2431 	return SLJIT_SUCCESS;
2432 }
2433 
2434 static sljit_s32 emit_sse2_logic(struct sljit_compiler *compiler, sljit_u8 opcode,
2435 	sljit_s32 pref66, sljit_s32 xmm1, sljit_s32 xmm2, sljit_sw xmm2w)
2436 {
2437 	sljit_u8 *inst;
2438 
2439 	inst = emit_x86_instruction(compiler, 2 | (pref66 ? EX86_PREF_66 : 0) | EX86_SSE2, xmm1, 0, xmm2, xmm2w);
2440 	FAIL_IF(!inst);
2441 	*inst++ = GROUP_0F;
2442 	*inst = opcode;
2443 	return SLJIT_SUCCESS;
2444 }
2445 
2446 static SLJIT_INLINE sljit_s32 emit_sse2_load(struct sljit_compiler *compiler,
2447 	sljit_s32 single, sljit_s32 dst, sljit_s32 src, sljit_sw srcw)
2448 {
2449 	return emit_sse2(compiler, MOVSD_x_xm, single, dst, src, srcw);
2450 }
2451 
2452 static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler,
2453 	sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src)
2454 {
2455 	return emit_sse2(compiler, MOVSD_xm_x, single, src, dst, dstw);
2456 }
2457 
2458 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
2459 	sljit_s32 dst, sljit_sw dstw,
2460 	sljit_s32 src, sljit_sw srcw)
2461 {
2462 	sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
2463 	sljit_u8 *inst;
2464 
2465 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2466 	if (GET_OPCODE(op) == SLJIT_CONV_SW_FROM_F64)
2467 		compiler->mode32 = 0;
2468 #endif
2469 
2470 	inst = emit_x86_instruction(compiler, 2 | ((op & SLJIT_32) ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2_OP2, dst_r, 0, src, srcw);
2471 	FAIL_IF(!inst);
2472 	*inst++ = GROUP_0F;
2473 	*inst = CVTTSD2SI_r_xm;
2474 
2475 	if (dst & SLJIT_MEM)
2476 		return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2477 	return SLJIT_SUCCESS;
2478 }
2479 
2480 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
2481 	sljit_s32 dst, sljit_sw dstw,
2482 	sljit_s32 src, sljit_sw srcw)
2483 {
2484 	sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
2485 	sljit_u8 *inst;
2486 
2487 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2488 	if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_SW)
2489 		compiler->mode32 = 0;
2490 #endif
2491 
2492 	if (src & SLJIT_IMM) {
2493 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2494 		if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
2495 			srcw = (sljit_s32)srcw;
2496 #endif
2497 		EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
2498 		src = TMP_REG1;
2499 		srcw = 0;
2500 	}
2501 
2502 	inst = emit_x86_instruction(compiler, 2 | ((op & SLJIT_32) ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2_OP1, dst_r, 0, src, srcw);
2503 	FAIL_IF(!inst);
2504 	*inst++ = GROUP_0F;
2505 	*inst = CVTSI2SD_x_rm;
2506 
2507 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2508 	compiler->mode32 = 1;
2509 #endif
2510 	if (dst_r == TMP_FREG)
2511 		return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
2512 	return SLJIT_SUCCESS;
2513 }
2514 
2515 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
2516 	sljit_s32 src1, sljit_sw src1w,
2517 	sljit_s32 src2, sljit_sw src2w)
2518 {
2519 	if (!FAST_IS_REG(src1)) {
2520 		FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
2521 		src1 = TMP_FREG;
2522 	}
2523 
2524 	return emit_sse2_logic(compiler, UCOMISD_x_xm, !(op & SLJIT_32), src1, src2, src2w);
2525 }
2526 
2527 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
2528 	sljit_s32 dst, sljit_sw dstw,
2529 	sljit_s32 src, sljit_sw srcw)
2530 {
2531 	sljit_s32 dst_r;
2532 
2533 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2534 	compiler->mode32 = 1;
2535 #endif
2536 
2537 	CHECK_ERROR();
2538 	SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
2539 
2540 	if (GET_OPCODE(op) == SLJIT_MOV_F64) {
2541 		if (FAST_IS_REG(dst))
2542 			return emit_sse2_load(compiler, op & SLJIT_32, dst, src, srcw);
2543 		if (FAST_IS_REG(src))
2544 			return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, src);
2545 		FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src, srcw));
2546 		return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
2547 	}
2548 
2549 	if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32) {
2550 		dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
2551 		if (FAST_IS_REG(src)) {
2552 			/* We overwrite the high bits of source. From SLJIT point of view,
2553 			   this is not an issue.
2554 			   Note: In SSE3, we could also use MOVDDUP and MOVSLDUP. */
2555 			FAIL_IF(emit_sse2_logic(compiler, UNPCKLPD_x_xm, op & SLJIT_32, src, src, 0));
2556 		}
2557 		else {
2558 			FAIL_IF(emit_sse2_load(compiler, !(op & SLJIT_32), TMP_FREG, src, srcw));
2559 			src = TMP_FREG;
2560 		}
2561 
2562 		FAIL_IF(emit_sse2_logic(compiler, CVTPD2PS_x_xm, op & SLJIT_32, dst_r, src, 0));
2563 		if (dst_r == TMP_FREG)
2564 			return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
2565 		return SLJIT_SUCCESS;
2566 	}
2567 
2568 	if (FAST_IS_REG(dst)) {
2569 		dst_r = dst;
2570 		if (dst != src)
2571 			FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, dst_r, src, srcw));
2572 	}
2573 	else {
2574 		dst_r = TMP_FREG;
2575 		FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, dst_r, src, srcw));
2576 	}
2577 
2578 	switch (GET_OPCODE(op)) {
2579 	case SLJIT_NEG_F64:
2580 		FAIL_IF(emit_sse2_logic(compiler, XORPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_32 ? sse2_buffer : sse2_buffer + 8)));
2581 		break;
2582 
2583 	case SLJIT_ABS_F64:
2584 		FAIL_IF(emit_sse2_logic(compiler, ANDPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_32 ? sse2_buffer + 4 : sse2_buffer + 12)));
2585 		break;
2586 	}
2587 
2588 	if (dst_r == TMP_FREG)
2589 		return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
2590 	return SLJIT_SUCCESS;
2591 }
2592 
2593 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
2594 	sljit_s32 dst, sljit_sw dstw,
2595 	sljit_s32 src1, sljit_sw src1w,
2596 	sljit_s32 src2, sljit_sw src2w)
2597 {
2598 	sljit_s32 dst_r;
2599 
2600 	CHECK_ERROR();
2601 	CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
2602 	ADJUST_LOCAL_OFFSET(dst, dstw);
2603 	ADJUST_LOCAL_OFFSET(src1, src1w);
2604 	ADJUST_LOCAL_OFFSET(src2, src2w);
2605 
2606 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2607 	compiler->mode32 = 1;
2608 #endif
2609 
2610 	if (FAST_IS_REG(dst)) {
2611 		dst_r = dst;
2612 		if (dst == src1)
2613 			; /* Do nothing here. */
2614 		else if (dst == src2 && (op == SLJIT_ADD_F64 || op == SLJIT_MUL_F64)) {
2615 			/* Swap arguments. */
2616 			src2 = src1;
2617 			src2w = src1w;
2618 		}
2619 		else if (dst != src2)
2620 			FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, dst_r, src1, src1w));
2621 		else {
2622 			dst_r = TMP_FREG;
2623 			FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
2624 		}
2625 	}
2626 	else {
2627 		dst_r = TMP_FREG;
2628 		FAIL_IF(emit_sse2_load(compiler, op & SLJIT_32, TMP_FREG, src1, src1w));
2629 	}
2630 
2631 	switch (GET_OPCODE(op)) {
2632 	case SLJIT_ADD_F64:
2633 		FAIL_IF(emit_sse2(compiler, ADDSD_x_xm, op & SLJIT_32, dst_r, src2, src2w));
2634 		break;
2635 
2636 	case SLJIT_SUB_F64:
2637 		FAIL_IF(emit_sse2(compiler, SUBSD_x_xm, op & SLJIT_32, dst_r, src2, src2w));
2638 		break;
2639 
2640 	case SLJIT_MUL_F64:
2641 		FAIL_IF(emit_sse2(compiler, MULSD_x_xm, op & SLJIT_32, dst_r, src2, src2w));
2642 		break;
2643 
2644 	case SLJIT_DIV_F64:
2645 		FAIL_IF(emit_sse2(compiler, DIVSD_x_xm, op & SLJIT_32, dst_r, src2, src2w));
2646 		break;
2647 	}
2648 
2649 	if (dst_r == TMP_FREG)
2650 		return emit_sse2_store(compiler, op & SLJIT_32, dst, dstw, TMP_FREG);
2651 	return SLJIT_SUCCESS;
2652 }
2653 
2654 /* --------------------------------------------------------------------- */
2655 /*  Conditional instructions                                             */
2656 /* --------------------------------------------------------------------- */
2657 
2658 SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
2659 {
2660 	sljit_u8 *inst;
2661 	struct sljit_label *label;
2662 
2663 	CHECK_ERROR_PTR();
2664 	CHECK_PTR(check_sljit_emit_label(compiler));
2665 
2666 	if (compiler->last_label && compiler->last_label->size == compiler->size)
2667 		return compiler->last_label;
2668 
2669 	label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
2670 	PTR_FAIL_IF(!label);
2671 	set_label(label, compiler);
2672 
2673 	inst = (sljit_u8*)ensure_buf(compiler, 2);
2674 	PTR_FAIL_IF(!inst);
2675 
2676 	*inst++ = 0;
2677 	*inst++ = 0;
2678 
2679 	return label;
2680 }
2681 
2682 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
2683 {
2684 	sljit_u8 *inst;
2685 	struct sljit_jump *jump;
2686 
2687 	CHECK_ERROR_PTR();
2688 	CHECK_PTR(check_sljit_emit_jump(compiler, type));
2689 
2690 	jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
2691 	PTR_FAIL_IF_NULL(jump);
2692 	set_jump(jump, compiler, (sljit_u32)((type & SLJIT_REWRITABLE_JUMP) | ((type & 0xff) << TYPE_SHIFT)));
2693 	type &= 0xff;
2694 
2695 	/* Worst case size. */
2696 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2697 	compiler->size += (type >= SLJIT_JUMP) ? 5 : 6;
2698 #else
2699 	compiler->size += (type >= SLJIT_JUMP) ? (10 + 3) : (2 + 10 + 3);
2700 #endif
2701 
2702 	inst = (sljit_u8*)ensure_buf(compiler, 2);
2703 	PTR_FAIL_IF_NULL(inst);
2704 
2705 	*inst++ = 0;
2706 	*inst++ = 1;
2707 	return jump;
2708 }
2709 
2710 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
2711 {
2712 	sljit_u8 *inst;
2713 	struct sljit_jump *jump;
2714 
2715 	CHECK_ERROR();
2716 	CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
2717 	ADJUST_LOCAL_OFFSET(src, srcw);
2718 
2719 	CHECK_EXTRA_REGS(src, srcw, (void)0);
2720 
2721 	if (src == SLJIT_IMM) {
2722 		jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
2723 		FAIL_IF_NULL(jump);
2724 		set_jump(jump, compiler, (sljit_u32)(JUMP_ADDR | (type << TYPE_SHIFT)));
2725 		jump->u.target = (sljit_uw)srcw;
2726 
2727 		/* Worst case size. */
2728 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2729 		compiler->size += 5;
2730 #else
2731 		compiler->size += 10 + 3;
2732 #endif
2733 
2734 		inst = (sljit_u8*)ensure_buf(compiler, 2);
2735 		FAIL_IF_NULL(inst);
2736 
2737 		*inst++ = 0;
2738 		*inst++ = 1;
2739 	}
2740 	else {
2741 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2742 		/* REX_W is not necessary (src is not immediate). */
2743 		compiler->mode32 = 1;
2744 #endif
2745 		inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
2746 		FAIL_IF(!inst);
2747 		*inst++ = GROUP_FF;
2748 		*inst = U8(*inst | ((type >= SLJIT_FAST_CALL) ? CALL_rm : JMP_rm));
2749 	}
2750 	return SLJIT_SUCCESS;
2751 }
2752 
2753 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
2754 	sljit_s32 dst, sljit_sw dstw,
2755 	sljit_s32 type)
2756 {
2757 	sljit_u8 *inst;
2758 	sljit_u8 cond_set = 0;
2759 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2760 	sljit_s32 reg;
2761 #endif
2762 	/* ADJUST_LOCAL_OFFSET and CHECK_EXTRA_REGS might overwrite these values. */
2763 	sljit_s32 dst_save = dst;
2764 	sljit_sw dstw_save = dstw;
2765 
2766 	CHECK_ERROR();
2767 	CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
2768 
2769 	ADJUST_LOCAL_OFFSET(dst, dstw);
2770 	CHECK_EXTRA_REGS(dst, dstw, (void)0);
2771 
2772 	type &= 0xff;
2773 	/* setcc = jcc + 0x10. */
2774 	cond_set = U8(get_jump_code((sljit_uw)type) + 0x10);
2775 
2776 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2777 	if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst)) {
2778 		inst = (sljit_u8*)ensure_buf(compiler, 1 + 4 + 3);
2779 		FAIL_IF(!inst);
2780 		INC_SIZE(4 + 3);
2781 		/* Set low register to conditional flag. */
2782 		*inst++ = (reg_map[TMP_REG1] <= 7) ? REX : REX_B;
2783 		*inst++ = GROUP_0F;
2784 		*inst++ = cond_set;
2785 		*inst++ = MOD_REG | reg_lmap[TMP_REG1];
2786 		*inst++ = U8(REX | (reg_map[TMP_REG1] <= 7 ? 0 : REX_R) | (reg_map[dst] <= 7 ? 0 : REX_B));
2787 		*inst++ = OR_rm8_r8;
2788 		*inst++ = U8(MOD_REG | (reg_lmap[TMP_REG1] << 3) | reg_lmap[dst]);
2789 		return SLJIT_SUCCESS;
2790 	}
2791 
2792 	reg = (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG1;
2793 
2794 	inst = (sljit_u8*)ensure_buf(compiler, 1 + 4 + 4);
2795 	FAIL_IF(!inst);
2796 	INC_SIZE(4 + 4);
2797 	/* Set low register to conditional flag. */
2798 	*inst++ = (reg_map[reg] <= 7) ? REX : REX_B;
2799 	*inst++ = GROUP_0F;
2800 	*inst++ = cond_set;
2801 	*inst++ = MOD_REG | reg_lmap[reg];
2802 	*inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : (REX_B | REX_R));
2803 	/* The movzx instruction does not affect flags. */
2804 	*inst++ = GROUP_0F;
2805 	*inst++ = MOVZX_r_rm8;
2806 	*inst = U8(MOD_REG | (reg_lmap[reg] << 3) | reg_lmap[reg]);
2807 
2808 	if (reg != TMP_REG1)
2809 		return SLJIT_SUCCESS;
2810 
2811 	if (GET_OPCODE(op) < SLJIT_ADD) {
2812 		compiler->mode32 = GET_OPCODE(op) != SLJIT_MOV;
2813 		return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2814 	}
2815 
2816 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
2817 		|| (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
2818 	compiler->skip_checks = 1;
2819 #endif
2820 	return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REG1, 0);
2821 
2822 #else
2823 	/* The SLJIT_CONFIG_X86_32 code path starts here. */
2824 	if (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst)) {
2825 		if (reg_map[dst] <= 4) {
2826 			/* Low byte is accessible. */
2827 			inst = (sljit_u8*)ensure_buf(compiler, 1 + 3 + 3);
2828 			FAIL_IF(!inst);
2829 			INC_SIZE(3 + 3);
2830 			/* Set low byte to conditional flag. */
2831 			*inst++ = GROUP_0F;
2832 			*inst++ = cond_set;
2833 			*inst++ = U8(MOD_REG | reg_map[dst]);
2834 
2835 			*inst++ = GROUP_0F;
2836 			*inst++ = MOVZX_r_rm8;
2837 			*inst = U8(MOD_REG | (reg_map[dst] << 3) | reg_map[dst]);
2838 			return SLJIT_SUCCESS;
2839 		}
2840 
2841 		/* Low byte is not accessible. */
2842 		if (cpu_has_cmov == -1)
2843 			get_cpu_features();
2844 
2845 		if (cpu_has_cmov) {
2846 			EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, 1);
2847 			/* a xor reg, reg operation would overwrite the flags. */
2848 			EMIT_MOV(compiler, dst, 0, SLJIT_IMM, 0);
2849 
2850 			inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
2851 			FAIL_IF(!inst);
2852 			INC_SIZE(3);
2853 
2854 			*inst++ = GROUP_0F;
2855 			/* cmovcc = setcc - 0x50. */
2856 			*inst++ = U8(cond_set - 0x50);
2857 			*inst++ = U8(MOD_REG | (reg_map[dst] << 3) | reg_map[TMP_REG1]);
2858 			return SLJIT_SUCCESS;
2859 		}
2860 
2861 		inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1);
2862 		FAIL_IF(!inst);
2863 		INC_SIZE(1 + 3 + 3 + 1);
2864 		*inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
2865 		/* Set al to conditional flag. */
2866 		*inst++ = GROUP_0F;
2867 		*inst++ = cond_set;
2868 		*inst++ = MOD_REG | 0 /* eax */;
2869 
2870 		*inst++ = GROUP_0F;
2871 		*inst++ = MOVZX_r_rm8;
2872 		*inst++ = U8(MOD_REG | (reg_map[dst] << 3) | 0 /* eax */);
2873 		*inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
2874 		return SLJIT_SUCCESS;
2875 	}
2876 
2877 	if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst) && reg_map[dst] <= 4) {
2878 		SLJIT_ASSERT(reg_map[SLJIT_R0] == 0);
2879 
2880 		if (dst != SLJIT_R0) {
2881 			inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 2 + 1);
2882 			FAIL_IF(!inst);
2883 			INC_SIZE(1 + 3 + 2 + 1);
2884 			/* Set low register to conditional flag. */
2885 			*inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
2886 			*inst++ = GROUP_0F;
2887 			*inst++ = cond_set;
2888 			*inst++ = MOD_REG | 0 /* eax */;
2889 			*inst++ = OR_rm8_r8;
2890 			*inst++ = MOD_REG | (0 /* eax */ << 3) | reg_map[dst];
2891 			*inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
2892 		}
2893 		else {
2894 			inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + 3 + 2 + 2);
2895 			FAIL_IF(!inst);
2896 			INC_SIZE(2 + 3 + 2 + 2);
2897 			/* Set low register to conditional flag. */
2898 			*inst++ = XCHG_r_rm;
2899 			*inst++ = U8(MOD_REG | (1 /* ecx */ << 3) | reg_map[TMP_REG1]);
2900 			*inst++ = GROUP_0F;
2901 			*inst++ = cond_set;
2902 			*inst++ = MOD_REG | 1 /* ecx */;
2903 			*inst++ = OR_rm8_r8;
2904 			*inst++ = MOD_REG | (1 /* ecx */ << 3) | 0 /* eax */;
2905 			*inst++ = XCHG_r_rm;
2906 			*inst++ = U8(MOD_REG | (1 /* ecx */ << 3) | reg_map[TMP_REG1]);
2907 		}
2908 		return SLJIT_SUCCESS;
2909 	}
2910 
2911 	/* Set TMP_REG1 to the bit. */
2912 	inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1);
2913 	FAIL_IF(!inst);
2914 	INC_SIZE(1 + 3 + 3 + 1);
2915 	*inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
2916 	/* Set al to conditional flag. */
2917 	*inst++ = GROUP_0F;
2918 	*inst++ = cond_set;
2919 	*inst++ = MOD_REG | 0 /* eax */;
2920 
2921 	*inst++ = GROUP_0F;
2922 	*inst++ = MOVZX_r_rm8;
2923 	*inst++ = MOD_REG | (0 << 3) /* eax */ | 0 /* eax */;
2924 
2925 	*inst++ = U8(XCHG_EAX_r | reg_map[TMP_REG1]);
2926 
2927 	if (GET_OPCODE(op) < SLJIT_ADD)
2928 		return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2929 
2930 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
2931 		|| (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
2932 	compiler->skip_checks = 1;
2933 #endif
2934 	return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REG1, 0);
2935 #endif /* SLJIT_CONFIG_X86_64 */
2936 }
2937 
2938 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
2939 	sljit_s32 dst_reg,
2940 	sljit_s32 src, sljit_sw srcw)
2941 {
2942 	sljit_u8* inst;
2943 
2944 	CHECK_ERROR();
2945 	CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
2946 
2947 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2948 	dst_reg &= ~SLJIT_32;
2949 
2950 	if (!sljit_has_cpu_feature(SLJIT_HAS_CMOV) || (dst_reg >= SLJIT_R3 && dst_reg <= SLJIT_S3))
2951 		return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);
2952 #else
2953 	if (!sljit_has_cpu_feature(SLJIT_HAS_CMOV))
2954 		return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);
2955 #endif
2956 
2957 	/* ADJUST_LOCAL_OFFSET is not needed. */
2958 	CHECK_EXTRA_REGS(src, srcw, (void)0);
2959 
2960 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2961 	compiler->mode32 = dst_reg & SLJIT_32;
2962 	dst_reg &= ~SLJIT_32;
2963 #endif
2964 
2965 	if (SLJIT_UNLIKELY(src & SLJIT_IMM)) {
2966 		EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, srcw);
2967 		src = TMP_REG1;
2968 		srcw = 0;
2969 	}
2970 
2971 	inst = emit_x86_instruction(compiler, 2, dst_reg, 0, src, srcw);
2972 	FAIL_IF(!inst);
2973 	*inst++ = GROUP_0F;
2974 	*inst = U8(get_jump_code(type & 0xff) - 0x40);
2975 	return SLJIT_SUCCESS;
2976 }
2977 
2978 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
2979 {
2980 	CHECK_ERROR();
2981 	CHECK(check_sljit_get_local_base(compiler, dst, dstw, offset));
2982 	ADJUST_LOCAL_OFFSET(dst, dstw);
2983 
2984 	CHECK_EXTRA_REGS(dst, dstw, (void)0);
2985 
2986 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2987 	compiler->mode32 = 0;
2988 #endif
2989 
2990 	ADJUST_LOCAL_OFFSET(SLJIT_MEM1(SLJIT_SP), offset);
2991 
2992 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2993 	if (NOT_HALFWORD(offset)) {
2994 		FAIL_IF(emit_load_imm64(compiler, TMP_REG1, offset));
2995 #if (defined SLJIT_DEBUG && SLJIT_DEBUG)
2996 		SLJIT_ASSERT(emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, TMP_REG1, 0) != SLJIT_ERR_UNSUPPORTED);
2997 		return compiler->error;
2998 #else
2999 		return emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, TMP_REG1, 0);
3000 #endif
3001 	}
3002 #endif
3003 
3004 	if (offset != 0)
3005 		return emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, SLJIT_IMM, offset);
3006 	return emit_mov(compiler, dst, dstw, SLJIT_SP, 0);
3007 }
3008 
3009 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
3010 {
3011 	sljit_u8 *inst;
3012 	struct sljit_const *const_;
3013 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3014 	sljit_s32 reg;
3015 #endif
3016 
3017 	CHECK_ERROR_PTR();
3018 	CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
3019 	ADJUST_LOCAL_OFFSET(dst, dstw);
3020 
3021 	CHECK_EXTRA_REGS(dst, dstw, (void)0);
3022 
3023 	const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
3024 	PTR_FAIL_IF(!const_);
3025 	set_const(const_, compiler);
3026 
3027 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3028 	compiler->mode32 = 0;
3029 	reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
3030 
3031 	if (emit_load_imm64(compiler, reg, init_value))
3032 		return NULL;
3033 #else
3034 	if (emit_mov(compiler, dst, dstw, SLJIT_IMM, init_value))
3035 		return NULL;
3036 #endif
3037 
3038 	inst = (sljit_u8*)ensure_buf(compiler, 2);
3039 	PTR_FAIL_IF(!inst);
3040 
3041 	*inst++ = 0;
3042 	*inst++ = 2;
3043 
3044 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3045 	if (dst & SLJIT_MEM)
3046 		if (emit_mov(compiler, dst, dstw, TMP_REG1, 0))
3047 			return NULL;
3048 #endif
3049 
3050 	return const_;
3051 }
3052 
3053 SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
3054 {
3055 	struct sljit_put_label *put_label;
3056 	sljit_u8 *inst;
3057 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3058 	sljit_s32 reg;
3059 	sljit_uw start_size;
3060 #endif
3061 
3062 	CHECK_ERROR_PTR();
3063 	CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
3064 	ADJUST_LOCAL_OFFSET(dst, dstw);
3065 
3066 	CHECK_EXTRA_REGS(dst, dstw, (void)0);
3067 
3068 	put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
3069 	PTR_FAIL_IF(!put_label);
3070 	set_put_label(put_label, compiler, 0);
3071 
3072 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3073 	compiler->mode32 = 0;
3074 	reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
3075 
3076 	if (emit_load_imm64(compiler, reg, 0))
3077 		return NULL;
3078 #else
3079 	if (emit_mov(compiler, dst, dstw, SLJIT_IMM, 0))
3080 		return NULL;
3081 #endif
3082 
3083 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
3084 	if (dst & SLJIT_MEM) {
3085 		start_size = compiler->size;
3086 		if (emit_mov(compiler, dst, dstw, TMP_REG1, 0))
3087 			return NULL;
3088 		put_label->flags = compiler->size - start_size;
3089 	}
3090 #endif
3091 
3092 	inst = (sljit_u8*)ensure_buf(compiler, 2);
3093 	PTR_FAIL_IF(!inst);
3094 
3095 	*inst++ = 0;
3096 	*inst++ = 3;
3097 
3098 	return put_label;
3099 }
3100 
3101 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
3102 {
3103 	SLJIT_UNUSED_ARG(executable_offset);
3104 
3105 	SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_uw)), 0);
3106 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
3107 	sljit_unaligned_store_sw((void*)addr, (sljit_sw)(new_target - (addr + 4) - (sljit_uw)executable_offset));
3108 #else
3109 	sljit_unaligned_store_sw((void*)addr, (sljit_sw)new_target);
3110 #endif
3111 	SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_uw)), 1);
3112 }
3113 
3114 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
3115 {
3116 	SLJIT_UNUSED_ARG(executable_offset);
3117 
3118 	SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_sw)), 0);
3119 	sljit_unaligned_store_sw((void*)addr, new_constant);
3120 	SLJIT_UPDATE_WX_FLAGS((void*)addr, (void*)(addr + sizeof(sljit_sw)), 1);
3121 }
3122