1 /*
2  *    Stack-less Just-In-Time compiler
3  *
4  *    Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without modification, are
7  * permitted provided that the following conditions are met:
8  *
9  *   1. Redistributions of source code must retain the above copyright notice, this list of
10  *      conditions and the following disclaimer.
11  *
12  *   2. Redistributions in binary form must reproduce the above copyright notice, this list
13  *      of conditions and the following disclaimer in the documentation and/or other materials
14  *      provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19  * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21  * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22  * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  */
26 
sljit_get_platform_name(void)27 SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
28 {
29 	return "SPARC" SLJIT_CPUINFO;
30 }
31 
32 /* Length of an instruction word
33    Both for sparc-32 and sparc-64 */
34 typedef sljit_u32 sljit_ins;
35 
36 #if (defined SLJIT_CACHE_FLUSH_OWN_IMPL && SLJIT_CACHE_FLUSH_OWN_IMPL)
37 
sparc_cache_flush(sljit_ins * from,sljit_ins * to)38 static void sparc_cache_flush(sljit_ins *from, sljit_ins *to)
39 {
40 #if defined(__SUNPRO_C) && __SUNPRO_C < 0x590
41 	__asm (
42 		/* if (from == to) return */
43 		"cmp %i0, %i1\n"
44 		"be .leave\n"
45 		"nop\n"
46 
47 		/* loop until from >= to */
48 		".mainloop:\n"
49 		"flush %i0\n"
50 		"add %i0, 8, %i0\n"
51 		"cmp %i0, %i1\n"
52 		"bcs .mainloop\n"
53 		"nop\n"
54 
55 		/* The comparison was done above. */
56 		"bne .leave\n"
57 		/* nop is not necessary here, since the
58 		   sub operation has no side effect. */
59 		"sub %i0, 4, %i0\n"
60 		"flush %i0\n"
61 		".leave:"
62 	);
63 #else
64 	if (SLJIT_UNLIKELY(from == to))
65 		return;
66 
67 	do {
68 		__asm__ volatile (
69 			"flush %0\n"
70 			: : "r"(from)
71 		);
72 		/* Operates at least on doubleword. */
73 		from += 2;
74 	} while (from < to);
75 
76 	if (from == to) {
77 		/* Flush the last word. */
78 		from --;
79 		__asm__ volatile (
80 			"flush %0\n"
81 			: : "r"(from)
82 		);
83 	}
84 #endif
85 }
86 
87 #endif /* (defined SLJIT_CACHE_FLUSH_OWN_IMPL && SLJIT_CACHE_FLUSH_OWN_IMPL) */
88 
89 /* TMP_REG2 is not used by getput_arg */
90 #define TMP_REG1	(SLJIT_NUMBER_OF_REGISTERS + 2)
91 #define TMP_REG2	(SLJIT_NUMBER_OF_REGISTERS + 3)
92 #define TMP_REG3	(SLJIT_NUMBER_OF_REGISTERS + 4)
93 /* This register is modified by calls, which affects the instruction
94    in the delay slot if it is used as a source register. */
95 #define TMP_LINK	(SLJIT_NUMBER_OF_REGISTERS + 5)
96 
97 #define TMP_FREG1	(SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
98 #define TMP_FREG2	(SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2)
99 
100 static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 6] = {
101 	0, 8, 9, 10, 11, 23, 22, 21, 20, 19, 18, 17, 16, 29, 28, 27, 26, 25, 24, 14, 1, 12, 13, 15
102 };
103 
104 static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
105 	0, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30
106 };
107 
108 /* --------------------------------------------------------------------- */
109 /*  Instrucion forms                                                     */
110 /* --------------------------------------------------------------------- */
111 
112 #define D(d)		((sljit_ins)reg_map[d] << 25)
113 #define FD(d)		((sljit_ins)freg_map[d] << 25)
114 #define FDN(d)		(((sljit_ins)freg_map[d] | 0x1) << 25)
115 #define DA(d)		((sljit_ins)(d) << 25)
116 #define S1(s1)		((sljit_ins)reg_map[s1] << 14)
117 #define FS1(s1)		((sljit_ins)freg_map[s1] << 14)
118 #define S1A(s1)		((sljit_ins)(s1) << 14)
119 #define S2(s2)		((sljit_ins)reg_map[s2])
120 #define FS2(s2)		((sljit_ins)freg_map[s2])
121 #define FS2N(s2)	((sljit_ins)freg_map[s2] | 0x1)
122 #define S2A(s2)		((sljit_ins)(s2))
123 #define IMM_ARG		0x2000
124 #define DOP(op)		((sljit_ins)(op) << 5)
125 #define IMM(imm)	(((sljit_ins)(imm) & 0x1fff) | IMM_ARG)
126 
127 #define DR(dr)		(reg_map[dr])
128 #define DRF(dr, flags)	((sljit_s32)(reg_map[dr] | ((flags) & SET_FLAGS)))
129 #define OPC1(opcode)	((sljit_ins)(opcode) << 30)
130 #define OPC2(opcode)	((sljit_ins)(opcode) << 22)
131 #define OPC3(opcode)	((sljit_ins)(opcode) << 19)
132 #define SET_FLAGS	OPC3(0x10)
133 
134 #define ADD		(OPC1(0x2) | OPC3(0x00))
135 #define ADDC		(OPC1(0x2) | OPC3(0x08))
136 #define AND		(OPC1(0x2) | OPC3(0x01))
137 #define ANDN		(OPC1(0x2) | OPC3(0x05))
138 #define CALL		(OPC1(0x1))
139 #define FABSS		(OPC1(0x2) | OPC3(0x34) | DOP(0x09))
140 #define FADDD		(OPC1(0x2) | OPC3(0x34) | DOP(0x42))
141 #define FADDS		(OPC1(0x2) | OPC3(0x34) | DOP(0x41))
142 #define FCMPD		(OPC1(0x2) | OPC3(0x35) | DOP(0x52))
143 #define FCMPS		(OPC1(0x2) | OPC3(0x35) | DOP(0x51))
144 #define FDIVD		(OPC1(0x2) | OPC3(0x34) | DOP(0x4e))
145 #define FDIVS		(OPC1(0x2) | OPC3(0x34) | DOP(0x4d))
146 #define FDTOI		(OPC1(0x2) | OPC3(0x34) | DOP(0xd2))
147 #define FDTOS		(OPC1(0x2) | OPC3(0x34) | DOP(0xc6))
148 #define FITOD		(OPC1(0x2) | OPC3(0x34) | DOP(0xc8))
149 #define FITOS		(OPC1(0x2) | OPC3(0x34) | DOP(0xc4))
150 #define FMOVS		(OPC1(0x2) | OPC3(0x34) | DOP(0x01))
151 #define FMULD		(OPC1(0x2) | OPC3(0x34) | DOP(0x4a))
152 #define FMULS		(OPC1(0x2) | OPC3(0x34) | DOP(0x49))
153 #define FNEGS		(OPC1(0x2) | OPC3(0x34) | DOP(0x05))
154 #define FSTOD		(OPC1(0x2) | OPC3(0x34) | DOP(0xc9))
155 #define FSTOI		(OPC1(0x2) | OPC3(0x34) | DOP(0xd1))
156 #define FSUBD		(OPC1(0x2) | OPC3(0x34) | DOP(0x46))
157 #define FSUBS		(OPC1(0x2) | OPC3(0x34) | DOP(0x45))
158 #define JMPL		(OPC1(0x2) | OPC3(0x38))
159 #define LDD		(OPC1(0x3) | OPC3(0x03))
160 #define LDDF		(OPC1(0x3) | OPC3(0x23))
161 #define LDF		(OPC1(0x3) | OPC3(0x20))
162 #define LDUW		(OPC1(0x3) | OPC3(0x00))
163 #define NOP		(OPC1(0x0) | OPC2(0x04))
164 #define OR		(OPC1(0x2) | OPC3(0x02))
165 #define ORN		(OPC1(0x2) | OPC3(0x06))
166 #define RDY		(OPC1(0x2) | OPC3(0x28) | S1A(0))
167 #define RESTORE		(OPC1(0x2) | OPC3(0x3d))
168 #define SAVE		(OPC1(0x2) | OPC3(0x3c))
169 #define SETHI		(OPC1(0x0) | OPC2(0x04))
170 #define SLL		(OPC1(0x2) | OPC3(0x25))
171 #define SLLX		(OPC1(0x2) | OPC3(0x25) | (1 << 12))
172 #define SRA		(OPC1(0x2) | OPC3(0x27))
173 #define SRAX		(OPC1(0x2) | OPC3(0x27) | (1 << 12))
174 #define SRL		(OPC1(0x2) | OPC3(0x26))
175 #define SRLX		(OPC1(0x2) | OPC3(0x26) | (1 << 12))
176 #define STD		(OPC1(0x3) | OPC3(0x07))
177 #define STDF		(OPC1(0x3) | OPC3(0x27))
178 #define STF		(OPC1(0x3) | OPC3(0x24))
179 #define STW		(OPC1(0x3) | OPC3(0x04))
180 #define SUB		(OPC1(0x2) | OPC3(0x04))
181 #define SUBC		(OPC1(0x2) | OPC3(0x0c))
182 #define TA		(OPC1(0x2) | OPC3(0x3a) | (8 << 25))
183 #define WRY		(OPC1(0x2) | OPC3(0x30) | DA(0))
184 #define XOR		(OPC1(0x2) | OPC3(0x03))
185 #define XNOR		(OPC1(0x2) | OPC3(0x07))
186 
187 #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
188 #define MAX_DISP	(0x1fffff)
189 #define MIN_DISP	(-0x200000)
190 #define DISP_MASK	((sljit_ins)0x3fffff)
191 
192 #define BICC		(OPC1(0x0) | OPC2(0x2))
193 #define FBFCC		(OPC1(0x0) | OPC2(0x6))
194 #define SLL_W		SLL
195 #define SDIV		(OPC1(0x2) | OPC3(0x0f))
196 #define SMUL		(OPC1(0x2) | OPC3(0x0b))
197 #define UDIV		(OPC1(0x2) | OPC3(0x0e))
198 #define UMUL		(OPC1(0x2) | OPC3(0x0a))
199 #else
200 #define SLL_W		SLLX
201 #endif
202 
203 #define SIMM_MAX	(0x0fff)
204 #define SIMM_MIN	(-0x1000)
205 
206 /* dest_reg is the absolute name of the register
207    Useful for reordering instructions in the delay slot. */
push_inst(struct sljit_compiler * compiler,sljit_ins ins,sljit_s32 delay_slot)208 static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins, sljit_s32 delay_slot)
209 {
210 	sljit_ins *ptr;
211 	SLJIT_ASSERT((delay_slot & DST_INS_MASK) == UNMOVABLE_INS
212 		|| (delay_slot & DST_INS_MASK) == MOVABLE_INS
213 		|| (delay_slot & DST_INS_MASK) == ((ins >> 25) & 0x1f));
214 	ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
215 	FAIL_IF(!ptr);
216 	*ptr = ins;
217 	compiler->size++;
218 	compiler->delay_slot = delay_slot;
219 	return SLJIT_SUCCESS;
220 }
221 
detect_jump_type(struct sljit_jump * jump,sljit_ins * code_ptr,sljit_ins * code,sljit_sw executable_offset)222 static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
223 {
224 	sljit_sw diff;
225 	sljit_uw target_addr;
226 	sljit_ins *inst;
227 	sljit_ins saved_inst;
228 
229 	if (jump->flags & SLJIT_REWRITABLE_JUMP)
230 		return code_ptr;
231 
232 	if (jump->flags & JUMP_ADDR)
233 		target_addr = jump->u.target;
234 	else {
235 		SLJIT_ASSERT(jump->flags & JUMP_LABEL);
236 		target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset;
237 	}
238 	inst = (sljit_ins*)jump->addr;
239 
240 #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
241 	if (jump->flags & IS_CALL) {
242 		/* Call is always patchable on sparc 32. */
243 		jump->flags |= PATCH_CALL;
244 		if (jump->flags & IS_MOVABLE) {
245 			inst[0] = inst[-1];
246 			inst[-1] = CALL;
247 			jump->addr -= sizeof(sljit_ins);
248 			return inst;
249 		}
250 		inst[0] = CALL;
251 		inst[1] = NOP;
252 		return inst + 1;
253 	}
254 #else
255 	/* Both calls and BPr instructions shall not pass this point. */
256 #error "Implementation required"
257 #endif
258 
259 	if (jump->flags & IS_COND)
260 		inst--;
261 
262 	diff = ((sljit_sw)target_addr - (sljit_sw)(inst - 1) - executable_offset) >> 2;
263 
264 	if (jump->flags & IS_MOVABLE) {
265 		if (diff <= MAX_DISP && diff >= MIN_DISP) {
266 			jump->flags |= PATCH_B;
267 			inst--;
268 			if (jump->flags & IS_COND) {
269 				saved_inst = inst[0];
270 				inst[0] = inst[1] ^ (1 << 28);
271 				inst[1] = saved_inst;
272 			} else {
273 				inst[1] = inst[0];
274 				inst[0] = BICC | DA(0x8);
275 			}
276 			jump->addr = (sljit_uw)inst;
277 			return inst + 1;
278 		}
279 	}
280 
281 	diff += SSIZE_OF(ins);
282 
283 	if (diff <= MAX_DISP && diff >= MIN_DISP) {
284 		jump->flags |= PATCH_B;
285 		if (jump->flags & IS_COND)
286 			inst[0] ^= (1 << 28);
287 		else
288 			inst[0] = BICC | DA(0x8);
289 		inst[1] = NOP;
290 		jump->addr = (sljit_uw)inst;
291 		return inst + 1;
292 	}
293 
294 	return code_ptr;
295 }
296 
sljit_generate_code(struct sljit_compiler * compiler)297 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
298 {
299 	struct sljit_memory_fragment *buf;
300 	sljit_ins *code;
301 	sljit_ins *code_ptr;
302 	sljit_ins *buf_ptr;
303 	sljit_ins *buf_end;
304 	sljit_uw word_count;
305 	sljit_uw next_addr;
306 	sljit_sw executable_offset;
307 	sljit_sw addr;
308 
309 	struct sljit_label *label;
310 	struct sljit_jump *jump;
311 	struct sljit_const *const_;
312 	struct sljit_put_label *put_label;
313 
314 	CHECK_ERROR_PTR();
315 	CHECK_PTR(check_sljit_generate_code(compiler));
316 	reverse_buf(compiler);
317 
318 	code = (sljit_ins*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_ins), compiler->exec_allocator_data);
319 	PTR_FAIL_WITH_EXEC_IF(code);
320 	buf = compiler->buf;
321 
322 	code_ptr = code;
323 	word_count = 0;
324 	next_addr = 0;
325 	executable_offset = SLJIT_EXEC_OFFSET(code);
326 
327 	label = compiler->labels;
328 	jump = compiler->jumps;
329 	const_ = compiler->consts;
330 	put_label = compiler->put_labels;
331 
332 	do {
333 		buf_ptr = (sljit_ins*)buf->memory;
334 		buf_end = buf_ptr + (buf->used_size >> 2);
335 		do {
336 			*code_ptr = *buf_ptr++;
337 			if (next_addr == word_count) {
338 				SLJIT_ASSERT(!label || label->size >= word_count);
339 				SLJIT_ASSERT(!jump || jump->addr >= word_count);
340 				SLJIT_ASSERT(!const_ || const_->addr >= word_count);
341 				SLJIT_ASSERT(!put_label || put_label->addr >= word_count);
342 
343 				/* These structures are ordered by their address. */
344 				if (label && label->size == word_count) {
345 					/* Just recording the address. */
346 					label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
347 					label->size = (sljit_uw)(code_ptr - code);
348 					label = label->next;
349 				}
350 				if (jump && jump->addr == word_count) {
351 #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
352 					jump->addr = (sljit_uw)(code_ptr - 3);
353 #else
354 					jump->addr = (sljit_uw)(code_ptr - 6);
355 #endif
356 					code_ptr = detect_jump_type(jump, code_ptr, code, executable_offset);
357 					jump = jump->next;
358 				}
359 				if (const_ && const_->addr == word_count) {
360 					/* Just recording the address. */
361 					const_->addr = (sljit_uw)code_ptr;
362 					const_ = const_->next;
363 				}
364 				if (put_label && put_label->addr == word_count) {
365 					SLJIT_ASSERT(put_label->label);
366 					put_label->addr = (sljit_uw)code_ptr;
367 					put_label = put_label->next;
368 				}
369 				next_addr = compute_next_addr(label, jump, const_, put_label);
370 			}
371 			code_ptr ++;
372 			word_count ++;
373 		} while (buf_ptr < buf_end);
374 
375 		buf = buf->next;
376 	} while (buf);
377 
378 	if (label && label->size == word_count) {
379 		label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
380 		label->size = (sljit_uw)(code_ptr - code);
381 		label = label->next;
382 	}
383 
384 	SLJIT_ASSERT(!label);
385 	SLJIT_ASSERT(!jump);
386 	SLJIT_ASSERT(!const_);
387 	SLJIT_ASSERT(!put_label);
388 	SLJIT_ASSERT(code_ptr - code <= (sljit_s32)compiler->size);
389 
390 	jump = compiler->jumps;
391 	while (jump) {
392 		do {
393 			addr = (sljit_sw)((jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target);
394 			buf_ptr = (sljit_ins *)jump->addr;
395 
396 			if (jump->flags & PATCH_CALL) {
397 				addr = (addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
398 				SLJIT_ASSERT(addr <= 0x1fffffff && addr >= -0x20000000);
399 				buf_ptr[0] = CALL | ((sljit_ins)addr & 0x3fffffff);
400 				break;
401 			}
402 			if (jump->flags & PATCH_B) {
403 				addr = (addr - (sljit_sw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
404 				SLJIT_ASSERT(addr <= MAX_DISP && addr >= MIN_DISP);
405 				buf_ptr[0] = (buf_ptr[0] & ~DISP_MASK) | ((sljit_ins)addr & DISP_MASK);
406 				break;
407 			}
408 
409 			/* Set the fields of immediate loads. */
410 #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
411 			SLJIT_ASSERT(((buf_ptr[0] & 0xc1cfffff) == 0x01000000) && ((buf_ptr[1] & 0xc1f83fff) == 0x80102000));
412 			buf_ptr[0] |= (sljit_ins)(addr >> 10) & 0x3fffff;
413 			buf_ptr[1] |= (sljit_ins)addr & 0x3ff;
414 #else
415 #error "Implementation required"
416 #endif
417 		} while (0);
418 		jump = jump->next;
419 	}
420 
421 	put_label = compiler->put_labels;
422 	while (put_label) {
423 		addr = (sljit_sw)put_label->label->addr;
424 		buf_ptr = (sljit_ins *)put_label->addr;
425 
426 #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
427 		SLJIT_ASSERT(((buf_ptr[0] & 0xc1cfffff) == 0x01000000) && ((buf_ptr[1] & 0xc1f83fff) == 0x80102000));
428 		buf_ptr[0] |= (addr >> 10) & 0x3fffff;
429 		buf_ptr[1] |= addr & 0x3ff;
430 #else
431 #error "Implementation required"
432 #endif
433 		put_label = put_label->next;
434 	}
435 
436 	compiler->error = SLJIT_ERR_COMPILED;
437 	compiler->executable_offset = executable_offset;
438 	compiler->executable_size = (sljit_uw)(code_ptr - code) * sizeof(sljit_ins);
439 
440 	code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
441 	code_ptr = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
442 
443 	SLJIT_CACHE_FLUSH(code, code_ptr);
444 	SLJIT_UPDATE_WX_FLAGS(code, code_ptr, 1);
445 	return code;
446 }
447 
sljit_has_cpu_feature(sljit_s32 feature_type)448 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
449 {
450 	switch (feature_type) {
451 	case SLJIT_HAS_FPU:
452 #ifdef SLJIT_IS_FPU_AVAILABLE
453 		return SLJIT_IS_FPU_AVAILABLE;
454 #else
455 		/* Available by default. */
456 		return 1;
457 #endif
458 
459 	case SLJIT_HAS_ZERO_REGISTER:
460 		return 1;
461 
462 #if (defined SLJIT_CONFIG_SPARC_64 && SLJIT_CONFIG_SPARC_64)
463 	case SLJIT_HAS_CMOV:
464 		return 1;
465 #endif
466 
467 	default:
468 		return 0;
469 	}
470 }
471 
472 /* --------------------------------------------------------------------- */
473 /*  Entry, exit                                                          */
474 /* --------------------------------------------------------------------- */
475 
476 /* Creates an index in data_transfer_insts array. */
477 #define LOAD_DATA	0x01
478 #define WORD_DATA	0x00
479 #define BYTE_DATA	0x02
480 #define HALF_DATA	0x04
481 #define INT_DATA	0x06
482 #define SIGNED_DATA	0x08
483 /* Separates integer and floating point registers */
484 #define GPR_REG		0x0f
485 #define DOUBLE_DATA	0x10
486 #define SINGLE_DATA	0x12
487 
488 #define MEM_MASK	0x1f
489 
490 #define ARG_TEST	0x00020
491 #define ALT_KEEP_CACHE	0x00040
492 #define CUMULATIVE_OP	0x00080
493 #define IMM_OP		0x00100
494 #define MOVE_OP		0x00200
495 #define SRC2_IMM	0x00400
496 
497 #define REG_DEST	0x00800
498 #define REG2_SOURCE	0x01000
499 #define SLOW_SRC1	0x02000
500 #define SLOW_SRC2	0x04000
501 #define SLOW_DEST	0x08000
502 
503 /* SET_FLAGS (0x10 << 19) also belong here! */
504 
505 #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
506 #include "sljitNativeSPARC_32.c"
507 #else
508 #include "sljitNativeSPARC_64.c"
509 #endif
510 
sljit_emit_enter(struct sljit_compiler * compiler,sljit_s32 options,sljit_s32 arg_types,sljit_s32 scratches,sljit_s32 saveds,sljit_s32 fscratches,sljit_s32 fsaveds,sljit_s32 local_size)511 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
512 	sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
513 	sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
514 {
515 	sljit_s32 reg_index, types, tmp;
516 	sljit_u32 float_offset, args_offset;
517 	sljit_s32 saved_arg_index, scratch_arg_index, float_arg_index;
518 
519 	CHECK_ERROR();
520 	CHECK(check_sljit_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
521 	set_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
522 
523 	local_size = (local_size + SLJIT_LOCALS_OFFSET + 7) & ~0x7;
524 	compiler->local_size = local_size;
525 
526 	if (local_size <= -SIMM_MIN) {
527 		FAIL_IF(push_inst(compiler, SAVE | D(SLJIT_SP) | S1(SLJIT_SP) | IMM(-local_size), UNMOVABLE_INS));
528 	}
529 	else {
530 		FAIL_IF(load_immediate(compiler, TMP_REG1, -local_size));
531 		FAIL_IF(push_inst(compiler, SAVE | D(SLJIT_SP) | S1(SLJIT_SP) | S2(TMP_REG1), UNMOVABLE_INS));
532 	}
533 
534 	arg_types >>= SLJIT_ARG_SHIFT;
535 
536 	types = arg_types;
537 	float_offset = 16 * sizeof(sljit_sw);
538 	reg_index = 24;
539 
540 	while (types && reg_index < 24 + 6) {
541 		switch (types & SLJIT_ARG_MASK) {
542 		case SLJIT_ARG_TYPE_F64:
543 			if (reg_index & 0x1) {
544 				FAIL_IF(push_inst(compiler, STW | DA(reg_index) | S1(SLJIT_SP) | IMM(float_offset), MOVABLE_INS));
545 				if (reg_index >= 24 + 6 - 1)
546 					break;
547 				FAIL_IF(push_inst(compiler, STW | DA(reg_index + 1) | S1(SLJIT_SP) | IMM(float_offset + sizeof(sljit_sw)), MOVABLE_INS));
548 			} else
549 				FAIL_IF(push_inst(compiler, STD | DA(reg_index) | S1(SLJIT_SP) | IMM(float_offset), MOVABLE_INS));
550 
551 			float_offset += sizeof(sljit_f64);
552 			reg_index++;
553 			break;
554 		case SLJIT_ARG_TYPE_F32:
555 			FAIL_IF(push_inst(compiler, STW | DA(reg_index) | S1(SLJIT_SP) | IMM(float_offset), MOVABLE_INS));
556 			float_offset += sizeof(sljit_f64);
557 			break;
558 		}
559 
560 		reg_index++;
561 		types >>= SLJIT_ARG_SHIFT;
562 	}
563 
564 	args_offset = (16 + 1 + 6) * sizeof(sljit_sw);
565 	float_offset = 16 * sizeof(sljit_sw);
566 	reg_index = 24;
567 	saved_arg_index = 24;
568 	scratch_arg_index = 8 - 1;
569 	float_arg_index = 1;
570 
571 	while (arg_types) {
572 		switch (arg_types & SLJIT_ARG_MASK) {
573 		case SLJIT_ARG_TYPE_F64:
574 			if (reg_index < 24 + 6 - 1) {
575 				FAIL_IF(push_inst(compiler, LDDF | FD(float_arg_index) | S1(SLJIT_SP) | IMM(float_offset), MOVABLE_INS));
576 			} else if (reg_index < 24 + 6) {
577 				FAIL_IF(push_inst(compiler, LDF | FD(float_arg_index) | S1(SLJIT_SP) | IMM(float_offset), MOVABLE_INS));
578 				FAIL_IF(push_inst(compiler, LDF | FD(float_arg_index) | (1 << 25) | S1A(30) | IMM(args_offset), MOVABLE_INS));
579 			} else {
580 				FAIL_IF(push_inst(compiler, LDF | FD(float_arg_index) | S1A(30) | IMM(args_offset), MOVABLE_INS));
581 				FAIL_IF(push_inst(compiler, LDF | FD(float_arg_index) | (1 << 25) | S1A(30) | IMM(args_offset + sizeof(sljit_sw)), MOVABLE_INS));
582 			}
583 
584 			float_arg_index++;
585 			float_offset += sizeof(sljit_f64);
586 			reg_index++;
587 			break;
588 		case SLJIT_ARG_TYPE_F32:
589 			if (reg_index < 24 + 6)
590 				FAIL_IF(push_inst(compiler, LDF | FD(float_arg_index) | S1(SLJIT_SP) | IMM(float_offset), MOVABLE_INS));
591 			else
592 				FAIL_IF(push_inst(compiler, LDF | FD(float_arg_index) | S1A(30) | IMM(args_offset), MOVABLE_INS));
593 			float_arg_index++;
594 			float_offset += sizeof(sljit_f64);
595 			break;
596 		default:
597 			scratch_arg_index++;
598 
599 			if (!(arg_types & SLJIT_ARG_TYPE_SCRATCH_REG)) {
600 				tmp = saved_arg_index++;
601 				if (tmp == reg_index)
602 					break;
603 			} else
604 				tmp = scratch_arg_index;
605 
606 			if (reg_index < 24 + 6)
607 				FAIL_IF(push_inst(compiler, OR | DA(tmp) | S1(0) | S2A(reg_index), tmp));
608 			else
609 				FAIL_IF(push_inst(compiler, LDUW | DA(tmp) | S1A(30) | IMM(args_offset), tmp));
610 			break;
611 		}
612 
613 		reg_index++;
614 		arg_types >>= SLJIT_ARG_SHIFT;
615 	}
616 
617 	return SLJIT_SUCCESS;
618 }
619 
sljit_set_context(struct sljit_compiler * compiler,sljit_s32 options,sljit_s32 arg_types,sljit_s32 scratches,sljit_s32 saveds,sljit_s32 fscratches,sljit_s32 fsaveds,sljit_s32 local_size)620 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
621 	sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
622 	sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
623 {
624 	CHECK_ERROR();
625 	CHECK(check_sljit_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
626 	set_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
627 
628 	compiler->local_size = (local_size + SLJIT_LOCALS_OFFSET + 7) & ~0x7;
629 	return SLJIT_SUCCESS;
630 }
631 
sljit_emit_return_void(struct sljit_compiler * compiler)632 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_void(struct sljit_compiler *compiler)
633 {
634 	CHECK_ERROR();
635 	CHECK(check_sljit_emit_return_void(compiler));
636 
637 	FAIL_IF(push_inst(compiler, JMPL | D(0) | S1A(31) | IMM(8), UNMOVABLE_INS));
638 	return push_inst(compiler, RESTORE | D(SLJIT_R0) | S1(SLJIT_R0) | S2(0), UNMOVABLE_INS);
639 }
640 
sljit_emit_return(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src,sljit_sw srcw)641 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
642 {
643 	CHECK_ERROR();
644 	CHECK(check_sljit_emit_return(compiler, op, src, srcw));
645 
646 	if (TYPE_CAST_NEEDED(op) || !FAST_IS_REG(src)) {
647 		FAIL_IF(emit_mov_before_return(compiler, op, src, srcw));
648 		src = SLJIT_R0;
649 	}
650 
651 	FAIL_IF(push_inst(compiler, JMPL | D(0) | S1A(31) | IMM(8), UNMOVABLE_INS));
652 	return push_inst(compiler, RESTORE | D(SLJIT_R0) | S1(src) | S2(0), UNMOVABLE_INS);
653 }
654 
655 /* --------------------------------------------------------------------- */
656 /*  Operators                                                            */
657 /* --------------------------------------------------------------------- */
658 
659 #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
660 #define ARCH_32_64(a, b)	a
661 #else
662 #define ARCH_32_64(a, b)	b
663 #endif
664 
665 static const sljit_ins data_transfer_insts[16 + 4] = {
666 /* u w s */ ARCH_32_64(OPC1(3) | OPC3(0x04) /* stw */, OPC1(3) | OPC3(0x0e) /* stx */),
667 /* u w l */ ARCH_32_64(OPC1(3) | OPC3(0x00) /* lduw */, OPC1(3) | OPC3(0x0b) /* ldx */),
668 /* u b s */ OPC1(3) | OPC3(0x05) /* stb */,
669 /* u b l */ OPC1(3) | OPC3(0x01) /* ldub */,
670 /* u h s */ OPC1(3) | OPC3(0x06) /* sth */,
671 /* u h l */ OPC1(3) | OPC3(0x02) /* lduh */,
672 /* u i s */ OPC1(3) | OPC3(0x04) /* stw */,
673 /* u i l */ OPC1(3) | OPC3(0x00) /* lduw */,
674 
675 /* s w s */ ARCH_32_64(OPC1(3) | OPC3(0x04) /* stw */, OPC1(3) | OPC3(0x0e) /* stx */),
676 /* s w l */ ARCH_32_64(OPC1(3) | OPC3(0x00) /* lduw */, OPC1(3) | OPC3(0x0b) /* ldx */),
677 /* s b s */ OPC1(3) | OPC3(0x05) /* stb */,
678 /* s b l */ OPC1(3) | OPC3(0x09) /* ldsb */,
679 /* s h s */ OPC1(3) | OPC3(0x06) /* sth */,
680 /* s h l */ OPC1(3) | OPC3(0x0a) /* ldsh */,
681 /* s i s */ OPC1(3) | OPC3(0x04) /* stw */,
682 /* s i l */ ARCH_32_64(OPC1(3) | OPC3(0x00) /* lduw */, OPC1(3) | OPC3(0x08) /* ldsw */),
683 
684 /* d   s */ OPC1(3) | OPC3(0x27),
685 /* d   l */ OPC1(3) | OPC3(0x23),
686 /* s   s */ OPC1(3) | OPC3(0x24),
687 /* s   l */ OPC1(3) | OPC3(0x20),
688 };
689 
690 #undef ARCH_32_64
691 
692 /* Can perform an operation using at most 1 instruction. */
getput_arg_fast(struct sljit_compiler * compiler,sljit_u32 flags,sljit_s32 reg,sljit_s32 arg,sljit_sw argw)693 static sljit_s32 getput_arg_fast(struct sljit_compiler *compiler, sljit_u32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
694 {
695 	SLJIT_ASSERT(arg & SLJIT_MEM);
696 
697 	if ((!(arg & OFFS_REG_MASK) && argw <= SIMM_MAX && argw >= SIMM_MIN)
698 			|| ((arg & OFFS_REG_MASK) && (argw & 0x3) == 0)) {
699 		/* Works for both absoulte and relative addresses (immediate case). */
700 		if (SLJIT_UNLIKELY(flags & ARG_TEST))
701 			return 1;
702 		FAIL_IF(push_inst(compiler, data_transfer_insts[flags & MEM_MASK]
703 			| ((flags & MEM_MASK) <= GPR_REG ? D(reg) : FD(reg))
704 			| S1(arg & REG_MASK) | ((arg & OFFS_REG_MASK) ? S2(OFFS_REG(arg)) : IMM(argw)),
705 			((flags & MEM_MASK) <= GPR_REG && (flags & LOAD_DATA)) ? DR(reg) : MOVABLE_INS));
706 		return -1;
707 	}
708 	return 0;
709 }
710 
711 /* See getput_arg below.
712    Note: can_cache is called only for binary operators. Those
713    operators always uses word arguments without write back. */
can_cache(sljit_s32 arg,sljit_sw argw,sljit_s32 next_arg,sljit_sw next_argw)714 static sljit_s32 can_cache(sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
715 {
716 	SLJIT_ASSERT((arg & SLJIT_MEM) && (next_arg & SLJIT_MEM));
717 
718 	/* Simple operation except for updates. */
719 	if (arg & OFFS_REG_MASK) {
720 		argw &= 0x3;
721 		SLJIT_ASSERT(argw);
722 		next_argw &= 0x3;
723 		if ((arg & OFFS_REG_MASK) == (next_arg & OFFS_REG_MASK) && argw == next_argw)
724 			return 1;
725 		return 0;
726 	}
727 
728 	if (((next_argw - argw) <= SIMM_MAX && (next_argw - argw) >= SIMM_MIN))
729 		return 1;
730 	return 0;
731 }
732 
733 /* Emit the necessary instructions. See can_cache above. */
getput_arg(struct sljit_compiler * compiler,sljit_u32 flags,sljit_s32 reg,sljit_s32 arg,sljit_sw argw,sljit_s32 next_arg,sljit_sw next_argw)734 static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_u32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
735 {
736 	sljit_s32 base, arg2, delay_slot;
737 	sljit_ins dest;
738 
739 	SLJIT_ASSERT(arg & SLJIT_MEM);
740 	if (!(next_arg & SLJIT_MEM)) {
741 		next_arg = 0;
742 		next_argw = 0;
743 	}
744 
745 	base = arg & REG_MASK;
746 	if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
747 		argw &= 0x3;
748 
749 		/* Using the cache. */
750 		if (((SLJIT_MEM | (arg & OFFS_REG_MASK)) == compiler->cache_arg) && (argw == compiler->cache_argw))
751 			arg2 = TMP_REG3;
752 		else {
753 			if ((arg & OFFS_REG_MASK) == (next_arg & OFFS_REG_MASK) && argw == (next_argw & 0x3)) {
754 				compiler->cache_arg = SLJIT_MEM | (arg & OFFS_REG_MASK);
755 				compiler->cache_argw = argw;
756 				arg2 = TMP_REG3;
757 			}
758 			else if ((flags & LOAD_DATA) && ((flags & MEM_MASK) <= GPR_REG) && reg != base && reg != OFFS_REG(arg))
759 				arg2 = reg;
760 			else /* It must be a mov operation, so tmp1 must be free to use. */
761 				arg2 = TMP_REG1;
762 			FAIL_IF(push_inst(compiler, SLL_W | D(arg2) | S1(OFFS_REG(arg)) | IMM_ARG | (sljit_ins)argw, DR(arg2)));
763 		}
764 	}
765 	else {
766 		/* Using the cache. */
767 		if ((compiler->cache_arg == SLJIT_MEM) && (argw - compiler->cache_argw) <= SIMM_MAX && (argw - compiler->cache_argw) >= SIMM_MIN) {
768 			if (argw != compiler->cache_argw) {
769 				FAIL_IF(push_inst(compiler, ADD | D(TMP_REG3) | S1(TMP_REG3) | IMM(argw - compiler->cache_argw), DR(TMP_REG3)));
770 				compiler->cache_argw = argw;
771 			}
772 			arg2 = TMP_REG3;
773 		} else {
774 			if ((next_argw - argw) <= SIMM_MAX && (next_argw - argw) >= SIMM_MIN) {
775 				compiler->cache_arg = SLJIT_MEM;
776 				compiler->cache_argw = argw;
777 				arg2 = TMP_REG3;
778 			}
779 			else if ((flags & LOAD_DATA) && ((flags & MEM_MASK) <= GPR_REG) && reg != base)
780 				arg2 = reg;
781 			else /* It must be a mov operation, so tmp1 must be free to use. */
782 				arg2 = TMP_REG1;
783 			FAIL_IF(load_immediate(compiler, arg2, argw));
784 		}
785 	}
786 
787 	dest = ((flags & MEM_MASK) <= GPR_REG ? D(reg) : FD(reg));
788 	delay_slot = ((flags & MEM_MASK) <= GPR_REG && (flags & LOAD_DATA)) ? DR(reg) : MOVABLE_INS;
789 	if (!base)
790 		return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | dest | S1(arg2) | IMM(0), delay_slot);
791 	return push_inst(compiler, data_transfer_insts[flags & MEM_MASK] | dest | S1(base) | S2(arg2), delay_slot);
792 }
793 
emit_op_mem(struct sljit_compiler * compiler,sljit_u32 flags,sljit_s32 reg,sljit_s32 arg,sljit_sw argw)794 static SLJIT_INLINE sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_u32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
795 {
796 	if (getput_arg_fast(compiler, flags, reg, arg, argw))
797 		return compiler->error;
798 	compiler->cache_arg = 0;
799 	compiler->cache_argw = 0;
800 	return getput_arg(compiler, flags, reg, arg, argw, 0, 0);
801 }
802 
emit_op_mem2(struct sljit_compiler * compiler,sljit_u32 flags,sljit_s32 reg,sljit_s32 arg1,sljit_sw arg1w,sljit_s32 arg2,sljit_sw arg2w)803 static SLJIT_INLINE sljit_s32 emit_op_mem2(struct sljit_compiler *compiler, sljit_u32 flags, sljit_s32 reg, sljit_s32 arg1, sljit_sw arg1w, sljit_s32 arg2, sljit_sw arg2w)
804 {
805 	if (getput_arg_fast(compiler, flags, reg, arg1, arg1w))
806 		return compiler->error;
807 	return getput_arg(compiler, flags, reg, arg1, arg1w, arg2, arg2w);
808 }
809 
emit_op(struct sljit_compiler * compiler,sljit_s32 op,sljit_u32 flags,sljit_s32 dst,sljit_sw dstw,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)810 static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_u32 flags,
811 	sljit_s32 dst, sljit_sw dstw,
812 	sljit_s32 src1, sljit_sw src1w,
813 	sljit_s32 src2, sljit_sw src2w)
814 {
815 	/* arg1 goes to TMP_REG1 or src reg
816 	   arg2 goes to TMP_REG2, imm or src reg
817 	   TMP_REG3 can be used for caching
818 	   result goes to TMP_REG2, so put result can use TMP_REG1 and TMP_REG3. */
819 	sljit_s32 dst_r = TMP_REG2;
820 	sljit_s32 src1_r;
821 	sljit_sw src2_r = 0;
822 	sljit_s32 sugg_src2_r = TMP_REG2;
823 
824 	if (!(flags & ALT_KEEP_CACHE)) {
825 		compiler->cache_arg = 0;
826 		compiler->cache_argw = 0;
827 	}
828 
829 	if (dst != TMP_REG2) {
830 		if (FAST_IS_REG(dst)) {
831 			dst_r = dst;
832 			flags |= REG_DEST;
833 			if (flags & MOVE_OP)
834 				sugg_src2_r = dst_r;
835 		}
836 		else if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, flags | ARG_TEST, TMP_REG1, dst, dstw))
837 			flags |= SLOW_DEST;
838 	}
839 
840 	if (flags & IMM_OP) {
841 		if ((src2 & SLJIT_IMM) && src2w) {
842 			if (src2w <= SIMM_MAX && src2w >= SIMM_MIN) {
843 				flags |= SRC2_IMM;
844 				src2_r = src2w;
845 			}
846 		}
847 		if (!(flags & SRC2_IMM) && (flags & CUMULATIVE_OP) && (src1 & SLJIT_IMM) && src1w) {
848 			if (src1w <= SIMM_MAX && src1w >= SIMM_MIN) {
849 				flags |= SRC2_IMM;
850 				src2_r = src1w;
851 
852 				/* And swap arguments. */
853 				src1 = src2;
854 				src1w = src2w;
855 				src2 = SLJIT_IMM;
856 				/* src2w = src2_r unneeded. */
857 			}
858 		}
859 	}
860 
861 	/* Source 1. */
862 	if (FAST_IS_REG(src1))
863 		src1_r = src1;
864 	else if (src1 & SLJIT_IMM) {
865 		if (src1w) {
866 			FAIL_IF(load_immediate(compiler, TMP_REG1, src1w));
867 			src1_r = TMP_REG1;
868 		}
869 		else
870 			src1_r = 0;
871 	}
872 	else {
873 		if (getput_arg_fast(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w))
874 			FAIL_IF(compiler->error);
875 		else
876 			flags |= SLOW_SRC1;
877 		src1_r = TMP_REG1;
878 	}
879 
880 	/* Source 2. */
881 	if (FAST_IS_REG(src2)) {
882 		src2_r = src2;
883 		flags |= REG2_SOURCE;
884 		if ((flags & (REG_DEST | MOVE_OP)) == MOVE_OP)
885 			dst_r = src2_r;
886 	}
887 	else if (src2 & SLJIT_IMM) {
888 		if (!(flags & SRC2_IMM)) {
889 			if (src2w) {
890 				FAIL_IF(load_immediate(compiler, sugg_src2_r, src2w));
891 				src2_r = sugg_src2_r;
892 			}
893 			else {
894 				src2_r = 0;
895 				if (flags & MOVE_OP) {
896 					if (dst & SLJIT_MEM)
897 						dst_r = 0;
898 					else
899 						op = SLJIT_MOV;
900 				}
901 			}
902 		}
903 	}
904 	else {
905 		if (getput_arg_fast(compiler, flags | LOAD_DATA, sugg_src2_r, src2, src2w))
906 			FAIL_IF(compiler->error);
907 		else
908 			flags |= SLOW_SRC2;
909 		src2_r = sugg_src2_r;
910 	}
911 
912 	if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) {
913 		SLJIT_ASSERT(src2_r == TMP_REG2);
914 		if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
915 			FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG2, src2, src2w, src1, src1w));
916 			FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, dst, dstw));
917 		}
918 		else {
919 			FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, src2, src2w));
920 			FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG2, src2, src2w, dst, dstw));
921 		}
922 	}
923 	else if (flags & SLOW_SRC1)
924 		FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, dst, dstw));
925 	else if (flags & SLOW_SRC2)
926 		FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, sugg_src2_r, src2, src2w, dst, dstw));
927 
928 	FAIL_IF(emit_single_op(compiler, op, flags, dst_r, src1_r, src2_r));
929 
930 	if (dst & SLJIT_MEM) {
931 		if (!(flags & SLOW_DEST)) {
932 			getput_arg_fast(compiler, flags, dst_r, dst, dstw);
933 			return compiler->error;
934 		}
935 		return getput_arg(compiler, flags, dst_r, dst, dstw, 0, 0);
936 	}
937 
938 	return SLJIT_SUCCESS;
939 }
940 
sljit_emit_op0(struct sljit_compiler * compiler,sljit_s32 op)941 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
942 {
943 	CHECK_ERROR();
944 	CHECK(check_sljit_emit_op0(compiler, op));
945 
946 	op = GET_OPCODE(op);
947 	switch (op) {
948 	case SLJIT_BREAKPOINT:
949 		return push_inst(compiler, TA, UNMOVABLE_INS);
950 	case SLJIT_NOP:
951 		return push_inst(compiler, NOP, UNMOVABLE_INS);
952 	case SLJIT_LMUL_UW:
953 	case SLJIT_LMUL_SW:
954 #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
955 		FAIL_IF(push_inst(compiler, (op == SLJIT_LMUL_UW ? UMUL : SMUL) | D(SLJIT_R0) | S1(SLJIT_R0) | S2(SLJIT_R1), DR(SLJIT_R0)));
956 		return push_inst(compiler, RDY | D(SLJIT_R1), DR(SLJIT_R1));
957 #else
958 #error "Implementation required"
959 #endif
960 	case SLJIT_DIVMOD_UW:
961 	case SLJIT_DIVMOD_SW:
962 	case SLJIT_DIV_UW:
963 	case SLJIT_DIV_SW:
964 		SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW & 0x2) == 0 && SLJIT_DIV_UW - 0x2 == SLJIT_DIVMOD_UW, bad_div_opcode_assignments);
965 #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
966 		if ((op | 0x2) == SLJIT_DIV_UW)
967 			FAIL_IF(push_inst(compiler, WRY | S1(0), MOVABLE_INS));
968 		else {
969 			FAIL_IF(push_inst(compiler, SRA | D(TMP_REG1) | S1(SLJIT_R0) | IMM(31), DR(TMP_REG1)));
970 			FAIL_IF(push_inst(compiler, WRY | S1(TMP_REG1), MOVABLE_INS));
971 		}
972 		if (op <= SLJIT_DIVMOD_SW)
973 			FAIL_IF(push_inst(compiler, OR | D(TMP_REG2) | S1(0) | S2(SLJIT_R0), DR(TMP_REG2)));
974 		FAIL_IF(push_inst(compiler, ((op | 0x2) == SLJIT_DIV_UW ? UDIV : SDIV) | D(SLJIT_R0) | S1(SLJIT_R0) | S2(SLJIT_R1), DR(SLJIT_R0)));
975 		if (op >= SLJIT_DIV_UW)
976 			return SLJIT_SUCCESS;
977 		FAIL_IF(push_inst(compiler, SMUL | D(SLJIT_R1) | S1(SLJIT_R0) | S2(SLJIT_R1), DR(SLJIT_R1)));
978 		return push_inst(compiler, SUB | D(SLJIT_R1) | S1(TMP_REG2) | S2(SLJIT_R1), DR(SLJIT_R1));
979 #else
980 #error "Implementation required"
981 #endif
982 	case SLJIT_ENDBR:
983 	case SLJIT_SKIP_FRAMES_BEFORE_RETURN:
984 		return SLJIT_SUCCESS;
985 	}
986 
987 	return SLJIT_SUCCESS;
988 }
989 
sljit_emit_op1(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)990 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
991 	sljit_s32 dst, sljit_sw dstw,
992 	sljit_s32 src, sljit_sw srcw)
993 {
994 	sljit_u32 flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
995 
996 	CHECK_ERROR();
997 	CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
998 	ADJUST_LOCAL_OFFSET(dst, dstw);
999 	ADJUST_LOCAL_OFFSET(src, srcw);
1000 
1001 	op = GET_OPCODE(op);
1002 	switch (op) {
1003 	case SLJIT_MOV:
1004 #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
1005 	case SLJIT_MOV_U32:
1006 	case SLJIT_MOV_S32:
1007 	case SLJIT_MOV32:
1008 #endif
1009 	case SLJIT_MOV_P:
1010 		return emit_op(compiler, SLJIT_MOV, flags | WORD_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, srcw);
1011 
1012 	case SLJIT_MOV_U8:
1013 		return emit_op(compiler, SLJIT_MOV_U8, flags | BYTE_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8)srcw : srcw);
1014 
1015 	case SLJIT_MOV_S8:
1016 		return emit_op(compiler, SLJIT_MOV_S8, flags | BYTE_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8)srcw : srcw);
1017 
1018 	case SLJIT_MOV_U16:
1019 		return emit_op(compiler, SLJIT_MOV_U16, flags | HALF_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16)srcw : srcw);
1020 
1021 	case SLJIT_MOV_S16:
1022 		return emit_op(compiler, SLJIT_MOV_S16, flags | HALF_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16)srcw : srcw);
1023 
1024 	case SLJIT_NOT:
1025 	case SLJIT_CLZ:
1026 		return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, src, srcw);
1027 	}
1028 
1029 	return SLJIT_SUCCESS;
1030 }
1031 
sljit_emit_op2(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1032 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
1033 	sljit_s32 dst, sljit_sw dstw,
1034 	sljit_s32 src1, sljit_sw src1w,
1035 	sljit_s32 src2, sljit_sw src2w)
1036 {
1037 	sljit_u32 flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
1038 
1039 	CHECK_ERROR();
1040 	CHECK(check_sljit_emit_op2(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w));
1041 	ADJUST_LOCAL_OFFSET(dst, dstw);
1042 	ADJUST_LOCAL_OFFSET(src1, src1w);
1043 	ADJUST_LOCAL_OFFSET(src2, src2w);
1044 
1045 	op = GET_OPCODE(op);
1046 	switch (op) {
1047 	case SLJIT_ADD:
1048 	case SLJIT_ADDC:
1049 	case SLJIT_MUL:
1050 	case SLJIT_AND:
1051 	case SLJIT_OR:
1052 	case SLJIT_XOR:
1053 		return emit_op(compiler, op, flags | CUMULATIVE_OP | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
1054 
1055 	case SLJIT_SUB:
1056 	case SLJIT_SUBC:
1057 		return emit_op(compiler, op, flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
1058 
1059 	case SLJIT_SHL:
1060 	case SLJIT_LSHR:
1061 	case SLJIT_ASHR:
1062 #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
1063 		if (src2 & SLJIT_IMM)
1064 			src2w &= 0x1f;
1065 #else
1066 		SLJIT_UNREACHABLE();
1067 #endif
1068 		return emit_op(compiler, op, flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
1069 	}
1070 
1071 	return SLJIT_SUCCESS;
1072 }
1073 
sljit_emit_op2u(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1074 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compiler, sljit_s32 op,
1075 	sljit_s32 src1, sljit_sw src1w,
1076 	sljit_s32 src2, sljit_sw src2w)
1077 {
1078 	CHECK_ERROR();
1079 	CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
1080 
1081 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
1082 		|| (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
1083 	compiler->skip_checks = 1;
1084 #endif
1085 	return sljit_emit_op2(compiler, op, TMP_REG2, 0, src1, src1w, src2, src2w);
1086 }
1087 
sljit_emit_op_src(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src,sljit_sw srcw)1088 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
1089 	sljit_s32 src, sljit_sw srcw)
1090 {
1091 	CHECK_ERROR();
1092 	CHECK(check_sljit_emit_op_src(compiler, op, src, srcw));
1093 	ADJUST_LOCAL_OFFSET(src, srcw);
1094 
1095 	switch (op) {
1096 	case SLJIT_FAST_RETURN:
1097 		if (FAST_IS_REG(src))
1098 			FAIL_IF(push_inst(compiler, OR | D(TMP_LINK) | S1(0) | S2(src), DR(TMP_LINK)));
1099 		else
1100 			FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_LINK, src, srcw));
1101 
1102 		FAIL_IF(push_inst(compiler, JMPL | D(0) | S1(TMP_LINK) | IMM(8), UNMOVABLE_INS));
1103 		return push_inst(compiler, NOP, UNMOVABLE_INS);
1104 	case SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN:
1105 	case SLJIT_PREFETCH_L1:
1106 	case SLJIT_PREFETCH_L2:
1107 	case SLJIT_PREFETCH_L3:
1108 	case SLJIT_PREFETCH_ONCE:
1109 		return SLJIT_SUCCESS;
1110 	}
1111 
1112 	return SLJIT_SUCCESS;
1113 }
1114 
sljit_get_register_index(sljit_s32 reg)1115 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
1116 {
1117 	CHECK_REG_INDEX(check_sljit_get_register_index(reg));
1118 	return reg_map[reg];
1119 }
1120 
sljit_get_float_register_index(sljit_s32 reg)1121 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
1122 {
1123 	CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
1124 	return freg_map[reg];
1125 }
1126 
sljit_emit_op_custom(struct sljit_compiler * compiler,void * instruction,sljit_u32 size)1127 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
1128 	void *instruction, sljit_u32 size)
1129 {
1130 	CHECK_ERROR();
1131 	CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
1132 
1133 	return push_inst(compiler, *(sljit_ins*)instruction, UNMOVABLE_INS);
1134 }
1135 
1136 /* --------------------------------------------------------------------- */
1137 /*  Floating point operators                                             */
1138 /* --------------------------------------------------------------------- */
1139 
1140 #define FLOAT_DATA(op) ((sljit_ins)DOUBLE_DATA | (((sljit_ins)(op) & SLJIT_32) >> 7))
1141 #define SELECT_FOP(op, single, double) ((op & SLJIT_32) ? single : double)
1142 #define FLOAT_TMP_MEM_OFFSET (22 * sizeof(sljit_sw))
1143 
sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1144 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
1145 	sljit_s32 dst, sljit_sw dstw,
1146 	sljit_s32 src, sljit_sw srcw)
1147 {
1148 	if (src & SLJIT_MEM) {
1149 		FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src, srcw, dst, dstw));
1150 		src = TMP_FREG1;
1151 	}
1152 
1153 	FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSTOI, FDTOI) | FD(TMP_FREG1) | FS2(src), MOVABLE_INS));
1154 
1155 	if (FAST_IS_REG(dst)) {
1156 		FAIL_IF(emit_op_mem2(compiler, SINGLE_DATA, TMP_FREG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET));
1157 		return emit_op_mem2(compiler, WORD_DATA | LOAD_DATA, dst, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET);
1158 	}
1159 
1160 	/* Store the integer value from a VFP register. */
1161 	return emit_op_mem2(compiler, SINGLE_DATA, TMP_FREG1, dst, dstw, 0, 0);
1162 }
1163 
sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1164 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
1165 	sljit_s32 dst, sljit_sw dstw,
1166 	sljit_s32 src, sljit_sw srcw)
1167 {
1168 	sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
1169 
1170 	if (src & SLJIT_IMM) {
1171 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1172 		if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
1173 			srcw = (sljit_s32)srcw;
1174 #endif
1175 		FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
1176 		src = TMP_REG1;
1177 		srcw = 0;
1178 	}
1179 
1180 	if (FAST_IS_REG(src)) {
1181 		FAIL_IF(emit_op_mem2(compiler, WORD_DATA, src, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET));
1182 		src = SLJIT_MEM1(SLJIT_SP);
1183 		srcw = FLOAT_TMP_MEM_OFFSET;
1184 	}
1185 
1186 	FAIL_IF(emit_op_mem2(compiler, SINGLE_DATA | LOAD_DATA, TMP_FREG1, src, srcw, dst, dstw));
1187 	FAIL_IF(push_inst(compiler, SELECT_FOP(op, FITOS, FITOD) | FD(dst_r) | FS2(TMP_FREG1), MOVABLE_INS));
1188 
1189 	if (dst & SLJIT_MEM)
1190 		return emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, 0, 0);
1191 	return SLJIT_SUCCESS;
1192 }
1193 
sljit_emit_fop1_cmp(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1194 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
1195 	sljit_s32 src1, sljit_sw src1w,
1196 	sljit_s32 src2, sljit_sw src2w)
1197 {
1198 	if (src1 & SLJIT_MEM) {
1199 		FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w));
1200 		src1 = TMP_FREG1;
1201 	}
1202 
1203 	if (src2 & SLJIT_MEM) {
1204 		FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, 0, 0));
1205 		src2 = TMP_FREG2;
1206 	}
1207 
1208 	return push_inst(compiler, SELECT_FOP(op, FCMPS, FCMPD) | FS1(src1) | FS2(src2), FCC_IS_SET | MOVABLE_INS);
1209 }
1210 
sljit_emit_fop1(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1211 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
1212 	sljit_s32 dst, sljit_sw dstw,
1213 	sljit_s32 src, sljit_sw srcw)
1214 {
1215 	sljit_s32 dst_r;
1216 
1217 	CHECK_ERROR();
1218 	compiler->cache_arg = 0;
1219 	compiler->cache_argw = 0;
1220 
1221 	SLJIT_COMPILE_ASSERT((SLJIT_32 == 0x100) && !(DOUBLE_DATA & 0x2), float_transfer_bit_error);
1222 	SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
1223 
1224 	if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32)
1225 		op ^= SLJIT_32;
1226 
1227 	dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
1228 
1229 	if (src & SLJIT_MEM) {
1230 		FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, dst_r, src, srcw, dst, dstw));
1231 		src = dst_r;
1232 	}
1233 
1234 	switch (GET_OPCODE(op)) {
1235 	case SLJIT_MOV_F64:
1236 		if (src != dst_r) {
1237 			if (dst_r != TMP_FREG1) {
1238 				FAIL_IF(push_inst(compiler, FMOVS | FD(dst_r) | FS2(src), MOVABLE_INS));
1239 				if (!(op & SLJIT_32))
1240 					FAIL_IF(push_inst(compiler, FMOVS | FDN(dst_r) | FS2N(src), MOVABLE_INS));
1241 			}
1242 			else
1243 				dst_r = src;
1244 		}
1245 		break;
1246 	case SLJIT_NEG_F64:
1247 		FAIL_IF(push_inst(compiler, FNEGS | FD(dst_r) | FS2(src), MOVABLE_INS));
1248 		if (dst_r != src && !(op & SLJIT_32))
1249 			FAIL_IF(push_inst(compiler, FMOVS | FDN(dst_r) | FS2N(src), MOVABLE_INS));
1250 		break;
1251 	case SLJIT_ABS_F64:
1252 		FAIL_IF(push_inst(compiler, FABSS | FD(dst_r) | FS2(src), MOVABLE_INS));
1253 		if (dst_r != src && !(op & SLJIT_32))
1254 			FAIL_IF(push_inst(compiler, FMOVS | FDN(dst_r) | FS2N(src), MOVABLE_INS));
1255 		break;
1256 	case SLJIT_CONV_F64_FROM_F32:
1257 		FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSTOD, FDTOS) | FD(dst_r) | FS2(src), MOVABLE_INS));
1258 		op ^= SLJIT_32;
1259 		break;
1260 	}
1261 
1262 	if (dst & SLJIT_MEM)
1263 		FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op), dst_r, dst, dstw, 0, 0));
1264 	return SLJIT_SUCCESS;
1265 }
1266 
sljit_emit_fop2(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1267 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
1268 	sljit_s32 dst, sljit_sw dstw,
1269 	sljit_s32 src1, sljit_sw src1w,
1270 	sljit_s32 src2, sljit_sw src2w)
1271 {
1272 	sljit_s32 dst_r, flags = 0;
1273 
1274 	CHECK_ERROR();
1275 	CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
1276 	ADJUST_LOCAL_OFFSET(dst, dstw);
1277 	ADJUST_LOCAL_OFFSET(src1, src1w);
1278 	ADJUST_LOCAL_OFFSET(src2, src2w);
1279 
1280 	compiler->cache_arg = 0;
1281 	compiler->cache_argw = 0;
1282 
1283 	dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG2;
1284 
1285 	if (src1 & SLJIT_MEM) {
1286 		if (getput_arg_fast(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w)) {
1287 			FAIL_IF(compiler->error);
1288 			src1 = TMP_FREG1;
1289 		} else
1290 			flags |= SLOW_SRC1;
1291 	}
1292 
1293 	if (src2 & SLJIT_MEM) {
1294 		if (getput_arg_fast(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w)) {
1295 			FAIL_IF(compiler->error);
1296 			src2 = TMP_FREG2;
1297 		} else
1298 			flags |= SLOW_SRC2;
1299 	}
1300 
1301 	if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) {
1302 		if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
1303 			FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, src1, src1w));
1304 			FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, dst, dstw));
1305 		}
1306 		else {
1307 			FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w));
1308 			FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, dst, dstw));
1309 		}
1310 	}
1311 	else if (flags & SLOW_SRC1)
1312 		FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, dst, dstw));
1313 	else if (flags & SLOW_SRC2)
1314 		FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, dst, dstw));
1315 
1316 	if (flags & SLOW_SRC1)
1317 		src1 = TMP_FREG1;
1318 	if (flags & SLOW_SRC2)
1319 		src2 = TMP_FREG2;
1320 
1321 	switch (GET_OPCODE(op)) {
1322 	case SLJIT_ADD_F64:
1323 		FAIL_IF(push_inst(compiler, SELECT_FOP(op, FADDS, FADDD) | FD(dst_r) | FS1(src1) | FS2(src2), MOVABLE_INS));
1324 		break;
1325 
1326 	case SLJIT_SUB_F64:
1327 		FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSUBS, FSUBD) | FD(dst_r) | FS1(src1) | FS2(src2), MOVABLE_INS));
1328 		break;
1329 
1330 	case SLJIT_MUL_F64:
1331 		FAIL_IF(push_inst(compiler, SELECT_FOP(op, FMULS, FMULD) | FD(dst_r) | FS1(src1) | FS2(src2), MOVABLE_INS));
1332 		break;
1333 
1334 	case SLJIT_DIV_F64:
1335 		FAIL_IF(push_inst(compiler, SELECT_FOP(op, FDIVS, FDIVD) | FD(dst_r) | FS1(src1) | FS2(src2), MOVABLE_INS));
1336 		break;
1337 	}
1338 
1339 	if (dst_r == TMP_FREG2)
1340 		FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG2, dst, dstw, 0, 0));
1341 
1342 	return SLJIT_SUCCESS;
1343 }
1344 
1345 #undef FLOAT_DATA
1346 #undef SELECT_FOP
1347 
1348 /* --------------------------------------------------------------------- */
1349 /*  Other instructions                                                   */
1350 /* --------------------------------------------------------------------- */
1351 
sljit_emit_fast_enter(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw dstw)1352 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
1353 {
1354 	CHECK_ERROR();
1355 	CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
1356 	ADJUST_LOCAL_OFFSET(dst, dstw);
1357 
1358 	if (FAST_IS_REG(dst))
1359 		return push_inst(compiler, OR | D(dst) | S1(0) | S2(TMP_LINK), UNMOVABLE_INS);
1360 
1361 	/* Memory. */
1362 	FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_LINK, dst, dstw));
1363 	compiler->delay_slot = UNMOVABLE_INS;
1364 	return SLJIT_SUCCESS;
1365 }
1366 
1367 /* --------------------------------------------------------------------- */
1368 /*  Conditional instructions                                             */
1369 /* --------------------------------------------------------------------- */
1370 
sljit_emit_label(struct sljit_compiler * compiler)1371 SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
1372 {
1373 	struct sljit_label *label;
1374 
1375 	CHECK_ERROR_PTR();
1376 	CHECK_PTR(check_sljit_emit_label(compiler));
1377 
1378 	if (compiler->last_label && compiler->last_label->size == compiler->size)
1379 		return compiler->last_label;
1380 
1381 	label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
1382 	PTR_FAIL_IF(!label);
1383 	set_label(label, compiler);
1384 	compiler->delay_slot = UNMOVABLE_INS;
1385 	return label;
1386 }
1387 
get_cc(struct sljit_compiler * compiler,sljit_s32 type)1388 static sljit_ins get_cc(struct sljit_compiler *compiler, sljit_s32 type)
1389 {
1390 	switch (type) {
1391 	case SLJIT_EQUAL:
1392 	case SLJIT_NOT_EQUAL_F64: /* Unordered. */
1393 		return DA(0x1);
1394 
1395 	case SLJIT_NOT_EQUAL:
1396 	case SLJIT_EQUAL_F64:
1397 		return DA(0x9);
1398 
1399 	case SLJIT_LESS:
1400 	case SLJIT_GREATER_F64: /* Unordered. */
1401 	case SLJIT_CARRY:
1402 		return DA(0x5);
1403 
1404 	case SLJIT_GREATER_EQUAL:
1405 	case SLJIT_LESS_EQUAL_F64:
1406 	case SLJIT_NOT_CARRY:
1407 		return DA(0xd);
1408 
1409 	case SLJIT_GREATER:
1410 	case SLJIT_GREATER_EQUAL_F64: /* Unordered. */
1411 		return DA(0xc);
1412 
1413 	case SLJIT_LESS_EQUAL:
1414 	case SLJIT_LESS_F64:
1415 		return DA(0x4);
1416 
1417 	case SLJIT_SIG_LESS:
1418 		return DA(0x3);
1419 
1420 	case SLJIT_SIG_GREATER_EQUAL:
1421 		return DA(0xb);
1422 
1423 	case SLJIT_SIG_GREATER:
1424 		return DA(0xa);
1425 
1426 	case SLJIT_SIG_LESS_EQUAL:
1427 		return DA(0x2);
1428 
1429 	case SLJIT_OVERFLOW:
1430 		if (!(compiler->status_flags_state & (SLJIT_CURRENT_FLAGS_ADD | SLJIT_CURRENT_FLAGS_SUB)))
1431 			return DA(0x9);
1432 		/* fallthrough */
1433 
1434 	case SLJIT_UNORDERED_F64:
1435 		return DA(0x7);
1436 
1437 	case SLJIT_NOT_OVERFLOW:
1438 		if (!(compiler->status_flags_state & (SLJIT_CURRENT_FLAGS_ADD | SLJIT_CURRENT_FLAGS_SUB)))
1439 			return DA(0x1);
1440 		/* fallthrough */
1441 
1442 	case SLJIT_ORDERED_F64:
1443 		return DA(0xf);
1444 
1445 	default:
1446 		SLJIT_UNREACHABLE();
1447 		return DA(0x8);
1448 	}
1449 }
1450 
sljit_emit_jump(struct sljit_compiler * compiler,sljit_s32 type)1451 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
1452 {
1453 	struct sljit_jump *jump;
1454 
1455 	CHECK_ERROR_PTR();
1456 	CHECK_PTR(check_sljit_emit_jump(compiler, type));
1457 
1458 	jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
1459 	PTR_FAIL_IF(!jump);
1460 	set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
1461 	type &= 0xff;
1462 
1463 	if (type < SLJIT_EQUAL_F64) {
1464 		jump->flags |= IS_COND;
1465 		if (((compiler->delay_slot & DST_INS_MASK) != UNMOVABLE_INS) && !(compiler->delay_slot & ICC_IS_SET))
1466 			jump->flags |= IS_MOVABLE;
1467 #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
1468 		PTR_FAIL_IF(push_inst(compiler, BICC | get_cc(compiler, type ^ 1) | 5, UNMOVABLE_INS));
1469 #else
1470 #error "Implementation required"
1471 #endif
1472 	}
1473 	else if (type < SLJIT_JUMP) {
1474 		jump->flags |= IS_COND;
1475 		if (((compiler->delay_slot & DST_INS_MASK) != UNMOVABLE_INS) && !(compiler->delay_slot & FCC_IS_SET))
1476 			jump->flags |= IS_MOVABLE;
1477 #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
1478 		PTR_FAIL_IF(push_inst(compiler, FBFCC | get_cc(compiler, type ^ 1) | 5, UNMOVABLE_INS));
1479 #else
1480 #error "Implementation required"
1481 #endif
1482 	}
1483 	else {
1484 		if ((compiler->delay_slot & DST_INS_MASK) != UNMOVABLE_INS)
1485 			jump->flags |= IS_MOVABLE;
1486 		if (type >= SLJIT_FAST_CALL)
1487 			jump->flags |= IS_CALL;
1488 	}
1489 
1490 	PTR_FAIL_IF(emit_const(compiler, TMP_REG1, 0));
1491 	PTR_FAIL_IF(push_inst(compiler, JMPL | D(type >= SLJIT_FAST_CALL ? TMP_LINK : 0) | S1(TMP_REG1) | IMM(0), UNMOVABLE_INS));
1492 	jump->addr = compiler->size;
1493 	PTR_FAIL_IF(push_inst(compiler, NOP, UNMOVABLE_INS));
1494 
1495 	return jump;
1496 }
1497 
sljit_emit_call(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 arg_types)1498 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type,
1499 	sljit_s32 arg_types)
1500 {
1501 	CHECK_ERROR_PTR();
1502 	CHECK_PTR(check_sljit_emit_call(compiler, type, arg_types));
1503 
1504 	PTR_FAIL_IF(call_with_args(compiler, arg_types, NULL));
1505 
1506 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
1507 		|| (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
1508 	compiler->skip_checks = 1;
1509 #endif
1510 
1511 	return sljit_emit_jump(compiler, type);
1512 }
1513 
sljit_emit_ijump(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 src,sljit_sw srcw)1514 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
1515 {
1516 	struct sljit_jump *jump = NULL;
1517 	sljit_s32 src_r;
1518 
1519 	CHECK_ERROR();
1520 	CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
1521 	ADJUST_LOCAL_OFFSET(src, srcw);
1522 
1523 	if (FAST_IS_REG(src))
1524 		src_r = src;
1525 	else if (src & SLJIT_IMM) {
1526 		jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
1527 		FAIL_IF(!jump);
1528 		set_jump(jump, compiler, JUMP_ADDR);
1529 		jump->u.target = (sljit_uw)srcw;
1530 
1531 		if ((compiler->delay_slot & DST_INS_MASK) != UNMOVABLE_INS)
1532 			jump->flags |= IS_MOVABLE;
1533 		if (type >= SLJIT_FAST_CALL)
1534 			jump->flags |= IS_CALL;
1535 
1536 		FAIL_IF(emit_const(compiler, TMP_REG1, 0));
1537 		src_r = TMP_REG1;
1538 	}
1539 	else {
1540 		FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw));
1541 		src_r = TMP_REG1;
1542 	}
1543 
1544 	FAIL_IF(push_inst(compiler, JMPL | D(type >= SLJIT_FAST_CALL ? TMP_LINK : 0) | S1(src_r) | IMM(0), UNMOVABLE_INS));
1545 	if (jump)
1546 		jump->addr = compiler->size;
1547 	return push_inst(compiler, NOP, UNMOVABLE_INS);
1548 }
1549 
sljit_emit_icall(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 arg_types,sljit_s32 src,sljit_sw srcw)1550 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compiler, sljit_s32 type,
1551 	sljit_s32 arg_types,
1552 	sljit_s32 src, sljit_sw srcw)
1553 {
1554 	CHECK_ERROR();
1555 	CHECK(check_sljit_emit_icall(compiler, type, arg_types, src, srcw));
1556 
1557 	if (src & SLJIT_MEM) {
1558 		ADJUST_LOCAL_OFFSET(src, srcw);
1559 		FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw));
1560 		src = TMP_REG1;
1561 	}
1562 
1563 	FAIL_IF(call_with_args(compiler, arg_types, &src));
1564 
1565 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
1566 		|| (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
1567 	compiler->skip_checks = 1;
1568 #endif
1569 
1570 	return sljit_emit_ijump(compiler, type, src, srcw);
1571 }
1572 
sljit_emit_op_flags(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 type)1573 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
1574 	sljit_s32 dst, sljit_sw dstw,
1575 	sljit_s32 type)
1576 {
1577 	sljit_s32 reg;
1578 	sljit_u32 flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
1579 
1580 	CHECK_ERROR();
1581 	CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
1582 	ADJUST_LOCAL_OFFSET(dst, dstw);
1583 
1584 #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
1585 	op = GET_OPCODE(op);
1586 	reg = (op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2;
1587 
1588 	compiler->cache_arg = 0;
1589 	compiler->cache_argw = 0;
1590 
1591 	if (op >= SLJIT_ADD && (dst & SLJIT_MEM))
1592 		FAIL_IF(emit_op_mem2(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, dst, dstw, dst, dstw));
1593 
1594 	type &= 0xff;
1595 	if (type < SLJIT_EQUAL_F64)
1596 		FAIL_IF(push_inst(compiler, BICC | get_cc(compiler, type) | 3, UNMOVABLE_INS));
1597 	else
1598 		FAIL_IF(push_inst(compiler, FBFCC | get_cc(compiler, type) | 3, UNMOVABLE_INS));
1599 
1600 	FAIL_IF(push_inst(compiler, OR | D(reg) | S1(0) | IMM(1), UNMOVABLE_INS));
1601 	FAIL_IF(push_inst(compiler, OR | D(reg) | S1(0) | IMM(0), UNMOVABLE_INS));
1602 
1603 	if (op >= SLJIT_ADD) {
1604 		flags |= CUMULATIVE_OP | IMM_OP | ALT_KEEP_CACHE;
1605 		if (dst & SLJIT_MEM)
1606 			return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, TMP_REG2, 0);
1607 		return emit_op(compiler, op, flags, dst, 0, dst, 0, TMP_REG2, 0);
1608 	}
1609 
1610 	if (!(dst & SLJIT_MEM))
1611 		return SLJIT_SUCCESS;
1612 
1613 	return emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw);
1614 #else
1615 #error "Implementation required"
1616 #endif
1617 }
1618 
sljit_emit_cmov(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 dst_reg,sljit_s32 src,sljit_sw srcw)1619 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
1620 	sljit_s32 dst_reg,
1621 	sljit_s32 src, sljit_sw srcw)
1622 {
1623 	CHECK_ERROR();
1624 	CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
1625 
1626 #if (defined SLJIT_CONFIG_SPARC_32 && SLJIT_CONFIG_SPARC_32)
1627 	return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);;
1628 #else
1629 #error "Implementation required"
1630 #endif
1631 }
1632 
sljit_emit_const(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw dstw,sljit_sw init_value)1633 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
1634 {
1635 	struct sljit_const *const_;
1636 	sljit_s32 dst_r;
1637 
1638 	CHECK_ERROR_PTR();
1639 	CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
1640 	ADJUST_LOCAL_OFFSET(dst, dstw);
1641 
1642 	const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
1643 	PTR_FAIL_IF(!const_);
1644 	set_const(const_, compiler);
1645 
1646 	dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
1647 	PTR_FAIL_IF(emit_const(compiler, dst_r, init_value));
1648 
1649 	if (dst & SLJIT_MEM)
1650 		PTR_FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw));
1651 	return const_;
1652 }
1653 
sljit_emit_put_label(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw dstw)1654 SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
1655 {
1656 	struct sljit_put_label *put_label;
1657 	sljit_s32 dst_r;
1658 
1659 	CHECK_ERROR_PTR();
1660 	CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
1661 	ADJUST_LOCAL_OFFSET(dst, dstw);
1662 
1663 	put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
1664 	PTR_FAIL_IF(!put_label);
1665 	set_put_label(put_label, compiler, 0);
1666 
1667 	dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
1668 	PTR_FAIL_IF(emit_const(compiler, dst_r, 0));
1669 
1670 	if (dst & SLJIT_MEM)
1671 		PTR_FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw));
1672 	return put_label;
1673 }
1674