2 * Stack-less Just-In-Time compiler
4 * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
6 * Redistribution and use in source and binary forms, with or without modification, are
7 * permitted provided that the following conditions are met:
9 * 1. Redistributions of source code must retain the above copyright notice, this list of
10 * conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright notice, this list
13 * of conditions and the following disclaimer in the documentation and/or other materials
14 * provided with the distribution.
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19 * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
29 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
30 return "x86" SLJIT_CPUINFO " ABI:fastcall";
32 return "x86" SLJIT_CPUINFO;
58 8 - R8 - From now on REX prefix is required
68 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
70 /* Last register + 1. */
71 #define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
73 static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 3] = {
74 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 7, 6, 3, 4, 5
77 #define CHECK_EXTRA_REGS(p, w, do) \
78 if (p >= SLJIT_R3 && p <= SLJIT_S3) { \
79 if (p <= compiler->scratches) \
80 w = compiler->saveds_offset - ((p) - SLJIT_R2) * (sljit_sw)sizeof(sljit_sw); \
82 w = compiler->locals_offset + ((p) - SLJIT_S2) * (sljit_sw)sizeof(sljit_sw); \
83 p = SLJIT_MEM1(SLJIT_SP); \
87 #else /* SLJIT_CONFIG_X86_32 */
89 /* Last register + 1. */
90 #define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
91 #define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
93 /* Note: r12 & 0x7 == 0b100, which decoded as SIB byte present
94 Note: avoid to use r12 and r13 for memory addessing
95 therefore r12 is better to be a higher saved register. */
97 /* Args: rdi(=7), rsi(=6), rdx(=2), rcx(=1), r8, r9. Scratches: rax(=0), r10, r11 */
98 static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 4] = {
99 0, 0, 6, 7, 1, 8, 11, 10, 12, 5, 13, 14, 15, 3, 4, 2, 9
101 /* low-map. reg_map & 0x7. */
102 static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 4] = {
103 0, 0, 6, 7, 1, 0, 3, 2, 4, 5, 5, 6, 7, 3, 4, 2, 1
106 /* Args: rcx(=1), rdx(=2), r8, r9. Scratches: rax(=0), r10, r11 */
107 static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 4] = {
108 0, 0, 2, 8, 1, 11, 12, 5, 13, 14, 15, 7, 6, 3, 4, 9, 10
110 /* low-map. reg_map & 0x7. */
111 static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS + 4] = {
112 0, 0, 2, 0, 1, 3, 4, 5, 5, 6, 7, 7, 6, 3, 4, 1, 2
116 /* Args: xmm0-xmm3 */
117 static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1] = {
120 /* low-map. freg_map & 0x7. */
121 static const sljit_u8 freg_lmap[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1] = {
132 #define HALFWORD_MAX 0x7fffffffl
133 #define HALFWORD_MIN -0x80000000l
135 #define HALFWORD_MAX 0x7fffffffll
136 #define HALFWORD_MIN -0x80000000ll
139 #define IS_HALFWORD(x) ((x) <= HALFWORD_MAX && (x) >= HALFWORD_MIN)
140 #define NOT_HALFWORD(x) ((x) > HALFWORD_MAX || (x) < HALFWORD_MIN)
142 #define CHECK_EXTRA_REGS(p, w, do)
144 #endif /* SLJIT_CONFIG_X86_32 */
148 /* Size flags for emit_x86_instruction: */
149 #define EX86_BIN_INS 0x0010
150 #define EX86_SHIFT_INS 0x0020
151 #define EX86_REX 0x0040
152 #define EX86_NO_REXW 0x0080
153 #define EX86_BYTE_ARG 0x0100
154 #define EX86_HALF_ARG 0x0200
155 #define EX86_PREF_66 0x0400
156 #define EX86_PREF_F2 0x0800
157 #define EX86_PREF_F3 0x1000
158 #define EX86_SSE2_OP1 0x2000
159 #define EX86_SSE2_OP2 0x4000
160 #define EX86_SSE2 (EX86_SSE2_OP1 | EX86_SSE2_OP2)
162 /* --------------------------------------------------------------------- */
163 /* Instrucion forms */
164 /* --------------------------------------------------------------------- */
166 #define ADD (/* BINARY */ 0 << 3)
167 #define ADD_EAX_i32 0x05
168 #define ADD_r_rm 0x03
169 #define ADD_rm_r 0x01
170 #define ADDSD_x_xm 0x58
171 #define ADC (/* BINARY */ 2 << 3)
172 #define ADC_EAX_i32 0x15
173 #define ADC_r_rm 0x13
174 #define ADC_rm_r 0x11
175 #define AND (/* BINARY */ 4 << 3)
176 #define AND_EAX_i32 0x25
177 #define AND_r_rm 0x23
178 #define AND_rm_r 0x21
179 #define ANDPD_x_xm 0x54
180 #define BSR_r_rm (/* GROUP_0F */ 0xbd)
181 #define CALL_i32 0xe8
182 #define CALL_rm (/* GROUP_FF */ 2 << 3)
184 #define CMOVE_r_rm (/* GROUP_0F */ 0x44)
185 #define CMP (/* BINARY */ 7 << 3)
186 #define CMP_EAX_i32 0x3d
187 #define CMP_r_rm 0x3b
188 #define CMP_rm_r 0x39
189 #define CVTPD2PS_x_xm 0x5a
190 #define CVTSI2SD_x_rm 0x2a
191 #define CVTTSD2SI_r_xm 0x2c
192 #define DIV (/* GROUP_F7 */ 6 << 3)
193 #define DIVSD_x_xm 0x5e
197 #define IDIV (/* GROUP_F7 */ 7 << 3)
198 #define IMUL (/* GROUP_F7 */ 5 << 3)
199 #define IMUL_r_rm (/* GROUP_0F */ 0xaf)
200 #define IMUL_r_rm_i8 0x6b
201 #define IMUL_r_rm_i32 0x69
206 #define JMP_rm (/* GROUP_FF */ 4 << 3)
208 #define MOV_r_rm 0x8b
209 #define MOV_r_i32 0xb8
210 #define MOV_rm_r 0x89
211 #define MOV_rm_i32 0xc7
212 #define MOV_rm8_i8 0xc6
213 #define MOV_rm8_r8 0x88
214 #define MOVSD_x_xm 0x10
215 #define MOVSD_xm_x 0x11
216 #define MOVSXD_r_rm 0x63
217 #define MOVSX_r_rm8 (/* GROUP_0F */ 0xbe)
218 #define MOVSX_r_rm16 (/* GROUP_0F */ 0xbf)
219 #define MOVZX_r_rm8 (/* GROUP_0F */ 0xb6)
220 #define MOVZX_r_rm16 (/* GROUP_0F */ 0xb7)
221 #define MUL (/* GROUP_F7 */ 4 << 3)
222 #define MULSD_x_xm 0x59
223 #define NEG_rm (/* GROUP_F7 */ 3 << 3)
225 #define NOT_rm (/* GROUP_F7 */ 2 << 3)
226 #define OR (/* BINARY */ 1 << 3)
228 #define OR_EAX_i32 0x0d
230 #define OR_rm8_r8 0x08
234 #define PREFETCH 0x18
235 #define PUSH_i32 0x68
237 #define PUSH_rm (/* GROUP_FF */ 6 << 3)
239 #define RET_near 0xc3
241 #define SBB (/* BINARY */ 3 << 3)
242 #define SBB_EAX_i32 0x1d
243 #define SBB_r_rm 0x1b
244 #define SBB_rm_r 0x19
245 #define SAR (/* SHIFT */ 7 << 3)
246 #define SHL (/* SHIFT */ 4 << 3)
247 #define SHR (/* SHIFT */ 5 << 3)
248 #define SUB (/* BINARY */ 5 << 3)
249 #define SUB_EAX_i32 0x2d
250 #define SUB_r_rm 0x2b
251 #define SUB_rm_r 0x29
252 #define SUBSD_x_xm 0x5c
253 #define TEST_EAX_i32 0xa9
254 #define TEST_rm_r 0x85
255 #define UCOMISD_x_xm 0x2e
256 #define UNPCKLPD_x_xm 0x14
257 #define XCHG_EAX_r 0x90
258 #define XCHG_r_rm 0x87
259 #define XOR (/* BINARY */ 6 << 3)
260 #define XOR_EAX_i32 0x35
261 #define XOR_r_rm 0x33
262 #define XOR_rm_r 0x31
263 #define XORPD_x_xm 0x57
265 #define GROUP_0F 0x0f
266 #define GROUP_F7 0xf7
267 #define GROUP_FF 0xff
268 #define GROUP_BINARY_81 0x81
269 #define GROUP_BINARY_83 0x83
270 #define GROUP_SHIFT_1 0xd1
271 #define GROUP_SHIFT_N 0xc1
272 #define GROUP_SHIFT_CL 0xd3
275 #define MOD_DISP8 0x40
277 #define INC_SIZE(s) (*inst++ = (s), compiler->size += (s))
279 #define PUSH_REG(r) (*inst++ = (PUSH_r + (r)))
280 #define POP_REG(r) (*inst++ = (POP_r + (r)))
281 #define RET() (*inst++ = (RET_near))
282 #define RET_I16(n) (*inst++ = (RET_i16), *inst++ = n, *inst++ = 0)
284 #define MOV_RM(mod, reg, rm) (*inst++ = (MOV_r_rm), *inst++ = (mod) << 6 | (reg) << 3 | (rm))
286 /* Multithreading does not affect these static variables, since they store
287 built-in CPU features. Therefore they can be overwritten by different threads
288 if they detect the CPU features in the same time. */
289 #if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
290 static sljit_s32 cpu_has_sse2 = -1;
292 static sljit_s32 cpu_has_cmov = -1;
295 #include <cmnintrin.h>
296 #elif defined(_MSC_VER) && _MSC_VER >= 1400
300 /******************************************************/
301 /* Unaligned-store functions */
302 /******************************************************/
304 static SLJIT_INLINE void sljit_unaligned_store_s16(void *addr, sljit_s16 value)
306 SLJIT_MEMCPY(addr, &value, sizeof(value));
309 static SLJIT_INLINE void sljit_unaligned_store_s32(void *addr, sljit_s32 value)
311 SLJIT_MEMCPY(addr, &value, sizeof(value));
314 static SLJIT_INLINE void sljit_unaligned_store_sw(void *addr, sljit_sw value)
316 SLJIT_MEMCPY(addr, &value, sizeof(value));
319 /******************************************************/
320 /* Utility functions */
321 /******************************************************/
323 static void get_cpu_features(void)
327 #if defined(_MSC_VER) && _MSC_VER >= 1400
331 features = (sljit_u32)CPUInfo[3];
333 #elif defined(__GNUC__) || defined(__INTEL_COMPILER) || defined(__SUNPRO_C)
338 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
339 /* On x86-32, there is no red zone, so this
340 should work (no need for a local variable). */
344 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
350 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
351 : "%eax", "%ecx", "%edx"
353 : "%rax", "%rbx", "%rcx", "%rdx"
357 #else /* _MSC_VER && _MSC_VER >= 1400 */
366 #endif /* _MSC_VER && _MSC_VER >= 1400 */
368 #if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
369 cpu_has_sse2 = (features >> 26) & 0x1;
371 cpu_has_cmov = (features >> 15) & 0x1;
374 static sljit_u8 get_jump_code(sljit_s32 type)
378 case SLJIT_EQUAL_F64:
379 return 0x84 /* je */;
381 case SLJIT_NOT_EQUAL:
382 case SLJIT_NOT_EQUAL_F64:
383 return 0x85 /* jne */;
387 return 0x82 /* jc */;
389 case SLJIT_GREATER_EQUAL:
390 case SLJIT_GREATER_EQUAL_F64:
391 return 0x83 /* jae */;
394 case SLJIT_GREATER_F64:
395 return 0x87 /* jnbe */;
397 case SLJIT_LESS_EQUAL:
398 case SLJIT_LESS_EQUAL_F64:
399 return 0x86 /* jbe */;
402 return 0x8c /* jl */;
404 case SLJIT_SIG_GREATER_EQUAL:
405 return 0x8d /* jnl */;
407 case SLJIT_SIG_GREATER:
408 return 0x8f /* jnle */;
410 case SLJIT_SIG_LESS_EQUAL:
411 return 0x8e /* jle */;
414 case SLJIT_MUL_OVERFLOW:
415 return 0x80 /* jo */;
417 case SLJIT_NOT_OVERFLOW:
418 case SLJIT_MUL_NOT_OVERFLOW:
419 return 0x81 /* jno */;
421 case SLJIT_UNORDERED_F64:
422 return 0x8a /* jp */;
424 case SLJIT_ORDERED_F64:
425 return 0x8b /* jpo */;
430 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
431 static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type, sljit_sw executable_offset);
433 static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type);
436 static sljit_u8* generate_near_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_s32 type, sljit_sw executable_offset)
438 sljit_s32 short_jump;
441 if (jump->flags & JUMP_LABEL)
442 label_addr = (sljit_uw)(code + jump->u.label->size);
444 label_addr = jump->u.target - executable_offset;
446 short_jump = (sljit_sw)(label_addr - (jump->addr + 2)) >= -128 && (sljit_sw)(label_addr - (jump->addr + 2)) <= 127;
448 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
449 if ((sljit_sw)(label_addr - (jump->addr + 1)) > HALFWORD_MAX || (sljit_sw)(label_addr - (jump->addr + 1)) < HALFWORD_MIN)
450 return generate_far_jump_code(jump, code_ptr, type);
453 if (type == SLJIT_JUMP) {
455 *code_ptr++ = JMP_i8;
457 *code_ptr++ = JMP_i32;
460 else if (type >= SLJIT_FAST_CALL) {
462 *code_ptr++ = CALL_i32;
465 else if (short_jump) {
466 *code_ptr++ = get_jump_code(type) - 0x10;
470 *code_ptr++ = GROUP_0F;
471 *code_ptr++ = get_jump_code(type);
476 jump->flags |= PATCH_MB;
477 code_ptr += sizeof(sljit_s8);
479 jump->flags |= PATCH_MW;
480 code_ptr += sizeof(sljit_s32);
486 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
488 struct sljit_memory_fragment *buf;
494 sljit_sw executable_offset;
497 struct sljit_label *label;
498 struct sljit_jump *jump;
499 struct sljit_const *const_;
502 CHECK_PTR(check_sljit_generate_code(compiler));
503 reverse_buf(compiler);
505 /* Second code generation pass. */
506 code = (sljit_u8*)SLJIT_MALLOC_EXEC(compiler->size);
507 PTR_FAIL_WITH_EXEC_IF(code);
511 label = compiler->labels;
512 jump = compiler->jumps;
513 const_ = compiler->consts;
514 executable_offset = SLJIT_EXEC_OFFSET(code);
517 buf_ptr = buf->memory;
518 buf_end = buf_ptr + buf->used_size;
522 /* The code is already generated. */
523 SLJIT_MEMCPY(code_ptr, buf_ptr, len);
529 jump->addr = (sljit_uw)code_ptr;
530 if (!(jump->flags & SLJIT_REWRITABLE_JUMP))
531 code_ptr = generate_near_jump_code(jump, code_ptr, code, *buf_ptr - 2, executable_offset);
533 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
534 code_ptr = generate_far_jump_code(jump, code_ptr, *buf_ptr - 2, executable_offset);
536 code_ptr = generate_far_jump_code(jump, code_ptr, *buf_ptr - 2);
541 else if (*buf_ptr == 0) {
542 label->addr = ((sljit_uw)code_ptr) + executable_offset;
543 label->size = code_ptr - code;
546 else { /* *buf_ptr is 1 */
547 const_->addr = ((sljit_uw)code_ptr) - sizeof(sljit_sw);
548 const_ = const_->next;
552 } while (buf_ptr < buf_end);
553 SLJIT_ASSERT(buf_ptr == buf_end);
557 SLJIT_ASSERT(!label);
559 SLJIT_ASSERT(!const_);
561 jump = compiler->jumps;
563 jump_addr = jump->addr + executable_offset;
565 if (jump->flags & PATCH_MB) {
566 SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s8))) >= -128 && (sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s8))) <= 127);
567 *(sljit_u8*)jump->addr = (sljit_u8)(jump->u.label->addr - (jump_addr + sizeof(sljit_s8)));
568 } else if (jump->flags & PATCH_MW) {
569 if (jump->flags & JUMP_LABEL) {
570 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
571 sljit_unaligned_store_sw((void*)jump->addr, (sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_sw))));
573 SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s32))) >= HALFWORD_MIN && (sljit_sw)(jump->u.label->addr - (jump_addr + sizeof(sljit_s32))) <= HALFWORD_MAX);
574 sljit_unaligned_store_s32((void*)jump->addr, (sljit_s32)(jump->u.label->addr - (jump_addr + sizeof(sljit_s32))));
578 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
579 sljit_unaligned_store_sw((void*)jump->addr, (sljit_sw)(jump->u.target - (jump_addr + sizeof(sljit_sw))));
581 SLJIT_ASSERT((sljit_sw)(jump->u.target - (jump_addr + sizeof(sljit_s32))) >= HALFWORD_MIN && (sljit_sw)(jump->u.target - (jump_addr + sizeof(sljit_s32))) <= HALFWORD_MAX);
582 sljit_unaligned_store_s32((void*)jump->addr, (sljit_s32)(jump->u.target - (jump_addr + sizeof(sljit_s32))));
586 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
587 else if (jump->flags & PATCH_MD)
588 sljit_unaligned_store_sw((void*)jump->addr, jump->u.label->addr);
594 /* Some space may be wasted because of short jumps. */
595 SLJIT_ASSERT(code_ptr <= code + compiler->size);
596 compiler->error = SLJIT_ERR_COMPILED;
597 compiler->executable_offset = executable_offset;
598 compiler->executable_size = code_ptr - code;
599 return (void*)(code + executable_offset);
602 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
604 switch (feature_type) {
606 #ifdef SLJIT_IS_FPU_AVAILABLE
607 return SLJIT_IS_FPU_AVAILABLE;
608 #elif (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
609 if (cpu_has_sse2 == -1)
612 #else /* SLJIT_DETECT_SSE2 */
614 #endif /* SLJIT_DETECT_SSE2 */
616 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
617 case SLJIT_HAS_VIRTUAL_REGISTERS:
623 if (cpu_has_cmov == -1)
628 #if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2)
629 if (cpu_has_sse2 == -1)
641 /* --------------------------------------------------------------------- */
643 /* --------------------------------------------------------------------- */
645 #define BINARY_OPCODE(opcode) (((opcode ## _EAX_i32) << 24) | ((opcode ## _r_rm) << 16) | ((opcode ## _rm_r) << 8) | (opcode))
647 static sljit_s32 emit_cum_binary(struct sljit_compiler *compiler,
649 sljit_s32 dst, sljit_sw dstw,
650 sljit_s32 src1, sljit_sw src1w,
651 sljit_s32 src2, sljit_sw src2w);
653 static sljit_s32 emit_non_cum_binary(struct sljit_compiler *compiler,
655 sljit_s32 dst, sljit_sw dstw,
656 sljit_s32 src1, sljit_sw src1w,
657 sljit_s32 src2, sljit_sw src2w);
659 static sljit_s32 emit_mov(struct sljit_compiler *compiler,
660 sljit_s32 dst, sljit_sw dstw,
661 sljit_s32 src, sljit_sw srcw);
663 #define EMIT_MOV(compiler, dst, dstw, src, srcw) \
664 FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw));
666 static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler,
667 sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src);
669 static SLJIT_INLINE sljit_s32 emit_sse2_load(struct sljit_compiler *compiler,
670 sljit_s32 single, sljit_s32 dst, sljit_s32 src, sljit_sw srcw);
672 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
673 #include "sljitNativeX86_32.c"
675 #include "sljitNativeX86_64.c"
678 static sljit_s32 emit_mov(struct sljit_compiler *compiler,
679 sljit_s32 dst, sljit_sw dstw,
680 sljit_s32 src, sljit_sw srcw)
684 SLJIT_ASSERT(dst != SLJIT_UNUSED);
686 if (FAST_IS_REG(src)) {
687 inst = emit_x86_instruction(compiler, 1, src, 0, dst, dstw);
690 return SLJIT_SUCCESS;
692 if (src & SLJIT_IMM) {
693 if (FAST_IS_REG(dst)) {
694 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
695 return emit_do_imm(compiler, MOV_r_i32 + reg_map[dst], srcw);
697 if (!compiler->mode32) {
698 if (NOT_HALFWORD(srcw))
699 return emit_load_imm64(compiler, dst, srcw);
702 return emit_do_imm32(compiler, (reg_map[dst] >= 8) ? REX_B : 0, MOV_r_i32 + reg_lmap[dst], srcw);
705 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
706 if (!compiler->mode32 && NOT_HALFWORD(srcw)) {
707 /* Immediate to memory move. Only SLJIT_MOV operation copies
708 an immediate directly into memory so TMP_REG1 can be used. */
709 FAIL_IF(emit_load_imm64(compiler, TMP_REG1, srcw));
710 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
713 return SLJIT_SUCCESS;
716 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, dstw);
719 return SLJIT_SUCCESS;
721 if (FAST_IS_REG(dst)) {
722 inst = emit_x86_instruction(compiler, 1, dst, 0, src, srcw);
725 return SLJIT_SUCCESS;
728 /* Memory to memory move. Only SLJIT_MOV operation copies
729 data from memory to memory so TMP_REG1 can be used. */
730 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src, srcw);
733 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
736 return SLJIT_SUCCESS;
739 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
742 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
747 CHECK(check_sljit_emit_op0(compiler, op));
749 switch (GET_OPCODE(op)) {
750 case SLJIT_BREAKPOINT:
751 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
757 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
764 case SLJIT_DIVMOD_UW:
765 case SLJIT_DIVMOD_SW:
768 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
771 reg_map[SLJIT_R0] == 0
772 && reg_map[SLJIT_R1] == 2
773 && reg_map[TMP_REG1] > 7);
776 reg_map[SLJIT_R0] == 0
777 && reg_map[SLJIT_R1] < 7
778 && reg_map[TMP_REG1] == 2);
780 compiler->mode32 = op & SLJIT_I32_OP;
782 SLJIT_COMPILE_ASSERT((SLJIT_DIVMOD_UW & 0x2) == 0 && SLJIT_DIV_UW - 0x2 == SLJIT_DIVMOD_UW, bad_div_opcode_assignments);
785 if ((op | 0x2) == SLJIT_DIV_UW) {
786 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64)
787 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R1, 0);
788 inst = emit_x86_instruction(compiler, 1, SLJIT_R1, 0, SLJIT_R1, 0);
790 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, TMP_REG1, 0);
796 if ((op | 0x2) == SLJIT_DIV_SW) {
797 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64)
798 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_R1, 0);
801 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
802 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
807 if (compiler->mode32) {
808 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
813 inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
822 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
823 inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
827 *inst = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_map[TMP_REG1] : reg_map[SLJIT_R1]);
830 size = (!compiler->mode32 || op >= SLJIT_DIVMOD_UW) ? 3 : 2;
832 size = (!compiler->mode32) ? 3 : 2;
834 inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
838 if (!compiler->mode32)
839 *inst++ = REX_W | ((op >= SLJIT_DIVMOD_UW) ? REX_B : 0);
840 else if (op >= SLJIT_DIVMOD_UW)
843 *inst = MOD_REG | ((op >= SLJIT_DIVMOD_UW) ? reg_lmap[TMP_REG1] : reg_lmap[SLJIT_R1]);
845 if (!compiler->mode32)
848 *inst = MOD_REG | reg_map[SLJIT_R1];
858 case SLJIT_DIVMOD_UW:
862 case SLJIT_DIVMOD_SW:
867 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) && !defined(_WIN64)
868 if (op <= SLJIT_DIVMOD_SW)
869 EMIT_MOV(compiler, SLJIT_R1, 0, TMP_REG1, 0);
871 if (op >= SLJIT_DIV_UW)
872 EMIT_MOV(compiler, SLJIT_R1, 0, TMP_REG1, 0);
877 return SLJIT_SUCCESS;
880 #define ENCODE_PREFIX(prefix) \
882 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1); \
888 static sljit_s32 emit_mov_byte(struct sljit_compiler *compiler, sljit_s32 sign,
889 sljit_s32 dst, sljit_sw dstw,
890 sljit_s32 src, sljit_sw srcw)
894 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
898 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
899 compiler->mode32 = 0;
902 if (src & SLJIT_IMM) {
903 if (FAST_IS_REG(dst)) {
904 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
905 return emit_do_imm(compiler, MOV_r_i32 + reg_map[dst], srcw);
907 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0);
910 return SLJIT_SUCCESS;
913 inst = emit_x86_instruction(compiler, 1 | EX86_BYTE_ARG | EX86_NO_REXW, SLJIT_IMM, srcw, dst, dstw);
916 return SLJIT_SUCCESS;
919 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
921 if ((dst & SLJIT_MEM) && FAST_IS_REG(src)) {
922 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
923 if (reg_map[src] >= 4) {
924 SLJIT_ASSERT(dst_r == TMP_REG1);
925 EMIT_MOV(compiler, TMP_REG1, 0, src, 0);
932 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
933 else if (FAST_IS_REG(src) && reg_map[src] >= 4) {
934 /* src, dst are registers. */
935 SLJIT_ASSERT(SLOW_IS_REG(dst));
936 if (reg_map[dst] < 4) {
938 EMIT_MOV(compiler, dst, 0, src, 0);
939 inst = emit_x86_instruction(compiler, 2, dst, 0, dst, 0);
942 *inst = sign ? MOVSX_r_rm8 : MOVZX_r_rm8;
946 EMIT_MOV(compiler, dst, 0, src, 0);
949 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0);
953 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0);
958 inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 0xff, dst, 0);
963 return SLJIT_SUCCESS;
967 /* src can be memory addr or reg_map[src] < 4 on x86_32 architectures. */
968 inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
971 *inst = sign ? MOVSX_r_rm8 : MOVZX_r_rm8;
974 if (dst & SLJIT_MEM) {
975 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
976 if (dst_r == TMP_REG1) {
977 /* Find a non-used register, whose reg_map[src] < 4. */
978 if ((dst & REG_MASK) == SLJIT_R0) {
979 if ((dst & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_R1))
985 if ((dst & OFFS_REG_MASK) != TO_OFFS_REG(SLJIT_R0))
987 else if ((dst & REG_MASK) == SLJIT_R1)
993 if (work_r == SLJIT_R0) {
994 ENCODE_PREFIX(XCHG_EAX_r + reg_map[TMP_REG1]);
997 inst = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0);
1002 inst = emit_x86_instruction(compiler, 1, work_r, 0, dst, dstw);
1006 if (work_r == SLJIT_R0) {
1007 ENCODE_PREFIX(XCHG_EAX_r + reg_map[TMP_REG1]);
1010 inst = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0);
1016 inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw);
1021 inst = emit_x86_instruction(compiler, 1 | EX86_REX | EX86_NO_REXW, dst_r, 0, dst, dstw);
1027 return SLJIT_SUCCESS;
1030 static sljit_s32 emit_prefetch(struct sljit_compiler *compiler, sljit_s32 op,
1031 sljit_s32 src, sljit_sw srcw)
1035 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1036 compiler->mode32 = 1;
1039 inst = emit_x86_instruction(compiler, 2, 0, 0, src, srcw);
1044 if (op >= SLJIT_MOV_U8 && op <= SLJIT_MOV_S8)
1046 else if (op >= SLJIT_MOV_U16 && op <= SLJIT_MOV_S16)
1051 return SLJIT_SUCCESS;
1054 static sljit_s32 emit_mov_half(struct sljit_compiler *compiler, sljit_s32 sign,
1055 sljit_s32 dst, sljit_sw dstw,
1056 sljit_s32 src, sljit_sw srcw)
1061 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1062 compiler->mode32 = 0;
1065 if (src & SLJIT_IMM) {
1066 if (FAST_IS_REG(dst)) {
1067 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1068 return emit_do_imm(compiler, MOV_r_i32 + reg_map[dst], srcw);
1070 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0);
1073 return SLJIT_SUCCESS;
1076 inst = emit_x86_instruction(compiler, 1 | EX86_HALF_ARG | EX86_NO_REXW | EX86_PREF_66, SLJIT_IMM, srcw, dst, dstw);
1079 return SLJIT_SUCCESS;
1082 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1084 if ((dst & SLJIT_MEM) && FAST_IS_REG(src))
1087 inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
1090 *inst = sign ? MOVSX_r_rm16 : MOVZX_r_rm16;
1093 if (dst & SLJIT_MEM) {
1094 inst = emit_x86_instruction(compiler, 1 | EX86_NO_REXW | EX86_PREF_66, dst_r, 0, dst, dstw);
1099 return SLJIT_SUCCESS;
1102 static sljit_s32 emit_unary(struct sljit_compiler *compiler, sljit_u8 opcode,
1103 sljit_s32 dst, sljit_sw dstw,
1104 sljit_s32 src, sljit_sw srcw)
1108 if (dst == src && dstw == srcw) {
1109 /* Same input and output */
1110 inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
1114 return SLJIT_SUCCESS;
1117 if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED))
1120 if (FAST_IS_REG(dst)) {
1121 EMIT_MOV(compiler, dst, 0, src, srcw);
1122 inst = emit_x86_instruction(compiler, 1, 0, 0, dst, 0);
1126 return SLJIT_SUCCESS;
1129 EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
1130 inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REG1, 0);
1134 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1135 return SLJIT_SUCCESS;
1138 static sljit_s32 emit_not_with_flags(struct sljit_compiler *compiler,
1139 sljit_s32 dst, sljit_sw dstw,
1140 sljit_s32 src, sljit_sw srcw)
1144 if (dst == SLJIT_UNUSED)
1147 if (FAST_IS_REG(dst)) {
1148 EMIT_MOV(compiler, dst, 0, src, srcw);
1149 inst = emit_x86_instruction(compiler, 1, 0, 0, dst, 0);
1153 inst = emit_x86_instruction(compiler, 1, dst, 0, dst, 0);
1156 return SLJIT_SUCCESS;
1159 EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
1160 inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REG1, 0);
1164 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, TMP_REG1, 0);
1167 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1168 return SLJIT_SUCCESS;
1171 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1172 static const sljit_sw emit_clz_arg = 32 + 31;
1175 static sljit_s32 emit_clz(struct sljit_compiler *compiler, sljit_s32 op_flags,
1176 sljit_s32 dst, sljit_sw dstw,
1177 sljit_s32 src, sljit_sw srcw)
1182 SLJIT_UNUSED_ARG(op_flags);
1184 if (cpu_has_cmov == -1)
1187 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1189 inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw);
1194 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1196 if (dst_r != TMP_REG1) {
1197 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, 32 + 31);
1198 inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG1, 0);
1201 inst = emit_x86_instruction(compiler, 2, dst_r, 0, SLJIT_MEM0(), (sljit_sw)&emit_clz_arg);
1208 FAIL_IF(sljit_emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, 32 + 31));
1210 inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 31, dst_r, 0);
1213 EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_IMM, !(op_flags & SLJIT_I32_OP) ? (64 + 63) : (32 + 31));
1215 inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0);
1221 FAIL_IF(sljit_emit_cmov_generic(compiler, SLJIT_EQUAL, dst_r, SLJIT_IMM, !(op_flags & SLJIT_I32_OP) ? (64 + 63) : (32 + 31)));
1223 inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, !(op_flags & SLJIT_I32_OP) ? 63 : 31, dst_r, 0);
1229 if (dst & SLJIT_MEM)
1230 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1231 return SLJIT_SUCCESS;
1234 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
1235 sljit_s32 dst, sljit_sw dstw,
1236 sljit_s32 src, sljit_sw srcw)
1238 sljit_s32 op_flags = GET_ALL_FLAGS(op);
1239 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1240 sljit_s32 dst_is_ereg = 0;
1244 CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
1245 ADJUST_LOCAL_OFFSET(dst, dstw);
1246 ADJUST_LOCAL_OFFSET(src, srcw);
1248 CHECK_EXTRA_REGS(dst, dstw, dst_is_ereg = 1);
1249 CHECK_EXTRA_REGS(src, srcw, (void)0);
1250 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1251 compiler->mode32 = op_flags & SLJIT_I32_OP;
1254 if (dst == SLJIT_UNUSED && !HAS_FLAGS(op)) {
1255 if (op <= SLJIT_MOV_P && (src & SLJIT_MEM))
1256 return emit_prefetch(compiler, op, src, srcw);
1257 return SLJIT_SUCCESS;
1260 op = GET_OPCODE(op);
1262 if (op >= SLJIT_MOV && op <= SLJIT_MOV_P) {
1263 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1264 compiler->mode32 = 0;
1267 if (FAST_IS_REG(src) && src == dst) {
1268 if (!TYPE_CAST_NEEDED(op))
1269 return SLJIT_SUCCESS;
1272 if (op_flags & SLJIT_I32_OP) {
1273 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1274 if (src & SLJIT_MEM) {
1275 if (op == SLJIT_MOV_S32)
1278 else if (src & SLJIT_IMM) {
1279 if (op == SLJIT_MOV_U32)
1285 if (src & SLJIT_IMM) {
1288 srcw = (sljit_u8)srcw;
1291 srcw = (sljit_s8)srcw;
1294 srcw = (sljit_u16)srcw;
1297 srcw = (sljit_s16)srcw;
1299 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1301 srcw = (sljit_u32)srcw;
1304 srcw = (sljit_s32)srcw;
1308 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1309 if (SLJIT_UNLIKELY(dst_is_ereg))
1310 return emit_mov(compiler, dst, dstw, src, srcw);
1314 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1315 if (SLJIT_UNLIKELY(dst_is_ereg) && (!(op == SLJIT_MOV || op == SLJIT_MOV_U32 || op == SLJIT_MOV_S32 || op == SLJIT_MOV_P) || (src & SLJIT_MEM))) {
1316 SLJIT_ASSERT(dst == SLJIT_MEM1(SLJIT_SP));
1324 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1328 FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw));
1331 FAIL_IF(emit_mov_byte(compiler, 0, dst, dstw, src, srcw));
1334 FAIL_IF(emit_mov_byte(compiler, 1, dst, dstw, src, srcw));
1337 FAIL_IF(emit_mov_half(compiler, 0, dst, dstw, src, srcw));
1340 FAIL_IF(emit_mov_half(compiler, 1, dst, dstw, src, srcw));
1342 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1344 FAIL_IF(emit_mov_int(compiler, 0, dst, dstw, src, srcw));
1347 FAIL_IF(emit_mov_int(compiler, 1, dst, dstw, src, srcw));
1352 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1353 if (SLJIT_UNLIKELY(dst_is_ereg) && dst == TMP_REG1)
1354 return emit_mov(compiler, SLJIT_MEM1(SLJIT_SP), dstw, TMP_REG1, 0);
1356 return SLJIT_SUCCESS;
1361 if (SLJIT_UNLIKELY(op_flags & SLJIT_SET_Z))
1362 return emit_not_with_flags(compiler, dst, dstw, src, srcw);
1363 return emit_unary(compiler, NOT_rm, dst, dstw, src, srcw);
1366 return emit_unary(compiler, NEG_rm, dst, dstw, src, srcw);
1369 return emit_clz(compiler, op_flags, dst, dstw, src, srcw);
1372 return SLJIT_SUCCESS;
1375 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1377 #define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \
1378 if (IS_HALFWORD(immw) || compiler->mode32) { \
1379 inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, immw, arg, argw); \
1381 *(inst + 1) |= (op_imm); \
1384 FAIL_IF(emit_load_imm64(compiler, (arg == TMP_REG1) ? TMP_REG2 : TMP_REG1, immw)); \
1385 inst = emit_x86_instruction(compiler, 1, (arg == TMP_REG1) ? TMP_REG2 : TMP_REG1, 0, arg, argw); \
1390 #define BINARY_EAX_IMM(op_eax_imm, immw) \
1391 FAIL_IF(emit_do_imm32(compiler, (!compiler->mode32) ? REX_W : 0, (op_eax_imm), immw))
1395 #define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \
1396 inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, immw, arg, argw); \
1398 *(inst + 1) |= (op_imm);
1400 #define BINARY_EAX_IMM(op_eax_imm, immw) \
1401 FAIL_IF(emit_do_imm(compiler, (op_eax_imm), immw))
1405 static sljit_s32 emit_cum_binary(struct sljit_compiler *compiler,
1407 sljit_s32 dst, sljit_sw dstw,
1408 sljit_s32 src1, sljit_sw src1w,
1409 sljit_s32 src2, sljit_sw src2w)
1412 sljit_u8 op_eax_imm = (op_types >> 24);
1413 sljit_u8 op_rm = (op_types >> 16) & 0xff;
1414 sljit_u8 op_mr = (op_types >> 8) & 0xff;
1415 sljit_u8 op_imm = op_types & 0xff;
1417 if (dst == SLJIT_UNUSED) {
1418 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1419 if (src2 & SLJIT_IMM) {
1420 BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
1423 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
1427 return SLJIT_SUCCESS;
1430 if (dst == src1 && dstw == src1w) {
1431 if (src2 & SLJIT_IMM) {
1432 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1433 if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
1435 if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128)) {
1437 BINARY_EAX_IMM(op_eax_imm, src2w);
1440 BINARY_IMM(op_imm, op_mr, src2w, dst, dstw);
1443 else if (FAST_IS_REG(dst)) {
1444 inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w);
1448 else if (FAST_IS_REG(src2)) {
1449 /* Special exception for sljit_emit_op_flags. */
1450 inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw);
1455 EMIT_MOV(compiler, TMP_REG1, 0, src2, src2w);
1456 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1460 return SLJIT_SUCCESS;
1463 /* Only for cumulative operations. */
1464 if (dst == src2 && dstw == src2w) {
1465 if (src1 & SLJIT_IMM) {
1466 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1467 if ((dst == SLJIT_R0) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) {
1469 if ((dst == SLJIT_R0) && (src1w > 127 || src1w < -128)) {
1471 BINARY_EAX_IMM(op_eax_imm, src1w);
1474 BINARY_IMM(op_imm, op_mr, src1w, dst, dstw);
1477 else if (FAST_IS_REG(dst)) {
1478 inst = emit_x86_instruction(compiler, 1, dst, dstw, src1, src1w);
1482 else if (FAST_IS_REG(src1)) {
1483 inst = emit_x86_instruction(compiler, 1, src1, src1w, dst, dstw);
1488 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1489 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1493 return SLJIT_SUCCESS;
1496 /* General version. */
1497 if (FAST_IS_REG(dst)) {
1498 EMIT_MOV(compiler, dst, 0, src1, src1w);
1499 if (src2 & SLJIT_IMM) {
1500 BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
1503 inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w);
1509 /* This version requires less memory writing. */
1510 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1511 if (src2 & SLJIT_IMM) {
1512 BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
1515 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
1519 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1522 return SLJIT_SUCCESS;
1525 static sljit_s32 emit_non_cum_binary(struct sljit_compiler *compiler,
1527 sljit_s32 dst, sljit_sw dstw,
1528 sljit_s32 src1, sljit_sw src1w,
1529 sljit_s32 src2, sljit_sw src2w)
1532 sljit_u8 op_eax_imm = (op_types >> 24);
1533 sljit_u8 op_rm = (op_types >> 16) & 0xff;
1534 sljit_u8 op_mr = (op_types >> 8) & 0xff;
1535 sljit_u8 op_imm = op_types & 0xff;
1537 if (dst == SLJIT_UNUSED) {
1538 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1539 if (src2 & SLJIT_IMM) {
1540 BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
1543 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
1547 return SLJIT_SUCCESS;
1550 if (dst == src1 && dstw == src1w) {
1551 if (src2 & SLJIT_IMM) {
1552 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1553 if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
1555 if ((dst == SLJIT_R0) && (src2w > 127 || src2w < -128)) {
1557 BINARY_EAX_IMM(op_eax_imm, src2w);
1560 BINARY_IMM(op_imm, op_mr, src2w, dst, dstw);
1563 else if (FAST_IS_REG(dst)) {
1564 inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w);
1568 else if (FAST_IS_REG(src2)) {
1569 inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw);
1574 EMIT_MOV(compiler, TMP_REG1, 0, src2, src2w);
1575 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, dst, dstw);
1579 return SLJIT_SUCCESS;
1582 /* General version. */
1583 if (FAST_IS_REG(dst) && dst != src2) {
1584 EMIT_MOV(compiler, dst, 0, src1, src1w);
1585 if (src2 & SLJIT_IMM) {
1586 BINARY_IMM(op_imm, op_mr, src2w, dst, 0);
1589 inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w);
1595 /* This version requires less memory writing. */
1596 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1597 if (src2 & SLJIT_IMM) {
1598 BINARY_IMM(op_imm, op_mr, src2w, TMP_REG1, 0);
1601 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
1605 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1608 return SLJIT_SUCCESS;
1611 static sljit_s32 emit_mul(struct sljit_compiler *compiler,
1612 sljit_s32 dst, sljit_sw dstw,
1613 sljit_s32 src1, sljit_sw src1w,
1614 sljit_s32 src2, sljit_sw src2w)
1619 dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
1621 /* Register destination. */
1622 if (dst_r == src1 && !(src2 & SLJIT_IMM)) {
1623 inst = emit_x86_instruction(compiler, 2, dst_r, 0, src2, src2w);
1628 else if (dst_r == src2 && !(src1 & SLJIT_IMM)) {
1629 inst = emit_x86_instruction(compiler, 2, dst_r, 0, src1, src1w);
1634 else if (src1 & SLJIT_IMM) {
1635 if (src2 & SLJIT_IMM) {
1636 EMIT_MOV(compiler, dst_r, 0, SLJIT_IMM, src2w);
1641 if (src1w <= 127 && src1w >= -128) {
1642 inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
1644 *inst = IMUL_r_rm_i8;
1645 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
1648 *inst = (sljit_s8)src1w;
1650 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1652 inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
1654 *inst = IMUL_r_rm_i32;
1655 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
1658 sljit_unaligned_store_sw(inst, src1w);
1661 else if (IS_HALFWORD(src1w)) {
1662 inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w);
1664 *inst = IMUL_r_rm_i32;
1665 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
1668 sljit_unaligned_store_s32(inst, (sljit_s32)src1w);
1672 EMIT_MOV(compiler, dst_r, 0, src2, src2w);
1673 FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src1w));
1674 inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0);
1681 else if (src2 & SLJIT_IMM) {
1682 /* Note: src1 is NOT immediate. */
1684 if (src2w <= 127 && src2w >= -128) {
1685 inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
1687 *inst = IMUL_r_rm_i8;
1688 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
1691 *inst = (sljit_s8)src2w;
1693 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
1695 inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
1697 *inst = IMUL_r_rm_i32;
1698 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
1701 sljit_unaligned_store_sw(inst, src2w);
1704 else if (IS_HALFWORD(src2w)) {
1705 inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w);
1707 *inst = IMUL_r_rm_i32;
1708 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4);
1711 sljit_unaligned_store_s32(inst, (sljit_s32)src2w);
1715 EMIT_MOV(compiler, dst_r, 0, src1, src1w);
1716 FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w));
1717 inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0);
1725 /* Neither argument is immediate. */
1726 if (ADDRESSING_DEPENDS_ON(src2, dst_r))
1728 EMIT_MOV(compiler, dst_r, 0, src1, src1w);
1729 inst = emit_x86_instruction(compiler, 2, dst_r, 0, src2, src2w);
1735 if (dst & SLJIT_MEM)
1736 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
1738 return SLJIT_SUCCESS;
1741 static sljit_s32 emit_lea_binary(struct sljit_compiler *compiler,
1742 sljit_s32 dst, sljit_sw dstw,
1743 sljit_s32 src1, sljit_sw src1w,
1744 sljit_s32 src2, sljit_sw src2w)
1747 sljit_s32 dst_r, done = 0;
1749 /* These cases better be left to handled by normal way. */
1750 if (dst == src1 && dstw == src1w)
1751 return SLJIT_ERR_UNSUPPORTED;
1752 if (dst == src2 && dstw == src2w)
1753 return SLJIT_ERR_UNSUPPORTED;
1755 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1757 if (FAST_IS_REG(src1)) {
1758 if (FAST_IS_REG(src2)) {
1759 inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM2(src1, src2), 0);
1764 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1765 if ((src2 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src2w))) {
1766 inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), (sljit_s32)src2w);
1768 if (src2 & SLJIT_IMM) {
1769 inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), src2w);
1776 else if (FAST_IS_REG(src2)) {
1777 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1778 if ((src1 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src1w))) {
1779 inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), (sljit_s32)src1w);
1781 if (src1 & SLJIT_IMM) {
1782 inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), src1w);
1791 if (dst_r == TMP_REG1)
1792 return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
1793 return SLJIT_SUCCESS;
1795 return SLJIT_ERR_UNSUPPORTED;
1798 static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler,
1799 sljit_s32 src1, sljit_sw src1w,
1800 sljit_s32 src2, sljit_sw src2w)
1804 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1805 if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
1807 if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128)) {
1809 BINARY_EAX_IMM(CMP_EAX_i32, src2w);
1810 return SLJIT_SUCCESS;
1813 if (FAST_IS_REG(src1)) {
1814 if (src2 & SLJIT_IMM) {
1815 BINARY_IMM(CMP, CMP_rm_r, src2w, src1, 0);
1818 inst = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w);
1822 return SLJIT_SUCCESS;
1825 if (FAST_IS_REG(src2) && !(src1 & SLJIT_IMM)) {
1826 inst = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w);
1829 return SLJIT_SUCCESS;
1832 if (src2 & SLJIT_IMM) {
1833 if (src1 & SLJIT_IMM) {
1834 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1838 BINARY_IMM(CMP, CMP_rm_r, src2w, src1, src1w);
1841 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1842 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
1846 return SLJIT_SUCCESS;
1849 static sljit_s32 emit_test_binary(struct sljit_compiler *compiler,
1850 sljit_s32 src1, sljit_sw src1w,
1851 sljit_s32 src2, sljit_sw src2w)
1855 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1856 if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) {
1858 if (src1 == SLJIT_R0 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128)) {
1860 BINARY_EAX_IMM(TEST_EAX_i32, src2w);
1861 return SLJIT_SUCCESS;
1864 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1865 if (src2 == SLJIT_R0 && (src1 & SLJIT_IMM) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) {
1867 if (src2 == SLJIT_R0 && (src1 & SLJIT_IMM) && (src1w > 127 || src1w < -128)) {
1869 BINARY_EAX_IMM(TEST_EAX_i32, src1w);
1870 return SLJIT_SUCCESS;
1873 if (!(src1 & SLJIT_IMM)) {
1874 if (src2 & SLJIT_IMM) {
1875 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1876 if (IS_HALFWORD(src2w) || compiler->mode32) {
1877 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, src1w);
1882 FAIL_IF(emit_load_imm64(compiler, TMP_REG1, src2w));
1883 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src1, src1w);
1888 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, src1w);
1892 return SLJIT_SUCCESS;
1894 else if (FAST_IS_REG(src1)) {
1895 inst = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w);
1898 return SLJIT_SUCCESS;
1902 if (!(src2 & SLJIT_IMM)) {
1903 if (src1 & SLJIT_IMM) {
1904 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1905 if (IS_HALFWORD(src1w) || compiler->mode32) {
1906 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src1w, src2, src2w);
1911 FAIL_IF(emit_load_imm64(compiler, TMP_REG1, src1w));
1912 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
1917 inst = emit_x86_instruction(compiler, 1, src1, src1w, src2, src2w);
1921 return SLJIT_SUCCESS;
1923 else if (FAST_IS_REG(src2)) {
1924 inst = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w);
1927 return SLJIT_SUCCESS;
1931 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1932 if (src2 & SLJIT_IMM) {
1933 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1934 if (IS_HALFWORD(src2w) || compiler->mode32) {
1935 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REG1, 0);
1940 FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w));
1941 inst = emit_x86_instruction(compiler, 1, TMP_REG2, 0, TMP_REG1, 0);
1946 inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REG1, 0);
1952 inst = emit_x86_instruction(compiler, 1, TMP_REG1, 0, src2, src2w);
1956 return SLJIT_SUCCESS;
1959 static sljit_s32 emit_shift(struct sljit_compiler *compiler,
1961 sljit_s32 dst, sljit_sw dstw,
1962 sljit_s32 src1, sljit_sw src1w,
1963 sljit_s32 src2, sljit_sw src2w)
1967 if ((src2 & SLJIT_IMM) || (src2 == SLJIT_PREF_SHIFT_REG)) {
1968 if (dst == src1 && dstw == src1w) {
1969 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, dstw);
1972 return SLJIT_SUCCESS;
1974 if (dst == SLJIT_UNUSED) {
1975 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1976 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, TMP_REG1, 0);
1979 return SLJIT_SUCCESS;
1981 if (dst == SLJIT_PREF_SHIFT_REG && src2 == SLJIT_PREF_SHIFT_REG) {
1982 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1983 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
1986 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
1987 return SLJIT_SUCCESS;
1989 if (FAST_IS_REG(dst)) {
1990 EMIT_MOV(compiler, dst, 0, src1, src1w);
1991 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, 0);
1994 return SLJIT_SUCCESS;
1997 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
1998 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, TMP_REG1, 0);
2001 EMIT_MOV(compiler, dst, dstw, TMP_REG1, 0);
2002 return SLJIT_SUCCESS;
2005 if (dst == SLJIT_PREF_SHIFT_REG) {
2006 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2007 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2008 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2011 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2013 else if (SLOW_IS_REG(dst) && dst != src2 && !ADDRESSING_DEPENDS_ON(src2, dst)) {
2015 EMIT_MOV(compiler, dst, 0, src1, src1w);
2016 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_PREF_SHIFT_REG, 0);
2017 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2018 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, dst, 0);
2021 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2024 /* This case is complex since ecx itself may be used for
2025 addressing, and this case must be supported as well. */
2026 EMIT_MOV(compiler, TMP_REG1, 0, src1, src1w);
2027 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2028 EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), 0, SLJIT_PREF_SHIFT_REG, 0);
2029 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2030 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2033 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, SLJIT_MEM1(SLJIT_SP), 0);
2035 EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_PREF_SHIFT_REG, 0);
2036 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);
2037 inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REG1, 0);
2040 EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG2, 0);
2042 if (dst != SLJIT_UNUSED)
2043 return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2046 return SLJIT_SUCCESS;
2049 static sljit_s32 emit_shift_with_flags(struct sljit_compiler *compiler,
2050 sljit_u8 mode, sljit_s32 set_flags,
2051 sljit_s32 dst, sljit_sw dstw,
2052 sljit_s32 src1, sljit_sw src1w,
2053 sljit_s32 src2, sljit_sw src2w)
2055 /* The CPU does not set flags if the shift count is 0. */
2056 if (src2 & SLJIT_IMM) {
2057 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2058 if ((src2w & 0x3f) != 0 || (compiler->mode32 && (src2w & 0x1f) != 0))
2059 return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2061 if ((src2w & 0x1f) != 0)
2062 return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2065 return emit_mov(compiler, dst, dstw, src1, src1w);
2066 /* OR dst, src, 0 */
2067 return emit_cum_binary(compiler, BINARY_OPCODE(OR),
2068 dst, dstw, src1, src1w, SLJIT_IMM, 0);
2072 return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w);
2074 if (!FAST_IS_REG(dst))
2075 FAIL_IF(emit_cmp_binary(compiler, src1, src1w, SLJIT_IMM, 0));
2077 FAIL_IF(emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w));
2079 if (FAST_IS_REG(dst))
2080 return emit_cmp_binary(compiler, (dst == SLJIT_UNUSED) ? TMP_REG1 : dst, dstw, SLJIT_IMM, 0);
2081 return SLJIT_SUCCESS;
2084 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
2085 sljit_s32 dst, sljit_sw dstw,
2086 sljit_s32 src1, sljit_sw src1w,
2087 sljit_s32 src2, sljit_sw src2w)
2090 CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
2091 ADJUST_LOCAL_OFFSET(dst, dstw);
2092 ADJUST_LOCAL_OFFSET(src1, src1w);
2093 ADJUST_LOCAL_OFFSET(src2, src2w);
2095 CHECK_EXTRA_REGS(dst, dstw, (void)0);
2096 CHECK_EXTRA_REGS(src1, src1w, (void)0);
2097 CHECK_EXTRA_REGS(src2, src2w, (void)0);
2098 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2099 compiler->mode32 = op & SLJIT_I32_OP;
2102 if (dst == SLJIT_UNUSED && !HAS_FLAGS(op))
2103 return SLJIT_SUCCESS;
2105 switch (GET_OPCODE(op)) {
2107 if (!HAS_FLAGS(op)) {
2108 if (emit_lea_binary(compiler, dst, dstw, src1, src1w, src2, src2w) != SLJIT_ERR_UNSUPPORTED)
2109 return compiler->error;
2111 return emit_cum_binary(compiler, BINARY_OPCODE(ADD),
2112 dst, dstw, src1, src1w, src2, src2w);
2114 return emit_cum_binary(compiler, BINARY_OPCODE(ADC),
2115 dst, dstw, src1, src1w, src2, src2w);
2117 if (!HAS_FLAGS(op)) {
2118 if ((src2 & SLJIT_IMM) && emit_lea_binary(compiler, dst, dstw, src1, src1w, SLJIT_IMM, -src2w) != SLJIT_ERR_UNSUPPORTED)
2119 return compiler->error;
2122 if (dst == SLJIT_UNUSED)
2123 return emit_cmp_binary(compiler, src1, src1w, src2, src2w);
2124 return emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
2125 dst, dstw, src1, src1w, src2, src2w);
2127 return emit_non_cum_binary(compiler, BINARY_OPCODE(SBB),
2128 dst, dstw, src1, src1w, src2, src2w);
2130 return emit_mul(compiler, dst, dstw, src1, src1w, src2, src2w);
2132 if (dst == SLJIT_UNUSED)
2133 return emit_test_binary(compiler, src1, src1w, src2, src2w);
2134 return emit_cum_binary(compiler, BINARY_OPCODE(AND),
2135 dst, dstw, src1, src1w, src2, src2w);
2137 return emit_cum_binary(compiler, BINARY_OPCODE(OR),
2138 dst, dstw, src1, src1w, src2, src2w);
2140 return emit_cum_binary(compiler, BINARY_OPCODE(XOR),
2141 dst, dstw, src1, src1w, src2, src2w);
2143 return emit_shift_with_flags(compiler, SHL, HAS_FLAGS(op),
2144 dst, dstw, src1, src1w, src2, src2w);
2146 return emit_shift_with_flags(compiler, SHR, HAS_FLAGS(op),
2147 dst, dstw, src1, src1w, src2, src2w);
2149 return emit_shift_with_flags(compiler, SAR, HAS_FLAGS(op),
2150 dst, dstw, src1, src1w, src2, src2w);
2153 return SLJIT_SUCCESS;
2156 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
2158 CHECK_REG_INDEX(check_sljit_get_register_index(reg));
2159 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2160 if (reg >= SLJIT_R3 && reg <= SLJIT_R8)
2163 return reg_map[reg];
2166 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
2168 CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
2169 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2172 return freg_map[reg];
2176 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
2177 void *instruction, sljit_s32 size)
2182 CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
2184 inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
2187 SLJIT_MEMCPY(inst, instruction, size);
2188 return SLJIT_SUCCESS;
2191 /* --------------------------------------------------------------------- */
2192 /* Floating point operators */
2193 /* --------------------------------------------------------------------- */
2195 /* Alignment(3) + 4 * 16 bytes. */
2196 static sljit_s32 sse2_data[3 + (4 * 4)];
2197 static sljit_s32 *sse2_buffer;
2199 static void init_compiler(void)
2201 /* Align to 16 bytes. */
2202 sse2_buffer = (sljit_s32*)(((sljit_uw)sse2_data + 15) & ~0xf);
2204 /* Single precision constants (each constant is 16 byte long). */
2205 sse2_buffer[0] = 0x80000000;
2206 sse2_buffer[4] = 0x7fffffff;
2207 /* Double precision constants (each constant is 16 byte long). */
2209 sse2_buffer[9] = 0x80000000;
2210 sse2_buffer[12] = 0xffffffff;
2211 sse2_buffer[13] = 0x7fffffff;
2214 static sljit_s32 emit_sse2(struct sljit_compiler *compiler, sljit_u8 opcode,
2215 sljit_s32 single, sljit_s32 xmm1, sljit_s32 xmm2, sljit_sw xmm2w)
2219 inst = emit_x86_instruction(compiler, 2 | (single ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, xmm1, 0, xmm2, xmm2w);
2223 return SLJIT_SUCCESS;
2226 static sljit_s32 emit_sse2_logic(struct sljit_compiler *compiler, sljit_u8 opcode,
2227 sljit_s32 pref66, sljit_s32 xmm1, sljit_s32 xmm2, sljit_sw xmm2w)
2231 inst = emit_x86_instruction(compiler, 2 | (pref66 ? EX86_PREF_66 : 0) | EX86_SSE2, xmm1, 0, xmm2, xmm2w);
2235 return SLJIT_SUCCESS;
2238 static SLJIT_INLINE sljit_s32 emit_sse2_load(struct sljit_compiler *compiler,
2239 sljit_s32 single, sljit_s32 dst, sljit_s32 src, sljit_sw srcw)
2241 return emit_sse2(compiler, MOVSD_x_xm, single, dst, src, srcw);
2244 static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler,
2245 sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src)
2247 return emit_sse2(compiler, MOVSD_xm_x, single, src, dst, dstw);
2250 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
2251 sljit_s32 dst, sljit_sw dstw,
2252 sljit_s32 src, sljit_sw srcw)
2254 sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
2257 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2258 if (GET_OPCODE(op) == SLJIT_CONV_SW_FROM_F64)
2259 compiler->mode32 = 0;
2262 inst = emit_x86_instruction(compiler, 2 | ((op & SLJIT_F32_OP) ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2_OP2, dst_r, 0, src, srcw);
2265 *inst = CVTTSD2SI_r_xm;
2267 if (dst & SLJIT_MEM)
2268 return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2269 return SLJIT_SUCCESS;
2272 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
2273 sljit_s32 dst, sljit_sw dstw,
2274 sljit_s32 src, sljit_sw srcw)
2276 sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
2279 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2280 if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_SW)
2281 compiler->mode32 = 0;
2284 if (src & SLJIT_IMM) {
2285 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2286 if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
2287 srcw = (sljit_s32)srcw;
2289 EMIT_MOV(compiler, TMP_REG1, 0, src, srcw);
2294 inst = emit_x86_instruction(compiler, 2 | ((op & SLJIT_F32_OP) ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2_OP1, dst_r, 0, src, srcw);
2297 *inst = CVTSI2SD_x_rm;
2299 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2300 compiler->mode32 = 1;
2302 if (dst_r == TMP_FREG)
2303 return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, TMP_FREG);
2304 return SLJIT_SUCCESS;
2307 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
2308 sljit_s32 src1, sljit_sw src1w,
2309 sljit_s32 src2, sljit_sw src2w)
2311 if (!FAST_IS_REG(src1)) {
2312 FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, TMP_FREG, src1, src1w));
2316 return emit_sse2_logic(compiler, UCOMISD_x_xm, !(op & SLJIT_F32_OP), src1, src2, src2w);
2319 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
2320 sljit_s32 dst, sljit_sw dstw,
2321 sljit_s32 src, sljit_sw srcw)
2325 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2326 compiler->mode32 = 1;
2330 SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
2332 if (GET_OPCODE(op) == SLJIT_MOV_F64) {
2333 if (FAST_IS_REG(dst))
2334 return emit_sse2_load(compiler, op & SLJIT_F32_OP, dst, src, srcw);
2335 if (FAST_IS_REG(src))
2336 return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, src);
2337 FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, TMP_FREG, src, srcw));
2338 return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, TMP_FREG);
2341 if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32) {
2342 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG;
2343 if (FAST_IS_REG(src)) {
2344 /* We overwrite the high bits of source. From SLJIT point of view,
2345 this is not an issue.
2346 Note: In SSE3, we could also use MOVDDUP and MOVSLDUP. */
2347 FAIL_IF(emit_sse2_logic(compiler, UNPCKLPD_x_xm, op & SLJIT_F32_OP, src, src, 0));
2350 FAIL_IF(emit_sse2_load(compiler, !(op & SLJIT_F32_OP), TMP_FREG, src, srcw));
2354 FAIL_IF(emit_sse2_logic(compiler, CVTPD2PS_x_xm, op & SLJIT_F32_OP, dst_r, src, 0));
2355 if (dst_r == TMP_FREG)
2356 return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, TMP_FREG);
2357 return SLJIT_SUCCESS;
2360 if (FAST_IS_REG(dst)) {
2363 FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, dst_r, src, srcw));
2367 FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, dst_r, src, srcw));
2370 switch (GET_OPCODE(op)) {
2372 FAIL_IF(emit_sse2_logic(compiler, XORPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_F32_OP ? sse2_buffer : sse2_buffer + 8)));
2376 FAIL_IF(emit_sse2_logic(compiler, ANDPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_F32_OP ? sse2_buffer + 4 : sse2_buffer + 12)));
2380 if (dst_r == TMP_FREG)
2381 return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, TMP_FREG);
2382 return SLJIT_SUCCESS;
2385 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
2386 sljit_s32 dst, sljit_sw dstw,
2387 sljit_s32 src1, sljit_sw src1w,
2388 sljit_s32 src2, sljit_sw src2w)
2393 CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
2394 ADJUST_LOCAL_OFFSET(dst, dstw);
2395 ADJUST_LOCAL_OFFSET(src1, src1w);
2396 ADJUST_LOCAL_OFFSET(src2, src2w);
2398 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2399 compiler->mode32 = 1;
2402 if (FAST_IS_REG(dst)) {
2405 ; /* Do nothing here. */
2406 else if (dst == src2 && (op == SLJIT_ADD_F64 || op == SLJIT_MUL_F64)) {
2407 /* Swap arguments. */
2411 else if (dst != src2)
2412 FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, dst_r, src1, src1w));
2415 FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, TMP_FREG, src1, src1w));
2420 FAIL_IF(emit_sse2_load(compiler, op & SLJIT_F32_OP, TMP_FREG, src1, src1w));
2423 switch (GET_OPCODE(op)) {
2425 FAIL_IF(emit_sse2(compiler, ADDSD_x_xm, op & SLJIT_F32_OP, dst_r, src2, src2w));
2429 FAIL_IF(emit_sse2(compiler, SUBSD_x_xm, op & SLJIT_F32_OP, dst_r, src2, src2w));
2433 FAIL_IF(emit_sse2(compiler, MULSD_x_xm, op & SLJIT_F32_OP, dst_r, src2, src2w));
2437 FAIL_IF(emit_sse2(compiler, DIVSD_x_xm, op & SLJIT_F32_OP, dst_r, src2, src2w));
2441 if (dst_r == TMP_FREG)
2442 return emit_sse2_store(compiler, op & SLJIT_F32_OP, dst, dstw, TMP_FREG);
2443 return SLJIT_SUCCESS;
2446 /* --------------------------------------------------------------------- */
2447 /* Conditional instructions */
2448 /* --------------------------------------------------------------------- */
2450 SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
2453 struct sljit_label *label;
2456 CHECK_PTR(check_sljit_emit_label(compiler));
2458 if (compiler->last_label && compiler->last_label->size == compiler->size)
2459 return compiler->last_label;
2461 label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
2462 PTR_FAIL_IF(!label);
2463 set_label(label, compiler);
2465 inst = (sljit_u8*)ensure_buf(compiler, 2);
2474 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
2477 struct sljit_jump *jump;
2480 CHECK_PTR(check_sljit_emit_jump(compiler, type));
2482 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
2483 PTR_FAIL_IF_NULL(jump);
2484 set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
2487 /* Worst case size. */
2488 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2489 compiler->size += (type >= SLJIT_JUMP) ? 5 : 6;
2491 compiler->size += (type >= SLJIT_JUMP) ? (10 + 3) : (2 + 10 + 3);
2494 inst = (sljit_u8*)ensure_buf(compiler, 2);
2495 PTR_FAIL_IF_NULL(inst);
2502 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
2505 struct sljit_jump *jump;
2508 CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
2509 ADJUST_LOCAL_OFFSET(src, srcw);
2511 CHECK_EXTRA_REGS(src, srcw, (void)0);
2513 if (src == SLJIT_IMM) {
2514 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
2516 set_jump(jump, compiler, JUMP_ADDR);
2517 jump->u.target = srcw;
2519 /* Worst case size. */
2520 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2521 compiler->size += 5;
2523 compiler->size += 10 + 3;
2526 inst = (sljit_u8*)ensure_buf(compiler, 2);
2533 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2534 /* REX_W is not necessary (src is not immediate). */
2535 compiler->mode32 = 1;
2537 inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
2540 *inst |= (type >= SLJIT_FAST_CALL) ? CALL_rm : JMP_rm;
2542 return SLJIT_SUCCESS;
2545 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
2546 sljit_s32 dst, sljit_sw dstw,
2550 sljit_u8 cond_set = 0;
2551 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2554 /* ADJUST_LOCAL_OFFSET and CHECK_EXTRA_REGS might overwrite these values. */
2555 sljit_s32 dst_save = dst;
2556 sljit_sw dstw_save = dstw;
2559 CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
2561 ADJUST_LOCAL_OFFSET(dst, dstw);
2562 CHECK_EXTRA_REGS(dst, dstw, (void)0);
2565 /* setcc = jcc + 0x10. */
2566 cond_set = get_jump_code(type) + 0x10;
2568 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2569 if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst)) {
2570 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4 + 3);
2573 /* Set low register to conditional flag. */
2574 *inst++ = (reg_map[TMP_REG1] <= 7) ? REX : REX_B;
2577 *inst++ = MOD_REG | reg_lmap[TMP_REG1];
2578 *inst++ = REX | (reg_map[TMP_REG1] <= 7 ? 0 : REX_R) | (reg_map[dst] <= 7 ? 0 : REX_B);
2579 *inst++ = OR_rm8_r8;
2580 *inst++ = MOD_REG | (reg_lmap[TMP_REG1] << 3) | reg_lmap[dst];
2581 return SLJIT_SUCCESS;
2584 reg = (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG1;
2586 inst = (sljit_u8*)ensure_buf(compiler, 1 + 4 + 4);
2589 /* Set low register to conditional flag. */
2590 *inst++ = (reg_map[reg] <= 7) ? REX : REX_B;
2593 *inst++ = MOD_REG | reg_lmap[reg];
2594 *inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : (REX_B | REX_R));
2595 /* The movzx instruction does not affect flags. */
2597 *inst++ = MOVZX_r_rm8;
2598 *inst = MOD_REG | (reg_lmap[reg] << 3) | reg_lmap[reg];
2600 if (reg != TMP_REG1)
2601 return SLJIT_SUCCESS;
2603 if (GET_OPCODE(op) < SLJIT_ADD) {
2604 compiler->mode32 = GET_OPCODE(op) != SLJIT_MOV;
2605 return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2608 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
2609 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
2610 compiler->skip_checks = 1;
2612 return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REG1, 0);
2615 /* The SLJIT_CONFIG_X86_32 code path starts here. */
2616 if (GET_OPCODE(op) < SLJIT_ADD && FAST_IS_REG(dst)) {
2617 if (reg_map[dst] <= 4) {
2618 /* Low byte is accessible. */
2619 inst = (sljit_u8*)ensure_buf(compiler, 1 + 3 + 3);
2622 /* Set low byte to conditional flag. */
2625 *inst++ = MOD_REG | reg_map[dst];
2628 *inst++ = MOVZX_r_rm8;
2629 *inst = MOD_REG | (reg_map[dst] << 3) | reg_map[dst];
2630 return SLJIT_SUCCESS;
2633 /* Low byte is not accessible. */
2634 if (cpu_has_cmov == -1)
2638 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, 1);
2639 /* a xor reg, reg operation would overwrite the flags. */
2640 EMIT_MOV(compiler, dst, 0, SLJIT_IMM, 0);
2642 inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
2647 /* cmovcc = setcc - 0x50. */
2648 *inst++ = cond_set - 0x50;
2649 *inst++ = MOD_REG | (reg_map[dst] << 3) | reg_map[TMP_REG1];
2650 return SLJIT_SUCCESS;
2653 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1);
2655 INC_SIZE(1 + 3 + 3 + 1);
2656 *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
2657 /* Set al to conditional flag. */
2660 *inst++ = MOD_REG | 0 /* eax */;
2663 *inst++ = MOVZX_r_rm8;
2664 *inst++ = MOD_REG | (reg_map[dst] << 3) | 0 /* eax */;
2665 *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
2666 return SLJIT_SUCCESS;
2669 if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && FAST_IS_REG(dst) && reg_map[dst] <= 4) {
2670 SLJIT_ASSERT(reg_map[SLJIT_R0] == 0);
2672 if (dst != SLJIT_R0) {
2673 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 2 + 1);
2675 INC_SIZE(1 + 3 + 2 + 1);
2676 /* Set low register to conditional flag. */
2677 *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
2680 *inst++ = MOD_REG | 0 /* eax */;
2681 *inst++ = OR_rm8_r8;
2682 *inst++ = MOD_REG | (0 /* eax */ << 3) | reg_map[dst];
2683 *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
2686 inst = (sljit_u8*)ensure_buf(compiler, 1 + 2 + 3 + 2 + 2);
2688 INC_SIZE(2 + 3 + 2 + 2);
2689 /* Set low register to conditional flag. */
2690 *inst++ = XCHG_r_rm;
2691 *inst++ = MOD_REG | (1 /* ecx */ << 3) | reg_map[TMP_REG1];
2694 *inst++ = MOD_REG | 1 /* ecx */;
2695 *inst++ = OR_rm8_r8;
2696 *inst++ = MOD_REG | (1 /* ecx */ << 3) | 0 /* eax */;
2697 *inst++ = XCHG_r_rm;
2698 *inst++ = MOD_REG | (1 /* ecx */ << 3) | reg_map[TMP_REG1];
2700 return SLJIT_SUCCESS;
2703 /* Set TMP_REG1 to the bit. */
2704 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1);
2706 INC_SIZE(1 + 3 + 3 + 1);
2707 *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
2708 /* Set al to conditional flag. */
2711 *inst++ = MOD_REG | 0 /* eax */;
2714 *inst++ = MOVZX_r_rm8;
2715 *inst++ = MOD_REG | (0 << 3) /* eax */ | 0 /* eax */;
2717 *inst++ = XCHG_EAX_r + reg_map[TMP_REG1];
2719 if (GET_OPCODE(op) < SLJIT_ADD)
2720 return emit_mov(compiler, dst, dstw, TMP_REG1, 0);
2722 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
2723 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
2724 compiler->skip_checks = 1;
2726 return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REG1, 0);
2727 #endif /* SLJIT_CONFIG_X86_64 */
2730 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
2732 sljit_s32 src, sljit_sw srcw)
2737 CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
2739 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2740 dst_reg &= ~SLJIT_I32_OP;
2742 if (!sljit_has_cpu_feature(SLJIT_HAS_CMOV) || (dst_reg >= SLJIT_R3 && dst_reg <= SLJIT_S3))
2743 return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);
2745 if (!sljit_has_cpu_feature(SLJIT_HAS_CMOV))
2746 return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);
2749 /* ADJUST_LOCAL_OFFSET is not needed. */
2750 CHECK_EXTRA_REGS(src, srcw, (void)0);
2752 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2753 compiler->mode32 = dst_reg & SLJIT_I32_OP;
2754 dst_reg &= ~SLJIT_I32_OP;
2757 if (SLJIT_UNLIKELY(src & SLJIT_IMM)) {
2758 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_IMM, srcw);
2763 inst = emit_x86_instruction(compiler, 2, dst_reg, 0, src, srcw);
2766 *inst = get_jump_code(type & 0xff) - 0x40;
2767 return SLJIT_SUCCESS;
2770 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
2773 CHECK(check_sljit_get_local_base(compiler, dst, dstw, offset));
2774 ADJUST_LOCAL_OFFSET(dst, dstw);
2776 CHECK_EXTRA_REGS(dst, dstw, (void)0);
2778 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2779 compiler->mode32 = 0;
2782 ADJUST_LOCAL_OFFSET(SLJIT_MEM1(SLJIT_SP), offset);
2784 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2785 if (NOT_HALFWORD(offset)) {
2786 FAIL_IF(emit_load_imm64(compiler, TMP_REG1, offset));
2787 #if (defined SLJIT_DEBUG && SLJIT_DEBUG)
2788 SLJIT_ASSERT(emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, TMP_REG1, 0) != SLJIT_ERR_UNSUPPORTED);
2789 return compiler->error;
2791 return emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, TMP_REG1, 0);
2797 return emit_lea_binary(compiler, dst, dstw, SLJIT_SP, 0, SLJIT_IMM, offset);
2798 return emit_mov(compiler, dst, dstw, SLJIT_SP, 0);
2801 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
2804 struct sljit_const *const_;
2805 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2810 CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
2811 ADJUST_LOCAL_OFFSET(dst, dstw);
2813 CHECK_EXTRA_REGS(dst, dstw, (void)0);
2815 const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
2816 PTR_FAIL_IF(!const_);
2817 set_const(const_, compiler);
2819 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2820 compiler->mode32 = 0;
2821 reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
2823 if (emit_load_imm64(compiler, reg, init_value))
2826 if (emit_mov(compiler, dst, dstw, SLJIT_IMM, init_value))
2830 inst = (sljit_u8*)ensure_buf(compiler, 2);
2836 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
2837 if (dst & SLJIT_MEM)
2838 if (emit_mov(compiler, dst, dstw, TMP_REG1, 0))
2845 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
2847 SLJIT_UNUSED_ARG(executable_offset);
2848 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)
2849 sljit_unaligned_store_sw((void*)addr, new_target - (addr + 4) - (sljit_uw)executable_offset);
2851 sljit_unaligned_store_sw((void*)addr, (sljit_sw) new_target);
2855 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
2857 SLJIT_UNUSED_ARG(executable_offset);
2858 sljit_unaligned_store_sw((void*)addr, new_constant);