2 * Stack-less Just-In-Time compiler
4 * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
6 * Redistribution and use in source and binary forms, with or without modification, are
7 * permitted provided that the following conditions are met:
9 * 1. Redistributions of source code must retain the above copyright notice, this list of
10 * conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright notice, this list
13 * of conditions and the following disclaimer in the documentation and/or other materials
14 * provided with the distribution.
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19 * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
29 return "ARM-64" SLJIT_CPUINFO;
32 /* Length of an instruction word */
33 typedef sljit_u32 sljit_ins;
37 #define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2)
38 #define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3)
39 #define TMP_LR (SLJIT_NUMBER_OF_REGISTERS + 4)
40 #define TMP_FP (SLJIT_NUMBER_OF_REGISTERS + 5)
42 #define TMP_FREG1 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
43 #define TMP_FREG2 (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2)
45 /* r18 - platform register, currently not used */
46 static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 8] = {
47 31, 0, 1, 2, 3, 4, 5, 6, 7, 11, 12, 13, 14, 15, 16, 17, 8, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 31, 9, 10, 30, 29
50 static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
51 0, 0, 1, 2, 3, 4, 5, 6, 7
54 #define W_OP (1 << 31)
55 #define RD(rd) (reg_map[rd])
56 #define RT(rt) (reg_map[rt])
57 #define RN(rn) (reg_map[rn] << 5)
58 #define RT2(rt2) (reg_map[rt2] << 10)
59 #define RM(rm) (reg_map[rm] << 16)
60 #define VD(vd) (freg_map[vd])
61 #define VT(vt) (freg_map[vt])
62 #define VN(vn) (freg_map[vn] << 5)
63 #define VM(vm) (freg_map[vm] << 16)
65 /* --------------------------------------------------------------------- */
66 /* Instrucion forms */
67 /* --------------------------------------------------------------------- */
69 #define ADC 0x9a000000
70 #define ADD 0x8b000000
71 #define ADDE 0x8b200000
72 #define ADDI 0x91000000
73 #define AND 0x8a000000
74 #define ANDI 0x92000000
75 #define ASRV 0x9ac02800
77 #define B_CC 0x54000000
79 #define BLR 0xd63f0000
81 #define BRK 0xd4200000
82 #define CBZ 0xb4000000
83 #define CLZ 0xdac01000
84 #define CSEL 0x9a800000
85 #define CSINC 0x9a800400
86 #define EOR 0xca000000
87 #define EORI 0xd2000000
88 #define FABS 0x1e60c000
89 #define FADD 0x1e602800
90 #define FCMP 0x1e602000
91 #define FCVT 0x1e224000
92 #define FCVTZS 0x9e780000
93 #define FDIV 0x1e601800
94 #define FMOV 0x1e604000
95 #define FMUL 0x1e600800
96 #define FNEG 0x1e614000
97 #define FSUB 0x1e603800
98 #define LDRI 0xf9400000
99 #define LDP 0xa9400000
100 #define LDP_PRE 0xa9c00000
101 #define LDR_PRE 0xf8400c00
102 #define LSLV 0x9ac02000
103 #define LSRV 0x9ac02400
104 #define MADD 0x9b000000
105 #define MOVK 0xf2800000
106 #define MOVN 0x92800000
107 #define MOVZ 0xd2800000
108 #define NOP 0xd503201f
109 #define ORN 0xaa200000
110 #define ORR 0xaa000000
111 #define ORRI 0xb2000000
112 #define RET 0xd65f0000
113 #define SBC 0xda000000
114 #define SBFM 0x93000000
115 #define SCVTF 0x9e620000
116 #define SDIV 0x9ac00c00
117 #define SMADDL 0x9b200000
118 #define SMULH 0x9b403c00
119 #define STP 0xa9000000
120 #define STP_PRE 0xa9800000
121 #define STRB 0x38206800
122 #define STRBI 0x39000000
123 #define STRI 0xf9000000
124 #define STR_FI 0x3d000000
125 #define STR_FR 0x3c206800
126 #define STUR_FI 0x3c000000
127 #define STURBI 0x38000000
128 #define SUB 0xcb000000
129 #define SUBI 0xd1000000
130 #define SUBS 0xeb000000
131 #define UBFM 0xd3000000
132 #define UDIV 0x9ac00800
133 #define UMULH 0x9bc03c00
135 /* dest_reg is the absolute name of the register
136 Useful for reordering instructions in the delay slot. */
137 static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins)
139 sljit_ins *ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
143 return SLJIT_SUCCESS;
146 static SLJIT_INLINE sljit_s32 emit_imm64_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_uw imm)
148 FAIL_IF(push_inst(compiler, MOVZ | RD(dst) | ((imm & 0xffff) << 5)));
149 FAIL_IF(push_inst(compiler, MOVK | RD(dst) | (((imm >> 16) & 0xffff) << 5) | (1 << 21)));
150 FAIL_IF(push_inst(compiler, MOVK | RD(dst) | (((imm >> 32) & 0xffff) << 5) | (2 << 21)));
151 return push_inst(compiler, MOVK | RD(dst) | ((imm >> 48) << 5) | (3 << 21));
154 static SLJIT_INLINE void modify_imm64_const(sljit_ins* inst, sljit_uw new_imm)
156 sljit_s32 dst = inst[0] & 0x1f;
157 SLJIT_ASSERT((inst[0] & 0xffe00000) == MOVZ && (inst[1] & 0xffe00000) == (MOVK | (1 << 21)));
158 inst[0] = MOVZ | dst | ((new_imm & 0xffff) << 5);
159 inst[1] = MOVK | dst | (((new_imm >> 16) & 0xffff) << 5) | (1 << 21);
160 inst[2] = MOVK | dst | (((new_imm >> 32) & 0xffff) << 5) | (2 << 21);
161 inst[3] = MOVK | dst | ((new_imm >> 48) << 5) | (3 << 21);
164 static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
167 sljit_uw target_addr;
169 if (jump->flags & SLJIT_REWRITABLE_JUMP) {
170 jump->flags |= PATCH_ABS64;
174 if (jump->flags & JUMP_ADDR)
175 target_addr = jump->u.target;
177 SLJIT_ASSERT(jump->flags & JUMP_LABEL);
178 target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset;
181 diff = (sljit_sw)target_addr - (sljit_sw)(code_ptr + 4) - executable_offset;
183 if (jump->flags & IS_COND) {
184 diff += sizeof(sljit_ins);
185 if (diff <= 0xfffff && diff >= -0x100000) {
186 code_ptr[-5] ^= (jump->flags & IS_CBZ) ? (0x1 << 24) : 0x1;
187 jump->addr -= sizeof(sljit_ins);
188 jump->flags |= PATCH_COND;
191 diff -= sizeof(sljit_ins);
194 if (diff <= 0x7ffffff && diff >= -0x8000000) {
195 jump->flags |= PATCH_B;
199 if (target_addr <= 0xffffffffl) {
200 if (jump->flags & IS_COND)
201 code_ptr[-5] -= (2 << 5);
202 code_ptr[-2] = code_ptr[0];
206 if (target_addr <= 0xffffffffffffl) {
207 if (jump->flags & IS_COND)
208 code_ptr[-5] -= (1 << 5);
209 jump->flags |= PATCH_ABS48;
210 code_ptr[-1] = code_ptr[0];
214 jump->flags |= PATCH_ABS64;
218 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
220 struct sljit_memory_fragment *buf;
226 sljit_sw executable_offset;
230 struct sljit_label *label;
231 struct sljit_jump *jump;
232 struct sljit_const *const_;
235 CHECK_PTR(check_sljit_generate_code(compiler));
236 reverse_buf(compiler);
238 code = (sljit_ins*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_ins));
239 PTR_FAIL_WITH_EXEC_IF(code);
244 executable_offset = SLJIT_EXEC_OFFSET(code);
246 label = compiler->labels;
247 jump = compiler->jumps;
248 const_ = compiler->consts;
251 buf_ptr = (sljit_ins*)buf->memory;
252 buf_end = buf_ptr + (buf->used_size >> 2);
254 *code_ptr = *buf_ptr++;
255 /* These structures are ordered by their address. */
256 SLJIT_ASSERT(!label || label->size >= word_count);
257 SLJIT_ASSERT(!jump || jump->addr >= word_count);
258 SLJIT_ASSERT(!const_ || const_->addr >= word_count);
259 if (label && label->size == word_count) {
260 label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
261 label->size = code_ptr - code;
264 if (jump && jump->addr == word_count) {
265 jump->addr = (sljit_uw)(code_ptr - 4);
266 code_ptr -= detect_jump_type(jump, code_ptr, code, executable_offset);
269 if (const_ && const_->addr == word_count) {
270 const_->addr = (sljit_uw)code_ptr;
271 const_ = const_->next;
275 } while (buf_ptr < buf_end);
280 if (label && label->size == word_count) {
281 label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
282 label->size = code_ptr - code;
286 SLJIT_ASSERT(!label);
288 SLJIT_ASSERT(!const_);
289 SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size);
291 jump = compiler->jumps;
294 addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
295 buf_ptr = (sljit_ins *)jump->addr;
297 if (jump->flags & PATCH_B) {
298 addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
299 SLJIT_ASSERT((sljit_sw)addr <= 0x1ffffff && (sljit_sw)addr >= -0x2000000);
300 buf_ptr[0] = ((jump->flags & IS_BL) ? BL : B) | (addr & 0x3ffffff);
301 if (jump->flags & IS_COND)
302 buf_ptr[-1] -= (4 << 5);
305 if (jump->flags & PATCH_COND) {
306 addr = (sljit_sw)(addr - (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset)) >> 2;
307 SLJIT_ASSERT((sljit_sw)addr <= 0x3ffff && (sljit_sw)addr >= -0x40000);
308 buf_ptr[0] = (buf_ptr[0] & ~0xffffe0) | ((addr & 0x7ffff) << 5);
312 SLJIT_ASSERT((jump->flags & (PATCH_ABS48 | PATCH_ABS64)) || addr <= 0xffffffffl);
313 SLJIT_ASSERT((jump->flags & PATCH_ABS64) || addr <= 0xffffffffffffl);
315 dst = buf_ptr[0] & 0x1f;
316 buf_ptr[0] = MOVZ | dst | ((addr & 0xffff) << 5);
317 buf_ptr[1] = MOVK | dst | (((addr >> 16) & 0xffff) << 5) | (1 << 21);
318 if (jump->flags & (PATCH_ABS48 | PATCH_ABS64))
319 buf_ptr[2] = MOVK | dst | (((addr >> 32) & 0xffff) << 5) | (2 << 21);
320 if (jump->flags & PATCH_ABS64)
321 buf_ptr[3] = MOVK | dst | (((addr >> 48) & 0xffff) << 5) | (3 << 21);
326 compiler->error = SLJIT_ERR_COMPILED;
327 compiler->executable_offset = executable_offset;
328 compiler->executable_size = (code_ptr - code) * sizeof(sljit_ins);
330 code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
331 code_ptr = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
333 SLJIT_CACHE_FLUSH(code, code_ptr);
337 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
339 switch (feature_type) {
341 #ifdef SLJIT_IS_FPU_AVAILABLE
342 return SLJIT_IS_FPU_AVAILABLE;
344 /* Available by default. */
357 /* --------------------------------------------------------------------- */
358 /* Core code generator functions. */
359 /* --------------------------------------------------------------------- */
361 #define COUNT_TRAILING_ZERO(value, result) \
363 if (!(value & 0xffffffff)) { \
367 if (!(value & 0xffff)) { \
371 if (!(value & 0xff)) { \
375 if (!(value & 0xf)) { \
379 if (!(value & 0x3)) { \
383 if (!(value & 0x1)) { \
388 #define LOGICAL_IMM_CHECK 0x100
390 static sljit_ins logical_imm(sljit_sw imm, sljit_s32 len)
392 sljit_s32 negated, ones, right;
396 if (len & LOGICAL_IMM_CHECK) {
397 len &= ~LOGICAL_IMM_CHECK;
398 if (len == 32 && (imm == 0 || imm == -1))
400 if (len == 16 && ((sljit_s32)imm == 0 || (sljit_s32)imm == -1))
404 SLJIT_ASSERT((len == 32 && imm != 0 && imm != -1)
405 || (len == 16 && (sljit_s32)imm != 0 && (sljit_s32)imm != -1));
407 uimm = (sljit_uw)imm;
414 mask = ((sljit_uw)1 << len) - 1;
415 if ((uimm & mask) != ((uimm >> len) & mask))
429 uimm &= ((sljit_uw)1 << len) - 1;
431 /* Unsigned right shift. */
432 COUNT_TRAILING_ZERO(uimm, right);
434 /* Signed shift. We also know that the highest bit is set. */
435 imm = (sljit_sw)~uimm;
436 SLJIT_ASSERT(imm < 0);
438 COUNT_TRAILING_ZERO(imm, ones);
446 ins = (0x3f - ((len << 1) - 1)) << 10;
449 return ins | ((len - ones - 1) << 10) | ((len - ones - right) << 16);
451 return ins | ((ones - 1) << 10) | ((len - right) << 16);
454 #undef COUNT_TRAILING_ZERO
456 static sljit_s32 load_immediate(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw simm)
458 sljit_uw imm = (sljit_uw)simm;
459 sljit_s32 i, zeros, ones, first;
462 /* Handling simple immediates first. */
464 return push_inst(compiler, MOVZ | RD(dst) | (imm << 5));
466 if (simm < 0 && simm >= -0x10000)
467 return push_inst(compiler, MOVN | RD(dst) | ((~imm & 0xffff) << 5));
469 if (imm <= 0xffffffffl) {
470 if ((imm & 0xffff) == 0)
471 return push_inst(compiler, MOVZ | RD(dst) | ((imm >> 16) << 5) | (1 << 21));
472 if ((imm & 0xffff0000l) == 0xffff0000)
473 return push_inst(compiler, (MOVN ^ W_OP) | RD(dst) | ((~imm & 0xffff) << 5));
474 if ((imm & 0xffff) == 0xffff)
475 return push_inst(compiler, (MOVN ^ W_OP) | RD(dst) | ((~imm & 0xffff0000l) >> (16 - 5)) | (1 << 21));
477 bitmask = logical_imm(simm, 16);
479 return push_inst(compiler, (ORRI ^ W_OP) | RD(dst) | RN(TMP_ZERO) | bitmask);
481 FAIL_IF(push_inst(compiler, MOVZ | RD(dst) | ((imm & 0xffff) << 5)));
482 return push_inst(compiler, MOVK | RD(dst) | ((imm & 0xffff0000l) >> (16 - 5)) | (1 << 21));
485 bitmask = logical_imm(simm, 32);
487 return push_inst(compiler, ORRI | RD(dst) | RN(TMP_ZERO) | bitmask);
489 if (simm < 0 && simm >= -0x100000000l) {
490 if ((imm & 0xffff) == 0xffff)
491 return push_inst(compiler, MOVN | RD(dst) | ((~imm & 0xffff0000l) >> (16 - 5)) | (1 << 21));
493 FAIL_IF(push_inst(compiler, MOVN | RD(dst) | ((~imm & 0xffff) << 5)));
494 return push_inst(compiler, MOVK | RD(dst) | ((imm & 0xffff0000l) >> (16 - 5)) | (1 << 21));
497 /* A large amount of number can be constructed from ORR and MOVx, but computing them is costly. */
501 for (i = 4; i > 0; i--) {
502 if ((simm & 0xffff) == 0)
504 if ((simm & 0xffff) == 0xffff)
509 simm = (sljit_sw)imm;
513 for (i = 0; i < 4; i++) {
514 if (!(simm & 0xffff)) {
520 FAIL_IF(push_inst(compiler, MOVN | RD(dst) | ((simm & 0xffff) << 5) | (i << 21)));
523 FAIL_IF(push_inst(compiler, MOVK | RD(dst) | ((~simm & 0xffff) << 5) | (i << 21)));
526 return SLJIT_SUCCESS;
529 for (i = 0; i < 4; i++) {
530 if (!(simm & 0xffff)) {
536 FAIL_IF(push_inst(compiler, MOVZ | RD(dst) | ((simm & 0xffff) << 5) | (i << 21)));
539 FAIL_IF(push_inst(compiler, MOVK | RD(dst) | ((simm & 0xffff) << 5) | (i << 21)));
542 return SLJIT_SUCCESS;
545 #define ARG1_IMM 0x0010000
546 #define ARG2_IMM 0x0020000
547 #define INT_OP 0x0040000
548 #define SET_FLAGS 0x0080000
549 #define UNUSED_RETURN 0x0100000
551 #define CHECK_FLAGS(flag_bits) \
552 if (flags & SET_FLAGS) { \
553 inv_bits |= flag_bits; \
554 if (flags & UNUSED_RETURN) \
558 static sljit_s32 emit_op_imm(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 dst, sljit_sw arg1, sljit_sw arg2)
560 /* dst must be register, TMP_REG1
561 arg1 must be register, TMP_REG1, imm
562 arg2 must be register, TMP_REG2, imm */
563 sljit_ins inv_bits = (flags & INT_OP) ? (1 << 31) : 0;
565 sljit_s32 op = (flags & 0xffff);
569 if (SLJIT_UNLIKELY((flags & (ARG1_IMM | ARG2_IMM)) == (ARG1_IMM | ARG2_IMM))) {
570 /* Both are immediates. */
572 if (arg1 == 0 && op != SLJIT_ADD && op != SLJIT_SUB)
575 FAIL_IF(load_immediate(compiler, TMP_REG1, arg1));
580 if (flags & (ARG1_IMM | ARG2_IMM)) {
581 reg = (flags & ARG2_IMM) ? arg1 : arg2;
582 imm = (flags & ARG2_IMM) ? arg2 : arg1;
590 /* No form with immediate operand (except imm 0, which
591 is represented by a ZERO register). */
594 SLJIT_ASSERT(!(flags & SET_FLAGS) && (flags & ARG2_IMM) && arg1 == TMP_REG1);
595 return load_immediate(compiler, dst, imm);
597 SLJIT_ASSERT(flags & ARG2_IMM);
598 FAIL_IF(load_immediate(compiler, dst, (flags & INT_OP) ? (~imm & 0xffffffff) : ~imm));
601 if (flags & ARG1_IMM)
607 CHECK_FLAGS(1 << 29);
608 return push_inst(compiler, ((op == SLJIT_ADD ? ADDI : SUBI) ^ inv_bits) | RD(dst) | RN(reg));
610 if (imm > 0 && imm <= 0xfff) {
611 CHECK_FLAGS(1 << 29);
612 return push_inst(compiler, (ADDI ^ inv_bits) | RD(dst) | RN(reg) | (imm << 10));
615 if (nimm > 0 && nimm <= 0xfff) {
616 CHECK_FLAGS(1 << 29);
617 return push_inst(compiler, (SUBI ^ inv_bits) | RD(dst) | RN(reg) | (nimm << 10));
619 if (imm > 0 && imm <= 0xffffff && !(imm & 0xfff)) {
620 CHECK_FLAGS(1 << 29);
621 return push_inst(compiler, (ADDI ^ inv_bits) | RD(dst) | RN(reg) | ((imm >> 12) << 10) | (1 << 22));
623 if (nimm > 0 && nimm <= 0xffffff && !(nimm & 0xfff)) {
624 CHECK_FLAGS(1 << 29);
625 return push_inst(compiler, (SUBI ^ inv_bits) | RD(dst) | RN(reg) | ((nimm >> 12) << 10) | (1 << 22));
627 if (imm > 0 && imm <= 0xffffff && !(flags & SET_FLAGS)) {
628 FAIL_IF(push_inst(compiler, (ADDI ^ inv_bits) | RD(dst) | RN(reg) | ((imm >> 12) << 10) | (1 << 22)));
629 return push_inst(compiler, (ADDI ^ inv_bits) | RD(dst) | RN(dst) | ((imm & 0xfff) << 10));
631 if (nimm > 0 && nimm <= 0xffffff && !(flags & SET_FLAGS)) {
632 FAIL_IF(push_inst(compiler, (SUBI ^ inv_bits) | RD(dst) | RN(reg) | ((nimm >> 12) << 10) | (1 << 22)));
633 return push_inst(compiler, (SUBI ^ inv_bits) | RD(dst) | RN(dst) | ((nimm & 0xfff) << 10));
637 inst_bits = logical_imm(imm, LOGICAL_IMM_CHECK | ((flags & INT_OP) ? 16 : 32));
640 CHECK_FLAGS(3 << 29);
641 return push_inst(compiler, (ANDI ^ inv_bits) | RD(dst) | RN(reg) | inst_bits);
644 inst_bits = logical_imm(imm, LOGICAL_IMM_CHECK | ((flags & INT_OP) ? 16 : 32));
651 FAIL_IF(push_inst(compiler, (inst_bits ^ inv_bits) | RD(dst) | RN(reg)));
654 if (flags & ARG1_IMM)
656 if (flags & INT_OP) {
658 FAIL_IF(push_inst(compiler, (UBFM ^ inv_bits) | RD(dst) | RN(arg1) | ((-imm & 0x1f) << 16) | ((31 - imm) << 10)));
662 FAIL_IF(push_inst(compiler, (UBFM ^ inv_bits) | RD(dst) | RN(arg1) | (1 << 22) | ((-imm & 0x3f) << 16) | ((63 - imm) << 10)));
667 if (flags & ARG1_IMM)
669 if (op == SLJIT_ASHR)
671 if (flags & INT_OP) {
673 FAIL_IF(push_inst(compiler, (UBFM ^ inv_bits) | RD(dst) | RN(arg1) | (imm << 16) | (31 << 10)));
677 FAIL_IF(push_inst(compiler, (UBFM ^ inv_bits) | RD(dst) | RN(arg1) | (1 << 22) | (imm << 16) | (63 << 10)));
685 if (flags & ARG2_IMM) {
689 FAIL_IF(load_immediate(compiler, TMP_REG2, arg2));
697 FAIL_IF(load_immediate(compiler, TMP_REG1, arg1));
703 /* Both arguments are registers. */
707 SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
709 return SLJIT_SUCCESS;
710 return push_inst(compiler, ORR | RD(dst) | RN(TMP_ZERO) | RM(arg2));
712 SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
713 return push_inst(compiler, (UBFM ^ (1 << 31)) | RD(dst) | RN(arg2) | (7 << 10));
715 SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
716 if (!(flags & INT_OP))
718 return push_inst(compiler, (SBFM ^ inv_bits) | RD(dst) | RN(arg2) | (7 << 10));
720 SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
721 return push_inst(compiler, (UBFM ^ (1 << 31)) | RD(dst) | RN(arg2) | (15 << 10));
723 SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
724 if (!(flags & INT_OP))
726 return push_inst(compiler, (SBFM ^ inv_bits) | RD(dst) | RN(arg2) | (15 << 10));
728 SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
729 if ((flags & INT_OP) && dst == arg2)
730 return SLJIT_SUCCESS;
731 return push_inst(compiler, (ORR ^ (1 << 31)) | RD(dst) | RN(TMP_ZERO) | RM(arg2));
733 SLJIT_ASSERT(!(flags & SET_FLAGS) && arg1 == TMP_REG1);
734 if ((flags & INT_OP) && dst == arg2)
735 return SLJIT_SUCCESS;
736 return push_inst(compiler, SBFM | (1 << 22) | RD(dst) | RN(arg2) | (31 << 10));
738 SLJIT_ASSERT(arg1 == TMP_REG1);
739 FAIL_IF(push_inst(compiler, (ORN ^ inv_bits) | RD(dst) | RN(TMP_ZERO) | RM(arg2)));
740 break; /* Set flags. */
742 SLJIT_ASSERT(arg1 == TMP_REG1);
743 if (flags & SET_FLAGS)
745 return push_inst(compiler, (SUB ^ inv_bits) | RD(dst) | RN(TMP_ZERO) | RM(arg2));
747 SLJIT_ASSERT(arg1 == TMP_REG1);
748 return push_inst(compiler, (CLZ ^ inv_bits) | RD(dst) | RN(arg2));
750 CHECK_FLAGS(1 << 29);
751 return push_inst(compiler, (ADD ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2));
753 CHECK_FLAGS(1 << 29);
754 return push_inst(compiler, (ADC ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2));
756 CHECK_FLAGS(1 << 29);
757 return push_inst(compiler, (SUB ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2));
759 CHECK_FLAGS(1 << 29);
760 return push_inst(compiler, (SBC ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2));
762 if (!(flags & SET_FLAGS))
763 return push_inst(compiler, (MADD ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2) | RT2(TMP_ZERO));
764 if (flags & INT_OP) {
765 FAIL_IF(push_inst(compiler, SMADDL | RD(dst) | RN(arg1) | RM(arg2) | (31 << 10)));
766 FAIL_IF(push_inst(compiler, ADD | RD(TMP_LR) | RN(TMP_ZERO) | RM(dst) | (2 << 22) | (31 << 10)));
767 return push_inst(compiler, SUBS | RD(TMP_ZERO) | RN(TMP_LR) | RM(dst) | (2 << 22) | (63 << 10));
769 FAIL_IF(push_inst(compiler, SMULH | RD(TMP_LR) | RN(arg1) | RM(arg2)));
770 FAIL_IF(push_inst(compiler, MADD | RD(dst) | RN(arg1) | RM(arg2) | RT2(TMP_ZERO)));
771 return push_inst(compiler, SUBS | RD(TMP_ZERO) | RN(TMP_LR) | RM(dst) | (2 << 22) | (63 << 10));
773 CHECK_FLAGS(3 << 29);
774 return push_inst(compiler, (AND ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2));
776 FAIL_IF(push_inst(compiler, (ORR ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2)));
777 break; /* Set flags. */
779 FAIL_IF(push_inst(compiler, (EOR ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2)));
780 break; /* Set flags. */
782 FAIL_IF(push_inst(compiler, (LSLV ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2)));
783 break; /* Set flags. */
785 FAIL_IF(push_inst(compiler, (LSRV ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2)));
786 break; /* Set flags. */
788 FAIL_IF(push_inst(compiler, (ASRV ^ inv_bits) | RD(dst) | RN(arg1) | RM(arg2)));
789 break; /* Set flags. */
792 return SLJIT_SUCCESS;
796 if (flags & SET_FLAGS)
797 return push_inst(compiler, (SUBS ^ inv_bits) | RD(TMP_ZERO) | RN(dst) | RM(TMP_ZERO));
798 return SLJIT_SUCCESS;
804 #define BYTE_SIZE 0x0
805 #define HALF_SIZE 0x1
807 #define WORD_SIZE 0x3
809 #define MEM_SIZE_SHIFT(flags) ((flags) & 0x3)
811 static sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg,
812 sljit_s32 arg, sljit_sw argw, sljit_s32 tmp_reg)
814 sljit_u32 shift = MEM_SIZE_SHIFT(flags);
815 sljit_u32 type = (shift << 30);
817 if (!(flags & STORE))
818 type |= (flags & SIGNED) ? 0x00800000 : 0x00400000;
820 SLJIT_ASSERT(arg & SLJIT_MEM);
822 if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
825 if (argw == 0 || argw == shift)
826 return push_inst(compiler, STRB | type | RT(reg)
827 | RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | (argw ? (1 << 12) : 0));
829 FAIL_IF(push_inst(compiler, ADD | RD(tmp_reg) | RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | (argw << 10)));
830 return push_inst(compiler, STRBI | type | RT(reg) | RN(tmp_reg));
835 if (arg == SLJIT_UNUSED) {
836 FAIL_IF(load_immediate(compiler, tmp_reg, argw & ~(0xfff << shift)));
838 argw = (argw >> shift) & 0xfff;
840 return push_inst(compiler, STRBI | type | RT(reg) | RN(tmp_reg) | (argw << 10));
843 if (argw >= 0 && (argw & ((1 << shift) - 1)) == 0) {
844 if ((argw >> shift) <= 0xfff) {
845 return push_inst(compiler, STRBI | type | RT(reg) | RN(arg) | (argw << (10 - shift)));
848 if (argw <= 0xffffff) {
849 FAIL_IF(push_inst(compiler, ADDI | (1 << 22) | RD(tmp_reg) | RN(arg) | ((argw >> 12) << 10)));
851 argw = ((argw & 0xfff) >> shift);
852 return push_inst(compiler, STRBI | type | RT(reg) | RN(tmp_reg) | (argw << 10));
856 if (argw <= 255 && argw >= -256)
857 return push_inst(compiler, STURBI | type | RT(reg) | RN(arg) | ((argw & 0x1ff) << 12));
859 FAIL_IF(load_immediate(compiler, tmp_reg, argw));
861 return push_inst(compiler, STRB | type | RT(reg) | RN(arg) | RM(tmp_reg));
864 /* --------------------------------------------------------------------- */
866 /* --------------------------------------------------------------------- */
868 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
869 sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
870 sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
872 sljit_s32 args, i, tmp, offs, prev, saved_regs_size;
875 CHECK(check_sljit_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
876 set_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
878 saved_regs_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds, 2);
879 if (saved_regs_size & 0x8)
880 saved_regs_size += sizeof(sljit_sw);
882 local_size = (local_size + 15) & ~0xf;
883 compiler->local_size = local_size + saved_regs_size;
885 FAIL_IF(push_inst(compiler, STP_PRE | RT(TMP_FP) | RT2(TMP_LR)
886 | RN(SLJIT_SP) | ((-(saved_regs_size >> 3) & 0x7f) << 15)));
889 if (local_size >= 4096)
890 FAIL_IF(push_inst(compiler, SUBI | RD(TMP_REG1) | RN(SLJIT_SP) | (1 << 10) | (1 << 22)));
891 else if (local_size > 256)
892 FAIL_IF(push_inst(compiler, SUBI | RD(TMP_REG1) | RN(SLJIT_SP) | (local_size << 10)));
895 tmp = saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - saveds) : SLJIT_FIRST_SAVED_REG;
898 for (i = SLJIT_S0; i >= tmp; i--) {
903 FAIL_IF(push_inst(compiler, STP | RT(prev) | RT2(i) | RN(SLJIT_SP) | offs));
908 for (i = scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
913 FAIL_IF(push_inst(compiler, STP | RT(prev) | RT2(i) | RN(SLJIT_SP) | offs));
919 FAIL_IF(push_inst(compiler, STRI | RT(prev) | RN(SLJIT_SP) | (offs >> 5)));
922 FAIL_IF(push_inst(compiler, ADDI | RD(TMP_FP) | RN(SLJIT_SP) | (0 << 10)));
924 args = get_arg_count(arg_types);
927 FAIL_IF(push_inst(compiler, ORR | RD(SLJIT_S0) | RN(TMP_ZERO) | RM(SLJIT_R0)));
929 FAIL_IF(push_inst(compiler, ORR | RD(SLJIT_S1) | RN(TMP_ZERO) | RM(SLJIT_R1)));
931 FAIL_IF(push_inst(compiler, ORR | RD(SLJIT_S2) | RN(TMP_ZERO) | RM(SLJIT_R2)));
934 if (local_size >= 4096) {
935 if (local_size < 4 * 4096) {
936 /* No need for a loop. */
937 if (local_size >= 2 * 4096) {
938 FAIL_IF(push_inst(compiler, LDRI | RT(TMP_ZERO) | RN(TMP_REG1)));
939 FAIL_IF(push_inst(compiler, SUBI | RD(TMP_REG1) | RN(TMP_REG1) | (1 << 10) | (1 << 22)));
943 if (local_size >= 2 * 4096) {
944 FAIL_IF(push_inst(compiler, LDRI | RT(TMP_ZERO) | RN(TMP_REG1)));
945 FAIL_IF(push_inst(compiler, SUBI | RD(TMP_REG1) | RN(TMP_REG1) | (1 << 10) | (1 << 22)));
949 FAIL_IF(push_inst(compiler, LDRI | RT(TMP_ZERO) | RN(TMP_REG1)));
953 FAIL_IF(push_inst(compiler, MOVZ | RD(TMP_REG2) | (((local_size >> 12) - 1) << 5)));
954 FAIL_IF(push_inst(compiler, LDRI | RT(TMP_ZERO) | RN(TMP_REG1)));
955 FAIL_IF(push_inst(compiler, SUBI | RD(TMP_REG1) | RN(TMP_REG1) | (1 << 10) | (1 << 22)));
956 FAIL_IF(push_inst(compiler, SUBI | (1 << 29) | RD(TMP_REG2) | RN(TMP_REG2) | (1 << 10)));
957 FAIL_IF(push_inst(compiler, B_CC | ((((sljit_ins) -3) & 0x7ffff) << 5) | 0x1 /* not-equal */));
958 FAIL_IF(push_inst(compiler, LDRI | RT(TMP_ZERO) | RN(TMP_REG1)));
963 if (local_size > 256) {
964 FAIL_IF(push_inst(compiler, SUBI | RD(TMP_REG1) | RN(TMP_REG1) | (local_size << 10)));
965 FAIL_IF(push_inst(compiler, LDRI | RT(TMP_ZERO) | RN(TMP_REG1)));
967 else if (local_size > 0)
968 FAIL_IF(push_inst(compiler, LDR_PRE | RT(TMP_ZERO) | RN(TMP_REG1) | ((-local_size & 0x1ff) << 12)));
970 FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RN(TMP_REG1) | (0 << 10)));
972 else if (local_size > 256) {
973 FAIL_IF(push_inst(compiler, LDRI | RT(TMP_ZERO) | RN(TMP_REG1)));
974 FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RN(TMP_REG1) | (0 << 10)));
976 else if (local_size > 0)
977 FAIL_IF(push_inst(compiler, LDR_PRE | RT(TMP_ZERO) | RN(SLJIT_SP) | ((-local_size & 0x1ff) << 12)));
981 /* The local_size does not include saved registers size. */
982 if (local_size > 0xfff) {
983 FAIL_IF(push_inst(compiler, SUBI | RD(SLJIT_SP) | RN(SLJIT_SP) | ((local_size >> 12) << 10) | (1 << 22)));
987 FAIL_IF(push_inst(compiler, SUBI | RD(SLJIT_SP) | RN(SLJIT_SP) | (local_size << 10)));
991 return SLJIT_SUCCESS;
994 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
995 sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
996 sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
998 sljit_s32 saved_regs_size;
1001 CHECK(check_sljit_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
1002 set_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
1004 saved_regs_size = GET_SAVED_REGISTERS_SIZE(scratches, saveds, 2);
1005 if (saved_regs_size & 0x8)
1006 saved_regs_size += sizeof(sljit_sw);
1008 compiler->local_size = saved_regs_size + ((local_size + 15) & ~0xf);
1009 return SLJIT_SUCCESS;
1012 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
1014 sljit_s32 local_size;
1015 sljit_s32 i, tmp, offs, prev, saved_regs_size;
1018 CHECK(check_sljit_emit_return(compiler, op, src, srcw));
1020 FAIL_IF(emit_mov_before_return(compiler, op, src, srcw));
1022 saved_regs_size = GET_SAVED_REGISTERS_SIZE(compiler->scratches, compiler->saveds, 2);
1023 if (saved_regs_size & 0x8)
1024 saved_regs_size += sizeof(sljit_sw);
1026 local_size = compiler->local_size - saved_regs_size;
1028 /* Load LR as early as possible. */
1029 if (local_size == 0)
1030 FAIL_IF(push_inst(compiler, LDP | RT(TMP_FP) | RT2(TMP_LR) | RN(SLJIT_SP)));
1031 else if (local_size < 63 * sizeof(sljit_sw)) {
1032 FAIL_IF(push_inst(compiler, LDP_PRE | RT(TMP_FP) | RT2(TMP_LR)
1033 | RN(SLJIT_SP) | (local_size << (15 - 3))));
1036 if (local_size > 0xfff) {
1037 FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RN(SLJIT_SP) | ((local_size >> 12) << 10) | (1 << 22)));
1038 local_size &= 0xfff;
1041 FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RN(SLJIT_SP) | (local_size << 10)));
1043 FAIL_IF(push_inst(compiler, LDP | RT(TMP_FP) | RT2(TMP_LR) | RN(SLJIT_SP)));
1046 tmp = compiler->saveds < SLJIT_NUMBER_OF_SAVED_REGISTERS ? (SLJIT_S0 + 1 - compiler->saveds) : SLJIT_FIRST_SAVED_REG;
1049 for (i = SLJIT_S0; i >= tmp; i--) {
1054 FAIL_IF(push_inst(compiler, LDP | RT(prev) | RT2(i) | RN(SLJIT_SP) | offs));
1059 for (i = compiler->scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
1064 FAIL_IF(push_inst(compiler, LDP | RT(prev) | RT2(i) | RN(SLJIT_SP) | offs));
1070 FAIL_IF(push_inst(compiler, LDRI | RT(prev) | RN(SLJIT_SP) | (offs >> 5)));
1072 /* These two can be executed in parallel. */
1073 FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RN(SLJIT_SP) | (saved_regs_size << 10)));
1074 return push_inst(compiler, RET | RN(TMP_LR));
1077 /* --------------------------------------------------------------------- */
1079 /* --------------------------------------------------------------------- */
1081 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
1083 sljit_ins inv_bits = (op & SLJIT_I32_OP) ? (1 << 31) : 0;
1086 CHECK(check_sljit_emit_op0(compiler, op));
1088 op = GET_OPCODE(op);
1090 case SLJIT_BREAKPOINT:
1091 return push_inst(compiler, BRK);
1093 return push_inst(compiler, NOP);
1096 FAIL_IF(push_inst(compiler, ORR | RD(TMP_REG1) | RN(TMP_ZERO) | RM(SLJIT_R0)));
1097 FAIL_IF(push_inst(compiler, MADD | RD(SLJIT_R0) | RN(SLJIT_R0) | RM(SLJIT_R1) | RT2(TMP_ZERO)));
1098 return push_inst(compiler, (op == SLJIT_LMUL_UW ? UMULH : SMULH) | RD(SLJIT_R1) | RN(TMP_REG1) | RM(SLJIT_R1));
1099 case SLJIT_DIVMOD_UW:
1100 case SLJIT_DIVMOD_SW:
1101 FAIL_IF(push_inst(compiler, (ORR ^ inv_bits) | RD(TMP_REG1) | RN(TMP_ZERO) | RM(SLJIT_R0)));
1102 FAIL_IF(push_inst(compiler, ((op == SLJIT_DIVMOD_UW ? UDIV : SDIV) ^ inv_bits) | RD(SLJIT_R0) | RN(SLJIT_R0) | RM(SLJIT_R1)));
1103 FAIL_IF(push_inst(compiler, (MADD ^ inv_bits) | RD(SLJIT_R1) | RN(SLJIT_R0) | RM(SLJIT_R1) | RT2(TMP_ZERO)));
1104 return push_inst(compiler, (SUB ^ inv_bits) | RD(SLJIT_R1) | RN(TMP_REG1) | RM(SLJIT_R1));
1107 return push_inst(compiler, ((op == SLJIT_DIV_UW ? UDIV : SDIV) ^ inv_bits) | RD(SLJIT_R0) | RN(SLJIT_R0) | RM(SLJIT_R1));
1110 return SLJIT_SUCCESS;
1113 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
1114 sljit_s32 dst, sljit_sw dstw,
1115 sljit_s32 src, sljit_sw srcw)
1117 sljit_s32 dst_r, flags, mem_flags;
1118 sljit_s32 op_flags = GET_ALL_FLAGS(op);
1121 CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
1122 ADJUST_LOCAL_OFFSET(dst, dstw);
1123 ADJUST_LOCAL_OFFSET(src, srcw);
1125 if (dst == SLJIT_UNUSED && !HAS_FLAGS(op)) {
1126 if (op <= SLJIT_MOV_P && (src & SLJIT_MEM)) {
1127 SLJIT_ASSERT(reg_map[1] == 0 && reg_map[3] == 2 && reg_map[5] == 4);
1129 if (op >= SLJIT_MOV_U8 && op <= SLJIT_MOV_S8)
1131 else if (op >= SLJIT_MOV_U16 && op <= SLJIT_MOV_S16)
1136 /* Signed word sized load is the prefetch instruction. */
1137 return emit_op_mem(compiler, WORD_SIZE | SIGNED, dst, src, srcw, TMP_REG1);
1139 return SLJIT_SUCCESS;
1142 dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
1144 op = GET_OPCODE(op);
1145 if (op >= SLJIT_MOV && op <= SLJIT_MOV_P) {
1146 /* Both operands are registers. */
1147 if (dst_r != TMP_REG1 && FAST_IS_REG(src))
1148 return emit_op_imm(compiler, op | ((op_flags & SLJIT_I32_OP) ? INT_OP : 0), dst_r, TMP_REG1, src);
1153 mem_flags = WORD_SIZE;
1156 mem_flags = BYTE_SIZE;
1157 if (src & SLJIT_IMM)
1158 srcw = (sljit_u8)srcw;
1161 mem_flags = BYTE_SIZE | SIGNED;
1162 if (src & SLJIT_IMM)
1163 srcw = (sljit_s8)srcw;
1166 mem_flags = HALF_SIZE;
1167 if (src & SLJIT_IMM)
1168 srcw = (sljit_u16)srcw;
1171 mem_flags = HALF_SIZE | SIGNED;
1172 if (src & SLJIT_IMM)
1173 srcw = (sljit_s16)srcw;
1176 mem_flags = INT_SIZE;
1177 if (src & SLJIT_IMM)
1178 srcw = (sljit_u32)srcw;
1181 mem_flags = INT_SIZE | SIGNED;
1182 if (src & SLJIT_IMM)
1183 srcw = (sljit_s32)srcw;
1186 SLJIT_UNREACHABLE();
1191 if (src & SLJIT_IMM)
1192 FAIL_IF(emit_op_imm(compiler, SLJIT_MOV | ARG2_IMM, dst_r, TMP_REG1, srcw));
1193 else if (!(src & SLJIT_MEM))
1196 FAIL_IF(emit_op_mem(compiler, mem_flags, dst_r, src, srcw, TMP_REG1));
1198 if (dst & SLJIT_MEM)
1199 return emit_op_mem(compiler, mem_flags | STORE, dst_r, dst, dstw, TMP_REG2);
1200 return SLJIT_SUCCESS;
1203 flags = HAS_FLAGS(op_flags) ? SET_FLAGS : 0;
1204 mem_flags = WORD_SIZE;
1206 if (op_flags & SLJIT_I32_OP) {
1208 mem_flags = INT_SIZE;
1211 if (dst == SLJIT_UNUSED)
1212 flags |= UNUSED_RETURN;
1214 if (src & SLJIT_MEM) {
1215 FAIL_IF(emit_op_mem(compiler, mem_flags, TMP_REG2, src, srcw, TMP_REG2));
1219 emit_op_imm(compiler, flags | op, dst_r, TMP_REG1, src);
1221 if (SLJIT_UNLIKELY(dst & SLJIT_MEM))
1222 return emit_op_mem(compiler, mem_flags | STORE, dst_r, dst, dstw, TMP_REG2);
1223 return SLJIT_SUCCESS;
1226 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
1227 sljit_s32 dst, sljit_sw dstw,
1228 sljit_s32 src1, sljit_sw src1w,
1229 sljit_s32 src2, sljit_sw src2w)
1231 sljit_s32 dst_r, flags, mem_flags;
1234 CHECK(check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
1235 ADJUST_LOCAL_OFFSET(dst, dstw);
1236 ADJUST_LOCAL_OFFSET(src1, src1w);
1237 ADJUST_LOCAL_OFFSET(src2, src2w);
1239 if (dst == SLJIT_UNUSED && !HAS_FLAGS(op))
1240 return SLJIT_SUCCESS;
1242 dst_r = SLOW_IS_REG(dst) ? dst : TMP_REG1;
1243 flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
1244 mem_flags = WORD_SIZE;
1246 if (op & SLJIT_I32_OP) {
1248 mem_flags = INT_SIZE;
1251 if (dst == SLJIT_UNUSED)
1252 flags |= UNUSED_RETURN;
1254 if (src1 & SLJIT_MEM) {
1255 FAIL_IF(emit_op_mem(compiler, mem_flags, TMP_REG1, src1, src1w, TMP_REG1));
1259 if (src2 & SLJIT_MEM) {
1260 FAIL_IF(emit_op_mem(compiler, mem_flags, TMP_REG2, src2, src2w, TMP_REG2));
1264 if (src1 & SLJIT_IMM)
1269 if (src2 & SLJIT_IMM)
1274 emit_op_imm(compiler, flags | GET_OPCODE(op), dst_r, src1w, src2w);
1276 if (dst & SLJIT_MEM)
1277 return emit_op_mem(compiler, mem_flags | STORE, dst_r, dst, dstw, TMP_REG2);
1278 return SLJIT_SUCCESS;
1281 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
1283 CHECK_REG_INDEX(check_sljit_get_register_index(reg));
1284 return reg_map[reg];
1287 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
1289 CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
1290 return freg_map[reg];
1293 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
1294 void *instruction, sljit_s32 size)
1297 CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
1299 return push_inst(compiler, *(sljit_ins*)instruction);
1302 /* --------------------------------------------------------------------- */
1303 /* Floating point operators */
1304 /* --------------------------------------------------------------------- */
1306 static sljit_s32 emit_fop_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
1308 sljit_u32 shift = MEM_SIZE_SHIFT(flags);
1309 sljit_ins type = (shift << 30);
1311 SLJIT_ASSERT(arg & SLJIT_MEM);
1313 if (!(flags & STORE))
1316 if (arg & OFFS_REG_MASK) {
1318 if (argw == 0 || argw == shift)
1319 return push_inst(compiler, STR_FR | type | VT(reg)
1320 | RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | (argw ? (1 << 12) : 0));
1322 FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG1) | RN(arg & REG_MASK) | RM(OFFS_REG(arg)) | (argw << 10)));
1323 return push_inst(compiler, STR_FI | type | VT(reg) | RN(TMP_REG1));
1328 if (arg == SLJIT_UNUSED) {
1329 FAIL_IF(load_immediate(compiler, TMP_REG1, argw & ~(0xfff << shift)));
1331 argw = (argw >> shift) & 0xfff;
1333 return push_inst(compiler, STR_FI | type | VT(reg) | RN(TMP_REG1) | (argw << 10));
1336 if (argw >= 0 && (argw & ((1 << shift) - 1)) == 0) {
1337 if ((argw >> shift) <= 0xfff)
1338 return push_inst(compiler, STR_FI | type | VT(reg) | RN(arg) | (argw << (10 - shift)));
1340 if (argw <= 0xffffff) {
1341 FAIL_IF(push_inst(compiler, ADDI | (1 << 22) | RD(TMP_REG1) | RN(arg) | ((argw >> 12) << 10)));
1343 argw = ((argw & 0xfff) >> shift);
1344 return push_inst(compiler, STR_FI | type | VT(reg) | RN(TMP_REG1) | (argw << 10));
1348 if (argw <= 255 && argw >= -256)
1349 return push_inst(compiler, STUR_FI | type | VT(reg) | RN(arg) | ((argw & 0x1ff) << 12));
1351 FAIL_IF(load_immediate(compiler, TMP_REG1, argw));
1352 return push_inst(compiler, STR_FR | type | VT(reg) | RN(arg) | RM(TMP_REG1));
1355 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
1356 sljit_s32 dst, sljit_sw dstw,
1357 sljit_s32 src, sljit_sw srcw)
1359 sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1360 sljit_ins inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
1362 if (GET_OPCODE(op) == SLJIT_CONV_S32_FROM_F64)
1363 inv_bits |= (1 << 31);
1365 if (src & SLJIT_MEM) {
1366 emit_fop_mem(compiler, (op & SLJIT_F32_OP) ? INT_SIZE : WORD_SIZE, TMP_FREG1, src, srcw);
1370 FAIL_IF(push_inst(compiler, (FCVTZS ^ inv_bits) | RD(dst_r) | VN(src)));
1372 if (dst & SLJIT_MEM)
1373 return emit_op_mem(compiler, ((GET_OPCODE(op) == SLJIT_CONV_S32_FROM_F64) ? INT_SIZE : WORD_SIZE) | STORE, TMP_REG1, dst, dstw, TMP_REG2);
1374 return SLJIT_SUCCESS;
1377 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
1378 sljit_s32 dst, sljit_sw dstw,
1379 sljit_s32 src, sljit_sw srcw)
1381 sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
1382 sljit_ins inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
1384 if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
1385 inv_bits |= (1 << 31);
1387 if (src & SLJIT_MEM) {
1388 emit_op_mem(compiler, ((GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32) ? INT_SIZE : WORD_SIZE), TMP_REG1, src, srcw, TMP_REG1);
1390 } else if (src & SLJIT_IMM) {
1391 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64)
1392 if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
1393 srcw = (sljit_s32)srcw;
1395 FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
1399 FAIL_IF(push_inst(compiler, (SCVTF ^ inv_bits) | VD(dst_r) | RN(src)));
1401 if (dst & SLJIT_MEM)
1402 return emit_fop_mem(compiler, ((op & SLJIT_F32_OP) ? INT_SIZE : WORD_SIZE) | STORE, TMP_FREG1, dst, dstw);
1403 return SLJIT_SUCCESS;
1406 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
1407 sljit_s32 src1, sljit_sw src1w,
1408 sljit_s32 src2, sljit_sw src2w)
1410 sljit_s32 mem_flags = (op & SLJIT_F32_OP) ? INT_SIZE : WORD_SIZE;
1411 sljit_ins inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
1413 if (src1 & SLJIT_MEM) {
1414 emit_fop_mem(compiler, mem_flags, TMP_FREG1, src1, src1w);
1418 if (src2 & SLJIT_MEM) {
1419 emit_fop_mem(compiler, mem_flags, TMP_FREG2, src2, src2w);
1423 return push_inst(compiler, (FCMP ^ inv_bits) | VN(src1) | VM(src2));
1426 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
1427 sljit_s32 dst, sljit_sw dstw,
1428 sljit_s32 src, sljit_sw srcw)
1430 sljit_s32 dst_r, mem_flags = (op & SLJIT_F32_OP) ? INT_SIZE : WORD_SIZE;
1435 SLJIT_COMPILE_ASSERT((INT_SIZE ^ 0x1) == WORD_SIZE, must_be_one_bit_difference);
1436 SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
1438 inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
1439 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
1441 if (src & SLJIT_MEM) {
1442 emit_fop_mem(compiler, (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32) ? (mem_flags ^ 0x1) : mem_flags, dst_r, src, srcw);
1446 switch (GET_OPCODE(op)) {
1449 if (dst_r != TMP_FREG1)
1450 FAIL_IF(push_inst(compiler, (FMOV ^ inv_bits) | VD(dst_r) | VN(src)));
1456 FAIL_IF(push_inst(compiler, (FNEG ^ inv_bits) | VD(dst_r) | VN(src)));
1459 FAIL_IF(push_inst(compiler, (FABS ^ inv_bits) | VD(dst_r) | VN(src)));
1461 case SLJIT_CONV_F64_FROM_F32:
1462 FAIL_IF(push_inst(compiler, FCVT | ((op & SLJIT_F32_OP) ? (1 << 22) : (1 << 15)) | VD(dst_r) | VN(src)));
1466 if (dst & SLJIT_MEM)
1467 return emit_fop_mem(compiler, mem_flags | STORE, dst_r, dst, dstw);
1468 return SLJIT_SUCCESS;
1471 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
1472 sljit_s32 dst, sljit_sw dstw,
1473 sljit_s32 src1, sljit_sw src1w,
1474 sljit_s32 src2, sljit_sw src2w)
1476 sljit_s32 dst_r, mem_flags = (op & SLJIT_F32_OP) ? INT_SIZE : WORD_SIZE;
1477 sljit_ins inv_bits = (op & SLJIT_F32_OP) ? (1 << 22) : 0;
1480 CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
1481 ADJUST_LOCAL_OFFSET(dst, dstw);
1482 ADJUST_LOCAL_OFFSET(src1, src1w);
1483 ADJUST_LOCAL_OFFSET(src2, src2w);
1485 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
1486 if (src1 & SLJIT_MEM) {
1487 emit_fop_mem(compiler, mem_flags, TMP_FREG1, src1, src1w);
1490 if (src2 & SLJIT_MEM) {
1491 emit_fop_mem(compiler, mem_flags, TMP_FREG2, src2, src2w);
1495 switch (GET_OPCODE(op)) {
1497 FAIL_IF(push_inst(compiler, (FADD ^ inv_bits) | VD(dst_r) | VN(src1) | VM(src2)));
1500 FAIL_IF(push_inst(compiler, (FSUB ^ inv_bits) | VD(dst_r) | VN(src1) | VM(src2)));
1503 FAIL_IF(push_inst(compiler, (FMUL ^ inv_bits) | VD(dst_r) | VN(src1) | VM(src2)));
1506 FAIL_IF(push_inst(compiler, (FDIV ^ inv_bits) | VD(dst_r) | VN(src1) | VM(src2)));
1510 if (!(dst & SLJIT_MEM))
1511 return SLJIT_SUCCESS;
1512 return emit_fop_mem(compiler, mem_flags | STORE, TMP_FREG1, dst, dstw);
1515 /* --------------------------------------------------------------------- */
1516 /* Other instructions */
1517 /* --------------------------------------------------------------------- */
1519 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
1522 CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
1523 ADJUST_LOCAL_OFFSET(dst, dstw);
1525 if (FAST_IS_REG(dst))
1526 return push_inst(compiler, ORR | RD(dst) | RN(TMP_ZERO) | RM(TMP_LR));
1529 return emit_op_mem(compiler, WORD_SIZE | STORE, TMP_LR, dst, dstw, TMP_REG1);
1532 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
1535 CHECK(check_sljit_emit_fast_return(compiler, src, srcw));
1536 ADJUST_LOCAL_OFFSET(src, srcw);
1538 if (FAST_IS_REG(src))
1539 FAIL_IF(push_inst(compiler, ORR | RD(TMP_LR) | RN(TMP_ZERO) | RM(src)));
1541 FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_LR, src, srcw, TMP_REG1));
1543 return push_inst(compiler, RET | RN(TMP_LR));
1546 /* --------------------------------------------------------------------- */
1547 /* Conditional instructions */
1548 /* --------------------------------------------------------------------- */
1550 static sljit_uw get_cc(sljit_s32 type)
1554 case SLJIT_MUL_NOT_OVERFLOW:
1555 case SLJIT_EQUAL_F64:
1558 case SLJIT_NOT_EQUAL:
1559 case SLJIT_MUL_OVERFLOW:
1560 case SLJIT_NOT_EQUAL_F64:
1564 case SLJIT_LESS_F64:
1567 case SLJIT_GREATER_EQUAL:
1568 case SLJIT_GREATER_EQUAL_F64:
1572 case SLJIT_GREATER_F64:
1575 case SLJIT_LESS_EQUAL:
1576 case SLJIT_LESS_EQUAL_F64:
1579 case SLJIT_SIG_LESS:
1582 case SLJIT_SIG_GREATER_EQUAL:
1585 case SLJIT_SIG_GREATER:
1588 case SLJIT_SIG_LESS_EQUAL:
1591 case SLJIT_OVERFLOW:
1592 case SLJIT_UNORDERED_F64:
1595 case SLJIT_NOT_OVERFLOW:
1596 case SLJIT_ORDERED_F64:
1600 SLJIT_UNREACHABLE();
1605 SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
1607 struct sljit_label *label;
1610 CHECK_PTR(check_sljit_emit_label(compiler));
1612 if (compiler->last_label && compiler->last_label->size == compiler->size)
1613 return compiler->last_label;
1615 label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
1616 PTR_FAIL_IF(!label);
1617 set_label(label, compiler);
1621 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
1623 struct sljit_jump *jump;
1626 CHECK_PTR(check_sljit_emit_jump(compiler, type));
1628 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
1630 set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
1633 if (type < SLJIT_JUMP) {
1634 jump->flags |= IS_COND;
1635 PTR_FAIL_IF(push_inst(compiler, B_CC | (6 << 5) | get_cc(type)));
1637 else if (type >= SLJIT_FAST_CALL)
1638 jump->flags |= IS_BL;
1640 PTR_FAIL_IF(emit_imm64_const(compiler, TMP_REG1, 0));
1641 jump->addr = compiler->size;
1642 PTR_FAIL_IF(push_inst(compiler, ((type >= SLJIT_FAST_CALL) ? BLR : BR) | RN(TMP_REG1)));
1647 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type,
1648 sljit_s32 arg_types)
1651 CHECK_PTR(check_sljit_emit_call(compiler, type, arg_types));
1653 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
1654 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
1655 compiler->skip_checks = 1;
1658 return sljit_emit_jump(compiler, type);
1661 static SLJIT_INLINE struct sljit_jump* emit_cmp_to0(struct sljit_compiler *compiler, sljit_s32 type,
1662 sljit_s32 src, sljit_sw srcw)
1664 struct sljit_jump *jump;
1665 sljit_ins inv_bits = (type & SLJIT_I32_OP) ? (1 << 31) : 0;
1667 SLJIT_ASSERT((type & 0xff) == SLJIT_EQUAL || (type & 0xff) == SLJIT_NOT_EQUAL);
1668 ADJUST_LOCAL_OFFSET(src, srcw);
1670 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
1672 set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
1673 jump->flags |= IS_CBZ | IS_COND;
1675 if (src & SLJIT_MEM) {
1676 PTR_FAIL_IF(emit_op_mem(compiler, inv_bits ? INT_SIZE : WORD_SIZE, TMP_REG1, src, srcw, TMP_REG1));
1679 else if (src & SLJIT_IMM) {
1680 PTR_FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
1684 SLJIT_ASSERT(FAST_IS_REG(src));
1686 if ((type & 0xff) == SLJIT_EQUAL)
1687 inv_bits |= 1 << 24;
1689 PTR_FAIL_IF(push_inst(compiler, (CBZ ^ inv_bits) | (6 << 5) | RT(src)));
1690 PTR_FAIL_IF(emit_imm64_const(compiler, TMP_REG1, 0));
1691 jump->addr = compiler->size;
1692 PTR_FAIL_IF(push_inst(compiler, BR | RN(TMP_REG1)));
1696 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
1698 struct sljit_jump *jump;
1701 CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
1702 ADJUST_LOCAL_OFFSET(src, srcw);
1704 if (!(src & SLJIT_IMM)) {
1705 if (src & SLJIT_MEM) {
1706 FAIL_IF(emit_op_mem(compiler, WORD_SIZE, TMP_REG1, src, srcw, TMP_REG1));
1709 return push_inst(compiler, ((type >= SLJIT_FAST_CALL) ? BLR : BR) | RN(src));
1712 /* These jumps are converted to jump/call instructions when possible. */
1713 jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
1715 set_jump(jump, compiler, JUMP_ADDR | ((type >= SLJIT_FAST_CALL) ? IS_BL : 0));
1716 jump->u.target = srcw;
1718 FAIL_IF(emit_imm64_const(compiler, TMP_REG1, 0));
1719 jump->addr = compiler->size;
1720 return push_inst(compiler, ((type >= SLJIT_FAST_CALL) ? BLR : BR) | RN(TMP_REG1));
1723 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compiler, sljit_s32 type,
1724 sljit_s32 arg_types,
1725 sljit_s32 src, sljit_sw srcw)
1728 CHECK(check_sljit_emit_icall(compiler, type, arg_types, src, srcw));
1730 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
1731 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
1732 compiler->skip_checks = 1;
1735 return sljit_emit_ijump(compiler, type, src, srcw);
1738 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
1739 sljit_s32 dst, sljit_sw dstw,
1742 sljit_s32 dst_r, src_r, flags, mem_flags;
1746 CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
1747 ADJUST_LOCAL_OFFSET(dst, dstw);
1749 cc = get_cc(type & 0xff);
1750 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1752 if (GET_OPCODE(op) < SLJIT_ADD) {
1753 FAIL_IF(push_inst(compiler, CSINC | (cc << 12) | RD(dst_r) | RN(TMP_ZERO) | RM(TMP_ZERO)));
1755 if (dst_r == TMP_REG1) {
1756 mem_flags = (GET_OPCODE(op) == SLJIT_MOV ? WORD_SIZE : INT_SIZE) | STORE;
1757 return emit_op_mem(compiler, mem_flags, TMP_REG1, dst, dstw, TMP_REG2);
1760 return SLJIT_SUCCESS;
1763 flags = HAS_FLAGS(op) ? SET_FLAGS : 0;
1764 mem_flags = WORD_SIZE;
1766 if (op & SLJIT_I32_OP) {
1768 mem_flags = INT_SIZE;
1773 if (dst & SLJIT_MEM) {
1774 FAIL_IF(emit_op_mem(compiler, mem_flags, TMP_REG1, dst, dstw, TMP_REG1));
1778 FAIL_IF(push_inst(compiler, CSINC | (cc << 12) | RD(TMP_REG2) | RN(TMP_ZERO) | RM(TMP_ZERO)));
1779 emit_op_imm(compiler, flags | GET_OPCODE(op), dst_r, src_r, TMP_REG2);
1781 if (dst & SLJIT_MEM)
1782 return emit_op_mem(compiler, mem_flags | STORE, TMP_REG1, dst, dstw, TMP_REG2);
1783 return SLJIT_SUCCESS;
1786 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
1788 sljit_s32 src, sljit_sw srcw)
1790 sljit_ins inv_bits = (dst_reg & SLJIT_I32_OP) ? (1 << 31) : 0;
1794 CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
1796 if (SLJIT_UNLIKELY(src & SLJIT_IMM)) {
1797 if (dst_reg & SLJIT_I32_OP)
1798 srcw = (sljit_s32)srcw;
1799 FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
1804 cc = get_cc(type & 0xff);
1805 dst_reg &= ~SLJIT_I32_OP;
1807 return push_inst(compiler, (CSEL ^ inv_bits) | (cc << 12) | RD(dst_reg) | RN(dst_reg) | RM(src));
1810 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
1812 sljit_s32 mem, sljit_sw memw)
1814 sljit_u32 sign = 0, inst;
1817 CHECK(check_sljit_emit_mem(compiler, type, reg, mem, memw));
1819 if ((mem & OFFS_REG_MASK) || (memw > 255 && memw < -256))
1820 return SLJIT_ERR_UNSUPPORTED;
1822 if (type & SLJIT_MEM_SUPP)
1823 return SLJIT_SUCCESS;
1825 switch (type & 0xff) {
1828 inst = STURBI | (MEM_SIZE_SHIFT(WORD_SIZE) << 30) | 0x400;
1833 inst = STURBI | (MEM_SIZE_SHIFT(BYTE_SIZE) << 30) | 0x400;
1838 inst = STURBI | (MEM_SIZE_SHIFT(HALF_SIZE) << 30) | 0x400;
1843 inst = STURBI | (MEM_SIZE_SHIFT(INT_SIZE) << 30) | 0x400;
1846 SLJIT_UNREACHABLE();
1847 inst = STURBI | (MEM_SIZE_SHIFT(WORD_SIZE) << 30) | 0x400;
1851 if (!(type & SLJIT_MEM_STORE))
1852 inst |= sign ? 0x00800000 : 0x00400000;
1854 if (type & SLJIT_MEM_PRE)
1857 return push_inst(compiler, inst | RT(reg) | RN(mem & REG_MASK) | ((memw & 0x1ff) << 12));
1860 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fmem(struct sljit_compiler *compiler, sljit_s32 type,
1862 sljit_s32 mem, sljit_sw memw)
1867 CHECK(check_sljit_emit_fmem(compiler, type, freg, mem, memw));
1869 if ((mem & OFFS_REG_MASK) || (memw > 255 && memw < -256))
1870 return SLJIT_ERR_UNSUPPORTED;
1872 if (type & SLJIT_MEM_SUPP)
1873 return SLJIT_SUCCESS;
1875 inst = STUR_FI | 0x80000400;
1877 if (!(type & SLJIT_F32_OP))
1880 if (!(type & SLJIT_MEM_STORE))
1883 if (type & SLJIT_MEM_PRE)
1886 return push_inst(compiler, inst | VT(freg) | RN(mem & REG_MASK) | ((memw & 0x1ff) << 12));
1889 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)
1895 CHECK(check_sljit_get_local_base(compiler, dst, dstw, offset));
1897 SLJIT_ASSERT (SLJIT_LOCALS_OFFSET_BASE == 0);
1899 dst_reg = FAST_IS_REG(dst) ? dst : TMP_REG1;
1901 if (offset <= 0xffffff && offset >= -0xffffff) {
1908 if (offset <= 0xfff)
1909 FAIL_IF(push_inst(compiler, ins | RD(dst_reg) | RN(SLJIT_SP) | (offset << 10)));
1911 FAIL_IF(push_inst(compiler, ins | RD(dst_reg) | RN(SLJIT_SP) | ((offset & 0xfff000) >> (12 - 10)) | (1 << 22)));
1915 FAIL_IF(push_inst(compiler, ins | RD(dst_reg) | RN(dst_reg) | (offset << 10)));
1919 FAIL_IF(load_immediate (compiler, dst_reg, offset));
1920 /* Add extended register form. */
1921 FAIL_IF(push_inst(compiler, ADDE | (0x3 << 13) | RD(dst_reg) | RN(SLJIT_SP) | RM(dst_reg)));
1924 if (SLJIT_UNLIKELY(dst & SLJIT_MEM))
1925 return emit_op_mem(compiler, WORD_SIZE | STORE, dst_reg, dst, dstw, TMP_REG1);
1926 return SLJIT_SUCCESS;
1929 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
1931 struct sljit_const *const_;
1935 CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
1936 ADJUST_LOCAL_OFFSET(dst, dstw);
1938 const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
1939 PTR_FAIL_IF(!const_);
1940 set_const(const_, compiler);
1942 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG1;
1943 PTR_FAIL_IF(emit_imm64_const(compiler, dst_r, init_value));
1945 if (dst & SLJIT_MEM)
1946 PTR_FAIL_IF(emit_op_mem(compiler, WORD_SIZE | STORE, dst_r, dst, dstw, TMP_REG2));
1950 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)
1952 sljit_ins* inst = (sljit_ins*)addr;
1953 modify_imm64_const(inst, new_target);
1954 inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
1955 SLJIT_CACHE_FLUSH(inst, inst + 4);
1958 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
1960 sljit_ins* inst = (sljit_ins*)addr;
1961 modify_imm64_const(inst, new_constant);
1962 inst = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(inst, executable_offset);
1963 SLJIT_CACHE_FLUSH(inst, inst + 4);