2 * Stack-less Just-In-Time compiler
4 * Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
6 * Redistribution and use in source and binary forms, with or without modification, are
7 * permitted provided that the following conditions are met:
9 * 1. Redistributions of source code must retain the above copyright notice, this list of
10 * conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright notice, this list
13 * of conditions and the following disclaimer in the documentation and/or other materials
14 * provided with the distribution.
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19 * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 /* x86 32-bit arch dependent functions. */
29 static sljit_s32 emit_do_imm(struct sljit_compiler *compiler, sljit_u8 opcode, sljit_sw imm)
33 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + sizeof(sljit_sw));
35 INC_SIZE(1 + sizeof(sljit_sw));
37 sljit_unaligned_store_sw(inst, imm);
41 static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_s32 type, sljit_sw executable_offset)
43 if (type == SLJIT_JUMP) {
44 *code_ptr++ = JMP_i32;
47 else if (type >= SLJIT_FAST_CALL) {
48 *code_ptr++ = CALL_i32;
52 *code_ptr++ = GROUP_0F;
53 *code_ptr++ = get_jump_code(type);
57 if (jump->flags & JUMP_LABEL)
58 jump->flags |= PATCH_MW;
60 sljit_unaligned_store_sw(code_ptr, jump->u.target - (jump->addr + 4) - (sljit_uw)executable_offset);
66 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
67 sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
68 sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
74 CHECK(check_sljit_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
75 set_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
77 args = get_arg_count(arg_types);
78 compiler->args = args;
80 /* [esp+0] for saving temporaries and function calls. */
81 compiler->stack_tmp_size = 2 * sizeof(sljit_sw);
83 #if !(defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
85 compiler->stack_tmp_size = 3 * sizeof(sljit_sw);
88 compiler->saveds_offset = compiler->stack_tmp_size;
90 compiler->saveds_offset += ((scratches > (3 + 6)) ? 6 : (scratches - 3)) * sizeof(sljit_sw);
92 compiler->locals_offset = compiler->saveds_offset;
95 compiler->locals_offset += (saveds - 3) * sizeof(sljit_sw);
97 if (options & SLJIT_F64_ALIGNMENT)
98 compiler->locals_offset = (compiler->locals_offset + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1);
100 size = 1 + (scratches > 9 ? (scratches - 9) : 0) + (saveds <= 3 ? saveds : 3);
101 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
102 size += (args > 0 ? (args * 2) : 0) + (args > 2 ? 2 : 0);
104 size += (args > 0 ? (2 + args * 3) : 0);
106 inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
110 PUSH_REG(reg_map[TMP_REG1]);
111 #if !(defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
114 *inst++ = MOD_REG | (reg_map[TMP_REG1] << 3) | 0x4 /* esp */;
117 if (saveds > 2 || scratches > 9)
118 PUSH_REG(reg_map[SLJIT_S2]);
119 if (saveds > 1 || scratches > 10)
120 PUSH_REG(reg_map[SLJIT_S1]);
121 if (saveds > 0 || scratches > 11)
122 PUSH_REG(reg_map[SLJIT_S0]);
124 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
127 inst[1] = MOD_REG | (reg_map[SLJIT_S0] << 3) | reg_map[SLJIT_R2];
132 inst[1] = MOD_REG | (reg_map[SLJIT_S1] << 3) | reg_map[SLJIT_R1];
137 inst[1] = MOD_DISP8 | (reg_map[SLJIT_S2] << 3) | 0x4 /* esp */;
139 inst[3] = sizeof(sljit_sw) * (3 + 2); /* saveds >= 3 as well. */
144 inst[1] = MOD_DISP8 | (reg_map[SLJIT_S0] << 3) | reg_map[TMP_REG1];
145 inst[2] = sizeof(sljit_sw) * 2;
150 inst[1] = MOD_DISP8 | (reg_map[SLJIT_S1] << 3) | reg_map[TMP_REG1];
151 inst[2] = sizeof(sljit_sw) * 3;
156 inst[1] = MOD_DISP8 | (reg_map[SLJIT_S2] << 3) | reg_map[TMP_REG1];
157 inst[2] = sizeof(sljit_sw) * 4;
161 SLJIT_ASSERT(SLJIT_LOCALS_OFFSET > 0);
163 #if defined(__APPLE__)
164 /* Ignore pushed registers and SLJIT_LOCALS_OFFSET when computing the aligned local size. */
165 saveds = (2 + (scratches > 9 ? (scratches - 9) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw);
166 local_size = ((SLJIT_LOCALS_OFFSET + saveds + local_size + 15) & ~15) - saveds;
168 if (options & SLJIT_F64_ALIGNMENT)
169 local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1));
171 local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_sw) - 1) & ~(sizeof(sljit_sw) - 1));
174 compiler->local_size = local_size;
177 if (local_size > 0) {
178 if (local_size <= 4 * 4096) {
179 if (local_size > 4096)
180 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -4096);
181 if (local_size > 2 * 4096)
182 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -4096 * 2);
183 if (local_size > 3 * 4096)
184 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -4096 * 3);
187 EMIT_MOV(compiler, SLJIT_R0, 0, SLJIT_SP, 0);
188 EMIT_MOV(compiler, SLJIT_R1, 0, SLJIT_IMM, (local_size - 1) >> 12);
190 SLJIT_ASSERT (reg_map[SLJIT_R0] == 0);
192 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_R0), -4096);
193 FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
194 SLJIT_R0, 0, SLJIT_R0, 0, SLJIT_IMM, 4096));
195 FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
196 SLJIT_R1, 0, SLJIT_R1, 0, SLJIT_IMM, 1));
198 inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
203 inst[1] = (sljit_s8) -16;
206 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), -local_size);
210 SLJIT_ASSERT(local_size > 0);
212 #if !defined(__APPLE__)
213 if (options & SLJIT_F64_ALIGNMENT) {
214 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_SP, 0);
216 /* Some space might allocated during sljit_grow_stack() above on WIN32. */
217 FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
218 SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, local_size + sizeof(sljit_sw)));
220 #if defined _WIN32 && !(defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
221 if (compiler->local_size > 1024)
222 FAIL_IF(emit_cum_binary(compiler, BINARY_OPCODE(ADD),
223 TMP_REG1, 0, TMP_REG1, 0, SLJIT_IMM, sizeof(sljit_sw)));
226 inst = (sljit_u8*)ensure_buf(compiler, 1 + 6);
230 inst[0] = GROUP_BINARY_81;
231 inst[1] = MOD_REG | AND | reg_map[SLJIT_SP];
232 sljit_unaligned_store_sw(inst + 2, ~(sizeof(sljit_f64) - 1));
234 /* The real local size must be used. */
235 return emit_mov(compiler, SLJIT_MEM1(SLJIT_SP), compiler->local_size, TMP_REG1, 0);
238 return emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
239 SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, local_size);
242 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
243 sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
244 sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
247 CHECK(check_sljit_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
248 set_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
250 compiler->args = get_arg_count(arg_types);
252 /* [esp+0] for saving temporaries and function calls. */
253 compiler->stack_tmp_size = 2 * sizeof(sljit_sw);
255 #if !(defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
257 compiler->stack_tmp_size = 3 * sizeof(sljit_sw);
260 compiler->saveds_offset = compiler->stack_tmp_size;
262 compiler->saveds_offset += ((scratches > (3 + 6)) ? 6 : (scratches - 3)) * sizeof(sljit_sw);
264 compiler->locals_offset = compiler->saveds_offset;
267 compiler->locals_offset += (saveds - 3) * sizeof(sljit_sw);
269 if (options & SLJIT_F64_ALIGNMENT)
270 compiler->locals_offset = (compiler->locals_offset + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1);
272 #if defined(__APPLE__)
273 saveds = (2 + (scratches > 9 ? (scratches - 9) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw);
274 compiler->local_size = ((SLJIT_LOCALS_OFFSET + saveds + local_size + 15) & ~15) - saveds;
276 if (options & SLJIT_F64_ALIGNMENT)
277 compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_f64) - 1) & ~(sizeof(sljit_f64) - 1));
279 compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + sizeof(sljit_sw) - 1) & ~(sizeof(sljit_sw) - 1));
281 return SLJIT_SUCCESS;
284 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)
290 CHECK(check_sljit_emit_return(compiler, op, src, srcw));
291 SLJIT_ASSERT(compiler->args >= 0);
293 FAIL_IF(emit_mov_before_return(compiler, op, src, srcw));
295 SLJIT_ASSERT(compiler->local_size > 0);
297 #if !defined(__APPLE__)
298 if (compiler->options & SLJIT_F64_ALIGNMENT)
299 EMIT_MOV(compiler, SLJIT_SP, 0, SLJIT_MEM1(SLJIT_SP), compiler->local_size)
301 FAIL_IF(emit_cum_binary(compiler, BINARY_OPCODE(ADD),
302 SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, compiler->local_size));
304 FAIL_IF(emit_cum_binary(compiler, BINARY_OPCODE(ADD),
305 SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, compiler->local_size));
308 size = 2 + (compiler->scratches > 7 ? (compiler->scratches - 7) : 0) +
309 (compiler->saveds <= 3 ? compiler->saveds : 3);
310 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
311 if (compiler->args > 2)
314 if (compiler->args > 0)
317 inst = (sljit_u8*)ensure_buf(compiler, 1 + size);
322 if (compiler->saveds > 0 || compiler->scratches > 11)
323 POP_REG(reg_map[SLJIT_S0]);
324 if (compiler->saveds > 1 || compiler->scratches > 10)
325 POP_REG(reg_map[SLJIT_S1]);
326 if (compiler->saveds > 2 || compiler->scratches > 9)
327 POP_REG(reg_map[SLJIT_S2]);
328 POP_REG(reg_map[TMP_REG1]);
329 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
330 if (compiler->args > 2)
331 RET_I16(sizeof(sljit_sw));
338 return SLJIT_SUCCESS;
341 /* --------------------------------------------------------------------- */
343 /* --------------------------------------------------------------------- */
345 /* Size contains the flags as well. */
346 static sljit_u8* emit_x86_instruction(struct sljit_compiler *compiler, sljit_s32 size,
347 /* The register or immediate operand. */
348 sljit_s32 a, sljit_sw imma,
349 /* The general operand (not immediate). */
350 sljit_s32 b, sljit_sw immb)
354 sljit_s32 flags = size & ~0xf;
357 /* Both cannot be switched on. */
358 SLJIT_ASSERT((flags & (EX86_BIN_INS | EX86_SHIFT_INS)) != (EX86_BIN_INS | EX86_SHIFT_INS));
359 /* Size flags not allowed for typed instructions. */
360 SLJIT_ASSERT(!(flags & (EX86_BIN_INS | EX86_SHIFT_INS)) || (flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) == 0);
361 /* Both size flags cannot be switched on. */
362 SLJIT_ASSERT((flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) != (EX86_BYTE_ARG | EX86_HALF_ARG));
363 /* SSE2 and immediate is not possible. */
364 SLJIT_ASSERT(!(a & SLJIT_IMM) || !(flags & EX86_SSE2));
365 SLJIT_ASSERT((flags & (EX86_PREF_F2 | EX86_PREF_F3)) != (EX86_PREF_F2 | EX86_PREF_F3)
366 && (flags & (EX86_PREF_F2 | EX86_PREF_66)) != (EX86_PREF_F2 | EX86_PREF_66)
367 && (flags & (EX86_PREF_F3 | EX86_PREF_66)) != (EX86_PREF_F3 | EX86_PREF_66));
372 if (flags & (EX86_PREF_F2 | EX86_PREF_F3))
374 if (flags & EX86_PREF_66)
377 /* Calculate size of b. */
378 inst_size += 1; /* mod r/m byte. */
380 if ((b & REG_MASK) == SLJIT_UNUSED)
381 inst_size += sizeof(sljit_sw);
382 else if (immb != 0 && !(b & OFFS_REG_MASK)) {
383 /* Immediate operand. */
384 if (immb <= 127 && immb >= -128)
385 inst_size += sizeof(sljit_s8);
387 inst_size += sizeof(sljit_sw);
390 if ((b & REG_MASK) == SLJIT_SP && !(b & OFFS_REG_MASK))
391 b |= TO_OFFS_REG(SLJIT_SP);
393 if ((b & OFFS_REG_MASK) != SLJIT_UNUSED)
394 inst_size += 1; /* SIB byte. */
397 /* Calculate size of a. */
399 if (flags & EX86_BIN_INS) {
400 if (imma <= 127 && imma >= -128) {
402 flags |= EX86_BYTE_ARG;
406 else if (flags & EX86_SHIFT_INS) {
410 flags |= EX86_BYTE_ARG;
412 } else if (flags & EX86_BYTE_ARG)
414 else if (flags & EX86_HALF_ARG)
415 inst_size += sizeof(short);
417 inst_size += sizeof(sljit_sw);
420 SLJIT_ASSERT(!(flags & EX86_SHIFT_INS) || a == SLJIT_PREF_SHIFT_REG);
422 inst = (sljit_u8*)ensure_buf(compiler, 1 + inst_size);
425 /* Encoding the byte. */
427 if (flags & EX86_PREF_F2)
429 if (flags & EX86_PREF_F3)
431 if (flags & EX86_PREF_66)
434 buf_ptr = inst + size;
436 /* Encode mod/rm byte. */
437 if (!(flags & EX86_SHIFT_INS)) {
438 if ((flags & EX86_BIN_INS) && (a & SLJIT_IMM))
439 *inst = (flags & EX86_BYTE_ARG) ? GROUP_BINARY_83 : GROUP_BINARY_81;
443 else if (!(flags & EX86_SSE2_OP1))
444 *buf_ptr = reg_map[a] << 3;
451 *inst = GROUP_SHIFT_1;
453 *inst = GROUP_SHIFT_N;
455 *inst = GROUP_SHIFT_CL;
459 if (!(b & SLJIT_MEM))
460 *buf_ptr++ |= MOD_REG + ((!(flags & EX86_SSE2_OP2)) ? reg_map[b] : b);
461 else if ((b & REG_MASK) != SLJIT_UNUSED) {
462 if ((b & OFFS_REG_MASK) == SLJIT_UNUSED || (b & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_SP)) {
464 if (immb <= 127 && immb >= -128)
470 if ((b & OFFS_REG_MASK) == SLJIT_UNUSED)
471 *buf_ptr++ |= reg_map[b & REG_MASK];
474 *buf_ptr++ = reg_map[b & REG_MASK] | (reg_map[OFFS_REG(b)] << 3);
478 if (immb <= 127 && immb >= -128)
479 *buf_ptr++ = immb; /* 8 bit displacement. */
481 sljit_unaligned_store_sw(buf_ptr, immb); /* 32 bit displacement. */
482 buf_ptr += sizeof(sljit_sw);
488 *buf_ptr++ = reg_map[b & REG_MASK] | (reg_map[OFFS_REG(b)] << 3) | (immb << 6);
493 sljit_unaligned_store_sw(buf_ptr, immb); /* 32 bit displacement. */
494 buf_ptr += sizeof(sljit_sw);
498 if (flags & EX86_BYTE_ARG)
500 else if (flags & EX86_HALF_ARG)
501 sljit_unaligned_store_s16(buf_ptr, imma);
502 else if (!(flags & EX86_SHIFT_INS))
503 sljit_unaligned_store_sw(buf_ptr, imma);
506 return !(flags & EX86_SHIFT_INS) ? inst : (inst + 1);
509 /* --------------------------------------------------------------------- */
510 /* Call / return instructions */
511 /* --------------------------------------------------------------------- */
513 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
515 static sljit_s32 c_fast_call_get_stack_size(sljit_s32 arg_types, sljit_s32 *word_arg_count_ptr)
517 sljit_s32 stack_size = 0;
518 sljit_s32 word_arg_count = 0;
520 arg_types >>= SLJIT_DEF_SHIFT;
523 switch (arg_types & SLJIT_DEF_MASK) {
524 case SLJIT_ARG_TYPE_F32:
525 stack_size += sizeof(sljit_f32);
527 case SLJIT_ARG_TYPE_F64:
528 stack_size += sizeof(sljit_f64);
532 if (word_arg_count > 2)
533 stack_size += sizeof(sljit_sw);
537 arg_types >>= SLJIT_DEF_SHIFT;
540 if (word_arg_count_ptr)
541 *word_arg_count_ptr = word_arg_count;
546 static sljit_s32 c_fast_call_with_args(struct sljit_compiler *compiler,
547 sljit_s32 arg_types, sljit_s32 stack_size, sljit_s32 word_arg_count, sljit_s32 swap_args)
550 sljit_s32 float_arg_count;
552 if (stack_size == sizeof(sljit_sw) && word_arg_count == 3) {
553 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
556 PUSH_REG(reg_map[SLJIT_R2]);
558 else if (stack_size > 0) {
559 if (word_arg_count >= 4)
560 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), compiler->saveds_offset - sizeof(sljit_sw));
562 FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
563 SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, stack_size));
566 arg_types >>= SLJIT_DEF_SHIFT;
570 switch (arg_types & SLJIT_DEF_MASK) {
571 case SLJIT_ARG_TYPE_F32:
573 FAIL_IF(emit_sse2_store(compiler, 1, SLJIT_MEM1(SLJIT_SP), stack_size, float_arg_count));
574 stack_size += sizeof(sljit_f32);
576 case SLJIT_ARG_TYPE_F64:
578 FAIL_IF(emit_sse2_store(compiler, 0, SLJIT_MEM1(SLJIT_SP), stack_size, float_arg_count));
579 stack_size += sizeof(sljit_f64);
583 if (word_arg_count == 3) {
584 EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), stack_size, SLJIT_R2, 0);
585 stack_size += sizeof(sljit_sw);
587 else if (word_arg_count == 4) {
588 EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), stack_size, TMP_REG1, 0);
589 stack_size += sizeof(sljit_sw);
594 arg_types >>= SLJIT_DEF_SHIFT;
598 if (word_arg_count > 0) {
600 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
604 *inst++ = XCHG_EAX_r | reg_map[SLJIT_R2];
607 inst = (sljit_u8*)ensure_buf(compiler, 1 + 2);
612 *inst++ = MOD_REG | (reg_map[SLJIT_R2] << 3) | reg_map[SLJIT_R0];
616 return SLJIT_SUCCESS;
621 static sljit_s32 cdecl_call_get_stack_size(struct sljit_compiler *compiler, sljit_s32 arg_types, sljit_s32 *word_arg_count_ptr)
623 sljit_s32 stack_size = 0;
624 sljit_s32 word_arg_count = 0;
626 arg_types >>= SLJIT_DEF_SHIFT;
629 switch (arg_types & SLJIT_DEF_MASK) {
630 case SLJIT_ARG_TYPE_F32:
631 stack_size += sizeof(sljit_f32);
633 case SLJIT_ARG_TYPE_F64:
634 stack_size += sizeof(sljit_f64);
638 stack_size += sizeof(sljit_sw);
642 arg_types >>= SLJIT_DEF_SHIFT;
645 if (word_arg_count_ptr)
646 *word_arg_count_ptr = word_arg_count;
648 if (stack_size <= compiler->stack_tmp_size)
651 #if defined(__APPLE__)
652 return ((stack_size - compiler->stack_tmp_size + 15) & ~15);
654 return stack_size - compiler->stack_tmp_size;
658 static sljit_s32 cdecl_call_with_args(struct sljit_compiler *compiler,
659 sljit_s32 arg_types, sljit_s32 stack_size, sljit_s32 word_arg_count)
661 sljit_s32 float_arg_count = 0;
663 if (word_arg_count >= 4)
664 EMIT_MOV(compiler, TMP_REG1, 0, SLJIT_MEM1(SLJIT_SP), compiler->saveds_offset - sizeof(sljit_sw));
667 FAIL_IF(emit_non_cum_binary(compiler, BINARY_OPCODE(SUB),
668 SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, stack_size));
672 arg_types >>= SLJIT_DEF_SHIFT;
675 switch (arg_types & SLJIT_DEF_MASK) {
676 case SLJIT_ARG_TYPE_F32:
678 FAIL_IF(emit_sse2_store(compiler, 1, SLJIT_MEM1(SLJIT_SP), stack_size, float_arg_count));
679 stack_size += sizeof(sljit_f32);
681 case SLJIT_ARG_TYPE_F64:
683 FAIL_IF(emit_sse2_store(compiler, 0, SLJIT_MEM1(SLJIT_SP), stack_size, float_arg_count));
684 stack_size += sizeof(sljit_f64);
688 EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_SP), stack_size, (word_arg_count >= 4) ? TMP_REG1 : word_arg_count, 0);
689 stack_size += sizeof(sljit_sw);
693 arg_types >>= SLJIT_DEF_SHIFT;
696 return SLJIT_SUCCESS;
699 static sljit_s32 post_call_with_args(struct sljit_compiler *compiler,
700 sljit_s32 arg_types, sljit_s32 stack_size)
706 FAIL_IF(emit_cum_binary(compiler, BINARY_OPCODE(ADD),
707 SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, stack_size));
709 if ((arg_types & SLJIT_DEF_MASK) < SLJIT_ARG_TYPE_F32)
710 return SLJIT_SUCCESS;
712 single = ((arg_types & SLJIT_DEF_MASK) == SLJIT_ARG_TYPE_F32);
714 inst = (sljit_u8*)ensure_buf(compiler, 1 + 3);
717 inst[0] = single ? FSTPS : FSTPD;
718 inst[1] = (0x03 << 3) | 0x04;
719 inst[2] = (0x04 << 3) | reg_map[SLJIT_SP];
721 return emit_sse2_load(compiler, single, SLJIT_FR0, SLJIT_MEM1(SLJIT_SP), 0);
724 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type,
727 struct sljit_jump *jump;
728 sljit_s32 stack_size = 0;
729 sljit_s32 word_arg_count;
732 CHECK_PTR(check_sljit_emit_call(compiler, type, arg_types));
734 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
735 if ((type & 0xff) == SLJIT_CALL) {
736 stack_size = c_fast_call_get_stack_size(arg_types, &word_arg_count);
737 PTR_FAIL_IF(c_fast_call_with_args(compiler, arg_types, stack_size, word_arg_count, 0));
739 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
740 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
741 compiler->skip_checks = 1;
744 jump = sljit_emit_jump(compiler, type);
745 PTR_FAIL_IF(jump == NULL);
747 PTR_FAIL_IF(post_call_with_args(compiler, arg_types, 0));
752 stack_size = cdecl_call_get_stack_size(compiler, arg_types, &word_arg_count);
753 PTR_FAIL_IF(cdecl_call_with_args(compiler, arg_types, stack_size, word_arg_count));
755 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
756 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
757 compiler->skip_checks = 1;
760 jump = sljit_emit_jump(compiler, type);
761 PTR_FAIL_IF(jump == NULL);
763 PTR_FAIL_IF(post_call_with_args(compiler, arg_types, stack_size));
767 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compiler, sljit_s32 type,
769 sljit_s32 src, sljit_sw srcw)
771 sljit_s32 stack_size = 0;
772 sljit_s32 word_arg_count;
773 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
778 CHECK(check_sljit_emit_icall(compiler, type, arg_types, src, srcw));
780 #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL)
781 SLJIT_ASSERT(reg_map[SLJIT_R0] == 0 && reg_map[SLJIT_R2] == 1 && SLJIT_R0 == 1 && SLJIT_R2 == 3);
783 if ((type & 0xff) == SLJIT_CALL) {
784 stack_size = c_fast_call_get_stack_size(arg_types, &word_arg_count);
787 if (word_arg_count > 0) {
788 if ((src & REG_MASK) == SLJIT_R2 || OFFS_REG(src) == SLJIT_R2) {
790 if (((src & REG_MASK) | 0x2) == SLJIT_R2)
792 if ((OFFS_REG(src) | 0x2) == SLJIT_R2)
793 src ^= TO_OFFS_REG(0x2);
797 FAIL_IF(c_fast_call_with_args(compiler, arg_types, stack_size, word_arg_count, swap_args));
799 compiler->saveds_offset += stack_size;
800 compiler->locals_offset += stack_size;
802 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
803 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
804 compiler->skip_checks = 1;
806 FAIL_IF(sljit_emit_ijump(compiler, type, src, srcw));
808 compiler->saveds_offset -= stack_size;
809 compiler->locals_offset -= stack_size;
811 return post_call_with_args(compiler, arg_types, 0);
815 stack_size = cdecl_call_get_stack_size(compiler, arg_types, &word_arg_count);
816 FAIL_IF(cdecl_call_with_args(compiler, arg_types, stack_size, word_arg_count));
818 compiler->saveds_offset += stack_size;
819 compiler->locals_offset += stack_size;
821 #if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) \
822 || (defined SLJIT_ARGUMENT_CHECKS && SLJIT_ARGUMENT_CHECKS)
823 compiler->skip_checks = 1;
825 FAIL_IF(sljit_emit_ijump(compiler, type, src, srcw));
827 compiler->saveds_offset -= stack_size;
828 compiler->locals_offset -= stack_size;
830 return post_call_with_args(compiler, arg_types, stack_size);
833 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
838 CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
839 ADJUST_LOCAL_OFFSET(dst, dstw);
841 CHECK_EXTRA_REGS(dst, dstw, (void)0);
843 /* For UNUSED dst. Uncommon, but possible. */
844 if (dst == SLJIT_UNUSED)
847 if (FAST_IS_REG(dst)) {
848 /* Unused dest is possible here. */
849 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
853 POP_REG(reg_map[dst]);
854 return SLJIT_SUCCESS;
858 inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw);
861 return SLJIT_SUCCESS;
864 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)
869 CHECK(check_sljit_emit_fast_return(compiler, src, srcw));
870 ADJUST_LOCAL_OFFSET(src, srcw);
872 CHECK_EXTRA_REGS(src, srcw, (void)0);
874 if (FAST_IS_REG(src)) {
875 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1 + 1);
879 PUSH_REG(reg_map[src]);
882 inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw);
887 inst = (sljit_u8*)ensure_buf(compiler, 1 + 1);
893 return SLJIT_SUCCESS;