X-Git-Url: http://www.chiark.greenend.org.uk/ucgi/~ian/git?a=blobdiff_plain;f=sljit%2FsljitNativeX86_32.c;fp=sljit%2FsljitNativeX86_32.c;h=d7129c8e26dfcdd853f0c2280f4e6fdaf3c69041;hb=86ef59c527b0015060b75bf41698f0310bb1ef72;hp=dd03f26618bc75e0868d13dde58ee8191433958f;hpb=519a17c5e5fa25f6abb42e289fdcd4a5e769ea4b;p=pcre3.git diff --git a/sljit/sljitNativeX86_32.c b/sljit/sljitNativeX86_32.c index dd03f26..d7129c8 100644 --- a/sljit/sljitNativeX86_32.c +++ b/sljit/sljitNativeX86_32.c @@ -63,27 +63,25 @@ static sljit_ub* generate_far_jump_code(struct sljit_jump *jump, sljit_ub *code_ return code_ptr; } -SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler, sljit_si args, sljit_si scratches, sljit_si saveds, sljit_si local_size) +SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compiler, + sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds, + sljit_si fscratches, sljit_si fsaveds, sljit_si local_size) { sljit_si size; - sljit_si locals_offset; sljit_ub *inst; CHECK_ERROR(); - check_sljit_emit_enter(compiler, args, scratches, saveds, local_size); + CHECK(check_sljit_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size)); + set_emit_enter(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size); - compiler->scratches = scratches; - compiler->saveds = saveds; compiler->args = args; compiler->flags_saved = 0; -#if (defined SLJIT_DEBUG && SLJIT_DEBUG) - compiler->logical_local_size = local_size; -#endif + size = 1 + (scratches > 7 ? (scratches - 7) : 0) + (saveds <= 3 ? saveds : 3); #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) - size = 1 + (saveds <= 3 ? saveds : 3) + (args > 0 ? (args * 2) : 0) + (args > 2 ? 2 : 0); + size += (args > 0 ? (args * 2) : 0) + (args > 2 ? 2 : 0); #else - size = 1 + (saveds <= 3 ? saveds : 3) + (args > 0 ? (2 + args * 3) : 0); + size += (args > 0 ? (2 + args * 3) : 0); #endif inst = (sljit_ub*)ensure_buf(compiler, 1 + size); FAIL_IF(!inst); @@ -96,76 +94,85 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil *inst++ = MOD_REG | (reg_map[TMP_REG1] << 3) | 0x4 /* esp */; } #endif - if (saveds > 2) - PUSH_REG(reg_map[SLJIT_SAVED_REG3]); - if (saveds > 1) - PUSH_REG(reg_map[SLJIT_SAVED_REG2]); - if (saveds > 0) - PUSH_REG(reg_map[SLJIT_SAVED_REG1]); + if (saveds > 2 || scratches > 7) + PUSH_REG(reg_map[SLJIT_S2]); + if (saveds > 1 || scratches > 8) + PUSH_REG(reg_map[SLJIT_S1]); + if (saveds > 0 || scratches > 9) + PUSH_REG(reg_map[SLJIT_S0]); #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) if (args > 0) { *inst++ = MOV_r_rm; - *inst++ = MOD_REG | (reg_map[SLJIT_SAVED_REG1] << 3) | reg_map[SLJIT_SCRATCH_REG3]; + *inst++ = MOD_REG | (reg_map[SLJIT_S0] << 3) | reg_map[SLJIT_R2]; } if (args > 1) { *inst++ = MOV_r_rm; - *inst++ = MOD_REG | (reg_map[SLJIT_SAVED_REG2] << 3) | reg_map[SLJIT_SCRATCH_REG2]; + *inst++ = MOD_REG | (reg_map[SLJIT_S1] << 3) | reg_map[SLJIT_R1]; } if (args > 2) { *inst++ = MOV_r_rm; - *inst++ = MOD_DISP8 | (reg_map[SLJIT_SAVED_REG3] << 3) | 0x4 /* esp */; + *inst++ = MOD_DISP8 | (reg_map[SLJIT_S2] << 3) | 0x4 /* esp */; *inst++ = 0x24; *inst++ = sizeof(sljit_sw) * (3 + 2); /* saveds >= 3 as well. */ } #else if (args > 0) { *inst++ = MOV_r_rm; - *inst++ = MOD_DISP8 | (reg_map[SLJIT_SAVED_REG1] << 3) | reg_map[TMP_REG1]; + *inst++ = MOD_DISP8 | (reg_map[SLJIT_S0] << 3) | reg_map[TMP_REG1]; *inst++ = sizeof(sljit_sw) * 2; } if (args > 1) { *inst++ = MOV_r_rm; - *inst++ = MOD_DISP8 | (reg_map[SLJIT_SAVED_REG2] << 3) | reg_map[TMP_REG1]; + *inst++ = MOD_DISP8 | (reg_map[SLJIT_S1] << 3) | reg_map[TMP_REG1]; *inst++ = sizeof(sljit_sw) * 3; } if (args > 2) { *inst++ = MOV_r_rm; - *inst++ = MOD_DISP8 | (reg_map[SLJIT_SAVED_REG3] << 3) | reg_map[TMP_REG1]; + *inst++ = MOD_DISP8 | (reg_map[SLJIT_S2] << 3) | reg_map[TMP_REG1]; *inst++ = sizeof(sljit_sw) * 4; } #endif -#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) - locals_offset = 2 * sizeof(sljit_uw); -#else - SLJIT_COMPILE_ASSERT(FIXED_LOCALS_OFFSET >= 2 * sizeof(sljit_uw), require_at_least_two_words); - locals_offset = FIXED_LOCALS_OFFSET; -#endif - compiler->scratches_start = locals_offset; - if (scratches > 3) - locals_offset += (scratches - 3) * sizeof(sljit_uw); - compiler->saveds_start = locals_offset; - if (saveds > 3) - locals_offset += (saveds - 3) * sizeof(sljit_uw); - compiler->locals_offset = locals_offset; + SLJIT_COMPILE_ASSERT(SLJIT_LOCALS_OFFSET >= (2 + 4) * sizeof(sljit_uw), require_at_least_two_words); #if defined(__APPLE__) - saveds = (2 + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw); - local_size = ((locals_offset + saveds + local_size + 15) & ~15) - saveds; + /* Ignore pushed registers and SLJIT_LOCALS_OFFSET when computing the aligned local size. */ + saveds = (2 + (scratches > 7 ? (scratches - 7) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw); + local_size = ((SLJIT_LOCALS_OFFSET + saveds + local_size + 15) & ~15) - saveds; #else - local_size = locals_offset + ((local_size + sizeof(sljit_uw) - 1) & ~(sizeof(sljit_uw) - 1)); + if (options & SLJIT_DOUBLE_ALIGNMENT) { + local_size = SLJIT_LOCALS_OFFSET + ((local_size + 7) & ~7); + + inst = (sljit_ub*)ensure_buf(compiler, 1 + 17); + FAIL_IF(!inst); + + INC_SIZE(17); + inst[0] = MOV_r_rm; + inst[1] = MOD_REG | (reg_map[TMP_REG1] << 3) | reg_map[SLJIT_SP]; + inst[2] = GROUP_F7; + inst[3] = MOD_REG | (0 << 3) | reg_map[SLJIT_SP]; + *(sljit_sw*)(inst + 4) = 0x4; + inst[8] = JNE_i8; + inst[9] = 6; + inst[10] = GROUP_BINARY_81; + inst[11] = MOD_REG | (5 << 3) | reg_map[SLJIT_SP]; + *(sljit_sw*)(inst + 12) = 0x4; + inst[16] = PUSH_r + reg_map[TMP_REG1]; + } + else + local_size = SLJIT_LOCALS_OFFSET + ((local_size + 3) & ~3); #endif compiler->local_size = local_size; #ifdef _WIN32 if (local_size > 1024) { #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) - FAIL_IF(emit_do_imm(compiler, MOV_r_i32 + reg_map[SLJIT_SCRATCH_REG1], local_size)); + FAIL_IF(emit_do_imm(compiler, MOV_r_i32 + reg_map[SLJIT_R0], local_size)); #else - local_size -= FIXED_LOCALS_OFFSET; - FAIL_IF(emit_do_imm(compiler, MOV_r_i32 + reg_map[SLJIT_SCRATCH_REG1], local_size)); + local_size -= SLJIT_LOCALS_OFFSET; + FAIL_IF(emit_do_imm(compiler, MOV_r_i32 + reg_map[SLJIT_R0], local_size)); FAIL_IF(emit_non_cum_binary(compiler, SUB_r_rm, SUB_rm_r, SUB, SUB_EAX_i32, - SLJIT_LOCALS_REG, 0, SLJIT_LOCALS_REG, 0, SLJIT_IMM, FIXED_LOCALS_OFFSET)); + SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, SLJIT_LOCALS_OFFSET)); #endif FAIL_IF(sljit_emit_ijump(compiler, SLJIT_CALL1, SLJIT_IMM, SLJIT_FUNC_OFFSET(sljit_grow_stack))); } @@ -173,41 +180,29 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_enter(struct sljit_compiler *compil SLJIT_ASSERT(local_size > 0); return emit_non_cum_binary(compiler, SUB_r_rm, SUB_rm_r, SUB, SUB_EAX_i32, - SLJIT_LOCALS_REG, 0, SLJIT_LOCALS_REG, 0, SLJIT_IMM, local_size); + SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, local_size); } -SLJIT_API_FUNC_ATTRIBUTE void sljit_set_context(struct sljit_compiler *compiler, sljit_si args, sljit_si scratches, sljit_si saveds, sljit_si local_size) +SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_set_context(struct sljit_compiler *compiler, + sljit_si options, sljit_si args, sljit_si scratches, sljit_si saveds, + sljit_si fscratches, sljit_si fsaveds, sljit_si local_size) { - sljit_si locals_offset; - - CHECK_ERROR_VOID(); - check_sljit_set_context(compiler, args, scratches, saveds, local_size); + CHECK_ERROR(); + CHECK(check_sljit_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size)); + set_set_context(compiler, options, args, scratches, saveds, fscratches, fsaveds, local_size); - compiler->scratches = scratches; - compiler->saveds = saveds; compiler->args = args; -#if (defined SLJIT_DEBUG && SLJIT_DEBUG) - compiler->logical_local_size = local_size; -#endif -#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) - locals_offset = 2 * sizeof(sljit_uw); -#else - locals_offset = FIXED_LOCALS_OFFSET; -#endif - compiler->scratches_start = locals_offset; - if (scratches > 3) - locals_offset += (scratches - 3) * sizeof(sljit_uw); - compiler->saveds_start = locals_offset; - if (saveds > 3) - locals_offset += (saveds - 3) * sizeof(sljit_uw); - compiler->locals_offset = locals_offset; #if defined(__APPLE__) - saveds = (2 + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw); - compiler->local_size = ((locals_offset + saveds + local_size + 15) & ~15) - saveds; + saveds = (2 + (scratches > 7 ? (scratches - 7) : 0) + (saveds <= 3 ? saveds : 3)) * sizeof(sljit_uw); + compiler->local_size = ((SLJIT_LOCALS_OFFSET + saveds + local_size + 15) & ~15) - saveds; #else - compiler->local_size = locals_offset + ((local_size + sizeof(sljit_uw) - 1) & ~(sizeof(sljit_uw) - 1)); + if (options & SLJIT_DOUBLE_ALIGNMENT) + compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + 7) & ~7); + else + compiler->local_size = SLJIT_LOCALS_OFFSET + ((local_size + 3) & ~3); #endif + return SLJIT_SUCCESS; } SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compiler, sljit_si op, sljit_si src, sljit_sw srcw) @@ -216,7 +211,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi sljit_ub *inst; CHECK_ERROR(); - check_sljit_emit_return(compiler, op, src, srcw); + CHECK(check_sljit_emit_return(compiler, op, src, srcw)); SLJIT_ASSERT(compiler->args >= 0); compiler->flags_saved = 0; @@ -224,9 +219,22 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi SLJIT_ASSERT(compiler->local_size > 0); FAIL_IF(emit_cum_binary(compiler, ADD_r_rm, ADD_rm_r, ADD, ADD_EAX_i32, - SLJIT_LOCALS_REG, 0, SLJIT_LOCALS_REG, 0, SLJIT_IMM, compiler->local_size)); + SLJIT_SP, 0, SLJIT_SP, 0, SLJIT_IMM, compiler->local_size)); + +#if !defined(__APPLE__) + if (compiler->options & SLJIT_DOUBLE_ALIGNMENT) { + inst = (sljit_ub*)ensure_buf(compiler, 1 + 3); + FAIL_IF(!inst); + + INC_SIZE(3); + inst[0] = MOV_r_rm; + inst[1] = (reg_map[SLJIT_SP] << 3) | 0x4 /* SIB */; + inst[2] = (4 << 3) | reg_map[SLJIT_SP]; + } +#endif - size = 2 + (compiler->saveds <= 3 ? compiler->saveds : 3); + size = 2 + (compiler->scratches > 7 ? (compiler->scratches - 7) : 0) + + (compiler->saveds <= 3 ? compiler->saveds : 3); #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) if (compiler->args > 2) size += 2; @@ -239,12 +247,12 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_return(struct sljit_compiler *compi INC_SIZE(size); - if (compiler->saveds > 0) - POP_REG(reg_map[SLJIT_SAVED_REG1]); - if (compiler->saveds > 1) - POP_REG(reg_map[SLJIT_SAVED_REG2]); - if (compiler->saveds > 2) - POP_REG(reg_map[SLJIT_SAVED_REG3]); + if (compiler->saveds > 0 || compiler->scratches > 9) + POP_REG(reg_map[SLJIT_S0]); + if (compiler->saveds > 1 || compiler->scratches > 8) + POP_REG(reg_map[SLJIT_S1]); + if (compiler->saveds > 2 || compiler->scratches > 7) + POP_REG(reg_map[SLJIT_S2]); POP_REG(reg_map[TMP_REG1]); #if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) if (compiler->args > 2) @@ -280,21 +288,17 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si SLJIT_ASSERT(!(flags & (EX86_BIN_INS | EX86_SHIFT_INS)) || (flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) == 0); /* Both size flags cannot be switched on. */ SLJIT_ASSERT((flags & (EX86_BYTE_ARG | EX86_HALF_ARG)) != (EX86_BYTE_ARG | EX86_HALF_ARG)); -#if (defined SLJIT_SSE2 && SLJIT_SSE2) /* SSE2 and immediate is not possible. */ SLJIT_ASSERT(!(a & SLJIT_IMM) || !(flags & EX86_SSE2)); SLJIT_ASSERT((flags & (EX86_PREF_F2 | EX86_PREF_F3)) != (EX86_PREF_F2 | EX86_PREF_F3) && (flags & (EX86_PREF_F2 | EX86_PREF_66)) != (EX86_PREF_F2 | EX86_PREF_66) && (flags & (EX86_PREF_F3 | EX86_PREF_66)) != (EX86_PREF_F3 | EX86_PREF_66)); -#endif size &= 0xf; inst_size = size; -#if (defined SLJIT_SSE2 && SLJIT_SSE2) if (flags & (EX86_PREF_F2 | EX86_PREF_F3)) inst_size++; -#endif if (flags & EX86_PREF_66) inst_size++; @@ -311,8 +315,8 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si inst_size += sizeof(sljit_sw); } - if ((b & REG_MASK) == SLJIT_LOCALS_REG && !(b & OFFS_REG_MASK)) - b |= TO_OFFS_REG(SLJIT_LOCALS_REG); + if ((b & REG_MASK) == SLJIT_SP && !(b & OFFS_REG_MASK)) + b |= TO_OFFS_REG(SLJIT_SP); if ((b & OFFS_REG_MASK) != SLJIT_UNUSED) inst_size += 1; /* SIB byte. */ @@ -348,12 +352,10 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si /* Encoding the byte. */ INC_SIZE(inst_size); -#if (defined SLJIT_SSE2 && SLJIT_SSE2) if (flags & EX86_PREF_F2) *inst++ = 0xf2; if (flags & EX86_PREF_F3) *inst++ = 0xf3; -#endif if (flags & EX86_PREF_66) *inst++ = 0x66; @@ -366,15 +368,10 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si if ((a & SLJIT_IMM) || (a == 0)) *buf_ptr = 0; -#if (defined SLJIT_SSE2 && SLJIT_SSE2) - else if (!(flags & EX86_SSE2)) + else if (!(flags & EX86_SSE2_OP1)) *buf_ptr = reg_map[a] << 3; else *buf_ptr = a << 3; -#else - else - *buf_ptr = reg_map[a] << 3; -#endif } else { if (a & SLJIT_IMM) { @@ -388,13 +385,9 @@ static sljit_ub* emit_x86_instruction(struct sljit_compiler *compiler, sljit_si } if (!(b & SLJIT_MEM)) -#if (defined SLJIT_SSE2 && SLJIT_SSE2) - *buf_ptr++ |= MOD_REG + ((!(flags & EX86_SSE2)) ? reg_map[b] : b); -#else - *buf_ptr++ |= MOD_REG + reg_map[b]; -#endif + *buf_ptr++ |= MOD_REG + ((!(flags & EX86_SSE2_OP2)) ? reg_map[b] : b); else if ((b & REG_MASK) != SLJIT_UNUSED) { - if ((b & OFFS_REG_MASK) == SLJIT_UNUSED || (b & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_LOCALS_REG)) { + if ((b & OFFS_REG_MASK) == SLJIT_UNUSED || (b & OFFS_REG_MASK) == TO_OFFS_REG(SLJIT_SP)) { if (immb != 0) { if (immb <= 127 && immb >= -128) *buf_ptr |= 0x40; @@ -455,28 +448,28 @@ static SLJIT_INLINE sljit_si call_with_args(struct sljit_compiler *compiler, slj INC_SIZE(type >= SLJIT_CALL3 ? 2 + 1 : 2); if (type >= SLJIT_CALL3) - PUSH_REG(reg_map[SLJIT_SCRATCH_REG3]); + PUSH_REG(reg_map[SLJIT_R2]); *inst++ = MOV_r_rm; - *inst++ = MOD_REG | (reg_map[SLJIT_SCRATCH_REG3] << 3) | reg_map[SLJIT_SCRATCH_REG1]; + *inst++ = MOD_REG | (reg_map[SLJIT_R2] << 3) | reg_map[SLJIT_R0]; #else inst = (sljit_ub*)ensure_buf(compiler, 1 + 4 * (type - SLJIT_CALL0)); FAIL_IF(!inst); INC_SIZE(4 * (type - SLJIT_CALL0)); *inst++ = MOV_rm_r; - *inst++ = MOD_DISP8 | (reg_map[SLJIT_SCRATCH_REG1] << 3) | 0x4 /* SIB */; - *inst++ = (0x4 /* none*/ << 3) | reg_map[SLJIT_LOCALS_REG]; + *inst++ = MOD_DISP8 | (reg_map[SLJIT_R0] << 3) | 0x4 /* SIB */; + *inst++ = (0x4 /* none*/ << 3) | reg_map[SLJIT_SP]; *inst++ = 0; if (type >= SLJIT_CALL2) { *inst++ = MOV_rm_r; - *inst++ = MOD_DISP8 | (reg_map[SLJIT_SCRATCH_REG2] << 3) | 0x4 /* SIB */; - *inst++ = (0x4 /* none*/ << 3) | reg_map[SLJIT_LOCALS_REG]; + *inst++ = MOD_DISP8 | (reg_map[SLJIT_R1] << 3) | 0x4 /* SIB */; + *inst++ = (0x4 /* none*/ << 3) | reg_map[SLJIT_SP]; *inst++ = sizeof(sljit_sw); } if (type >= SLJIT_CALL3) { *inst++ = MOV_rm_r; - *inst++ = MOD_DISP8 | (reg_map[SLJIT_SCRATCH_REG3] << 3) | 0x4 /* SIB */; - *inst++ = (0x4 /* none*/ << 3) | reg_map[SLJIT_LOCALS_REG]; + *inst++ = MOD_DISP8 | (reg_map[SLJIT_R2] << 3) | 0x4 /* SIB */; + *inst++ = (0x4 /* none*/ << 3) | reg_map[SLJIT_SP]; *inst++ = 2 * sizeof(sljit_sw); } #endif @@ -488,7 +481,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_enter(struct sljit_compiler *c sljit_ub *inst; CHECK_ERROR(); - check_sljit_emit_fast_enter(compiler, dst, dstw); + CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw)); ADJUST_LOCAL_OFFSET(dst, dstw); CHECK_EXTRA_REGS(dst, dstw, (void)0); @@ -519,7 +512,7 @@ SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fast_return(struct sljit_compiler * sljit_ub *inst; CHECK_ERROR(); - check_sljit_emit_fast_return(compiler, src, srcw); + CHECK(check_sljit_emit_fast_return(compiler, src, srcw)); ADJUST_LOCAL_OFFSET(src, srcw); CHECK_EXTRA_REGS(src, srcw, (void)0);