fix problem with flmin/flmax and cgc
svn: r17532
This commit is contained in:
parent
eeddcdca89
commit
f7f6b972fd
|
@ -354,7 +354,13 @@
|
||||||
[(2) (memq (car a) '(flabs flsqrt ->fl
|
[(2) (memq (car a) '(flabs flsqrt ->fl
|
||||||
unsafe-flabs
|
unsafe-flabs
|
||||||
unsafe-flsqrt
|
unsafe-flsqrt
|
||||||
unsafe-fx->fl))]
|
unsafe-fx->fl
|
||||||
|
flsin flcos fltan
|
||||||
|
flasin flacos flatan
|
||||||
|
flexp fllog
|
||||||
|
flfloor flceiling flround fltruncate
|
||||||
|
flmin flmax
|
||||||
|
unsafe-flmin unsafe-flmax))]
|
||||||
[(3) (memq (car a) '(fl+ fl- fl* fl/
|
[(3) (memq (car a) '(fl+ fl- fl* fl/
|
||||||
fl< fl> fl<= fl>= fl=
|
fl< fl> fl<= fl>= fl=
|
||||||
flvector-ref
|
flvector-ref
|
||||||
|
|
|
@ -96,6 +96,7 @@ END_XFORM_ARITH;
|
||||||
#if defined(MZ_USE_JIT_PPC) || defined(MZ_USE_JIT_X86_64)
|
#if defined(MZ_USE_JIT_PPC) || defined(MZ_USE_JIT_X86_64)
|
||||||
# define NEED_LONG_JUMPS
|
# define NEED_LONG_JUMPS
|
||||||
#endif
|
#endif
|
||||||
|
/* Tiny jumps seem worthwhile for x86, but they don't seem to help for x86_64: */
|
||||||
#if defined(MZ_USE_JIT_I386) && !defined(MZ_USE_JIT_X86_64)
|
#if defined(MZ_USE_JIT_I386) && !defined(MZ_USE_JIT_X86_64)
|
||||||
# define USE_TINY_JUMPS
|
# define USE_TINY_JUMPS
|
||||||
#endif
|
#endif
|
||||||
|
@ -510,6 +511,7 @@ static void mz_load_retained(mz_jit_state *jitter, int rs, int retptr)
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#if defined(MZ_USE_JIT_I386)
|
||||||
static double *mz_retain_double(mz_jit_state *jitter, double d)
|
static double *mz_retain_double(mz_jit_state *jitter, double d)
|
||||||
{
|
{
|
||||||
void *p;
|
void *p;
|
||||||
|
@ -519,6 +521,7 @@ static double *mz_retain_double(mz_jit_state *jitter, double d)
|
||||||
jitter->retained_double++;
|
jitter->retained_double++;
|
||||||
return p;
|
return p;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
static void *generate_one(mz_jit_state *old_jitter,
|
static void *generate_one(mz_jit_state *old_jitter,
|
||||||
Generate_Proc generate,
|
Generate_Proc generate,
|
||||||
|
@ -1498,8 +1501,8 @@ static void _jit_prolog_again(mz_jit_state *jitter, int n, int ret_addr_reg)
|
||||||
|
|
||||||
#ifdef USE_TINY_JUMPS
|
#ifdef USE_TINY_JUMPS
|
||||||
/* A tiny jump has to be between -128 and 127 bytes. */
|
/* A tiny jump has to be between -128 and 127 bytes. */
|
||||||
# define __START_TINY_JUMPS__(cond) if (cond) { _jitl.tiny_jumps = 1; }
|
# define __START_TINY_JUMPS__(cond) if (cond) { __START_SHORT_JUMPS__(1); _jitl.tiny_jumps = 1; }
|
||||||
# define __END_TINY_JUMPS__(cond) if (cond) { _jitl.tiny_jumps = 0; }
|
# define __END_TINY_JUMPS__(cond) if (cond) { _jitl.tiny_jumps = 0; __END_SHORT_JUMPS__(1); }
|
||||||
# define __START_INNER_TINY__(cond) __END_SHORT_JUMPS__(cond); __START_TINY_JUMPS__(1);
|
# define __START_INNER_TINY__(cond) __END_SHORT_JUMPS__(cond); __START_TINY_JUMPS__(1);
|
||||||
# define __END_INNER_TINY__(cond) __END_TINY_JUMPS__(1); __START_SHORT_JUMPS__(cond);
|
# define __END_INNER_TINY__(cond) __END_TINY_JUMPS__(1); __START_SHORT_JUMPS__(cond);
|
||||||
#else
|
#else
|
||||||
|
@ -1512,6 +1515,14 @@ static void _jit_prolog_again(mz_jit_state *jitter, int n, int ret_addr_reg)
|
||||||
#define __START_TINY_OR_SHORT_JUMPS__(tcond, cond) if (tcond) { __START_TINY_JUMPS__(1); } else { __START_SHORT_JUMPS__(cond); }
|
#define __START_TINY_OR_SHORT_JUMPS__(tcond, cond) if (tcond) { __START_TINY_JUMPS__(1); } else { __START_SHORT_JUMPS__(cond); }
|
||||||
#define __END_TINY_OR_SHORT_JUMPS__(tcond, cond) if (tcond) { __END_TINY_JUMPS__(1); } else { __END_SHORT_JUMPS__(cond); }
|
#define __END_TINY_OR_SHORT_JUMPS__(tcond, cond) if (tcond) { __END_TINY_JUMPS__(1); } else { __END_SHORT_JUMPS__(cond); }
|
||||||
|
|
||||||
|
#ifdef JIT_X86_64
|
||||||
|
# define __START_TINY_JUMPS_IF_COMPACT__(cond) /* empty */
|
||||||
|
# define __END_TINY_JUMPS_IF_COMPACT__(cond) /* empty */
|
||||||
|
#else
|
||||||
|
# define __START_TINY_JUMPS_IF_COMPACT__(cond) __START_TINY_JUMPS__(cond)
|
||||||
|
# define __END_TINY_JUMPS_IF_COMPACT__(cond) __END_TINY_JUMPS__(cond)
|
||||||
|
#endif
|
||||||
|
|
||||||
/* mz_b..i_p supports 64-bit constants on x86_64: */
|
/* mz_b..i_p supports 64-bit constants on x86_64: */
|
||||||
#ifdef MZ_USE_JIT_X86_64
|
#ifdef MZ_USE_JIT_X86_64
|
||||||
# define mz_beqi_p(a, v, i) ((void)jit_patchable_movi_p(JIT_REXTMP, i), jit_beqr_p(a, v, JIT_REXTMP))
|
# define mz_beqi_p(a, v, i) ((void)jit_patchable_movi_p(JIT_REXTMP, i), jit_beqr_p(a, v, JIT_REXTMP))
|
||||||
|
@ -2437,10 +2448,8 @@ static void branch_for_true(mz_jit_state *jitter, Branch_Info *for_branch)
|
||||||
if (for_branch->true_needs_jump) {
|
if (for_branch->true_needs_jump) {
|
||||||
GC_CAN_IGNORE jit_insn *ref;
|
GC_CAN_IGNORE jit_insn *ref;
|
||||||
|
|
||||||
__START_SHORT_JUMPS__(for_branch->branch_short);
|
|
||||||
ref = jit_jmpi(jit_forward());
|
ref = jit_jmpi(jit_forward());
|
||||||
add_branch(for_branch, ref, BRANCH_ADDR_TRUE, BRANCH_ADDR_UCBRANCH);
|
add_branch(for_branch, ref, BRANCH_ADDR_TRUE, BRANCH_ADDR_UCBRANCH);
|
||||||
__END_SHORT_JUMPS__(for_branch->branch_short);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5097,9 +5106,9 @@ static int generate_arith(mz_jit_state *jitter, Scheme_Object *rator, Scheme_Obj
|
||||||
if (!unsafe_fx && !unsafe_fl) {
|
if (!unsafe_fx && !unsafe_fl) {
|
||||||
mz_rs_sync();
|
mz_rs_sync();
|
||||||
|
|
||||||
__START_TINY_JUMPS__(1);
|
__START_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
ref2 = jit_bmsi_ul(jit_forward(), va, 0x1);
|
ref2 = jit_bmsi_ul(jit_forward(), va, 0x1);
|
||||||
__END_TINY_JUMPS__(1);
|
__END_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
} else {
|
} else {
|
||||||
ref2 = NULL;
|
ref2 = NULL;
|
||||||
if (for_branch) mz_rs_sync();
|
if (for_branch) mz_rs_sync();
|
||||||
|
@ -5117,18 +5126,18 @@ static int generate_arith(mz_jit_state *jitter, Scheme_Object *rator, Scheme_Obj
|
||||||
|
|
||||||
if (!unsafe_fx && !unsafe_fl) {
|
if (!unsafe_fx && !unsafe_fl) {
|
||||||
if (!has_fixnum_fast) {
|
if (!has_fixnum_fast) {
|
||||||
__START_TINY_JUMPS__(1);
|
__START_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
mz_patch_branch(ref2);
|
mz_patch_branch(ref2);
|
||||||
__END_TINY_JUMPS__(1);
|
__END_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Slow path */
|
/* Slow path */
|
||||||
refslow = generate_arith_slow_path(jitter, rator, &ref, &ref4, for_branch, orig_args, reversed, arith, 0, 0);
|
refslow = generate_arith_slow_path(jitter, rator, &ref, &ref4, for_branch, orig_args, reversed, arith, 0, 0);
|
||||||
|
|
||||||
if (has_fixnum_fast) {
|
if (has_fixnum_fast) {
|
||||||
__START_TINY_JUMPS__(1);
|
__START_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
mz_patch_branch(ref2);
|
mz_patch_branch(ref2);
|
||||||
__END_TINY_JUMPS__(1);
|
__END_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
refslow = overflow_refslow;
|
refslow = overflow_refslow;
|
||||||
|
@ -5147,9 +5156,9 @@ static int generate_arith(mz_jit_state *jitter, Scheme_Object *rator, Scheme_Obj
|
||||||
|
|
||||||
/* check both fixnum bits at once by ANDing into R2: */
|
/* check both fixnum bits at once by ANDing into R2: */
|
||||||
jit_andr_ul(JIT_R2, JIT_R0, JIT_R1);
|
jit_andr_ul(JIT_R2, JIT_R0, JIT_R1);
|
||||||
__START_TINY_JUMPS__(1);
|
__START_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
ref2 = jit_bmsi_ul(jit_forward(), JIT_R2, 0x1);
|
ref2 = jit_bmsi_ul(jit_forward(), JIT_R2, 0x1);
|
||||||
__END_TINY_JUMPS__(1);
|
__END_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
CHECK_LIMIT();
|
CHECK_LIMIT();
|
||||||
} else {
|
} else {
|
||||||
if (for_branch) mz_rs_sync();
|
if (for_branch) mz_rs_sync();
|
||||||
|
@ -5167,9 +5176,9 @@ static int generate_arith(mz_jit_state *jitter, Scheme_Object *rator, Scheme_Obj
|
||||||
|
|
||||||
if (!unsafe_fx && !unsafe_fl) {
|
if (!unsafe_fx && !unsafe_fl) {
|
||||||
if (!has_fixnum_fast) {
|
if (!has_fixnum_fast) {
|
||||||
__START_TINY_JUMPS__(1);
|
__START_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
mz_patch_branch(ref2);
|
mz_patch_branch(ref2);
|
||||||
__END_TINY_JUMPS__(1);
|
__END_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Slow path */
|
/* Slow path */
|
||||||
|
@ -5177,9 +5186,9 @@ static int generate_arith(mz_jit_state *jitter, Scheme_Object *rator, Scheme_Obj
|
||||||
|
|
||||||
if (has_fixnum_fast) {
|
if (has_fixnum_fast) {
|
||||||
/* Fixnum branch: */
|
/* Fixnum branch: */
|
||||||
__START_TINY_JUMPS__(1);
|
__START_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
mz_patch_branch(ref2);
|
mz_patch_branch(ref2);
|
||||||
__END_TINY_JUMPS__(1);
|
__END_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
}
|
}
|
||||||
CHECK_LIMIT();
|
CHECK_LIMIT();
|
||||||
} else {
|
} else {
|
||||||
|
@ -5191,9 +5200,9 @@ static int generate_arith(mz_jit_state *jitter, Scheme_Object *rator, Scheme_Obj
|
||||||
/* Only one argument: */
|
/* Only one argument: */
|
||||||
if (!unsafe_fx && !unsafe_fl) {
|
if (!unsafe_fx && !unsafe_fl) {
|
||||||
mz_rs_sync();
|
mz_rs_sync();
|
||||||
__START_TINY_JUMPS__(1);
|
__START_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
ref2 = jit_bmsi_ul(jit_forward(), JIT_R0, 0x1);
|
ref2 = jit_bmsi_ul(jit_forward(), JIT_R0, 0x1);
|
||||||
__END_TINY_JUMPS__(1);
|
__END_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
} else {
|
} else {
|
||||||
if (for_branch) mz_rs_sync();
|
if (for_branch) mz_rs_sync();
|
||||||
ref2 = NULL;
|
ref2 = NULL;
|
||||||
|
@ -5215,18 +5224,18 @@ static int generate_arith(mz_jit_state *jitter, Scheme_Object *rator, Scheme_Obj
|
||||||
|
|
||||||
if (!unsafe_fx && !unsafe_fl) {
|
if (!unsafe_fx && !unsafe_fl) {
|
||||||
if (!has_fixnum_fast) {
|
if (!has_fixnum_fast) {
|
||||||
__START_TINY_JUMPS__(1);
|
__START_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
mz_patch_branch(ref2);
|
mz_patch_branch(ref2);
|
||||||
__END_TINY_JUMPS__(1);
|
__END_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Slow path */
|
/* Slow path */
|
||||||
refslow = generate_arith_slow_path(jitter, rator, &ref, &ref4, for_branch, orig_args, reversed, arith, 1, v);
|
refslow = generate_arith_slow_path(jitter, rator, &ref, &ref4, for_branch, orig_args, reversed, arith, 1, v);
|
||||||
|
|
||||||
if (has_fixnum_fast) {
|
if (has_fixnum_fast) {
|
||||||
__START_TINY_JUMPS__(1);
|
__START_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
mz_patch_branch(ref2);
|
mz_patch_branch(ref2);
|
||||||
__END_TINY_JUMPS__(1);
|
__END_TINY_JUMPS_IF_COMPACT__(1);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
refslow = overflow_refslow;
|
refslow = overflow_refslow;
|
||||||
|
@ -5342,9 +5351,9 @@ static int generate_arith(mz_jit_state *jitter, Scheme_Object *rator, Scheme_Obj
|
||||||
/* first argument must have been most negative fixnum,
|
/* first argument must have been most negative fixnum,
|
||||||
second argument must have been -1: */
|
second argument must have been -1: */
|
||||||
if (reversed)
|
if (reversed)
|
||||||
jit_movi_p(JIT_R0, (void *)(((long)1 << ((8 * JIT_WORD_SIZE) - 1)) | 0x1));
|
(void)jit_movi_p(JIT_R0, (void *)(((long)1 << ((8 * JIT_WORD_SIZE) - 1)) | 0x1));
|
||||||
else
|
else
|
||||||
jit_movi_p(JIT_R0, scheme_make_integer(-1));
|
(void)jit_movi_p(JIT_R0, scheme_make_integer(-1));
|
||||||
(void)jit_jmpi(refslow);
|
(void)jit_jmpi(refslow);
|
||||||
__START_INNER_TINY__(branch_short);
|
__START_INNER_TINY__(branch_short);
|
||||||
mz_patch_branch(refx);
|
mz_patch_branch(refx);
|
||||||
|
@ -5705,13 +5714,17 @@ static int generate_arith(mz_jit_state *jitter, Scheme_Object *rator, Scheme_Obj
|
||||||
mz_patch_ucbranch(refdt);
|
mz_patch_ucbranch(refdt);
|
||||||
|
|
||||||
(void)jit_movi_p(JIT_R0, scheme_true);
|
(void)jit_movi_p(JIT_R0, scheme_true);
|
||||||
|
__START_INNER_TINY__(branch_short);
|
||||||
ref2 = jit_jmpi(jit_forward());
|
ref2 = jit_jmpi(jit_forward());
|
||||||
|
__END_INNER_TINY__(branch_short);
|
||||||
if (ref3)
|
if (ref3)
|
||||||
mz_patch_branch(ref3);
|
mz_patch_branch(ref3);
|
||||||
if (refd)
|
if (refd)
|
||||||
mz_patch_branch(refd);
|
mz_patch_branch(refd);
|
||||||
(void)jit_movi_p(JIT_R0, scheme_false);
|
(void)jit_movi_p(JIT_R0, scheme_false);
|
||||||
|
__START_INNER_TINY__(branch_short);
|
||||||
mz_patch_ucbranch(ref2);
|
mz_patch_ucbranch(ref2);
|
||||||
|
__END_INNER_TINY__(branch_short);
|
||||||
if (!unsafe_fx && !unsafe_fl)
|
if (!unsafe_fx && !unsafe_fl)
|
||||||
jit_patch_movi(ref, (_jit.x.pc));
|
jit_patch_movi(ref, (_jit.x.pc));
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,7 +33,6 @@
|
||||||
#ifndef __lightning_asm_common_h
|
#ifndef __lightning_asm_common_h
|
||||||
#define __lightning_asm_common_h_
|
#define __lightning_asm_common_h_
|
||||||
|
|
||||||
|
|
||||||
#ifndef _ASM_SAFETY
|
#ifndef _ASM_SAFETY
|
||||||
#define JITFAIL(MSG) 0
|
#define JITFAIL(MSG) 0
|
||||||
#else
|
#else
|
||||||
|
@ -112,8 +111,10 @@ typedef unsigned long _ul;
|
||||||
#define _jit_L(L) _jit_VD(((*_jit.x.ul_pc++)= _jit_UL((L) )))
|
#define _jit_L(L) _jit_VD(((*_jit.x.ul_pc++)= _jit_UL((L) )))
|
||||||
#define _jit_I_noinc(I) _jit_VD(((*_jit.x.ui_pc)= _jit_UI((I) )))
|
#define _jit_I_noinc(I) _jit_VD(((*_jit.x.ui_pc)= _jit_UI((I) )))
|
||||||
|
|
||||||
|
#define _COPY_HIGH_BIT(N, I) (((unsigned long)(I) & (1 << ((N)-1))) ? ~_MASK(N) : 0)
|
||||||
|
|
||||||
#define _MASK(N) ((unsigned long)(((long)1<<(N)))-1)
|
#define _MASK(N) ((unsigned long)(((long)1<<(N)))-1)
|
||||||
#define _siP(N,I) (!((((unsigned long)(I))^(((unsigned long)(I))<<1))&~_MASK(N)))
|
#define _siP(N,I) (!((((unsigned long)(I))^(_COPY_HIGH_BIT(N, I)))&~_MASK(N)))
|
||||||
#define _uiP(N,I) (!(((unsigned long)(I))&~_MASK(N)))
|
#define _uiP(N,I) (!(((unsigned long)(I))&~_MASK(N)))
|
||||||
#define _suiP(N,I) (_siP(N,I) | _uiP(N,I))
|
#define _suiP(N,I) (_siP(N,I) | _uiP(N,I))
|
||||||
|
|
||||||
|
|
|
@ -122,7 +122,7 @@ typedef _uc jit_insn;
|
||||||
/*** ASSEMBLER ***/
|
/*** ASSEMBLER ***/
|
||||||
|
|
||||||
#define _OFF4(D) (_jit_UL(D) - _jit_UL(_jit.x.pc))
|
#define _OFF4(D) (_jit_UL(D) - _jit_UL(_jit.x.pc))
|
||||||
#define _CKD8(D) _ck_d(8, ((_uc) _OFF4(D)) )
|
#define _CKD8(D) _ck_d(8, ((_sc) _OFF4(D)) )
|
||||||
|
|
||||||
#define _D8(D) (_jit_B(0), ((*(_PUC(_jit.x.pc)-1))= _CKD8(D)))
|
#define _D8(D) (_jit_B(0), ((*(_PUC(_jit.x.pc)-1))= _CKD8(D)))
|
||||||
#define _D32(D) (_jit_I(0), ((*(_PUI(_jit.x.pc)-1))= _OFF4(D)))
|
#define _D32(D) (_jit_I(0), ((*(_PUI(_jit.x.pc)-1))= _OFF4(D)))
|
||||||
|
@ -562,15 +562,25 @@ typedef _uc jit_insn;
|
||||||
#define JNLESm(D,B,I,S) JCCSim(0xf,D,B,I,S)
|
#define JNLESm(D,B,I,S) JCCSim(0xf,D,B,I,S)
|
||||||
#define JGSm(D,B,I,S) JCCSim(0xf,D,B,I,S)
|
#define JGSm(D,B,I,S) JCCSim(0xf,D,B,I,S)
|
||||||
|
|
||||||
#ifdef JIT_X86_64
|
#ifndef JIT_X86_64
|
||||||
# define JCCim(CC,nCC,D,B,I,S) (!_jitl.long_jumps \
|
# define SUPPORT_TINY_JUMPS
|
||||||
? _OO_D32(0x0f80|(CC), (long)(D) ) \
|
#endif
|
||||||
: (_O_D8(0x70|(nCC), _jit_UL(_jit.x.pc) + 13), JMPm((long)D, 0, 0, 0)))
|
|
||||||
#else
|
#ifdef SUPPORT_TINY_JUMPS
|
||||||
# define JCCim(CC,nCC,D,B,I,S) ((_r0P(B) && _r0P(I)) ? (_jitl.tiny_jumps \
|
# define JCCim_base(CC,nCC,D,B,I,S) ((_r0P(B) && _r0P(I)) ? (_jitl.tiny_jumps \
|
||||||
? _O_D8(0x70|(CC), D) \
|
? _O_D8(0x70|(CC), D) \
|
||||||
: _OO_D32 (0x0f80|(CC) ,(long)(D) )) : \
|
: _OO_D32 (0x0f80|(CC) ,(long)(D) )) : \
|
||||||
JITFAIL("illegal mode in conditional jump"))
|
JITFAIL("illegal mode in conditional jump"))
|
||||||
|
#else
|
||||||
|
# define JCCim_base(CC,nCC,D,B,I,S) (_OO_D32 (0x0f80|(CC) ,(long)(D) ))
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef JIT_X86_64
|
||||||
|
# define JCCim(CC,nCC,D,B,I,S) (!_jitl.long_jumps \
|
||||||
|
? JCCim_base(CC,nCC,D,B,I,S) \
|
||||||
|
: (_O_D8(0x70|(nCC), _jit_UL(_jit.x.pc) + 13), JMPm((long)D, 0, 0, 0)))
|
||||||
|
#else
|
||||||
|
# define JCCim(CC,nCC,D,B,I,S) JCCim_base(CC,nCC,D,B,I,S)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#define JOm(D,B,I,S) JCCim(0x0,0x1,D,B,I,S)
|
#define JOm(D,B,I,S) JCCim(0x0,0x1,D,B,I,S)
|
||||||
|
@ -608,15 +618,21 @@ typedef _uc jit_insn;
|
||||||
#define JMPSm(D,B,I,S) ((_r0P(B) && _r0P(I)) ? _O_D8 (0xeb ,(long)(D) ) : \
|
#define JMPSm(D,B,I,S) ((_r0P(B) && _r0P(I)) ? _O_D8 (0xeb ,(long)(D) ) : \
|
||||||
JITFAIL("illegal mode in short jump"))
|
JITFAIL("illegal mode in short jump"))
|
||||||
|
|
||||||
#ifdef JIT_X86_64
|
#ifdef SUPPORT_TINY_JUMPS
|
||||||
# define JMPm(D,B,I,S) (!_jitl.long_jumps \
|
# define JMPm_base(D,B,I,S) ((_r0P(B) && _r0P(I)) ? (_jitl.tiny_jumps \
|
||||||
? _O_D32(0xe9, (long)(D)) \
|
|
||||||
: (MOVQir((D), JIT_REXTMP), _qO_Mrm(0xff,_b11,_b100,_r8(JIT_REXTMP))))
|
|
||||||
#else
|
|
||||||
# define JMPm(D,B,I,S) ((_r0P(B) && _r0P(I)) ? (_jitl.tiny_jumps \
|
|
||||||
? _O_D8(0xeB, D) \
|
? _O_D8(0xeB, D) \
|
||||||
: _O_D32 (0xe9 ,(long)(D) )) : \
|
: _O_D32 (0xe9 ,(long)(D) )) : \
|
||||||
JITFAIL("illegal mode in direct jump"))
|
JITFAIL("illegal mode in direct jump"))
|
||||||
|
#else
|
||||||
|
# define JMPm_base(D,B,I,S) (_O_D32(0xe9 ,(long)(D) ))
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef JIT_X86_64
|
||||||
|
# define JMPm(D,B,I,S) (!_jitl.long_jumps \
|
||||||
|
? JMPm_base(D,B,I,S) \
|
||||||
|
: (MOVQir((D), JIT_REXTMP), _qO_Mrm(0xff,_b11,_b100,_r8(JIT_REXTMP))))
|
||||||
|
#else
|
||||||
|
# define JMPm(D,B,I,S) JMPm_base(D,B,I,S)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#define JMPsr(R) _O_Mrm (0xff ,_b11,_b100,_r4(R) )
|
#define JMPsr(R) _O_Mrm (0xff ,_b11,_b100,_r4(R) )
|
||||||
|
|
|
@ -55,8 +55,10 @@ struct jit_local_state {
|
||||||
int long_jumps;
|
int long_jumps;
|
||||||
int nextarg_geti;
|
int nextarg_geti;
|
||||||
#else
|
#else
|
||||||
int tiny_jumps;
|
|
||||||
int framesize;
|
int framesize;
|
||||||
|
#endif
|
||||||
|
#ifdef SUPPORT_TINY_JUMPS
|
||||||
|
int tiny_jumps;
|
||||||
#endif
|
#endif
|
||||||
int r0_can_be_tmp;
|
int r0_can_be_tmp;
|
||||||
int argssize;
|
int argssize;
|
||||||
|
@ -548,18 +550,29 @@ static int jit_arg_reg_order[] = { _EDI, _ESI, _EDX, _ECX };
|
||||||
#define jit_calli(label) (CALLm( ((unsigned long) (label)), 0, 0, 0), _jit.x.pc)
|
#define jit_calli(label) (CALLm( ((unsigned long) (label)), 0, 0, 0), _jit.x.pc)
|
||||||
#define jit_callr(reg) (CALLsr(reg))
|
#define jit_callr(reg) (CALLsr(reg))
|
||||||
#define jit_jmpr(reg) JMPsr(reg)
|
#define jit_jmpr(reg) JMPsr(reg)
|
||||||
|
|
||||||
|
#ifdef SUPPORT_TINY_JUMPS
|
||||||
|
# if 0
|
||||||
|
static long _CHECK_TINY(long diff) { if ((diff < -128) || (diff > 127)) *(long *)0x0 = 1; return diff; }
|
||||||
|
# else
|
||||||
|
# define _CHECK_TINY(x) x
|
||||||
|
# endif
|
||||||
|
# define jit_patch_normal_at(jump_pc,v) (_jitl.tiny_jumps \
|
||||||
|
? (*_PSC((jump_pc) - sizeof(char)) = _jit_SC(_CHECK_TINY((jit_insn *)(v) - (jump_pc)))) \
|
||||||
|
: (*_PSI((jump_pc) - sizeof(int)) = _jit_SI((jit_insn *)(v) - (jump_pc))))
|
||||||
|
#else
|
||||||
|
# define jit_patch_normal_at(jump_pc,v) (*_PSI((jump_pc) - sizeof(int)) = _jit_SI((jit_insn *)(v) - (jump_pc)))
|
||||||
|
#endif
|
||||||
|
|
||||||
#ifdef JIT_X86_64
|
#ifdef JIT_X86_64
|
||||||
#define jit_patch_long_at(jump_pc,v) (*_PSL((jump_pc) - sizeof(long)) = _jit_SL((jit_insn *)(v)))
|
#define jit_patch_long_at(jump_pc,v) (*_PSL((jump_pc) - sizeof(long)) = _jit_SL((jit_insn *)(v)))
|
||||||
# define jit_patch_short_at(jump_pc,v) (*_PSI((jump_pc) - sizeof(int)) = _jit_SI((jit_insn *)(v) - (jump_pc)))
|
# define jit_patch_short_at(jump_pc,v) jit_patch_normal_at(jump_pc, v)
|
||||||
# define jit_patch_branch_at(jump_pc,v) (_jitl.long_jumps ? jit_patch_long_at((jump_pc)-3, v) : jit_patch_short_at(jump_pc, v))
|
# define jit_patch_branch_at(jump_pc,v) (_jitl.long_jumps ? jit_patch_long_at((jump_pc)-3, v) : jit_patch_short_at(jump_pc, v))
|
||||||
# define jit_patch_ucbranch_at(jump_pc,v) (_jitl.long_jumps ? jit_patch_long_at((jump_pc)-3, v) : jit_patch_short_at(jump_pc, v))
|
# define jit_patch_ucbranch_at(jump_pc,v) (_jitl.long_jumps ? jit_patch_long_at((jump_pc)-3, v) : jit_patch_short_at(jump_pc, v))
|
||||||
# define jit_ret() (POPQr(_R13), POPQr(_R12), POPQr(_EBX), POPQr(_EBP), RET_())
|
# define jit_ret() (POPQr(_R13), POPQr(_R12), POPQr(_EBX), POPQr(_EBP), RET_())
|
||||||
#else
|
#else
|
||||||
#define jit_patch_long_at(jump_pc,v) (_jitl.tiny_jumps \
|
# define jit_patch_branch_at(jump_pc,v) jit_patch_normal_at(jump_pc, v)
|
||||||
? (*_PSC((jump_pc) - sizeof(char)) = _jit_SC((jit_insn *)(v) - (jump_pc))) \
|
# define jit_patch_ucbranch_at(jump_pc,v) jit_patch_normal_at(jump_pc, v)
|
||||||
: (*_PSL((jump_pc) - sizeof(long)) = _jit_SL((jit_insn *)(v) - (jump_pc))))
|
|
||||||
# define jit_patch_branch_at(jump_pc,v) jit_patch_long_at(jump_pc, v)
|
|
||||||
# define jit_patch_ucbranch_at(jump_pc,v) jit_patch_long_at(jump_pc, v)
|
|
||||||
# define jit_ret() (POPLr(_EDI), POPLr(_ESI), POPLr(_EBX), POPLr(_EBP), RET_())
|
# define jit_ret() (POPLr(_EDI), POPLr(_ESI), POPLr(_EBX), POPLr(_EBP), RET_())
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
|
@ -185,12 +185,12 @@ void scheme_init_flfxnum_numcomp(Scheme_Env *env)
|
||||||
scheme_add_global_constant("fl>=", p, env);
|
scheme_add_global_constant("fl>=", p, env);
|
||||||
|
|
||||||
p = scheme_make_folding_prim(fl_min, "flmin", 2, 2, 1);
|
p = scheme_make_folding_prim(fl_min, "flmin", 2, 2, 1);
|
||||||
if (scheme_can_inline_fp_comp())
|
if (scheme_can_inline_fp_op())
|
||||||
SCHEME_PRIM_PROC_FLAGS(p) |= SCHEME_PRIM_IS_BINARY_INLINED;
|
SCHEME_PRIM_PROC_FLAGS(p) |= SCHEME_PRIM_IS_BINARY_INLINED;
|
||||||
scheme_add_global_constant("flmin", p, env);
|
scheme_add_global_constant("flmin", p, env);
|
||||||
|
|
||||||
p = scheme_make_folding_prim(fl_max, "flmax", 2, 2, 1);
|
p = scheme_make_folding_prim(fl_max, "flmax", 2, 2, 1);
|
||||||
if (scheme_can_inline_fp_comp())
|
if (scheme_can_inline_fp_op())
|
||||||
SCHEME_PRIM_PROC_FLAGS(p) |= SCHEME_PRIM_IS_BINARY_INLINED;
|
SCHEME_PRIM_PROC_FLAGS(p) |= SCHEME_PRIM_IS_BINARY_INLINED;
|
||||||
scheme_add_global_constant("flmax", p, env);
|
scheme_add_global_constant("flmax", p, env);
|
||||||
}
|
}
|
||||||
|
@ -265,13 +265,13 @@ void scheme_init_unsafe_numcomp(Scheme_Env *env)
|
||||||
scheme_add_global_constant("unsafe-fl>=", p, env);
|
scheme_add_global_constant("unsafe-fl>=", p, env);
|
||||||
|
|
||||||
p = scheme_make_folding_prim(unsafe_fl_min, "unsafe-flmin", 2, 2, 1);
|
p = scheme_make_folding_prim(unsafe_fl_min, "unsafe-flmin", 2, 2, 1);
|
||||||
if (scheme_can_inline_fp_comp())
|
if (scheme_can_inline_fp_op())
|
||||||
SCHEME_PRIM_PROC_FLAGS(p) |= SCHEME_PRIM_IS_BINARY_INLINED;
|
SCHEME_PRIM_PROC_FLAGS(p) |= SCHEME_PRIM_IS_BINARY_INLINED;
|
||||||
SCHEME_PRIM_PROC_FLAGS(p) |= SCHEME_PRIM_IS_UNSAFE_FUNCTIONAL;
|
SCHEME_PRIM_PROC_FLAGS(p) |= SCHEME_PRIM_IS_UNSAFE_FUNCTIONAL;
|
||||||
scheme_add_global_constant("unsafe-flmin", p, env);
|
scheme_add_global_constant("unsafe-flmin", p, env);
|
||||||
|
|
||||||
p = scheme_make_folding_prim(unsafe_fl_max, "unsafe-flmax", 2, 2, 1);
|
p = scheme_make_folding_prim(unsafe_fl_max, "unsafe-flmax", 2, 2, 1);
|
||||||
if (scheme_can_inline_fp_comp())
|
if (scheme_can_inline_fp_op())
|
||||||
SCHEME_PRIM_PROC_FLAGS(p) |= SCHEME_PRIM_IS_BINARY_INLINED;
|
SCHEME_PRIM_PROC_FLAGS(p) |= SCHEME_PRIM_IS_BINARY_INLINED;
|
||||||
SCHEME_PRIM_PROC_FLAGS(p) |= SCHEME_PRIM_IS_UNSAFE_FUNCTIONAL;
|
SCHEME_PRIM_PROC_FLAGS(p) |= SCHEME_PRIM_IS_UNSAFE_FUNCTIONAL;
|
||||||
scheme_add_global_constant("unsafe-flmax", p, env);
|
scheme_add_global_constant("unsafe-flmax", p, env);
|
||||||
|
|
Loading…
Reference in New Issue
Block a user