#define CLOBBER_BSF clobber_flags()
/* The older code generator is now deprecated. */
-#define USE_NEW_RTASM 0
+#define USE_NEW_RTASM 1
#if USE_NEW_RTASM
/* Lower bound onto stack */
emit_byte(0xdd);
emit_byte(0x05);
- emit_long((uae_u32)&bounds[0]); /* fld double from lower */
+/* FIXME: 32-bit address prefix needed? */
+ emit_long(uae_ptr32(&bounds[0])); /* fld double from lower */
/* Clamp to lower */
emit_byte(0xdb);
emit_byte(0xd8); /* fstp st(0) */
emit_byte(0xdd);
emit_byte(0x05);
- emit_long((uae_u32)&bounds[1]); /* fld double from upper */
+/* FIXME: 32-bit address prefix needed? */
+ emit_long(uae_ptr32(&bounds[1])); /* fld double from upper */
/* Clamp to upper */
emit_byte(0xdb);
emit_byte(0xf0); /* f2xm1 (2^frac(x))-1 */
emit_byte(0xd8);
emit_byte(0x05);
- emit_long((uae_u32)&one); /* fadd (2^frac(x))-1 + 1 */
+ emit_long(uae_ptr32(&one)); /* fadd (2^frac(x))-1 + 1 */
emit_byte(0xd9);
emit_byte(0xfd); /* fscale (2^frac(x))*2^int(x) */
emit_byte(0xdd);
emit_byte(0xf0); /* f2xm1 (2^frac(x))-1 */
emit_byte(0xd8);
emit_byte(0x05);
- emit_long((uae_u32)&one); /* fadd (2^frac(x))-1 + 1 */
+ emit_long(uae_ptr32(&one)); /* fadd (2^frac(x))-1 + 1 */
emit_byte(0xd9);
emit_byte(0xfd); /* fscale (2^frac(x))*2^int(x*log2(e)) */
emit_byte(0xdd);
emit_byte(0xf0); /* f2xm1 (2^frac(x))-1 */
emit_byte(0xd8);
emit_byte(0x05);
- emit_long((uae_u32)&one); /* fadd (2^frac(x))-1 + 1 */
+ emit_long(uae_ptr32(&one)); /* fadd (2^frac(x))-1 + 1 */
emit_byte(0xd9);
emit_byte(0xfd); /* fscale (2^frac(x))*2^int(x*log2(10)) */
emit_byte(0xdd);
emit_byte(0xf3); /* fpatan atan(x/sqrt(1-(x^2))) & pop */
emit_byte(0xdb);
emit_byte(0x2d);
- emit_long((uae_u32)&pihalf); /* fld load pi/2 from pihalf */
+ emit_long(uae_ptr32(&pihalf)); /* fld load pi/2 from pihalf */
emit_byte(0xde);
emit_byte(0xe1); /* fsubrp pi/2 - asin(x) & pop */
tos_make(d); /* store y=acos(x) */
emit_byte(0xf0); /* f2xm1 (2^frac(x))-1 */
emit_byte(0xd8);
emit_byte(0x05);
- emit_long((uae_u32)&one); /* fadd (2^frac(x))-1 + 1 */
+ emit_long(uae_ptr32(&one)); /* fadd (2^frac(x))-1 + 1 */
emit_byte(0xd9);
emit_byte(0xfd); /* fscale (2^frac(x))*2^int(x*log2(e)) */
emit_byte(0xd9);
emit_byte(0xf0); /* f2xm1 (2^frac(x))-1 */
emit_byte(0xd8);
emit_byte(0x05);
- emit_long((uae_u32)&one); /* fadd (2^frac(x))-1 + 1 */
+ emit_long(uae_ptr32(&one)); /* fadd (2^frac(x))-1 + 1 */
emit_byte(0xd9);
emit_byte(0xfd); /* fscale (2^frac(x))*2^int(x*log2(e)) */
emit_byte(0xdd);
emit_byte(0xf0); /* f2xm1 (2^frac(x))-1 */
emit_byte(0xd8);
emit_byte(0x05);
- emit_long((uae_u32)&one); /* fadd (2^frac(x))-1 + 1 */
+ emit_long(uae_ptr32(&one)); /* fadd (2^frac(x))-1 + 1 */
emit_byte(0xd9);
emit_byte(0xfd); /* fscale (2^frac(x))*2^int(x*log2(e)) */
emit_byte(0xd9);
emit_byte(0xf0); /* f2xm1 (2^frac(x))-1 */
emit_byte(0xd8);
emit_byte(0x05);
- emit_long((uae_u32)&one); /* fadd (2^frac(x))-1 + 1 */
+ emit_long(uae_ptr32(&one)); /* fadd (2^frac(x))-1 + 1 */
emit_byte(0xd9);
emit_byte(0xfd); /* fscale (2^frac(x))*2^int(x*log2(e)) */
emit_byte(0xdd);
emit_byte(0xf0); /* f2xm1 (2^frac(x))-1 */
emit_byte(0xd8);
emit_byte(0x05);
- emit_long((uae_u32)&one); /* fadd (2^frac(x))-1 + 1 */
+ emit_long(uae_ptr32(&one)); /* fadd (2^frac(x))-1 + 1 */
emit_byte(0xd9);
emit_byte(0xfd); /* fscale (2^frac(x))*2^int(x*log2(e)) */
emit_byte(0xd9);
emit_byte(0xf0); /* f2xm1 (2^frac(x))-1 */
emit_byte(0xd8);
emit_byte(0x05);
- emit_long((uae_u32)&one); /* fadd (2^frac(x))-1 + 1 */
+ emit_long(uae_ptr32(&one)); /* fadd (2^frac(x))-1 + 1 */
emit_byte(0xd9);
emit_byte(0xfd); /* fscale (2^frac(x))*2^int(x*log2(e)) */
emit_byte(0xdd);
uae_log("JIT: " format "\n", __func__, ##__VA_ARGS__);
#define jit_log2(format, ...)
-#define MEMBaseDiff ((uae_u32)NATMEM_OFFSET)
+#define MEMBaseDiff uae_ptr32(NATMEM_OFFSET)
#ifdef NATMEM_OFFSET
#define FIXED_ADDRESSING 1
else if (r==FLAGX)
raw_load_flagx(bestreg,r);
else {
- raw_mov_l_rm(bestreg,(uae_u32)live.state[r].mem);
+ raw_mov_l_rm(bestreg,uae_ptr32(live.state[r].mem));
}
live.state[r].dirtysize=0;
set_status(r,CLEAN);
static inline int isinrom(uintptr addr)
{
#ifdef UAE
- return (addr>=(uae_u32)kickmem_bank.baseaddr &&
- addr<(uae_u32)kickmem_bank.baseaddr+8*65536);
+ return (addr >= uae_ptr32(kickmem_bank.baseaddr) &&
+ addr < uae_ptr32(kickmem_bank.baseaddr + 8 * 65536));
#else
return ((addr >= (uintptr)ROMBaseHost) && (addr < (uintptr)ROMBaseHost + ROMSize));
#endif
mov_l_rr(f,address);
shrl_l_ri(f,16); /* The index into the baseaddr table */
- mov_l_rm_indexed(f,(uae_u32)(baseaddr),f);
+ mov_l_rm_indexed(f,uae_ptr32(baseaddr),f);
if (address==source) { /* IBrowse does this! */
if (size > 1) {
mov_l_rr(f,address);
shrl_l_ri(f,16); /* The index into the mem bank table */
- mov_l_rm_indexed(f,(uae_u32)mem_banks,f);
+ mov_l_rm_indexed(f,uae_ptr32(mem_banks),f);
/* Now f holds a pointer to the actual membank */
mov_l_rR(f,f,offset);
/* Now f holds the address of the b/w/lput function */
mov_l_rr(f,address);
shrl_l_ri(f,16); /* The index into the baseaddr table */
- mov_l_rm_indexed(f,(uae_u32)baseaddr,f);
+ mov_l_rm_indexed(f,uae_ptr32(baseaddr),f);
/* f now holds the offset */
switch(size) {
mov_l_rr(f,address);
shrl_l_ri(f,16); /* The index into the mem bank table */
- mov_l_rm_indexed(f,(uae_u32)mem_banks,f);
+ mov_l_rm_indexed(f,uae_ptr32(mem_banks),f);
/* Now f holds a pointer to the actual membank */
mov_l_rR(f,f,offset);
/* Now f holds the address of the b/w/lget function */
mov_l_rr(f,address);
mov_l_rr(dest,address); // gb-- nop if dest==address
shrl_l_ri(f,16);
- mov_l_rm_indexed(f,(uae_u32)baseaddr,f);
+ mov_l_rm_indexed(f,uae_ptr32(baseaddr),f);
add_l(dest,f);
forget_about(tmp);
}
f=dest;
mov_l_rr(f,address);
shrl_l_ri(f,16); /* The index into the baseaddr bank table */
- mov_l_rm_indexed(dest,(uae_u32)baseaddr,f);
+ mov_l_rm_indexed(dest,uae_ptr32(baseaddr),f);
add_l(dest,address);
and_l_ri (dest, ~1);
forget_about(tmp);
bi->handler=
bi->handler_to_use=(cpuop_func*)get_target();
- raw_cmp_l_mi((uae_u32)®s.pc_p,(uae_u32)pc_hist[0].location);
- raw_jnz((uae_u32)popall_cache_miss);
+ raw_cmp_l_mi(uae_ptr32(®s.pc_p),uae_ptr32(pc_hist[0].location));
+ raw_jnz(uae_ptr32(popall_cache_miss));
/* This was 16 bytes on the x86, so now aligned on (n+1)*32 */
was_comp=0;
was_comp=0;
}
raw_mov_l_ri(REG_PAR1,(uae_u32)opcode);
- raw_mov_l_ri(REG_PAR2,(uae_u32)®s);
+ raw_mov_l_ri(REG_PAR2,uae_ptr32(®s));
#if USE_NORMAL_CALLING_CONVENTION
raw_push_l_r(REG_PAR2);
raw_push_l_r(REG_PAR1);
raw_jz_b_oponly();
branchadd=(uae_s8*)get_target();
emit_byte(0);
- raw_sub_l_mi((uae_u32)&countdown,scaled_cycles(totcycles));
+ raw_sub_l_mi(uae_ptr32(&countdown),scaled_cycles(totcycles));
raw_jmp((uintptr)popall_do_nothing);
*branchadd=(uintptr)get_target()-(uintptr)branchadd-1;
}
/* predicted outcome */
tbi=get_blockinfo_addr_new((void*)t1,1);
match_states(tbi);
- raw_sub_l_mi((uae_u32)&countdown,scaled_cycles(totcycles));
+ raw_sub_l_mi(uae_ptr32(&countdown),scaled_cycles(totcycles));
raw_jcc_l_oponly(9);
tba=(uae_u32*)get_target();
emit_jmp_target(get_handler(t1));
match_states(tbi);
//flush(1); /* Can only get here if was_comp==1 */
- raw_sub_l_mi((uae_u32)&countdown,scaled_cycles(totcycles));
+ raw_sub_l_mi(uae_ptr32(&countdown),scaled_cycles(totcycles));
raw_jcc_l_oponly(9);
tba=(uae_u32*)get_target();
emit_jmp_target(get_handler(t2));
raw_and_l_ri(r,TAGMASK);
int r2 = (r==0) ? 1 : 0;
raw_mov_l_ri(r2,(uintptr)popall_do_nothing);
- raw_sub_l_mi((uae_u32)&countdown,scaled_cycles(totcycles));
+ raw_sub_l_mi(uae_ptr32(&countdown),scaled_cycles(totcycles));
#if USE_NEW_RTASM
raw_cmov_l_rm_indexed(r2,(uintptr)cache_tags,r,SIZEOF_VOID_P,9);
#else
tbi=get_blockinfo_addr_new((void*)(uintptr)v,1);
match_states(tbi);
- raw_sub_l_mi((uae_u32)&countdown,scaled_cycles(totcycles));
+ raw_sub_l_mi(uae_ptr32(&countdown),scaled_cycles(totcycles));
raw_jcc_l_oponly(9);
tba=(uae_u32*)get_target();
emit_jmp_target(get_handler(v));
raw_and_l_ri(r,TAGMASK);
int r2 = (r==0) ? 1 : 0;
raw_mov_l_ri(r2,(uintptr)popall_do_nothing);
- raw_sub_l_mi((uae_u32)&countdown,scaled_cycles(totcycles));
+ raw_sub_l_mi(uae_ptr32(&countdown),scaled_cycles(totcycles));
#if USE_NEW_RTASM
raw_cmov_l_rm_indexed(r2,(uintptr)cache_tags,r,SIZEOF_VOID_P,9);
#else