tcg/tci: Elimnate TARGET_LONG_BITS, target_ulong

We now have the address size as part of the opcode, so
we no longer need to test TARGET_LONG_BITS.  We can use
uint64_t for target_ulong, as passed into load/store helpers.

Reviewed-by: Alex Bennée <alex.bennee@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
Richard Henderson 2023-03-20 07:48:09 -07:00
parent fecccfcc54
commit dd7dc93ef0
2 changed files with 46 additions and 30 deletions

View file

@ -286,7 +286,7 @@ static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
return result; return result;
} }
static uint64_t tci_qemu_ld(CPUArchState *env, target_ulong taddr, static uint64_t tci_qemu_ld(CPUArchState *env, uint64_t taddr,
MemOpIdx oi, const void *tb_ptr) MemOpIdx oi, const void *tb_ptr)
{ {
MemOp mop = get_memop(oi); MemOp mop = get_memop(oi);
@ -312,7 +312,7 @@ static uint64_t tci_qemu_ld(CPUArchState *env, target_ulong taddr,
} }
} }
static void tci_qemu_st(CPUArchState *env, target_ulong taddr, uint64_t val, static void tci_qemu_st(CPUArchState *env, uint64_t taddr, uint64_t val,
MemOpIdx oi, const void *tb_ptr) MemOpIdx oi, const void *tb_ptr)
{ {
MemOp mop = get_memop(oi); MemOp mop = get_memop(oi);
@ -372,10 +372,9 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
TCGReg r0, r1, r2, r3, r4, r5; TCGReg r0, r1, r2, r3, r4, r5;
tcg_target_ulong t1; tcg_target_ulong t1;
TCGCond condition; TCGCond condition;
target_ulong taddr;
uint8_t pos, len; uint8_t pos, len;
uint32_t tmp32; uint32_t tmp32;
uint64_t tmp64; uint64_t tmp64, taddr;
uint64_t T1, T2; uint64_t T1, T2;
MemOpIdx oi; MemOpIdx oi;
int32_t ofs; int32_t ofs;
@ -923,31 +922,40 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
break; break;
case INDEX_op_qemu_ld_a32_i32: case INDEX_op_qemu_ld_a32_i32:
tci_args_rrm(insn, &r0, &r1, &oi);
taddr = (uint32_t)regs[r1];
goto do_ld_i32;
case INDEX_op_qemu_ld_a64_i32: case INDEX_op_qemu_ld_a64_i32:
if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) { if (TCG_TARGET_REG_BITS == 64) {
tci_args_rrm(insn, &r0, &r1, &oi); tci_args_rrm(insn, &r0, &r1, &oi);
taddr = regs[r1]; taddr = regs[r1];
} else { } else {
tci_args_rrrm(insn, &r0, &r1, &r2, &oi); tci_args_rrrm(insn, &r0, &r1, &r2, &oi);
taddr = tci_uint64(regs[r2], regs[r1]); taddr = tci_uint64(regs[r2], regs[r1]);
} }
tmp32 = tci_qemu_ld(env, taddr, oi, tb_ptr); do_ld_i32:
regs[r0] = tmp32; regs[r0] = tci_qemu_ld(env, taddr, oi, tb_ptr);
break; break;
case INDEX_op_qemu_ld_a32_i64: case INDEX_op_qemu_ld_a32_i64:
if (TCG_TARGET_REG_BITS == 64) {
tci_args_rrm(insn, &r0, &r1, &oi);
taddr = (uint32_t)regs[r1];
} else {
tci_args_rrrm(insn, &r0, &r1, &r2, &oi);
taddr = (uint32_t)regs[r2];
}
goto do_ld_i64;
case INDEX_op_qemu_ld_a64_i64: case INDEX_op_qemu_ld_a64_i64:
if (TCG_TARGET_REG_BITS == 64) { if (TCG_TARGET_REG_BITS == 64) {
tci_args_rrm(insn, &r0, &r1, &oi); tci_args_rrm(insn, &r0, &r1, &oi);
taddr = regs[r1]; taddr = regs[r1];
} else if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
tci_args_rrrm(insn, &r0, &r1, &r2, &oi);
taddr = regs[r2];
} else { } else {
tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4); tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4);
taddr = tci_uint64(regs[r3], regs[r2]); taddr = tci_uint64(regs[r3], regs[r2]);
oi = regs[r4]; oi = regs[r4];
} }
do_ld_i64:
tmp64 = tci_qemu_ld(env, taddr, oi, tb_ptr); tmp64 = tci_qemu_ld(env, taddr, oi, tb_ptr);
if (TCG_TARGET_REG_BITS == 32) { if (TCG_TARGET_REG_BITS == 32) {
tci_write_reg64(regs, r1, r0, tmp64); tci_write_reg64(regs, r1, r0, tmp64);
@ -957,35 +965,44 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
break; break;
case INDEX_op_qemu_st_a32_i32: case INDEX_op_qemu_st_a32_i32:
tci_args_rrm(insn, &r0, &r1, &oi);
taddr = (uint32_t)regs[r1];
goto do_st_i32;
case INDEX_op_qemu_st_a64_i32: case INDEX_op_qemu_st_a64_i32:
if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) { if (TCG_TARGET_REG_BITS == 64) {
tci_args_rrm(insn, &r0, &r1, &oi); tci_args_rrm(insn, &r0, &r1, &oi);
taddr = regs[r1]; taddr = regs[r1];
} else { } else {
tci_args_rrrm(insn, &r0, &r1, &r2, &oi); tci_args_rrrm(insn, &r0, &r1, &r2, &oi);
taddr = tci_uint64(regs[r2], regs[r1]); taddr = tci_uint64(regs[r2], regs[r1]);
} }
tmp32 = regs[r0]; do_st_i32:
tci_qemu_st(env, taddr, tmp32, oi, tb_ptr); tci_qemu_st(env, taddr, regs[r0], oi, tb_ptr);
break; break;
case INDEX_op_qemu_st_a32_i64: case INDEX_op_qemu_st_a32_i64:
if (TCG_TARGET_REG_BITS == 64) {
tci_args_rrm(insn, &r0, &r1, &oi);
tmp64 = regs[r0];
taddr = (uint32_t)regs[r1];
} else {
tci_args_rrrm(insn, &r0, &r1, &r2, &oi);
tmp64 = tci_uint64(regs[r1], regs[r0]);
taddr = (uint32_t)regs[r2];
}
goto do_st_i64;
case INDEX_op_qemu_st_a64_i64: case INDEX_op_qemu_st_a64_i64:
if (TCG_TARGET_REG_BITS == 64) { if (TCG_TARGET_REG_BITS == 64) {
tci_args_rrm(insn, &r0, &r1, &oi); tci_args_rrm(insn, &r0, &r1, &oi);
taddr = regs[r1];
tmp64 = regs[r0]; tmp64 = regs[r0];
taddr = regs[r1];
} else { } else {
if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) { tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4);
tci_args_rrrm(insn, &r0, &r1, &r2, &oi);
taddr = regs[r2];
} else {
tci_args_rrrrr(insn, &r0, &r1, &r2, &r3, &r4);
taddr = tci_uint64(regs[r3], regs[r2]);
oi = regs[r4];
}
tmp64 = tci_uint64(regs[r1], regs[r0]); tmp64 = tci_uint64(regs[r1], regs[r0]);
taddr = tci_uint64(regs[r3], regs[r2]);
oi = regs[r4];
} }
do_st_i64:
tci_qemu_st(env, taddr, tmp64, oi, tb_ptr); tci_qemu_st(env, taddr, tmp64, oi, tb_ptr);
break; break;

View file

@ -243,7 +243,7 @@ static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
return false; return false;
} }
static void stack_bounds_check(TCGReg base, target_long offset) static void stack_bounds_check(TCGReg base, intptr_t offset)
{ {
if (base == TCG_REG_CALL_STACK) { if (base == TCG_REG_CALL_STACK) {
tcg_debug_assert(offset >= 0); tcg_debug_assert(offset >= 0);
@ -850,24 +850,23 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
break; break;
case INDEX_op_qemu_ld_a32_i32: case INDEX_op_qemu_ld_a32_i32:
case INDEX_op_qemu_ld_a64_i32:
case INDEX_op_qemu_st_a32_i32: case INDEX_op_qemu_st_a32_i32:
tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
break;
case INDEX_op_qemu_ld_a64_i32:
case INDEX_op_qemu_st_a64_i32: case INDEX_op_qemu_st_a64_i32:
if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) { case INDEX_op_qemu_ld_a32_i64:
case INDEX_op_qemu_st_a32_i64:
if (TCG_TARGET_REG_BITS == 64) {
tcg_out_op_rrm(s, opc, args[0], args[1], args[2]); tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
} else { } else {
tcg_out_op_rrrm(s, opc, args[0], args[1], args[2], args[3]); tcg_out_op_rrrm(s, opc, args[0], args[1], args[2], args[3]);
} }
break; break;
case INDEX_op_qemu_ld_a32_i64:
case INDEX_op_qemu_ld_a64_i64: case INDEX_op_qemu_ld_a64_i64:
case INDEX_op_qemu_st_a32_i64:
case INDEX_op_qemu_st_a64_i64: case INDEX_op_qemu_st_a64_i64:
if (TCG_TARGET_REG_BITS == 64) { if (TCG_TARGET_REG_BITS == 64) {
tcg_out_op_rrm(s, opc, args[0], args[1], args[2]); tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
} else if (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS) {
tcg_out_op_rrrm(s, opc, args[0], args[1], args[2], args[3]);
} else { } else {
tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[4]); tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[4]);
tcg_out_op_rrrrr(s, opc, args[0], args[1], tcg_out_op_rrrrr(s, opc, args[0], args[1],