tcg/aarch64: implement sign/zero extend operations

implement the optional sign/zero extend operations with the dedicated
aarch64 instructions.

Signed-off-by: Claudio Fontana <claudio.fontana@huawei.com>
Reviewed-by: Richard Henderson <rth@twiddle.net>
Message-id: 51AC9A58.40502@huawei.com
Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
This commit is contained in:
Claudio Fontana 2013-06-12 16:20:23 +01:00 committed by Peter Maydell
parent 9c4a059df3
commit 31f1275b90
2 changed files with 66 additions and 12 deletions

View file

@ -674,6 +674,24 @@ static inline void tcg_out_rev16(TCGContext *s, int ext, TCGReg rd, TCGReg rm)
tcg_out32(s, base | rm << 5 | rd);
}
static inline void tcg_out_sxt(TCGContext *s, int ext, int s_bits,
TCGReg rd, TCGReg rn)
{
/* using ALIASes SXTB 0x13001c00, SXTH 0x13003c00, SXTW 0x93407c00
of SBFM Xd, Xn, #0, #7|15|31 */
int bits = 8 * (1 << s_bits) - 1;
tcg_out_sbfm(s, ext, rd, rn, 0, bits);
}
static inline void tcg_out_uxt(TCGContext *s, int s_bits,
TCGReg rd, TCGReg rn)
{
/* using ALIASes UXTB 0x53001c00, UXTH 0x53003c00
of UBFM Wd, Wn, #0, #7|15 */
int bits = 8 * (1 << s_bits) - 1;
tcg_out_ubfm(s, 0, rd, rn, 0, bits);
}
#ifdef CONFIG_SOFTMMU
#include "exec/softmmu_defs.h"
@ -721,8 +739,7 @@ static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, int opc)
tcg_out_callr(s, TCG_REG_TMP);
if (opc & 0x04) { /* sign extend */
unsigned int bits = 8 * (1 << s_bits) - 1;
tcg_out_sbfm(s, 1, data_reg, TCG_REG_X0, 0, bits); /* 7|15|31 */
tcg_out_sxt(s, 1, s_bits, data_reg, TCG_REG_X0);
} else {
tcg_out_movr(s, 1, data_reg, TCG_REG_X0);
}
@ -1037,6 +1054,31 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
tcg_out_rev16(s, 0, args[0], args[1]);
break;
case INDEX_op_ext8s_i64:
ext = 1; /* fall through */
case INDEX_op_ext8s_i32:
tcg_out_sxt(s, ext, 0, args[0], args[1]);
break;
case INDEX_op_ext16s_i64:
ext = 1; /* fall through */
case INDEX_op_ext16s_i32:
tcg_out_sxt(s, ext, 1, args[0], args[1]);
break;
case INDEX_op_ext32s_i64:
tcg_out_sxt(s, 1, 2, args[0], args[1]);
break;
case INDEX_op_ext8u_i64:
case INDEX_op_ext8u_i32:
tcg_out_uxt(s, 0, args[0], args[1]);
break;
case INDEX_op_ext16u_i64:
case INDEX_op_ext16u_i32:
tcg_out_uxt(s, 1, args[0], args[1]);
break;
case INDEX_op_ext32u_i64:
tcg_out_movr(s, 0, args[0], args[1]);
break;
default:
tcg_abort(); /* opcode not implemented */
}
@ -1125,6 +1167,18 @@ static const TCGTargetOpDef aarch64_op_defs[] = {
{ INDEX_op_bswap32_i64, { "r", "r" } },
{ INDEX_op_bswap64_i64, { "r", "r" } },
{ INDEX_op_ext8s_i32, { "r", "r" } },
{ INDEX_op_ext16s_i32, { "r", "r" } },
{ INDEX_op_ext8u_i32, { "r", "r" } },
{ INDEX_op_ext16u_i32, { "r", "r" } },
{ INDEX_op_ext8s_i64, { "r", "r" } },
{ INDEX_op_ext16s_i64, { "r", "r" } },
{ INDEX_op_ext32s_i64, { "r", "r" } },
{ INDEX_op_ext8u_i64, { "r", "r" } },
{ INDEX_op_ext16u_i64, { "r", "r" } },
{ INDEX_op_ext32u_i64, { "r", "r" } },
{ -1 },
};

View file

@ -40,10 +40,10 @@ typedef enum {
/* optional instructions */
#define TCG_TARGET_HAS_div_i32 0
#define TCG_TARGET_HAS_ext8s_i32 0
#define TCG_TARGET_HAS_ext16s_i32 0
#define TCG_TARGET_HAS_ext8u_i32 0
#define TCG_TARGET_HAS_ext16u_i32 0
#define TCG_TARGET_HAS_ext8s_i32 1
#define TCG_TARGET_HAS_ext16s_i32 1
#define TCG_TARGET_HAS_ext8u_i32 1
#define TCG_TARGET_HAS_ext16u_i32 1
#define TCG_TARGET_HAS_bswap16_i32 1
#define TCG_TARGET_HAS_bswap32_i32 1
#define TCG_TARGET_HAS_not_i32 0
@ -62,12 +62,12 @@ typedef enum {
#define TCG_TARGET_HAS_muls2_i32 0
#define TCG_TARGET_HAS_div_i64 0
#define TCG_TARGET_HAS_ext8s_i64 0
#define TCG_TARGET_HAS_ext16s_i64 0
#define TCG_TARGET_HAS_ext32s_i64 0
#define TCG_TARGET_HAS_ext8u_i64 0
#define TCG_TARGET_HAS_ext16u_i64 0
#define TCG_TARGET_HAS_ext32u_i64 0
#define TCG_TARGET_HAS_ext8s_i64 1
#define TCG_TARGET_HAS_ext16s_i64 1
#define TCG_TARGET_HAS_ext32s_i64 1
#define TCG_TARGET_HAS_ext8u_i64 1
#define TCG_TARGET_HAS_ext16u_i64 1
#define TCG_TARGET_HAS_ext32u_i64 1
#define TCG_TARGET_HAS_bswap16_i64 1
#define TCG_TARGET_HAS_bswap32_i64 1
#define TCG_TARGET_HAS_bswap64_i64 1