mirror of
https://github.com/torvalds/linux
synced 2024-10-07 11:53:31 +00:00
xtensa: add load/store exception handler
Memory attached to instruction bus of the xtensa CPU is only accessible for a limited subset of opcodes. Other opcodes generate an exception with the load/store error cause code. This property complicates use of such systems. Provide a handler that recognizes and transparently fixes such exceptions. The following opcodes are recognized when used outside of FLIX bundles: l32i, l32i.n, l16ui, l16si, l8ui. Signed-off-by: Max Filippov <jcmvbkbc@gmail.com>
This commit is contained in:
parent
651d4aff68
commit
f29cf77609
|
@ -203,6 +203,18 @@ config XTENSA_UNALIGNED_USER
|
||||||
|
|
||||||
Say Y here to enable unaligned memory access in user space.
|
Say Y here to enable unaligned memory access in user space.
|
||||||
|
|
||||||
|
config XTENSA_LOAD_STORE
|
||||||
|
bool "Load/store exception handler for memory only readable with l32"
|
||||||
|
help
|
||||||
|
The Xtensa architecture only allows reading memory attached to its
|
||||||
|
instruction bus with l32r and l32i instructions, all other
|
||||||
|
instructions raise an exception with the LoadStoreErrorCause code.
|
||||||
|
This makes it hard to use some configurations, e.g. store string
|
||||||
|
literals in FLASH memory attached to the instruction bus.
|
||||||
|
|
||||||
|
Say Y here to enable exception handler that allows transparent
|
||||||
|
byte and 2-byte access to memory attached to instruction bus.
|
||||||
|
|
||||||
config HAVE_SMP
|
config HAVE_SMP
|
||||||
bool "System Supports SMP (MX)"
|
bool "System Supports SMP (MX)"
|
||||||
depends on XTENSA_VARIANT_CUSTOM
|
depends on XTENSA_VARIANT_CUSTOM
|
||||||
|
|
|
@ -47,6 +47,7 @@ __init trap_set_handler(int cause, xtensa_exception_handler *handler);
|
||||||
asmlinkage void fast_illegal_instruction_user(void);
|
asmlinkage void fast_illegal_instruction_user(void);
|
||||||
asmlinkage void fast_syscall_user(void);
|
asmlinkage void fast_syscall_user(void);
|
||||||
asmlinkage void fast_alloca(void);
|
asmlinkage void fast_alloca(void);
|
||||||
|
asmlinkage void fast_load_store(void);
|
||||||
asmlinkage void fast_unaligned(void);
|
asmlinkage void fast_unaligned(void);
|
||||||
asmlinkage void fast_second_level_miss(void);
|
asmlinkage void fast_second_level_miss(void);
|
||||||
asmlinkage void fast_store_prohibited(void);
|
asmlinkage void fast_store_prohibited(void);
|
||||||
|
@ -64,6 +65,10 @@ void do_unhandled(struct pt_regs *regs);
|
||||||
static inline void __init early_trap_init(void)
|
static inline void __init early_trap_init(void)
|
||||||
{
|
{
|
||||||
static struct exc_table init_exc_table __initdata = {
|
static struct exc_table init_exc_table __initdata = {
|
||||||
|
#ifdef CONFIG_XTENSA_LOAD_STORE
|
||||||
|
.fast_kernel_handler[EXCCAUSE_LOAD_STORE_ERROR] =
|
||||||
|
fast_load_store,
|
||||||
|
#endif
|
||||||
#ifdef CONFIG_MMU
|
#ifdef CONFIG_MMU
|
||||||
.fast_kernel_handler[EXCCAUSE_DTLB_MISS] =
|
.fast_kernel_handler[EXCCAUSE_DTLB_MISS] =
|
||||||
fast_second_level_miss,
|
fast_second_level_miss,
|
||||||
|
|
|
@ -22,7 +22,17 @@
|
||||||
#include <asm/asmmacro.h>
|
#include <asm/asmmacro.h>
|
||||||
#include <asm/processor.h>
|
#include <asm/processor.h>
|
||||||
|
|
||||||
#if XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION
|
#if XCHAL_UNALIGNED_LOAD_EXCEPTION || defined CONFIG_XTENSA_LOAD_STORE
|
||||||
|
#define LOAD_EXCEPTION_HANDLER
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if XCHAL_UNALIGNED_STORE_EXCEPTION || defined LOAD_EXCEPTION_HANDLER
|
||||||
|
#define ANY_EXCEPTION_HANDLER
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if XCHAL_HAVE_WINDOWED
|
||||||
|
#define UNALIGNED_USER_EXCEPTION
|
||||||
|
#endif
|
||||||
|
|
||||||
/* First-level exception handler for unaligned exceptions.
|
/* First-level exception handler for unaligned exceptions.
|
||||||
*
|
*
|
||||||
|
@ -58,10 +68,6 @@
|
||||||
* BE shift left / mask 0 0 X X
|
* BE shift left / mask 0 0 X X
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#if XCHAL_HAVE_WINDOWED
|
|
||||||
#define UNALIGNED_USER_EXCEPTION
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#if XCHAL_HAVE_BE
|
#if XCHAL_HAVE_BE
|
||||||
|
|
||||||
#define HWORD_START 16
|
#define HWORD_START 16
|
||||||
|
@ -103,7 +109,7 @@
|
||||||
*
|
*
|
||||||
* 23 0
|
* 23 0
|
||||||
* -----------------------------
|
* -----------------------------
|
||||||
* res 0000 0010
|
* L8UI xxxx xxxx 0000 ssss tttt 0010
|
||||||
* L16UI xxxx xxxx 0001 ssss tttt 0010
|
* L16UI xxxx xxxx 0001 ssss tttt 0010
|
||||||
* L32I xxxx xxxx 0010 ssss tttt 0010
|
* L32I xxxx xxxx 0010 ssss tttt 0010
|
||||||
* XXX 0011 ssss tttt 0010
|
* XXX 0011 ssss tttt 0010
|
||||||
|
@ -128,9 +134,11 @@
|
||||||
|
|
||||||
#define OP0_L32I_N 0x8 /* load immediate narrow */
|
#define OP0_L32I_N 0x8 /* load immediate narrow */
|
||||||
#define OP0_S32I_N 0x9 /* store immediate narrow */
|
#define OP0_S32I_N 0x9 /* store immediate narrow */
|
||||||
|
#define OP0_LSAI 0x2 /* load/store */
|
||||||
#define OP1_SI_MASK 0x4 /* OP1 bit set for stores */
|
#define OP1_SI_MASK 0x4 /* OP1 bit set for stores */
|
||||||
#define OP1_SI_BIT 2 /* OP1 bit number for stores */
|
#define OP1_SI_BIT 2 /* OP1 bit number for stores */
|
||||||
|
|
||||||
|
#define OP1_L8UI 0x0
|
||||||
#define OP1_L32I 0x2
|
#define OP1_L32I 0x2
|
||||||
#define OP1_L16UI 0x1
|
#define OP1_L16UI 0x1
|
||||||
#define OP1_L16SI 0x9
|
#define OP1_L16SI 0x9
|
||||||
|
@ -155,8 +163,73 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
.literal_position
|
.literal_position
|
||||||
|
#ifdef CONFIG_XTENSA_LOAD_STORE
|
||||||
|
ENTRY(fast_load_store)
|
||||||
|
|
||||||
|
call0 .Lsave_and_load_instruction
|
||||||
|
|
||||||
|
/* Analyze the instruction (load or store?). */
|
||||||
|
|
||||||
|
extui a0, a4, INSN_OP0, 4 # get insn.op0 nibble
|
||||||
|
|
||||||
|
#if XCHAL_HAVE_DENSITY
|
||||||
|
_beqi a0, OP0_L32I_N, 1f # L32I.N, jump
|
||||||
|
#endif
|
||||||
|
bnei a0, OP0_LSAI, .Linvalid_instruction
|
||||||
|
/* 'store indicator bit' set, jump */
|
||||||
|
bbsi.l a4, OP1_SI_BIT + INSN_OP1, .Linvalid_instruction
|
||||||
|
|
||||||
|
1:
|
||||||
|
movi a3, ~3
|
||||||
|
and a3, a3, a8 # align memory address
|
||||||
|
|
||||||
|
__ssa8 a8
|
||||||
|
|
||||||
|
#ifdef CONFIG_MMU
|
||||||
|
/* l32e can't be used here even when it's available. */
|
||||||
|
/* TODO access_ok(a3) could be used here */
|
||||||
|
j .Linvalid_instruction
|
||||||
|
#endif
|
||||||
|
l32i a5, a3, 0
|
||||||
|
l32i a6, a3, 4
|
||||||
|
__src_b a3, a5, a6 # a3 has the data word
|
||||||
|
|
||||||
|
#if XCHAL_HAVE_DENSITY
|
||||||
|
addi a7, a7, 2 # increment PC (assume 16-bit insn)
|
||||||
|
_beqi a0, OP0_L32I_N, .Lload_w# l32i.n: jump
|
||||||
|
addi a7, a7, 1
|
||||||
|
#else
|
||||||
|
addi a7, a7, 3
|
||||||
|
#endif
|
||||||
|
|
||||||
|
extui a5, a4, INSN_OP1, 4
|
||||||
|
_beqi a5, OP1_L32I, .Lload_w
|
||||||
|
bnei a5, OP1_L8UI, .Lload16
|
||||||
|
extui a3, a3, 0, 8
|
||||||
|
j .Lload_w
|
||||||
|
|
||||||
|
ENDPROC(fast_load_store)
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Entry condition:
|
||||||
|
*
|
||||||
|
* a0: trashed, original value saved on stack (PT_AREG0)
|
||||||
|
* a1: a1
|
||||||
|
* a2: new stack pointer, original in DEPC
|
||||||
|
* a3: a3
|
||||||
|
* depc: a2, original value saved on stack (PT_DEPC)
|
||||||
|
* excsave_1: dispatch table
|
||||||
|
*
|
||||||
|
* PT_DEPC >= VALID_DOUBLE_EXCEPTION_ADDRESS: double exception, DEPC
|
||||||
|
* < VALID_DOUBLE_EXCEPTION_ADDRESS: regular exception
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifdef ANY_EXCEPTION_HANDLER
|
||||||
ENTRY(fast_unaligned)
|
ENTRY(fast_unaligned)
|
||||||
|
|
||||||
|
#if XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION
|
||||||
|
|
||||||
call0 .Lsave_and_load_instruction
|
call0 .Lsave_and_load_instruction
|
||||||
|
|
||||||
/* Analyze the instruction (load or store?). */
|
/* Analyze the instruction (load or store?). */
|
||||||
|
@ -171,12 +244,17 @@ ENTRY(fast_unaligned)
|
||||||
/* 'store indicator bit' not set, jump */
|
/* 'store indicator bit' not set, jump */
|
||||||
_bbci.l a4, OP1_SI_BIT + INSN_OP1, .Lload
|
_bbci.l a4, OP1_SI_BIT + INSN_OP1, .Lload
|
||||||
|
|
||||||
|
#endif
|
||||||
|
#if XCHAL_UNALIGNED_STORE_EXCEPTION
|
||||||
|
|
||||||
/* Store: Jump to table entry to get the value in the source register.*/
|
/* Store: Jump to table entry to get the value in the source register.*/
|
||||||
|
|
||||||
.Lstore:movi a5, .Lstore_table # table
|
.Lstore:movi a5, .Lstore_table # table
|
||||||
extui a6, a4, INSN_T, 4 # get source register
|
extui a6, a4, INSN_T, 4 # get source register
|
||||||
addx8 a5, a6, a5
|
addx8 a5, a6, a5
|
||||||
jx a5 # jump into table
|
jx a5 # jump into table
|
||||||
|
#endif
|
||||||
|
#if XCHAL_UNALIGNED_LOAD_EXCEPTION
|
||||||
|
|
||||||
/* Load: Load memory address. */
|
/* Load: Load memory address. */
|
||||||
|
|
||||||
|
@ -207,7 +285,9 @@ ENTRY(fast_unaligned)
|
||||||
|
|
||||||
extui a5, a4, INSN_OP1, 4
|
extui a5, a4, INSN_OP1, 4
|
||||||
_beqi a5, OP1_L32I, .Lload_w # l32i: jump
|
_beqi a5, OP1_L32I, .Lload_w # l32i: jump
|
||||||
|
#endif
|
||||||
|
#ifdef LOAD_EXCEPTION_HANDLER
|
||||||
|
.Lload16:
|
||||||
extui a3, a3, 0, 16 # extract lower 16 bits
|
extui a3, a3, 0, 16 # extract lower 16 bits
|
||||||
_beqi a5, OP1_L16UI, .Lload_w
|
_beqi a5, OP1_L16UI, .Lload_w
|
||||||
addi a5, a5, -OP1_L16SI
|
addi a5, a5, -OP1_L16SI
|
||||||
|
@ -247,7 +327,8 @@ ENTRY(fast_unaligned)
|
||||||
mov a13, a3 ; _j .Lexit; .align 8
|
mov a13, a3 ; _j .Lexit; .align 8
|
||||||
mov a14, a3 ; _j .Lexit; .align 8
|
mov a14, a3 ; _j .Lexit; .align 8
|
||||||
mov a15, a3 ; _j .Lexit; .align 8
|
mov a15, a3 ; _j .Lexit; .align 8
|
||||||
|
#endif
|
||||||
|
#if XCHAL_UNALIGNED_STORE_EXCEPTION
|
||||||
.Lstore_table:
|
.Lstore_table:
|
||||||
l32i a3, a2, PT_AREG0; _j .Lstore_w; .align 8
|
l32i a3, a2, PT_AREG0; _j .Lstore_w; .align 8
|
||||||
mov a3, a1; _j .Lstore_w; .align 8 # fishy??
|
mov a3, a1; _j .Lstore_w; .align 8 # fishy??
|
||||||
|
@ -265,7 +346,9 @@ ENTRY(fast_unaligned)
|
||||||
mov a3, a13 ; _j .Lstore_w; .align 8
|
mov a3, a13 ; _j .Lstore_w; .align 8
|
||||||
mov a3, a14 ; _j .Lstore_w; .align 8
|
mov a3, a14 ; _j .Lstore_w; .align 8
|
||||||
mov a3, a15 ; _j .Lstore_w; .align 8
|
mov a3, a15 ; _j .Lstore_w; .align 8
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef ANY_EXCEPTION_HANDLER
|
||||||
/* We cannot handle this exception. */
|
/* We cannot handle this exception. */
|
||||||
|
|
||||||
.extern _kernel_exception
|
.extern _kernel_exception
|
||||||
|
@ -294,6 +377,8 @@ ENTRY(fast_unaligned)
|
||||||
|
|
||||||
2: movi a0, _user_exception
|
2: movi a0, _user_exception
|
||||||
jx a0
|
jx a0
|
||||||
|
#endif
|
||||||
|
#if XCHAL_UNALIGNED_STORE_EXCEPTION
|
||||||
|
|
||||||
# a7: instruction pointer, a4: instruction, a3: value
|
# a7: instruction pointer, a4: instruction, a3: value
|
||||||
.Lstore_w:
|
.Lstore_w:
|
||||||
|
@ -358,7 +443,8 @@ ENTRY(fast_unaligned)
|
||||||
#else
|
#else
|
||||||
s32i a6, a4, 4
|
s32i a6, a4, 4
|
||||||
#endif
|
#endif
|
||||||
|
#endif
|
||||||
|
#ifdef ANY_EXCEPTION_HANDLER
|
||||||
.Lexit:
|
.Lexit:
|
||||||
#if XCHAL_HAVE_LOOPS
|
#if XCHAL_HAVE_LOOPS
|
||||||
rsr a4, lend # check if we reached LEND
|
rsr a4, lend # check if we reached LEND
|
||||||
|
@ -453,7 +539,7 @@ ENTRY(fast_unaligned)
|
||||||
__src_b a4, a4, a5 # a4 has the instruction
|
__src_b a4, a4, a5 # a4 has the instruction
|
||||||
|
|
||||||
ret
|
ret
|
||||||
|
#endif
|
||||||
ENDPROC(fast_unaligned)
|
ENDPROC(fast_unaligned)
|
||||||
|
|
||||||
ENTRY(fast_unaligned_fixup)
|
ENTRY(fast_unaligned_fixup)
|
||||||
|
@ -490,5 +576,4 @@ ENTRY(fast_unaligned_fixup)
|
||||||
jx a0
|
jx a0
|
||||||
|
|
||||||
ENDPROC(fast_unaligned_fixup)
|
ENDPROC(fast_unaligned_fixup)
|
||||||
|
#endif
|
||||||
#endif /* XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION */
|
|
||||||
|
|
|
@ -245,7 +245,8 @@ void __init init_arch(bp_tag_t *bp_start)
|
||||||
{
|
{
|
||||||
/* Initialize basic exception handling if configuration may need it */
|
/* Initialize basic exception handling if configuration may need it */
|
||||||
|
|
||||||
if (IS_ENABLED(CONFIG_KASAN))
|
if (IS_ENABLED(CONFIG_KASAN) ||
|
||||||
|
IS_ENABLED(CONFIG_XTENSA_LOAD_STORE))
|
||||||
early_trap_init();
|
early_trap_init();
|
||||||
|
|
||||||
/* Initialize MMU. */
|
/* Initialize MMU. */
|
||||||
|
|
|
@ -54,6 +54,9 @@ static void do_interrupt(struct pt_regs *regs);
|
||||||
#if XTENSA_FAKE_NMI
|
#if XTENSA_FAKE_NMI
|
||||||
static void do_nmi(struct pt_regs *regs);
|
static void do_nmi(struct pt_regs *regs);
|
||||||
#endif
|
#endif
|
||||||
|
#ifdef CONFIG_XTENSA_LOAD_STORE
|
||||||
|
static void do_load_store(struct pt_regs *regs);
|
||||||
|
#endif
|
||||||
static void do_unaligned_user(struct pt_regs *regs);
|
static void do_unaligned_user(struct pt_regs *regs);
|
||||||
static void do_multihit(struct pt_regs *regs);
|
static void do_multihit(struct pt_regs *regs);
|
||||||
#if XTENSA_HAVE_COPROCESSORS
|
#if XTENSA_HAVE_COPROCESSORS
|
||||||
|
@ -89,7 +92,10 @@ static dispatch_init_table_t __initdata dispatch_init_table[] = {
|
||||||
{ EXCCAUSE_SYSTEM_CALL, USER, fast_syscall_user },
|
{ EXCCAUSE_SYSTEM_CALL, USER, fast_syscall_user },
|
||||||
{ EXCCAUSE_SYSTEM_CALL, 0, system_call },
|
{ EXCCAUSE_SYSTEM_CALL, 0, system_call },
|
||||||
/* EXCCAUSE_INSTRUCTION_FETCH unhandled */
|
/* EXCCAUSE_INSTRUCTION_FETCH unhandled */
|
||||||
/* EXCCAUSE_LOAD_STORE_ERROR unhandled*/
|
#ifdef CONFIG_XTENSA_LOAD_STORE
|
||||||
|
{ EXCCAUSE_LOAD_STORE_ERROR, USER|KRNL, fast_load_store },
|
||||||
|
{ EXCCAUSE_LOAD_STORE_ERROR, 0, do_load_store },
|
||||||
|
#endif
|
||||||
{ EXCCAUSE_LEVEL1_INTERRUPT, 0, do_interrupt },
|
{ EXCCAUSE_LEVEL1_INTERRUPT, 0, do_interrupt },
|
||||||
#ifdef SUPPORT_WINDOWED
|
#ifdef SUPPORT_WINDOWED
|
||||||
{ EXCCAUSE_ALLOCA, USER|KRNL, fast_alloca },
|
{ EXCCAUSE_ALLOCA, USER|KRNL, fast_alloca },
|
||||||
|
@ -347,6 +353,19 @@ static void do_div0(struct pt_regs *regs)
|
||||||
force_sig_fault(SIGFPE, FPE_INTDIV, (void __user *)regs->pc);
|
force_sig_fault(SIGFPE, FPE_INTDIV, (void __user *)regs->pc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef CONFIG_XTENSA_LOAD_STORE
|
||||||
|
static void do_load_store(struct pt_regs *regs)
|
||||||
|
{
|
||||||
|
__die_if_kernel("Unhandled load/store exception in kernel",
|
||||||
|
regs, SIGKILL);
|
||||||
|
|
||||||
|
pr_info_ratelimited("Load/store error to %08lx in '%s' (pid = %d, pc = %#010lx)\n",
|
||||||
|
regs->excvaddr, current->comm,
|
||||||
|
task_pid_nr(current), regs->pc);
|
||||||
|
force_sig_fault(SIGBUS, BUS_ADRERR, (void *)regs->excvaddr);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Handle unaligned memory accesses from user space. Kill task.
|
* Handle unaligned memory accesses from user space. Kill task.
|
||||||
*
|
*
|
||||||
|
|
Loading…
Reference in a new issue