Orange Pi5 kernel

Deprecated Linux kernel 5.10.110 for OrangePi 5/5B/5+ boards

3 Commits   0 Branches   0 Tags
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   1) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   2)  * arch/xtensa/kernel/align.S
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   3)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   4)  * Handle unalignment exceptions in kernel space.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   5)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   6)  * This file is subject to the terms and conditions of the GNU General
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   7)  * Public License.  See the file "COPYING" in the main directory of
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   8)  * this archive for more details.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   9)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  10)  * Copyright (C) 2001 - 2005 Tensilica, Inc.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  11)  * Copyright (C) 2014 Cadence Design Systems Inc.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  12)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  13)  * Rewritten by Chris Zankel <chris@zankel.net>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  14)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  15)  * Based on work from Joe Taylor <joe@tensilica.com, joetylr@yahoo.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  16)  * and Marc Gauthier <marc@tensilica.com, marc@alimni.uwaterloo.ca>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  17)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  18) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  19) #include <linux/linkage.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  20) #include <asm/current.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  21) #include <asm/asm-offsets.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  22) #include <asm/asmmacro.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  23) #include <asm/processor.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  24) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  25) #if XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  26) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  27) /*  First-level exception handler for unaligned exceptions.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  28)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  29)  *  Note: This handler works only for kernel exceptions.  Unaligned user
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  30)  *        access should get a seg fault.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  31)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  32) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  33) /* Big and little endian 16-bit values are located in
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  34)  * different halves of a register.  HWORD_START helps to
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  35)  * abstract the notion of extracting a 16-bit value from a
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  36)  * register.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  37)  * We also have to define new shifting instructions because
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  38)  * lsb and msb are on 'opposite' ends in a register for
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  39)  * different endian machines.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  40)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  41)  * Assume a memory region in ascending address:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  42)  *   	0 1 2 3|4 5 6 7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  43)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  44)  * When loading one word into a register, the content of that register is:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  45)  *  LE	3 2 1 0, 7 6 5 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  46)  *  BE  0 1 2 3, 4 5 6 7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  47)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  48)  * Masking the bits of the higher/lower address means:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  49)  *  LE  X X 0 0, 0 0 X X
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  50)  *  BE	0 0 X X, X X 0 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  51)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  52)  * Shifting to higher/lower addresses, means:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  53)  *  LE  shift left / shift right
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  54)  *  BE  shift right / shift left
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  55)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  56)  * Extracting 16 bits from a 32 bit reg. value to higher/lower address means:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  57)  *  LE  mask 0 0 X X / shift left
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  58)  *  BE  shift left / mask 0 0 X X
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  59)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  60) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  61) #define UNALIGNED_USER_EXCEPTION
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  62) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  63) #if XCHAL_HAVE_BE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  64) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  65) #define HWORD_START	16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  66) #define	INSN_OP0	28
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  67) #define	INSN_T		24
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  68) #define	INSN_OP1	16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  69) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  70) .macro __ssa8r	r;		ssa8l	\r;		.endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  71) .macro __sh	r, s;		srl	\r, \s;		.endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  72) .macro __sl	r, s;		sll	\r, \s;		.endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  73) .macro __exth	r, s;		extui	\r, \s, 0, 16;	.endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  74) .macro __extl	r, s;		slli	\r, \s, 16;	.endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  75) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  76) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  77) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  78) #define HWORD_START	0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  79) #define	INSN_OP0	0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  80) #define	INSN_T		4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  81) #define	INSN_OP1	12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  82) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  83) .macro __ssa8r	r;		ssa8b	\r;		.endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  84) .macro __sh	r, s;		sll	\r, \s;		.endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  85) .macro __sl	r, s;		srl	\r, \s;		.endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  86) .macro __exth	r, s;		slli	\r, \s, 16;	.endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  87) .macro __extl	r, s;		extui	\r, \s, 0, 16;	.endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  88) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  89) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  90) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  91) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  92)  *	xxxx xxxx = imm8 field
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  93)  *	     yyyy = imm4 field
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  94)  *	     ssss = s field
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  95)  *	     tttt = t field
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  96)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  97)  *	       		 16		    0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  98)  *		          -------------------
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  99)  *	L32I.N		  yyyy ssss tttt 1000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100)  *	S32I.N	          yyyy ssss tttt 1001
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102)  *	       23			    0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103)  *		-----------------------------
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104)  *	res	          0000           0010
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105)  *	L16UI	xxxx xxxx 0001 ssss tttt 0010
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106)  *	L32I	xxxx xxxx 0010 ssss tttt 0010
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107)  *	XXX	          0011 ssss tttt 0010
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108)  *	XXX	          0100 ssss tttt 0010
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109)  *	S16I	xxxx xxxx 0101 ssss tttt 0010
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110)  *	S32I	xxxx xxxx 0110 ssss tttt 0010
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111)  *	XXX	          0111 ssss tttt 0010
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112)  *	XXX	          1000 ssss tttt 0010
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113)  *	L16SI	xxxx xxxx 1001 ssss tttt 0010
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114)  *	XXX	          1010           0010
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115)  *      **L32AI	xxxx xxxx 1011 ssss tttt 0010 unsupported
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116)  *	XXX	          1100           0010
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117)  *	XXX	          1101           0010
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118)  *	XXX	          1110           0010
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119)  *	**S32RI	xxxx xxxx 1111 ssss tttt 0010 unsupported
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120)  *		-----------------------------
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121)  *                           ^         ^    ^
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122)  *    sub-opcode (NIBBLE_R) -+         |    |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123)  *       t field (NIBBLE_T) -----------+    |
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124)  *  major opcode (NIBBLE_OP0) --------------+
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) #define OP0_L32I_N	0x8		/* load immediate narrow */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) #define OP0_S32I_N	0x9		/* store immediate narrow */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) #define OP1_SI_MASK	0x4		/* OP1 bit set for stores */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) #define OP1_SI_BIT	2		/* OP1 bit number for stores */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) #define OP1_L32I	0x2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) #define OP1_L16UI	0x1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) #define OP1_L16SI	0x9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) #define OP1_L32AI	0xb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) #define OP1_S32I	0x6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) #define OP1_S16I	0x5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) #define OP1_S32RI	0xf
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142)  * Entry condition:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144)  *   a0:	trashed, original value saved on stack (PT_AREG0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145)  *   a1:	a1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146)  *   a2:	new stack pointer, original in DEPC
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147)  *   a3:	a3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148)  *   depc:	a2, original value saved on stack (PT_DEPC)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149)  *   excsave_1:	dispatch table
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151)  *   PT_DEPC >= VALID_DOUBLE_EXCEPTION_ADDRESS: double exception, DEPC
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152)  *	     <  VALID_DOUBLE_EXCEPTION_ADDRESS: regular exception
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) 	.literal_position
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) ENTRY(fast_unaligned)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) 	/* Note: We don't expect the address to be aligned on a word
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) 	 *       boundary. After all, the processor generated that exception
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) 	 *       and it would be a hardware fault.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) 	/* Save some working register */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) 	s32i	a4, a2, PT_AREG4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) 	s32i	a5, a2, PT_AREG5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) 	s32i	a6, a2, PT_AREG6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) 	s32i	a7, a2, PT_AREG7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) 	s32i	a8, a2, PT_AREG8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) 	rsr	a0, depc
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) 	s32i	a0, a2, PT_AREG2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) 	s32i	a3, a2, PT_AREG3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) 	rsr	a3, excsave1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) 	movi	a4, fast_unaligned_fixup
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) 	s32i	a4, a3, EXC_TABLE_FIXUP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) 	/* Keep value of SAR in a0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) 	rsr	a0, sar
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) 	rsr	a8, excvaddr		# load unaligned memory address
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) 	/* Now, identify one of the following load/store instructions.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) 	 *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) 	 * The only possible danger of a double exception on the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) 	 * following l32i instructions is kernel code in vmalloc
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) 	 * memory. The processor was just executing at the EPC_1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) 	 * address, and indeed, already fetched the instruction.  That
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) 	 * guarantees a TLB mapping, which hasn't been replaced by
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) 	 * this unaligned exception handler that uses only static TLB
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) 	 * mappings. However, high-level interrupt handlers might
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) 	 * modify TLB entries, so for the generic case, we register a
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) 	 * TABLE_FIXUP handler here, too.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) 	/* a3...a6 saved on stack, a2 = SP */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) 	/* Extract the instruction that caused the unaligned access. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) 	rsr	a7, epc1	# load exception address
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) 	movi	a3, ~3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) 	and	a3, a3, a7	# mask lower bits
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) 	l32i	a4, a3, 0	# load 2 words
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) 	l32i	a5, a3, 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) 	__ssa8	a7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) 	__src_b	a4, a4, a5	# a4 has the instruction
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) 	/* Analyze the instruction (load or store?). */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) 	extui	a5, a4, INSN_OP0, 4	# get insn.op0 nibble
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) #if XCHAL_HAVE_DENSITY
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) 	_beqi	a5, OP0_L32I_N, .Lload	# L32I.N, jump
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) 	addi	a6, a5, -OP0_S32I_N
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) 	_beqz	a6, .Lstore		# S32I.N, do a store
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) 	/* 'store indicator bit' not set, jump */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) 	_bbci.l	a4, OP1_SI_BIT + INSN_OP1, .Lload
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) 	/* Store: Jump to table entry to get the value in the source register.*/
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) .Lstore:movi	a5, .Lstore_table	# table
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) 	extui	a6, a4, INSN_T, 4	# get source register
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) 	addx8	a5, a6, a5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) 	jx	a5			# jump into table
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) 	/* Load: Load memory address. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) .Lload: movi	a3, ~3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) 	and	a3, a3, a8		# align memory address
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) 	__ssa8	a8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) #ifdef UNALIGNED_USER_EXCEPTION
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) 	addi	a3, a3, 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) 	l32e	a5, a3, -8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) 	l32e	a6, a3, -4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) 	l32i	a5, a3, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) 	l32i	a6, a3, 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) 	__src_b	a3, a5, a6		# a3 has the data word
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) #if XCHAL_HAVE_DENSITY
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) 	addi	a7, a7, 2		# increment PC (assume 16-bit insn)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) 	extui	a5, a4, INSN_OP0, 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) 	_beqi	a5, OP0_L32I_N, 1f	# l32i.n: jump
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) 	addi	a7, a7, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) 	addi	a7, a7, 3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) 	extui	a5, a4, INSN_OP1, 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) 	_beqi	a5, OP1_L32I, 1f	# l32i: jump
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) 	extui	a3, a3, 0, 16		# extract lower 16 bits
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) 	_beqi	a5, OP1_L16UI, 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) 	addi	a5, a5, -OP1_L16SI
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) 	_bnez	a5, .Linvalid_instruction_load
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) 	/* sign extend value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) 	slli	a3, a3, 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) 	srai	a3, a3, 16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) 	/* Set target register. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) 	extui	a4, a4, INSN_T, 4	# extract target register
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) 	movi	a5, .Lload_table
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) 	addx8	a4, a4, a5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) 	jx	a4			# jump to entry for target register
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) 	.align	8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) .Lload_table:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) 	s32i	a3, a2, PT_AREG0;	_j .Lexit;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) 	mov	a1, a3;			_j .Lexit;	.align 8 # fishy??
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) 	s32i	a3, a2, PT_AREG2;	_j .Lexit;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) 	s32i	a3, a2, PT_AREG3;	_j .Lexit;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) 	s32i	a3, a2, PT_AREG4;	_j .Lexit;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) 	s32i	a3, a2, PT_AREG5;	_j .Lexit;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) 	s32i	a3, a2, PT_AREG6;	_j .Lexit;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) 	s32i	a3, a2, PT_AREG7;	_j .Lexit;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) 	s32i	a3, a2, PT_AREG8;	_j .Lexit;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) 	mov	a9, a3		;	_j .Lexit;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) 	mov	a10, a3		;	_j .Lexit;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) 	mov	a11, a3		;	_j .Lexit;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) 	mov	a12, a3		;	_j .Lexit;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) 	mov	a13, a3		;	_j .Lexit;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) 	mov	a14, a3		;	_j .Lexit;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) 	mov	a15, a3		;	_j .Lexit;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) .Lstore_table:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) 	l32i	a3, a2, PT_AREG0;	_j 1f;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) 	mov	a3, a1;			_j 1f;	.align 8	# fishy??
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) 	l32i	a3, a2, PT_AREG2;	_j 1f;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) 	l32i	a3, a2, PT_AREG3;	_j 1f;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) 	l32i	a3, a2, PT_AREG4;	_j 1f;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) 	l32i	a3, a2, PT_AREG5;	_j 1f;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) 	l32i	a3, a2, PT_AREG6;	_j 1f;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) 	l32i	a3, a2, PT_AREG7;	_j 1f;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) 	l32i	a3, a2, PT_AREG8;	_j 1f;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) 	mov	a3, a9		;	_j 1f;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) 	mov	a3, a10		;	_j 1f;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) 	mov	a3, a11		;	_j 1f;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) 	mov	a3, a12		;	_j 1f;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) 	mov	a3, a13		;	_j 1f;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) 	mov	a3, a14		;	_j 1f;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) 	mov	a3, a15		;	_j 1f;	.align 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) 	/* We cannot handle this exception. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) 	.extern _kernel_exception
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) .Linvalid_instruction_load:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) .Linvalid_instruction_store:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) 	movi	a4, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) 	rsr	a3, excsave1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) 	s32i	a4, a3, EXC_TABLE_FIXUP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) 	/* Restore a4...a8 and SAR, set SP, and jump to default exception. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) 	l32i	a8, a2, PT_AREG8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) 	l32i	a7, a2, PT_AREG7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) 	l32i	a6, a2, PT_AREG6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) 	l32i	a5, a2, PT_AREG5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) 	l32i	a4, a2, PT_AREG4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) 	wsr	a0, sar
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) 	mov	a1, a2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) 	rsr	a0, ps
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) 	bbsi.l  a0, PS_UM_BIT, 2f     # jump if user mode
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) 	movi	a0, _kernel_exception
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) 	jx	a0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) 2:	movi	a0, _user_exception
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) 	jx	a0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) 1: 	# a7: instruction pointer, a4: instruction, a3: value
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) 	movi	a6, 0			# mask: ffffffff:00000000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) #if XCHAL_HAVE_DENSITY
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) 	addi	a7, a7, 2		# incr. PC,assume 16-bit instruction
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) 	extui	a5, a4, INSN_OP0, 4	# extract OP0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) 	addi	a5, a5, -OP0_S32I_N
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) 	_beqz	a5, 1f			# s32i.n: jump
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) 	addi	a7, a7, 1		# increment PC, 32-bit instruction
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) 	addi	a7, a7, 3		# increment PC, 32-bit instruction
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) 	extui	a5, a4, INSN_OP1, 4	# extract OP1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) 	_beqi	a5, OP1_S32I, 1f	# jump if 32 bit store
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) 	_bnei	a5, OP1_S16I, .Linvalid_instruction_store
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) 	movi	a5, -1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) 	__extl	a3, a3			# get 16-bit value
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) 	__exth	a6, a5			# get 16-bit mask ffffffff:ffff0000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) 	/* Get memory address */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) 	movi	a4, ~3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) 	and	a4, a4, a8		# align memory address
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) 	/* Insert value into memory */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) 	movi	a5, -1			# mask: ffffffff:XXXX0000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) #ifdef UNALIGNED_USER_EXCEPTION
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) 	addi	a4, a4, 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) 	__ssa8r a8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) 	__src_b	a8, a5, a6		# lo-mask  F..F0..0 (BE) 0..0F..F (LE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) 	__src_b	a6, a6, a5		# hi-mask  0..0F..F (BE) F..F0..0 (LE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) #ifdef UNALIGNED_USER_EXCEPTION
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) 	l32e	a5, a4, -8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) 	l32i	a5, a4, 0		# load lower address word
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) 	and	a5, a5, a8		# mask
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) 	__sh	a8, a3 			# shift value
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) 	or	a5, a5, a8		# or with original value
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) #ifdef UNALIGNED_USER_EXCEPTION
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) 	s32e	a5, a4, -8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) 	l32e	a8, a4, -4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) 	s32i	a5, a4, 0		# store
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) 	l32i	a8, a4, 4		# same for upper address word
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) 	__sl	a5, a3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) 	and	a6, a8, a6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) 	or	a6, a6, a5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) #ifdef UNALIGNED_USER_EXCEPTION
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) 	s32e	a6, a4, -4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) 	s32i	a6, a4, 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408) .Lexit:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) #if XCHAL_HAVE_LOOPS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) 	rsr	a4, lend		# check if we reached LEND
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) 	bne	a7, a4, 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) 	rsr	a4, lcount		# and LCOUNT != 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) 	beqz	a4, 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) 	addi	a4, a4, -1		# decrement LCOUNT and set
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) 	rsr	a7, lbeg		# set PC to LBEGIN
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) 	wsr	a4, lcount
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419) 1:	wsr	a7, epc1		# skip emulated instruction
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) 	/* Update icount if we're single-stepping in userspace. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) 	rsr	a4, icountlevel
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) 	beqz	a4, 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) 	bgeui	a4, LOCKLEVEL + 1, 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) 	rsr	a4, icount
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) 	addi	a4, a4, 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) 	wsr	a4, icount
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) 	movi	a4, 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430) 	rsr	a3, excsave1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) 	s32i	a4, a3, EXC_TABLE_FIXUP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) 	/* Restore working register */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) 	l32i	a8, a2, PT_AREG8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436) 	l32i	a7, a2, PT_AREG7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) 	l32i	a6, a2, PT_AREG6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438) 	l32i	a5, a2, PT_AREG5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) 	l32i	a4, a2, PT_AREG4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) 	l32i	a3, a2, PT_AREG3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) 	/* restore SAR and return */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444) 	wsr	a0, sar
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445) 	l32i	a0, a2, PT_AREG0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) 	l32i	a2, a2, PT_AREG2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) 	rfe
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449) ENDPROC(fast_unaligned)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451) ENTRY(fast_unaligned_fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) 	l32i	a2, a3, EXC_TABLE_DOUBLE_SAVE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) 	wsr	a3, excsave1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) 	l32i	a8, a2, PT_AREG8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) 	l32i	a7, a2, PT_AREG7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) 	l32i	a6, a2, PT_AREG6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) 	l32i	a5, a2, PT_AREG5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460) 	l32i	a4, a2, PT_AREG4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) 	l32i	a0, a2, PT_AREG2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462) 	xsr	a0, depc			# restore depc and a0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463) 	wsr	a0, sar
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) 	rsr	a0, exccause
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466) 	s32i	a0, a2, PT_DEPC			# mark as a regular exception
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) 	rsr	a0, ps
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) 	bbsi.l  a0, PS_UM_BIT, 1f		# jump if user mode
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) 	rsr	a0, exccause
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) 	addx4	a0, a0, a3              	# find entry in table
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) 	l32i	a0, a0, EXC_TABLE_FAST_KERNEL   # load handler
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) 	l32i	a3, a2, PT_AREG3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) 	jx	a0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) 	rsr	a0, exccause
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) 	addx4	a0, a0, a3              	# find entry in table
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) 	l32i	a0, a0, EXC_TABLE_FAST_USER     # load handler
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) 	l32i	a3, a2, PT_AREG3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) 	jx	a0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483) ENDPROC(fast_unaligned_fixup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485) #endif /* XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION */