^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) * linux/arch/h8300/kernel/entry.S
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * Yoshinori Sato <ysato@users.sourceforge.jp>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * David McCullough <davidm@snapgear.com>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) * entry.S
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) * include exception/interrupt gateway
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) * system call entry
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include <linux/sys.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #include <asm/unistd.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #include <asm/setup.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #include <asm/segment.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #include <asm/linkage.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #include <asm/asm-offsets.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) #include <asm/thread_info.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) #include <asm/errno.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) #if defined(CONFIG_CPU_H8300H)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) #define USERRET 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) INTERRUPTS = 64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) .h8300h
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) .macro SHLL2 reg
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) shll.l \reg
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) shll.l \reg
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) .macro SHLR2 reg
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) shlr.l \reg
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) shlr.l \reg
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) .macro SAVEREGS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) mov.l er0,@-sp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) mov.l er1,@-sp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) mov.l er2,@-sp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) mov.l er3,@-sp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) .macro RESTOREREGS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) mov.l @sp+,er3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) mov.l @sp+,er2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) .macro SAVEEXR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) .macro RESTOREEXR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) #if defined(CONFIG_CPU_H8S)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) #define USERRET 10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) #define USEREXR 8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) INTERRUPTS = 128
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) .h8300s
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) .macro SHLL2 reg
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) shll.l #2,\reg
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) .macro SHLR2 reg
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) shlr.l #2,\reg
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) .macro SAVEREGS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) stm.l er0-er3,@-sp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) .macro RESTOREREGS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) ldm.l @sp+,er2-er3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) .macro SAVEEXR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) mov.w @(USEREXR:16,er0),r1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) mov.w r1,@(LEXR-LER3:16,sp) /* copy EXR */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) .macro RESTOREEXR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) mov.w @(LEXR-LER1:16,sp),r1 /* restore EXR */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) mov.b r1l,r1h
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) mov.w r1,@(USEREXR:16,er0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) /* CPU context save/restore macros. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) .macro SAVE_ALL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) mov.l er0,@-sp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) stc ccr,r0l /* check kernel mode */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) btst #4,r0l
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) bne 5f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) /* user mode */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) mov.l sp,@_sw_usp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) mov.l @sp,er0 /* restore saved er0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) orc #0x10,ccr /* switch kernel stack */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) mov.l @_sw_ksp,sp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) sub.l #(LRET-LORIG),sp /* allocate LORIG - LRET */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) SAVEREGS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) mov.l @_sw_usp,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) mov.l @(USERRET:16,er0),er1 /* copy the RET addr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) mov.l er1,@(LRET-LER3:16,sp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) SAVEEXR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) mov.l @(LORIG-LER3:16,sp),er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) mov.l er0,@(LER0-LER3:16,sp) /* copy ER0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) mov.w e1,r1 /* e1 highbyte = ccr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) and #0xef,r1h /* mask mode? flag */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) bra 6f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) 5:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) /* kernel mode */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) mov.l @sp,er0 /* restore saved er0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) subs #2,sp /* set dummy ccr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) subs #4,sp /* set dummp sp */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) SAVEREGS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) mov.w @(LRET-LER3:16,sp),r1 /* copy old ccr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) 6:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) mov.b r1h,r1l
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) mov.b #0,r1h
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) mov.w r1,@(LCCR-LER3:16,sp) /* set ccr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) mov.l @_sw_usp,er2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) mov.l er2,@(LSP-LER3:16,sp) /* set usp */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) mov.l er6,@-sp /* syscall arg #6 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) mov.l er5,@-sp /* syscall arg #5 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) mov.l er4,@-sp /* syscall arg #4 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) .endm /* r1 = ccr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) .macro RESTORE_ALL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) mov.l @sp+,er4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) mov.l @sp+,er5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) mov.l @sp+,er6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) RESTOREREGS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) mov.w @(LCCR-LER1:16,sp),r0 /* check kernel mode */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) btst #4,r0l
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) bne 7f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) orc #0xc0,ccr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) mov.l @(LSP-LER1:16,sp),er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) mov.l @(LER0-LER1:16,sp),er1 /* restore ER0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) mov.l er1,@er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) RESTOREEXR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) mov.w @(LCCR-LER1:16,sp),r1 /* restore the RET addr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) mov.b r1l,r1h
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) mov.b @(LRET+1-LER1:16,sp),r1l
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) mov.w r1,e1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) mov.w @(LRET+2-LER1:16,sp),r1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) mov.l er1,@(USERRET:16,er0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) mov.l @sp+,er1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) add.l #(LRET-LER1),sp /* remove LORIG - LRET */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) mov.l sp,@_sw_ksp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) andc #0xef,ccr /* switch to user mode */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) mov.l er0,sp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) bra 8f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) 7:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) mov.l @sp+,er1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) add.l #10,sp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) 8:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) mov.l @sp+,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) adds #4,sp /* remove the sw created LVEC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) rte
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) .globl _system_call
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) .globl ret_from_exception
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) .globl ret_from_fork
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) .globl ret_from_kernel_thread
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) .globl ret_from_interrupt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) .globl _interrupt_redirect_table
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) .globl _sw_ksp,_sw_usp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) .globl _resume
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) .globl _interrupt_entry
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) .globl _trace_break
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) .globl _nmi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) #if defined(CONFIG_ROMKERNEL)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) .section .int_redirect,"ax"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) _interrupt_redirect_table:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) #if defined(CONFIG_CPU_H8300H)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) .rept 7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) .long 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) .endr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) #if defined(CONFIG_CPU_H8S)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) .rept 5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) .long 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) .endr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) jmp @_trace_break
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) .long 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) jsr @_interrupt_entry /* NMI */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) jmp @_system_call /* TRAPA #0 (System call) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) .long 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) #if defined(CONFIG_KGDB)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) jmp @_kgdb_trap
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) .long 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) jmp @_trace_break /* TRAPA #3 (breakpoint) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) .rept INTERRUPTS-12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) jsr @_interrupt_entry
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) .endr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) #if defined(CONFIG_RAMKERNEL)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) .globl _interrupt_redirect_table
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) .section .bss
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) _interrupt_redirect_table:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) .space 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) .section .text
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) .align 2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) _interrupt_entry:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) SAVE_ALL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) /* r1l is saved ccr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) mov.l sp,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) add.l #LVEC,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) btst #4,r1l
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) bne 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) /* user LVEC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) mov.l @_sw_usp,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) adds #4,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) mov.l @er0,er0 /* LVEC address */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) #if defined(CONFIG_ROMKERNEL)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) sub.l #_interrupt_redirect_table,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) #if defined(CONFIG_RAMKERNEL)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) mov.l @_interrupt_redirect_table,er1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) sub.l er1,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) SHLR2 er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) dec.l #1,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) mov.l sp,er1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) subs #4,er1 /* adjust ret_pc */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) #if defined(CONFIG_CPU_H8S)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) orc #7,exr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) jsr @do_IRQ
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) jmp @ret_from_interrupt
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) _system_call:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) subs #4,sp /* dummy LVEC */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) SAVE_ALL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) /* er0: syscall nr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) andc #0xbf,ccr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) mov.l er0,er4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) /* save top of frame */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) mov.l sp,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) jsr @set_esp0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) andc #0x3f,ccr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) mov.l sp,er2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) and.w #0xe000,r2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) mov.l @(TI_FLAGS:16,er2),er2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) and.w #_TIF_WORK_SYSCALL_MASK,r2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) beq 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) mov.l sp,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) jsr @do_syscall_trace_enter
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) cmp.l #__NR_syscalls,er4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) bcc badsys
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) SHLL2 er4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) mov.l #_sys_call_table,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) add.l er4,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) mov.l @er0,er4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) beq ret_from_exception:16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) mov.l @(LER1:16,sp),er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) mov.l @(LER2:16,sp),er1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) mov.l @(LER3:16,sp),er2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) jsr @er4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) mov.l er0,@(LER0:16,sp) /* save the return value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) mov.l sp,er2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) and.w #0xe000,r2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) mov.l @(TI_FLAGS:16,er2),er2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) and.w #_TIF_WORK_SYSCALL_MASK,r2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) beq 2f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) mov.l sp,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) jsr @do_syscall_trace_leave
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) 2:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) orc #0xc0,ccr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) bra resume_userspace
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) badsys:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) mov.l #-ENOSYS,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) mov.l er0,@(LER0:16,sp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) bra resume_userspace
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) #if !defined(CONFIG_PREEMPTION)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) #define resume_kernel restore_all
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) ret_from_exception:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) #if defined(CONFIG_PREEMPTION)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) orc #0xc0,ccr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) ret_from_interrupt:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) mov.b @(LCCR+1:16,sp),r0l
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) btst #4,r0l
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) bne resume_kernel:16 /* return from kernel */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) resume_userspace:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) andc #0xbf,ccr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) mov.l sp,er4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) and.w #0xe000,r4 /* er4 <- current thread info */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) mov.l @(TI_FLAGS:16,er4),er1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) and.l #_TIF_WORK_MASK,er1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) beq restore_all:8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) work_pending:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) btst #TIF_NEED_RESCHED,r1l
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) bne work_resched:8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) /* work notifysig */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) mov.l sp,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) subs #4,er0 /* er0: pt_regs */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) jsr @do_notify_resume
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) bra resume_userspace:8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) work_resched:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) mov.l sp,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) jsr @set_esp0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) jsr @schedule
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) bra resume_userspace:8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) restore_all:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) RESTORE_ALL /* Does RTE */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) #if defined(CONFIG_PREEMPTION)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) resume_kernel:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) mov.l @(TI_PRE_COUNT:16,er4),er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) bne restore_all:8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) need_resched:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) mov.l @(TI_FLAGS:16,er4),er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) btst #TIF_NEED_RESCHED,r0l
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) beq restore_all:8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) mov.b @(LCCR+1:16,sp),r0l /* Interrupt Enabled? */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) bmi restore_all:8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) mov.l sp,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) jsr @set_esp0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) jsr @preempt_schedule_irq
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) bra need_resched:8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) ret_from_fork:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) mov.l er2,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) jsr @schedule_tail
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) jmp @ret_from_exception
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) ret_from_kernel_thread:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) mov.l er2,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) jsr @schedule_tail
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) mov.l @(LER4:16,sp),er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) mov.l @(LER5:16,sp),er1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) jsr @er1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) jmp @ret_from_exception
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) _resume:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) * Beware - when entering resume, offset of tss is in d1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) * prev (the current task) is in a0, next (the new task)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) * is in a1 and d2.b is non-zero if the mm structure is
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) * shared between the tasks, so don't change these
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) * registers until their contents are no longer needed.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) /* save sr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) sub.w r3,r3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) stc ccr,r3l
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) mov.w r3,@(THREAD_CCR+2:16,er0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) /* disable interrupts */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) orc #0xc0,ccr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) mov.l @_sw_usp,er3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) mov.l er3,@(THREAD_USP:16,er0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) mov.l sp,@(THREAD_KSP:16,er0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) /* Skip address space switching if they are the same. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) /* FIXME: what did we hack out of here, this does nothing! */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) mov.l @(THREAD_USP:16,er1),er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) mov.l er0,@_sw_usp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) mov.l @(THREAD_KSP:16,er1),sp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) /* restore status register */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379) mov.w @(THREAD_CCR+2:16,er1),r3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) ldc r3l,ccr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) rts
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) _trace_break:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) subs #4,sp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) SAVE_ALL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) sub.l er1,er1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) dec.l #1,er1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) mov.l er1,@(LORIG,sp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) mov.l sp,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) jsr @set_esp0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) mov.l @_sw_usp,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) mov.l @er0,er1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) mov.w @(-2:16,er1),r2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395) cmp.w #0x5730,r2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) beq 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) subs #2,er1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) mov.l er1,@er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) and.w #0xff,e1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) mov.l er1,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) jsr @trace_trap
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) jmp @ret_from_exception
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) _nmi:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) subs #4, sp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) mov.l er0, @-sp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408) mov.l @_interrupt_redirect_table, er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) add.l #8*4, er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) mov.l er0, @(4,sp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) mov.l @sp+, er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) jmp @_interrupt_entry
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) #if defined(CONFIG_KGDB)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) _kgdb_trap:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) subs #4,sp
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) SAVE_ALL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) mov.l sp,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419) add.l #LRET,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) mov.l er0,@(LSP,sp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) jsr @set_esp0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) mov.l sp,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) subs #4,er0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) jsr @h8300_kgdb_trap
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) jmp @ret_from_exception
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) .section .bss
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) _sw_ksp:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430) .space 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) _sw_usp:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) .space 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) .end