^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0-only */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * CPU reset routines
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Copyright (C) 2001 Deep Blue Solutions Ltd.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * Copyright (C) 2012 ARM Ltd.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) * Copyright (C) 2015 Huawei Futurewei Technologies.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <linux/linkage.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <asm/assembler.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <asm/sysreg.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <asm/virt.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) .text
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) .pushsection .idmap.text, "awx"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) * __cpu_soft_restart(el2_switch, entry, arg0, arg1, arg2) - Helper for
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) * cpu_soft_restart.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) * @el2_switch: Flag to indicate a switch to EL2 is needed.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) * @entry: Location to jump to for soft reset.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) * arg0: First argument passed to @entry. (relocation list)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) * arg1: Second argument passed to @entry.(physical kernel entry)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) * arg2: Third argument passed to @entry. (physical dtb address)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) * Put the CPU into the same state as it would be if it had been reset, and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) * branch to what would be the reset vector. It must be executed with the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) * flat identity mapping.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) SYM_CODE_START(__cpu_soft_restart)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) mov_q x12, INIT_SCTLR_EL1_MMU_OFF
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) pre_disable_mmu_workaround
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) * either disable EL1&0 translation regime or disable EL2&0 translation
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) * regime if HCR_EL2.E2H == 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) msr sctlr_el1, x12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) isb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) cbz x0, 1f // el2_switch?
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) mov x0, #HVC_SOFT_RESTART
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) hvc #0 // no return
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) 1: mov x8, x1 // entry
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) mov x0, x2 // arg0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) mov x1, x3 // arg1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) mov x2, x4 // arg2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) br x8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) SYM_CODE_END(__cpu_soft_restart)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) .popsection