^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0-only */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * kexec for arm64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Copyright (C) Linaro.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * Copyright (C) Huawei Futurewei Technologies.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #include <linux/kexec.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <linux/linkage.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <asm/assembler.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <asm/kexec.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <asm/page.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <asm/sysreg.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) * arm64_relocate_new_kernel - Put a 2nd stage image in place and boot it.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) * The memory that the old kernel occupies may be overwritten when coping the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) * new image to its final location. To assure that the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) * arm64_relocate_new_kernel routine which does that copy is not overwritten,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) * all code and data needed by arm64_relocate_new_kernel must be between the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) * symbols arm64_relocate_new_kernel and arm64_relocate_new_kernel_end. The
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) * machine_kexec() routine will copy arm64_relocate_new_kernel to the kexec
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) * control_code_page, a special page which has been set up to be preserved
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) * during the copy operation.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) SYM_CODE_START(arm64_relocate_new_kernel)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) /* Setup the list loop variables. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) mov x18, x2 /* x18 = dtb address */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) mov x17, x1 /* x17 = kimage_start */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) mov x16, x0 /* x16 = kimage_head */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) raw_dcache_line_size x15, x0 /* x15 = dcache line size */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) mov x14, xzr /* x14 = entry ptr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) mov x13, xzr /* x13 = copy dest */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) /* Check if the new image needs relocation. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) tbnz x16, IND_DONE_BIT, .Ldone
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) .Lloop:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) and x12, x16, PAGE_MASK /* x12 = addr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) /* Test the entry flags. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) .Ltest_source:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) tbz x16, IND_SOURCE_BIT, .Ltest_indirection
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) /* Invalidate dest page to PoC. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) mov x0, x13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) add x20, x0, #PAGE_SIZE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) sub x1, x15, #1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) bic x0, x0, x1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) 2: dc ivac, x0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) add x0, x0, x15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) cmp x0, x20
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) b.lo 2b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) dsb sy
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) mov x20, x13
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) mov x21, x12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) copy_page x20, x21, x0, x1, x2, x3, x4, x5, x6, x7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) /* dest += PAGE_SIZE */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) add x13, x13, PAGE_SIZE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) b .Lnext
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) .Ltest_indirection:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) tbz x16, IND_INDIRECTION_BIT, .Ltest_destination
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) /* ptr = addr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) mov x14, x12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) b .Lnext
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) .Ltest_destination:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) tbz x16, IND_DESTINATION_BIT, .Lnext
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) /* dest = addr */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) mov x13, x12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) .Lnext:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) /* entry = *ptr++ */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) ldr x16, [x14], #8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) /* while (!(entry & DONE)) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) tbz x16, IND_DONE_BIT, .Lloop
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) .Ldone:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) /* wait for writes from copy_page to finish */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) dsb nsh
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) ic iallu
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) dsb nsh
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) isb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) /* Start new image. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) mov x0, x18
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) mov x1, xzr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) mov x2, xzr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) mov x3, xzr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) br x17
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) SYM_CODE_END(arm64_relocate_new_kernel)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) .align 3 /* To keep the 64-bit values below naturally aligned. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) .Lcopy_end:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) .org KEXEC_CONTROL_PAGE_SIZE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) * arm64_relocate_new_kernel_size - Number of bytes to copy to the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) * control_code_page.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) .globl arm64_relocate_new_kernel_size
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) arm64_relocate_new_kernel_size:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) .quad .Lcopy_end - arm64_relocate_new_kernel