^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0-only */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * linux/arch/arm/kernel/head-nommu.S
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Copyright (C) 1994-2002 Russell King
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * Copyright (C) 2003-2006 Hyok S. Choi
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * Common kernel startup code (non-paged MM)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <linux/linkage.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <linux/init.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <linux/errno.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) #include <asm/assembler.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) #include <asm/ptrace.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #include <asm/asm-offsets.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #include <asm/memory.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #include <asm/cp15.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #include <asm/thread_info.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #include <asm/v7m.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #include <asm/mpu.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #include <asm/page.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) * Kernel startup entry point.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) * ---------------------------
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) * This is normally called from the decompressor code. The requirements
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) * are: MMU = off, D-cache = off, I-cache = dont care, r0 = 0,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) * r1 = machine nr.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) * See linux/arch/arm/tools/mach-types for the complete list of machine
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) * numbers for r1.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) __HEAD
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) #ifdef CONFIG_CPU_THUMBONLY
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) .thumb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) ENTRY(stext)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) .arm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) ENTRY(stext)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) THUMB( badr r9, 1f ) @ Kernel is always entered in ARM.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) THUMB( bx r9 ) @ If this is a Thumb-2 kernel,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) THUMB( .thumb ) @ switch to Thumb now.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) THUMB(1: )
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) #ifdef CONFIG_ARM_VIRT_EXT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) bl __hyp_stub_install
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) @ ensure svc mode and all interrupts masked
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) safe_svcmode_maskall r9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) @ and irqs disabled
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) #if defined(CONFIG_CPU_CP15)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) mrc p15, 0, r9, c0, c0 @ get processor id
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) #elif defined(CONFIG_CPU_V7M)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) ldr r9, =BASEADDR_V7M_SCB
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) ldr r9, [r9, V7M_SCB_CPUID]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) ldr r9, =CONFIG_PROCESSOR_ID
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) bl __lookup_processor_type @ r5=procinfo r9=cpuid
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) movs r10, r5 @ invalid processor (r5=0)?
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) beq __error_p @ yes, error 'p'
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) #ifdef CONFIG_ARM_MPU
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) bl __setup_mpu
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) badr lr, 1f @ return (PIC) address
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) ldr r12, [r10, #PROCINFO_INITFUNC]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) add r12, r12, r10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) ret r12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) 1: ldr lr, =__mmap_switched
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) b __after_proc_init
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) ENDPROC(stext)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) #ifdef CONFIG_SMP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) .text
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) ENTRY(secondary_startup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) * Common entry point for secondary CPUs.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) * Ensure that we're in SVC mode, and IRQs are disabled. Lookup
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) * the processor type - there is no need to check the machine type
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) * as it has already been validated by the primary processor.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) #ifdef CONFIG_ARM_VIRT_EXT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) bl __hyp_stub_install_secondary
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) safe_svcmode_maskall r9
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) #ifndef CONFIG_CPU_CP15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) ldr r9, =CONFIG_PROCESSOR_ID
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) mrc p15, 0, r9, c0, c0 @ get processor id
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) bl __lookup_processor_type @ r5=procinfo r9=cpuid
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) movs r10, r5 @ invalid processor?
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) beq __error_p @ yes, error 'p'
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) ldr r7, __secondary_data
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) #ifdef CONFIG_ARM_MPU
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) bl __secondary_setup_mpu @ Initialize the MPU
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) badr lr, 1f @ return (PIC) address
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) ldr r12, [r10, #PROCINFO_INITFUNC]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) add r12, r12, r10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) ret r12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) 1: bl __after_proc_init
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) ldr sp, [r7, #12] @ set up the stack pointer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) mov fp, #0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) b secondary_start_kernel
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) ENDPROC(secondary_startup)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) .type __secondary_data, %object
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) __secondary_data:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) .long secondary_data
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) #endif /* CONFIG_SMP */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) * Set the Control Register and Read the process ID.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) .text
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) __after_proc_init:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) M_CLASS(movw r12, #:lower16:BASEADDR_V7M_SCB)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) M_CLASS(movt r12, #:upper16:BASEADDR_V7M_SCB)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) #ifdef CONFIG_ARM_MPU
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) M_CLASS(ldr r3, [r12, 0x50])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) AR_CLASS(mrc p15, 0, r3, c0, c1, 4) @ Read ID_MMFR0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) and r3, r3, #(MMFR0_PMSA) @ PMSA field
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) teq r3, #(MMFR0_PMSAv7) @ PMSA v7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) beq 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) teq r3, #(MMFR0_PMSAv8) @ PMSA v8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) * Memory region attributes for PMSAv8:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) * n = AttrIndx[2:0]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) * n MAIR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) * DEVICE_nGnRnE 000 00000000
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) * NORMAL 001 11111111
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) ldreq r3, =PMSAv8_MAIR(0x00, PMSAv8_RGN_DEVICE_nGnRnE) | \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) PMSAv8_MAIR(0xff, PMSAv8_RGN_NORMAL)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) AR_CLASS(mcreq p15, 0, r3, c10, c2, 0) @ MAIR 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) M_CLASS(streq r3, [r12, #PMSAv8_MAIR0])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) moveq r3, #0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) AR_CLASS(mcreq p15, 0, r3, c10, c2, 1) @ MAIR 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) M_CLASS(streq r3, [r12, #PMSAv8_MAIR1])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) #ifdef CONFIG_CPU_CP15
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) * CP15 system control register value returned in r0 from
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) * the CPU init function.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) #ifdef CONFIG_ARM_MPU
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) biceq r0, r0, #CR_BR @ Disable the 'default mem-map'
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) orreq r0, r0, #CR_M @ Set SCTRL.M (MPU on)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) #if defined(CONFIG_ALIGNMENT_TRAP) && __LINUX_ARM_ARCH__ < 6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) orr r0, r0, #CR_A
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) bic r0, r0, #CR_A
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) #ifdef CONFIG_CPU_DCACHE_DISABLE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) bic r0, r0, #CR_C
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) #ifdef CONFIG_CPU_BPREDICT_DISABLE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) bic r0, r0, #CR_Z
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) #ifdef CONFIG_CPU_ICACHE_DISABLE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) bic r0, r0, #CR_I
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) mcr p15, 0, r0, c1, c0, 0 @ write control reg
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) instr_sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) #elif defined (CONFIG_CPU_V7M)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) #ifdef CONFIG_ARM_MPU
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) ldreq r3, [r12, MPU_CTRL]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) biceq r3, #MPU_CTRL_PRIVDEFENA
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) orreq r3, #MPU_CTRL_ENABLE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) streq r3, [r12, MPU_CTRL]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) isb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) /* For V7M systems we want to modify the CCR similarly to the SCTLR */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) #ifdef CONFIG_CPU_DCACHE_DISABLE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) bic r0, r0, #V7M_SCB_CCR_DC
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) #ifdef CONFIG_CPU_BPREDICT_DISABLE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) bic r0, r0, #V7M_SCB_CCR_BP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) #ifdef CONFIG_CPU_ICACHE_DISABLE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) bic r0, r0, #V7M_SCB_CCR_IC
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) str r0, [r12, V7M_SCB_CCR]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) /* Pass exc_ret to __mmap_switched */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) mov r0, r10
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) #endif /* CONFIG_CPU_CP15 elif CONFIG_CPU_V7M */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) ret lr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) ENDPROC(__after_proc_init)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) .ltorg
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) #ifdef CONFIG_ARM_MPU
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) #ifndef CONFIG_CPU_V7M
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) /* Set which MPU region should be programmed */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) .macro set_region_nr tmp, rgnr, unused
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) mov \tmp, \rgnr @ Use static region numbers
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) mcr p15, 0, \tmp, c6, c2, 0 @ Write RGNR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) /* Setup a single MPU region, either D or I side (D-side for unified) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) .macro setup_region bar, acr, sr, side = PMSAv7_DATA_SIDE, unused
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) mcr p15, 0, \bar, c6, c1, (0 + \side) @ I/DRBAR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) mcr p15, 0, \acr, c6, c1, (4 + \side) @ I/DRACR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) mcr p15, 0, \sr, c6, c1, (2 + \side) @ I/DRSR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) .macro set_region_nr tmp, rgnr, base
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) mov \tmp, \rgnr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) str \tmp, [\base, #PMSAv7_RNR]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) .macro setup_region bar, acr, sr, unused, base
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) lsl \acr, \acr, #16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) orr \acr, \acr, \sr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) str \bar, [\base, #PMSAv7_RBAR]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) str \acr, [\base, #PMSAv7_RASR]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) * Setup the MPU and initial MPU Regions. We create the following regions:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) * Region 0: Use this for probing the MPU details, so leave disabled.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) * Region 1: Background region - covers the whole of RAM as strongly ordered
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) * Region 2: Normal, Shared, cacheable for RAM. From PHYS_OFFSET, size from r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) * Region 3: Normal, shared, inaccessible from PL0 to protect the vectors page
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) * r6: Value to be written to DRSR (and IRSR if required) for PMSAv7_RAM_REGION
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) __HEAD
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) ENTRY(__setup_mpu)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) /* Probe for v7 PMSA compliance */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) M_CLASS(movw r12, #:lower16:BASEADDR_V7M_SCB)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) M_CLASS(movt r12, #:upper16:BASEADDR_V7M_SCB)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) AR_CLASS(mrc p15, 0, r0, c0, c1, 4) @ Read ID_MMFR0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) M_CLASS(ldr r0, [r12, 0x50])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) and r0, r0, #(MMFR0_PMSA) @ PMSA field
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) teq r0, #(MMFR0_PMSAv7) @ PMSA v7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) beq __setup_pmsa_v7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) teq r0, #(MMFR0_PMSAv8) @ PMSA v8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) beq __setup_pmsa_v8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) ret lr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) ENDPROC(__setup_mpu)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) ENTRY(__setup_pmsa_v7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) /* Calculate the size of a region covering just the kernel */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) ldr r5, =PLAT_PHYS_OFFSET @ Region start: PHYS_OFFSET
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) ldr r6, =(_end) @ Cover whole kernel
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) sub r6, r6, r5 @ Minimum size of region to map
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) clz r6, r6 @ Region size must be 2^N...
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) rsb r6, r6, #31 @ ...so round up region size
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) lsl r6, r6, #PMSAv7_RSR_SZ @ Put size in right field
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) orr r6, r6, #(1 << PMSAv7_RSR_EN) @ Set region enabled bit
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) /* Determine whether the D/I-side memory map is unified. We set the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) * flags here and continue to use them for the rest of this function */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) AR_CLASS(mrc p15, 0, r0, c0, c0, 4) @ MPUIR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) M_CLASS(ldr r0, [r12, #MPU_TYPE])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) ands r5, r0, #MPUIR_DREGION_SZMASK @ 0 size d region => No MPU
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) bxeq lr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) tst r0, #MPUIR_nU @ MPUIR_nU = 0 for unified
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) /* Setup second region first to free up r6 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) set_region_nr r0, #PMSAv7_RAM_REGION, r12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) isb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) /* Full access from PL0, PL1, shared for CONFIG_SMP, cacheable */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) ldr r0, =PLAT_PHYS_OFFSET @ RAM starts at PHYS_OFFSET
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) ldr r5,=(PMSAv7_AP_PL1RW_PL0RW | PMSAv7_RGN_NORMAL)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) setup_region r0, r5, r6, PMSAv7_DATA_SIDE, r12 @ PHYS_OFFSET, shared, enabled
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) beq 1f @ Memory-map not unified
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) setup_region r0, r5, r6, PMSAv7_INSTR_SIDE, r12 @ PHYS_OFFSET, shared, enabled
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) 1: isb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) /* First/background region */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) set_region_nr r0, #PMSAv7_BG_REGION, r12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) isb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) /* Execute Never, strongly ordered, inaccessible to PL0, rw PL1 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) mov r0, #0 @ BG region starts at 0x0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) ldr r5,=(PMSAv7_ACR_XN | PMSAv7_RGN_STRONGLY_ORDERED | PMSAv7_AP_PL1RW_PL0NA)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) mov r6, #PMSAv7_RSR_ALL_MEM @ 4GB region, enabled
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) setup_region r0, r5, r6, PMSAv7_DATA_SIDE, r12 @ 0x0, BG region, enabled
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) beq 2f @ Memory-map not unified
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) setup_region r0, r5, r6, PMSAv7_INSTR_SIDE r12 @ 0x0, BG region, enabled
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) 2: isb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) #ifdef CONFIG_XIP_KERNEL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) set_region_nr r0, #PMSAv7_ROM_REGION, r12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) isb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) ldr r5,=(PMSAv7_AP_PL1RO_PL0NA | PMSAv7_RGN_NORMAL)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) ldr r0, =CONFIG_XIP_PHYS_ADDR @ ROM start
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) ldr r6, =(_exiprom) @ ROM end
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) sub r6, r6, r0 @ Minimum size of region to map
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) clz r6, r6 @ Region size must be 2^N...
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) rsb r6, r6, #31 @ ...so round up region size
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) lsl r6, r6, #PMSAv7_RSR_SZ @ Put size in right field
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) orr r6, r6, #(1 << PMSAv7_RSR_EN) @ Set region enabled bit
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) setup_region r0, r5, r6, PMSAv7_DATA_SIDE, r12 @ XIP_PHYS_ADDR, shared, enabled
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) beq 3f @ Memory-map not unified
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) setup_region r0, r5, r6, PMSAv7_INSTR_SIDE, r12 @ XIP_PHYS_ADDR, shared, enabled
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) 3: isb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) ret lr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) ENDPROC(__setup_pmsa_v7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) ENTRY(__setup_pmsa_v8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) mov r0, #0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) AR_CLASS(mcr p15, 0, r0, c6, c2, 1) @ PRSEL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) M_CLASS(str r0, [r12, #PMSAv8_RNR])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) isb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) #ifdef CONFIG_XIP_KERNEL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) ldr r5, =CONFIG_XIP_PHYS_ADDR @ ROM start
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) ldr r6, =(_exiprom) @ ROM end
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) sub r6, r6, #1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) bic r6, r6, #(PMSAv8_MINALIGN - 1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) orr r5, r5, #(PMSAv8_AP_PL1RW_PL0NA | PMSAv8_RGN_SHARED)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) orr r6, r6, #(PMSAv8_LAR_IDX(PMSAv8_RGN_NORMAL) | PMSAv8_LAR_EN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) AR_CLASS(mcr p15, 0, r5, c6, c8, 0) @ PRBAR0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) AR_CLASS(mcr p15, 0, r6, c6, c8, 1) @ PRLAR0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) M_CLASS(str r5, [r12, #PMSAv8_RBAR_A(0)])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) M_CLASS(str r6, [r12, #PMSAv8_RLAR_A(0)])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) ldr r5, =KERNEL_START
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) ldr r6, =KERNEL_END
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) sub r6, r6, #1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) bic r6, r6, #(PMSAv8_MINALIGN - 1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) orr r5, r5, #(PMSAv8_AP_PL1RW_PL0NA | PMSAv8_RGN_SHARED)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) orr r6, r6, #(PMSAv8_LAR_IDX(PMSAv8_RGN_NORMAL) | PMSAv8_LAR_EN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) AR_CLASS(mcr p15, 0, r5, c6, c8, 4) @ PRBAR1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) AR_CLASS(mcr p15, 0, r6, c6, c8, 5) @ PRLAR1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) M_CLASS(str r5, [r12, #PMSAv8_RBAR_A(1)])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) M_CLASS(str r6, [r12, #PMSAv8_RLAR_A(1)])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) /* Setup Background: 0x0 - min(KERNEL_START, XIP_PHYS_ADDR) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) #ifdef CONFIG_XIP_KERNEL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) ldr r6, =KERNEL_START
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) ldr r5, =CONFIG_XIP_PHYS_ADDR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) cmp r6, r5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) movcs r6, r5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) ldr r6, =KERNEL_START
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) cmp r6, #0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) beq 1f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) mov r5, #0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) sub r6, r6, #1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) bic r6, r6, #(PMSAv8_MINALIGN - 1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) orr r5, r5, #(PMSAv8_AP_PL1RW_PL0NA | PMSAv8_RGN_SHARED | PMSAv8_BAR_XN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) orr r6, r6, #(PMSAv8_LAR_IDX(PMSAv8_RGN_DEVICE_nGnRnE) | PMSAv8_LAR_EN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) AR_CLASS(mcr p15, 0, r5, c6, c9, 0) @ PRBAR2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) AR_CLASS(mcr p15, 0, r6, c6, c9, 1) @ PRLAR2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) M_CLASS(str r5, [r12, #PMSAv8_RBAR_A(2)])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) M_CLASS(str r6, [r12, #PMSAv8_RLAR_A(2)])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) /* Setup Background: max(KERNEL_END, _exiprom) - 0xffffffff */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) #ifdef CONFIG_XIP_KERNEL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395) ldr r5, =KERNEL_END
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) ldr r6, =(_exiprom)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) cmp r5, r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) movcc r5, r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) ldr r5, =KERNEL_END
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) mov r6, #0xffffffff
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) bic r6, r6, #(PMSAv8_MINALIGN - 1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) orr r5, r5, #(PMSAv8_AP_PL1RW_PL0NA | PMSAv8_RGN_SHARED | PMSAv8_BAR_XN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) orr r6, r6, #(PMSAv8_LAR_IDX(PMSAv8_RGN_DEVICE_nGnRnE) | PMSAv8_LAR_EN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408) AR_CLASS(mcr p15, 0, r5, c6, c9, 4) @ PRBAR3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) AR_CLASS(mcr p15, 0, r6, c6, c9, 5) @ PRLAR3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) M_CLASS(str r5, [r12, #PMSAv8_RBAR_A(3)])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) M_CLASS(str r6, [r12, #PMSAv8_RLAR_A(3)])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) #ifdef CONFIG_XIP_KERNEL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) /* Setup Background: min(_exiprom, KERNEL_END) - max(KERNEL_START, XIP_PHYS_ADDR) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) ldr r5, =(_exiprom)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) ldr r6, =KERNEL_END
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) cmp r5, r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) movcs r5, r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) ldr r6, =KERNEL_START
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) ldr r0, =CONFIG_XIP_PHYS_ADDR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) cmp r6, r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) movcc r6, r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) sub r6, r6, #1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) bic r6, r6, #(PMSAv8_MINALIGN - 1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) orr r5, r5, #(PMSAv8_AP_PL1RW_PL0NA | PMSAv8_RGN_SHARED | PMSAv8_BAR_XN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) orr r6, r6, #(PMSAv8_LAR_IDX(PMSAv8_RGN_DEVICE_nGnRnE) | PMSAv8_LAR_EN)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) #ifdef CONFIG_CPU_V7M
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) /* There is no alias for n == 4 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) mov r0, #4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) str r0, [r12, #PMSAv8_RNR] @ PRSEL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) isb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) str r5, [r12, #PMSAv8_RBAR_A(0)]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438) str r6, [r12, #PMSAv8_RLAR_A(0)]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) mcr p15, 0, r5, c6, c10, 0 @ PRBAR4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441) mcr p15, 0, r6, c6, c10, 1 @ PRLAR4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444) ret lr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445) ENDPROC(__setup_pmsa_v8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) #ifdef CONFIG_SMP
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449) * r6: pointer at mpu_rgn_info
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452) .text
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) ENTRY(__secondary_setup_mpu)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) /* Use MPU region info supplied by __cpu_up */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455) ldr r6, [r7] @ get secondary_data.mpu_rgn_info
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) /* Probe for v7 PMSA compliance */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) mrc p15, 0, r0, c0, c1, 4 @ Read ID_MMFR0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) and r0, r0, #(MMFR0_PMSA) @ PMSA field
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460) teq r0, #(MMFR0_PMSAv7) @ PMSA v7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) beq __secondary_setup_pmsa_v7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462) teq r0, #(MMFR0_PMSAv8) @ PMSA v8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463) beq __secondary_setup_pmsa_v8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) b __error_p
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) ENDPROC(__secondary_setup_mpu)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) * r6: pointer at mpu_rgn_info
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470) ENTRY(__secondary_setup_pmsa_v7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) /* Determine whether the D/I-side memory map is unified. We set the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) * flags here and continue to use them for the rest of this function */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) mrc p15, 0, r0, c0, c0, 4 @ MPUIR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) ands r5, r0, #MPUIR_DREGION_SZMASK @ 0 size d region => No MPU
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) beq __error_p
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) ldr r4, [r6, #MPU_RNG_INFO_USED]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) mov r5, #MPU_RNG_SIZE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) add r3, r6, #MPU_RNG_INFO_RNGS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) mla r3, r4, r5, r3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483) tst r0, #MPUIR_nU @ MPUIR_nU = 0 for unified
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484) sub r3, r3, #MPU_RNG_SIZE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485) sub r4, r4, #1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487) set_region_nr r0, r4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488) isb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490) ldr r0, [r3, #MPU_RGN_DRBAR]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491) ldr r6, [r3, #MPU_RGN_DRSR]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492) ldr r5, [r3, #MPU_RGN_DRACR]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494) setup_region r0, r5, r6, PMSAv7_DATA_SIDE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495) beq 2f
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496) setup_region r0, r5, r6, PMSAv7_INSTR_SIDE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497) 2: isb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499) mrc p15, 0, r0, c0, c0, 4 @ Reevaluate the MPUIR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) cmp r4, #0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501) bgt 1b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503) ret lr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504) ENDPROC(__secondary_setup_pmsa_v7)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) ENTRY(__secondary_setup_pmsa_v8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507) ldr r4, [r6, #MPU_RNG_INFO_USED]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) #ifndef CONFIG_XIP_KERNEL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) add r4, r4, #1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511) mov r5, #MPU_RNG_SIZE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512) add r3, r6, #MPU_RNG_INFO_RNGS
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) mla r3, r4, r5, r3
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515) 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516) sub r3, r3, #MPU_RNG_SIZE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517) sub r4, r4, #1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519) mcr p15, 0, r4, c6, c2, 1 @ PRSEL
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520) isb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522) ldr r5, [r3, #MPU_RGN_PRBAR]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523) ldr r6, [r3, #MPU_RGN_PRLAR]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525) mcr p15, 0, r5, c6, c3, 0 @ PRBAR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526) mcr p15, 0, r6, c6, c3, 1 @ PRLAR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528) cmp r4, #0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529) bgt 1b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) ret lr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 532) ENDPROC(__secondary_setup_pmsa_v8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 533) #endif /* CONFIG_SMP */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 534) #endif /* CONFIG_ARM_MPU */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 535) #include "head-common.S"