^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0-or-later */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * vDSO provided cache flush routines
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Copyright (C) 2004 Benjamin Herrenschmuidt (benh@kernel.crashing.org),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * IBM Corp.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #include <asm/processor.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) #include <asm/ppc_asm.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) #include <asm/vdso.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) #include <asm/vdso_datapage.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <asm/asm-offsets.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include <asm/cache.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) .text
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) * Default "generic" version of __kernel_sync_dicache.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) * void __kernel_sync_dicache(unsigned long start, unsigned long end)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) * Flushes the data cache & invalidate the instruction cache for the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) * provided range [start, end[
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) V_FUNCTION_BEGIN(__kernel_sync_dicache)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) .cfi_startproc
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) #ifdef CONFIG_PPC64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) mflr r12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) .cfi_register lr,r12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) get_datapage r10, r0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) mtlr r12
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) #ifdef CONFIG_PPC64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) lwz r7,CFG_DCACHE_BLOCKSZ(r10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) addi r5,r7,-1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) li r5, L1_CACHE_BYTES - 1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) andc r6,r3,r5 /* round low to line bdy */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) subf r8,r6,r4 /* compute length */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) add r8,r8,r5 /* ensure we get enough */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) #ifdef CONFIG_PPC64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) lwz r9,CFG_DCACHE_LOGBLOCKSZ(r10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) srw. r8,r8,r9 /* compute line count */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) srwi. r8, r8, L1_CACHE_SHIFT
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) mr r7, r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) crclr cr0*4+so
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) beqlr /* nothing to do? */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) mtctr r8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) 1: dcbst 0,r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) #ifdef CONFIG_PPC64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) add r6,r6,r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) addi r6, r6, L1_CACHE_BYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) bdnz 1b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) /* Now invalidate the instruction cache */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) #ifdef CONFIG_PPC64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) lwz r7,CFG_ICACHE_BLOCKSZ(r10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) addi r5,r7,-1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) andc r6,r3,r5 /* round low to line bdy */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) subf r8,r6,r4 /* compute length */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) add r8,r8,r5
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) lwz r9,CFG_ICACHE_LOGBLOCKSZ(r10)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) srw. r8,r8,r9 /* compute line count */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) crclr cr0*4+so
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) beqlr /* nothing to do? */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) mtctr r8
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) #ifdef CONFIG_PPC64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) 2: icbi 0,r6
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) add r6,r6,r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) 2: icbi 0, r7
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) addi r7, r7, L1_CACHE_BYTES
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) bdnz 2b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) isync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) li r3,0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) .cfi_endproc
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) V_FUNCTION_END(__kernel_sync_dicache)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) * POWER5 version of __kernel_sync_dicache
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) V_FUNCTION_BEGIN(__kernel_sync_dicache_p5)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) .cfi_startproc
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) crclr cr0*4+so
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) sync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) isync
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) li r3,0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) blr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) .cfi_endproc
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) V_FUNCTION_END(__kernel_sync_dicache_p5)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103)