^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0-only */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) * vlock.S - simple voting lock implementation for ARM
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) * Created by: Dave Martin, 2012-08-16
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) * Copyright: (C) 2012-2013 Linaro Limited
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) * This algorithm is described in more detail in
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) * Documentation/arm/vlocks.rst.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) #include <linux/linkage.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) #include "vlock.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) /* Select different code if voting flags can fit in a single word. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) #if VLOCK_VOTING_SIZE > 4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) #define FEW(x...)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) #define MANY(x...) x
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) #else
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) #define FEW(x...) x
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) #define MANY(x...)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) #endif
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) @ voting lock for first-man coordination
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) .macro voting_begin rbase:req, rcpu:req, rscratch:req
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) mov \rscratch, #1
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) strb \rscratch, [\rbase, \rcpu]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) dmb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) .macro voting_end rbase:req, rcpu:req, rscratch:req
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) dmb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) mov \rscratch, #0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) strb \rscratch, [\rbase, \rcpu]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) dsb st
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) sev
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) .endm
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) * The vlock structure must reside in Strongly-Ordered or Device memory.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) * This implementation deliberately eliminates most of the barriers which
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) * would be required for other memory types, and assumes that independent
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) * writes to neighbouring locations within a cacheline do not interfere
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) * with one another.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) @ r0: lock structure base
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) @ r1: CPU ID (0-based index within cluster)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) ENTRY(vlock_trylock)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) add r1, r1, #VLOCK_VOTING_OFFSET
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) voting_begin r0, r1, r2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) ldrb r2, [r0, #VLOCK_OWNER_OFFSET] @ check whether lock is held
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) cmp r2, #VLOCK_OWNER_NONE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) bne trylock_fail @ fail if so
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) @ Control dependency implies strb not observable before previous ldrb.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) strb r1, [r0, #VLOCK_OWNER_OFFSET] @ submit my vote
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) voting_end r0, r1, r2 @ implies DMB
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) @ Wait for the current round of voting to finish:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) MANY( mov r3, #VLOCK_VOTING_OFFSET )
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) 0:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) MANY( ldr r2, [r0, r3] )
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70) FEW( ldr r2, [r0, #VLOCK_VOTING_OFFSET] )
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) cmp r2, #0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) wfene
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) bne 0b
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) MANY( add r3, r3, #4 )
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) MANY( cmp r3, #VLOCK_VOTING_OFFSET + VLOCK_VOTING_SIZE )
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) MANY( bne 0b )
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) @ Check who won:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) dmb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) ldrb r2, [r0, #VLOCK_OWNER_OFFSET]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) eor r0, r1, r2 @ zero if I won, else nonzero
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) bx lr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) trylock_fail:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) voting_end r0, r1, r2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) mov r0, #1 @ nonzero indicates that I lost
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) bx lr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) ENDPROC(vlock_trylock)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) @ r0: lock structure base
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) ENTRY(vlock_unlock)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) dmb
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) mov r1, #VLOCK_OWNER_NONE
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) strb r1, [r0, #VLOCK_OWNER_OFFSET]
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) dsb st
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) sev
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) bx lr
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99) ENDPROC(vlock_unlock)