^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1) /* SPDX-License-Identifier: GPL-2.0 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #ifndef __ASM_SH_CMPXCHG_GRB_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #define __ASM_SH_CMPXCHG_GRB_H
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) unsigned long retval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) " .align 2 \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) " mova 1f, r0 \n\t" /* r0 = end point */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) " nop \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) " mov r15, r1 \n\t" /* r1 = saved sp */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14) " mov #-4, r15 \n\t" /* LOGIN */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) " mov.l @%1, %0 \n\t" /* load old value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) " mov.l %2, @%1 \n\t" /* store new value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) "1: mov r1, r15 \n\t" /* LOGOUT */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) : "=&r" (retval),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19) "+r" (m),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) "+r" (val) /* inhibit r15 overloading */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) :
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) : "memory", "r0", "r1");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24) return retval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) static inline unsigned long xchg_u16(volatile u16 *m, unsigned long val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) unsigned long retval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32) " .align 2 \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) " mova 1f, r0 \n\t" /* r0 = end point */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) " mov r15, r1 \n\t" /* r1 = saved sp */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) " mov #-6, r15 \n\t" /* LOGIN */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36) " mov.w @%1, %0 \n\t" /* load old value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) " extu.w %0, %0 \n\t" /* extend as unsigned */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) " mov.w %2, @%1 \n\t" /* store new value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39) "1: mov r1, r15 \n\t" /* LOGOUT */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) : "=&r" (retval),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) "+r" (m),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) "+r" (val) /* inhibit r15 overloading */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) :
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44) : "memory" , "r0", "r1");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) return retval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49) static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) unsigned long retval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) " .align 2 \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) " mova 1f, r0 \n\t" /* r0 = end point */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) " mov r15, r1 \n\t" /* r1 = saved sp */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) " mov #-6, r15 \n\t" /* LOGIN */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) " mov.b @%1, %0 \n\t" /* load old value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59) " extu.b %0, %0 \n\t" /* extend as unsigned */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) " mov.b %2, @%1 \n\t" /* store new value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) "1: mov r1, r15 \n\t" /* LOGOUT */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62) : "=&r" (retval),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) "+r" (m),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) "+r" (val) /* inhibit r15 overloading */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) :
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) : "memory" , "r0", "r1");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68) return retval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) static inline unsigned long __cmpxchg_u32(volatile int *m, unsigned long old,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) unsigned long new)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74) unsigned long retval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) __asm__ __volatile__ (
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) " .align 2 \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78) " mova 1f, r0 \n\t" /* r0 = end point */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) " nop \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) " mov r15, r1 \n\t" /* r1 = saved sp */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) " mov #-8, r15 \n\t" /* LOGIN */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82) " mov.l @%3, %0 \n\t" /* load old value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) " cmp/eq %0, %1 \n\t"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84) " bf 1f \n\t" /* if not equal */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) " mov.l %2, @%3 \n\t" /* store new value */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) "1: mov r1, r15 \n\t" /* LOGOUT */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) : "=&r" (retval),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) "+r" (old), "+r" (new) /* old or new can be r15 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) : "r" (m)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) : "memory" , "r0", "r1", "t");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) return retval;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) #endif /* __ASM_SH_CMPXCHG_GRB_H */