| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| #ifndef __ARCH_SPARC_CMPXCHG__ |
| #define __ARCH_SPARC_CMPXCHG__ |
| |
| unsigned long __xchg_u32(volatile u32 *m, u32 new); |
| void __xchg_called_with_bad_pointer(void); |
| |
| static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr, int size) |
| { |
| <------>switch (size) { |
| <------>case 4: |
| <------><------>return __xchg_u32(ptr, x); |
| <------>} |
| <------>__xchg_called_with_bad_pointer(); |
| <------>return x; |
| } |
| |
| #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr)))) |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| void __cmpxchg_called_with_bad_pointer(void); |
| |
| unsigned long __cmpxchg_u32(volatile u32 *m, u32 old, u32 new_); |
| |
| |
| static inline unsigned long |
| __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size) |
| { |
| <------>switch (size) { |
| <------>case 4: |
| <------><------>return __cmpxchg_u32((u32 *)ptr, (u32)old, (u32)new_); |
| <------>default: |
| <------><------>__cmpxchg_called_with_bad_pointer(); |
| <------><------>break; |
| <------>} |
| <------>return old; |
| } |
| |
| #define cmpxchg(ptr, o, n) \ |
| ({ \ |
| <------>__typeof__(*(ptr)) _o_ = (o); \ |
| <------>__typeof__(*(ptr)) _n_ = (n); \ |
| <------>(__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \ |
| <------><------><------>(unsigned long)_n_, sizeof(*(ptr))); \ |
| }) |
| |
| u64 __cmpxchg_u64(u64 *ptr, u64 old, u64 new); |
| #define cmpxchg64(ptr, old, new) __cmpxchg_u64(ptr, old, new) |
| |
| #include <asm-generic/cmpxchg-local.h> |
| |
| |
| |
| |
| |
| #define cmpxchg_local(ptr, o, n) \ |
| <------>((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\ |
| <------><------><------>(unsigned long)(n), sizeof(*(ptr)))) |
| #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) |
| |
| #endif |
| |