aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sh/include/asm/cmpxchg-llsc.h
diff options
context:
space:
mode:
authorMichael S. Tsirkin <mst@redhat.com>2016-01-08 09:23:58 +0200
committerMichael S. Tsirkin <mst@redhat.com>2016-01-12 20:47:02 +0200
commit9e3f84ce416663c84a191cb3ead300fc1a4adadc (patch)
tree8c4537b92a1498af2599787cfc8622cd24a0ab20 /arch/sh/include/asm/cmpxchg-llsc.h
parentsh: support 1 and 2 byte xchg (diff)
downloadlinux-dev-9e3f84ce416663c84a191cb3ead300fc1a4adadc.tar.xz
linux-dev-9e3f84ce416663c84a191cb3ead300fc1a4adadc.zip
sh: move xchg_cmpxchg to a header by itself
Looks like future sh variants will support a 4-byte cas which will be used to implement 1 and 2 byte xchg. This is exactly what we do for llsc now, move the portable part of the code into a separate header so it's easy to reuse. Suggested-by: Rich Felker <dalias@libc.org> Signed-off-by: Michael S. Tsirkin <mst@redhat.com> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Diffstat (limited to 'arch/sh/include/asm/cmpxchg-llsc.h')
-rw-r--r--arch/sh/include/asm/cmpxchg-llsc.h35
1 files changed, 1 insertions, 34 deletions
diff --git a/arch/sh/include/asm/cmpxchg-llsc.h b/arch/sh/include/asm/cmpxchg-llsc.h
index e754794e282f..fcfd32271bff 100644
--- a/arch/sh/include/asm/cmpxchg-llsc.h
+++ b/arch/sh/include/asm/cmpxchg-llsc.h
@@ -1,9 +1,6 @@
#ifndef __ASM_SH_CMPXCHG_LLSC_H
#define __ASM_SH_CMPXCHG_LLSC_H
-#include <linux/bitops.h>
-#include <asm/byteorder.h>
-
static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
{
unsigned long retval;
@@ -50,36 +47,6 @@ __cmpxchg_u32(volatile u32 *m, unsigned long old, unsigned long new)
return retval;
}
-static inline u32 __xchg_cmpxchg(volatile void *ptr, u32 x, int size)
-{
- int off = (unsigned long)ptr % sizeof(u32);
- volatile u32 *p = ptr - off;
-#ifdef __BIG_ENDIAN
- int bitoff = (sizeof(u32) - 1 - off) * BITS_PER_BYTE;
-#else
- int bitoff = off * BITS_PER_BYTE;
-#endif
- u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
- u32 oldv, newv;
- u32 ret;
-
- do {
- oldv = READ_ONCE(*p);
- ret = (oldv & bitmask) >> bitoff;
- newv = (oldv & ~bitmask) | (x << bitoff);
- } while (__cmpxchg_u32(p, oldv, newv) != oldv);
-
- return ret;
-}
-
-static inline unsigned long xchg_u16(volatile u16 *m, unsigned long val)
-{
- return __xchg_cmpxchg(m, val, sizeof *m);
-}
-
-static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
-{
- return __xchg_cmpxchg(m, val, sizeof *m);
-}
+#include <asm/cmpxchg-xchg.h>
#endif /* __ASM_SH_CMPXCHG_LLSC_H */