aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm64/include/asm/atomic_ll_sc.h
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2015-05-14 18:05:50 +0100
committerWill Deacon <will.deacon@arm.com>2015-07-27 15:28:52 +0100
commite9a4b795652f654a7870727e5333c1b709b8736c (patch)
treeade6676d032ea31c5f24c210d778a857b16a37a5 /arch/arm64/include/asm/atomic_ll_sc.h
parentarm64: cmpxchg: patch in lse instructions when supported by the CPU (diff)
downloadlinux-dev-e9a4b795652f654a7870727e5333c1b709b8736c.tar.xz
linux-dev-e9a4b795652f654a7870727e5333c1b709b8736c.zip
arm64: cmpxchg_dbl: patch in lse instructions when supported by the CPU
On CPUs which support the LSE atomic instructions introduced in ARMv8.1, it makes sense to use them in preference to ll/sc sequences. This patch introduces runtime patching of our cmpxchg_double primitives so that the LSE casp instruction is used instead. Reviewed-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Will Deacon <will.deacon@arm.com>
Diffstat (limited to 'arch/arm64/include/asm/atomic_ll_sc.h')
-rw-r--r--arch/arm64/include/asm/atomic_ll_sc.h34
1 files changed, 34 insertions, 0 deletions
diff --git a/arch/arm64/include/asm/atomic_ll_sc.h b/arch/arm64/include/asm/atomic_ll_sc.h
index 4864158d486e..f89f1e4ba577 100644
--- a/arch/arm64/include/asm/atomic_ll_sc.h
+++ b/arch/arm64/include/asm/atomic_ll_sc.h
@@ -253,4 +253,38 @@ __CMPXCHG_CASE( , , mb_8, dmb ish, "memory")
#undef __CMPXCHG_CASE
+#define __CMPXCHG_DBL(name, mb, cl) \
+__LL_SC_INLINE int \
+__LL_SC_PREFIX(__cmpxchg_double##name(unsigned long old1, \
+ unsigned long old2, \
+ unsigned long new1, \
+ unsigned long new2, \
+ volatile void *ptr)) \
+{ \
+ unsigned long tmp, ret; \
+ \
+ asm volatile("// __cmpxchg_double" #name "\n" \
+ " " #mb "\n" \
+ "1: ldxp %0, %1, %2\n" \
+ " eor %0, %0, %3\n" \
+ " eor %1, %1, %4\n" \
+ " orr %1, %0, %1\n" \
+ " cbnz %1, 2f\n" \
+ " stxp %w0, %5, %6, %2\n" \
+ " cbnz %w0, 1b\n" \
+ " " #mb "\n" \
+ "2:" \
+ : "=&r" (tmp), "=&r" (ret), "+Q" (*(unsigned long *)ptr) \
+ : "r" (old1), "r" (old2), "r" (new1), "r" (new2) \
+ : cl); \
+ \
+ return ret; \
+} \
+__LL_SC_EXPORT(__cmpxchg_double##name);
+
+__CMPXCHG_DBL( , , )
+__CMPXCHG_DBL(_mb, dmb ish, "memory")
+
+#undef __CMPXCHG_DBL
+
#endif /* __ASM_ATOMIC_LL_SC_H */