aboutsummaryrefslogtreecommitdiffstats
path: root/arch/s390/include/asm/atomic.h
diff options
context:
space:
mode:
authorPeter Zijlstra <peterz@infradead.org>2014-03-13 19:00:35 +0100
committerIngo Molnar <mingo@kernel.org>2014-04-18 14:20:42 +0200
commit0e530747c69f1e191f101a925bb4051894e5c7b0 (patch)
treedba0d885403109b8a5857bf59890dfe6b6cd077c /arch/s390/include/asm/atomic.h
parentarch,powerpc: Convert smp_mb__*() (diff)
downloadlinux-dev-0e530747c69f1e191f101a925bb4051894e5c7b0.tar.xz
linux-dev-0e530747c69f1e191f101a925bb4051894e5c7b0.zip
arch,s390: Convert smp_mb__*()
As per the existing implementation; implement the new one using smp_mb(). AFAICT the s390 compare-and-swap does imply a barrier, however there are some immediate ops that seem to be singly-copy atomic and do not imply a barrier. One such is the "ni" op (which would be and-immediate) which is used for the constant clear_bit implementation. Therefore s390 needs full barriers for the {before,after} atomic ops. Signed-off-by: Peter Zijlstra <peterz@infradead.org> Acked-by: Paul E. McKenney <paulmck@linux.vnet.ibm.com> Link: http://lkml.kernel.org/n/tip-kme5dz5hcobpnufnnkh1ech2@git.kernel.org Cc: Chen Gang <gang.chen@asianux.com> Cc: Heiko Carstens <heiko.carstens@de.ibm.com> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Martin Schwidefsky <schwidefsky@de.ibm.com> Cc: Will Deacon <will.deacon@arm.com> Cc: linux390@de.ibm.com Cc: linux-kernel@vger.kernel.org Cc: linux-s390@vger.kernel.org Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'arch/s390/include/asm/atomic.h')
-rw-r--r--arch/s390/include/asm/atomic.h5
1 files changed, 0 insertions, 5 deletions
diff --git a/arch/s390/include/asm/atomic.h b/arch/s390/include/asm/atomic.h
index 1d4706114a45..fa934fe080c1 100644
--- a/arch/s390/include/asm/atomic.h
+++ b/arch/s390/include/asm/atomic.h
@@ -412,9 +412,4 @@ static inline long long atomic64_dec_if_positive(atomic64_t *v)
#define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
-#define smp_mb__before_atomic_dec() smp_mb()
-#define smp_mb__after_atomic_dec() smp_mb()
-#define smp_mb__before_atomic_inc() smp_mb()
-#define smp_mb__after_atomic_inc() smp_mb()
-
#endif /* __ARCH_S390_ATOMIC__ */