aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/include/asm/atomic64_64.h
diff options
context:
space:
mode:
authorH. Peter Anvin <hpa@zytor.com>2016-06-08 12:38:38 -0700
committerH. Peter Anvin <hpa@linux.intel.com>2016-06-08 12:41:20 -0700
commit117780eef7740729e803bdcc0d5f2f48137ea8e3 (patch)
tree63bd519e1d8b115d332fcb6e63ee4f94ee235161 /arch/x86/include/asm/atomic64_64.h
parentx86, bitops: remove use of "sbb" to return CF (diff)
downloadlinux-dev-117780eef7740729e803bdcc0d5f2f48137ea8e3.tar.xz
linux-dev-117780eef7740729e803bdcc0d5f2f48137ea8e3.zip
x86, asm: use bool for bitops and other assembly outputs
The gcc people have confirmed that using "bool" when combined with inline assembly always is treated as a byte-sized operand that can be assumed to be 0 or 1, which is exactly what the SET instruction emits. Change the output types and intermediate variables of as many operations as practical to "bool". Signed-off-by: H. Peter Anvin <hpa@zytor.com> Link: http://lkml.kernel.org/r/1465414726-197858-3-git-send-email-hpa@linux.intel.com Reviewed-by: Andy Lutomirski <luto@kernel.org> Reviewed-by: Borislav Petkov <bp@suse.de> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Diffstat (limited to 'arch/x86/include/asm/atomic64_64.h')
-rw-r--r--arch/x86/include/asm/atomic64_64.h10
1 files changed, 5 insertions, 5 deletions
diff --git a/arch/x86/include/asm/atomic64_64.h b/arch/x86/include/asm/atomic64_64.h
index 037351022f54..4f881d7f0c39 100644
--- a/arch/x86/include/asm/atomic64_64.h
+++ b/arch/x86/include/asm/atomic64_64.h
@@ -70,7 +70,7 @@ static inline void atomic64_sub(long i, atomic64_t *v)
* true if the result is zero, or false for all
* other cases.
*/
-static inline int atomic64_sub_and_test(long i, atomic64_t *v)
+static inline bool atomic64_sub_and_test(long i, atomic64_t *v)
{
GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", "e");
}
@@ -109,7 +109,7 @@ static __always_inline void atomic64_dec(atomic64_t *v)
* returns true if the result is 0, or false for all other
* cases.
*/
-static inline int atomic64_dec_and_test(atomic64_t *v)
+static inline bool atomic64_dec_and_test(atomic64_t *v)
{
GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", "e");
}
@@ -122,7 +122,7 @@ static inline int atomic64_dec_and_test(atomic64_t *v)
* and returns true if the result is zero, or false for all
* other cases.
*/
-static inline int atomic64_inc_and_test(atomic64_t *v)
+static inline bool atomic64_inc_and_test(atomic64_t *v)
{
GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", "e");
}
@@ -136,7 +136,7 @@ static inline int atomic64_inc_and_test(atomic64_t *v)
* if the result is negative, or false when
* result is greater than or equal to zero.
*/
-static inline int atomic64_add_negative(long i, atomic64_t *v)
+static inline bool atomic64_add_negative(long i, atomic64_t *v)
{
GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", "s");
}
@@ -180,7 +180,7 @@ static inline long atomic64_xchg(atomic64_t *v, long new)
* Atomically adds @a to @v, so long as it was not @u.
* Returns the old value of @v.
*/
-static inline int atomic64_add_unless(atomic64_t *v, long a, long u)
+static inline bool atomic64_add_unless(atomic64_t *v, long a, long u)
{
long c, old;
c = atomic64_read(v);