From c47d6a04e6ed22ccc5d89aaf2a136bf4971de310 Mon Sep 17 00:00:00 2001 From: Mark Rutland Date: Tue, 30 Apr 2013 11:11:15 +0100 Subject: arm64: klib: bitops: fix unpredictable stxr usage We're currently relying on unpredictable behaviour in our testops (test_and_*_bit), as stxr is unpredictable when the status register and the source register are the same This patch changes reallocates the status register so as to bring us back into the realm of predictable behaviour. Boot tested on an AEMv8 model. Signed-off-by: Mark Rutland Signed-off-by: Catalin Marinas --- arch/arm64/lib/bitops.S | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'arch') diff --git a/arch/arm64/lib/bitops.S b/arch/arm64/lib/bitops.S index fd1e801b53e7..eaed8bbd78fc 100644 --- a/arch/arm64/lib/bitops.S +++ b/arch/arm64/lib/bitops.S @@ -50,8 +50,8 @@ ENTRY( \name ) 1: ldxr x2, [x1] lsr x0, x2, x3 // Save old value of bit \instr x2, x2, x4 // toggle bit - stxr w2, x2, [x1] - cbnz w2, 1b + stxr w5, x2, [x1] + cbnz w5, 1b smp_dmb ish and x0, x0, #1 3: ret -- cgit v1.2.3-59-g8ed1b