aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorArd Biesheuvel <ardb@kernel.org>2021-12-24 10:31:00 +0100
committerRussell King (Oracle) <rmk+kernel@armlinux.org.uk>2022-01-06 12:58:58 +0000
commit5fe41793bc78d9bb47fea37d1a16984ad6cf294b (patch)
tree697f185af991330149195a761e6721c081f5beef
parentARM: 9177/1: disable vmap'ed stacks on suspend-capable SMP configs (diff)
downloadlinux-dev-5fe41793bc78d9bb47fea37d1a16984ad6cf294b.tar.xz
linux-dev-5fe41793bc78d9bb47fea37d1a16984ad6cf294b.zip
ARM: 9176/1: avoid literal references in inline assembly
Nathan reports that the new get_current() and per-CPU offset accessors may cause problems at build time due to the use of a literal to hold the address of the respective variables. This is due to the fact that LLD before v14 does not support the PC-relative group relocations that are normally used for this, and the fallback relies on literals but does not emit the literal pools explictly using the .ltorg directive. ./arch/arm/include/asm/current.h:53:6: error: out of range pc-relative fixup value asm(LOAD_SYM_ARMV6(%0, __current) : "=r"(cur)); ^ ./arch/arm/include/asm/insn.h:25:2: note: expanded from macro 'LOAD_SYM_ARMV6' " ldr " #reg ", =" #sym " nt" ^ <inline asm>:1:3: note: instantiated into assembly here ldr r0, =__current ^ Since emitting a literal pool in this particular case is not possible, let's avoid the LOAD_SYM_ARMV6() entirely, and use the ordinary C assigment instead. As it turns out, there are other such cases, and here, using .ltorg to emit the literal pool within range of the LDR instruction would be possible due to the presence of an unconditional branch right after it. Unfortunately, putting .ltorg directives in subsections appears to confuse the Clang inline assembler, resulting in similar errors even though the .ltorg is most definitely within range. So let's fix this by emitting the literal explicitly, and not rely on the assembler to figure this out. This means we have move the fallback out of the LOAD_SYM_ARMV6() macro and into the callers. Link: https://github.com/ClangBuiltLinux/linux/issues/1551 Fixes: 9c46929e7989 ("ARM: implement THREAD_INFO_IN_TASK for uniprocessor systems") Reported-by: Nathan Chancellor <natechancellor@gmail.com> Tested-by: Nathan Chancellor <nathan@kernel.org> Signed-off-by: Ard Biesheuvel <ardb@kernel.org> Signed-off-by: Russell King (Oracle) <rmk+kernel@armlinux.org.uk>
-rw-r--r--arch/arm/include/asm/current.h13
-rw-r--r--arch/arm/include/asm/insn.h7
-rw-r--r--arch/arm/include/asm/percpu.h8
3 files changed, 19 insertions, 9 deletions
diff --git a/arch/arm/include/asm/current.h b/arch/arm/include/asm/current.h
index 69ecf4c6c725..2f9d79214b25 100644
--- a/arch/arm/include/asm/current.h
+++ b/arch/arm/include/asm/current.h
@@ -37,8 +37,16 @@ static inline __attribute_const__ struct task_struct *get_current(void)
#ifdef CONFIG_CPU_V6
"1: \n\t"
" .subsection 1 \n\t"
+#if !(defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS)) && \
+ !(defined(CONFIG_LD_IS_LLD) && CONFIG_LLD_VERSION < 140000)
"2: " LOAD_SYM_ARMV6(%0, __current) " \n\t"
" b 1b \n\t"
+#else
+ "2: ldr %0, 3f \n\t"
+ " ldr %0, [%0] \n\t"
+ " b 1b \n\t"
+ "3: .long __current \n\t"
+#endif
" .previous \n\t"
" .pushsection \".alt.smp.init\", \"a\" \n\t"
" .long 0b - . \n\t"
@@ -46,8 +54,9 @@ static inline __attribute_const__ struct task_struct *get_current(void)
" .popsection \n\t"
#endif
: "=r"(cur));
-#elif __LINUX_ARM_ARCH__>=7 || \
- (defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS))
+#elif __LINUX_ARM_ARCH__>= 7 || \
+ (defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS)) || \
+ (defined(CONFIG_LD_IS_LLD) && CONFIG_LLD_VERSION < 140000)
cur = __current;
#else
asm(LOAD_SYM_ARMV6(%0, __current) : "=r"(cur));
diff --git a/arch/arm/include/asm/insn.h b/arch/arm/include/asm/insn.h
index a160ed3ea427..faf3d1c28368 100644
--- a/arch/arm/include/asm/insn.h
+++ b/arch/arm/include/asm/insn.h
@@ -10,8 +10,6 @@
* which should be sufficient for the core kernel as well as modules loaded
* into the module region. (Not supported by LLD before release 14)
*/
-#if !(defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS)) && \
- !(defined(CONFIG_LD_IS_LLD) && CONFIG_LLD_VERSION < 140000)
#define LOAD_SYM_ARMV6(reg, sym) \
" .globl " #sym " \n\t" \
" .reloc 10f, R_ARM_ALU_PC_G0_NC, " #sym " \n\t" \
@@ -20,11 +18,6 @@
"10: sub " #reg ", pc, #8 \n\t" \
"11: sub " #reg ", " #reg ", #4 \n\t" \
"12: ldr " #reg ", [" #reg ", #0] \n\t"
-#else
-#define LOAD_SYM_ARMV6(reg, sym) \
- " ldr " #reg ", =" #sym " \n\t" \
- " ldr " #reg ", [" #reg "] \n\t"
-#endif
static inline unsigned long
arm_gen_nop(void)
diff --git a/arch/arm/include/asm/percpu.h b/arch/arm/include/asm/percpu.h
index a4a0d38d016a..28961d60877d 100644
--- a/arch/arm/include/asm/percpu.h
+++ b/arch/arm/include/asm/percpu.h
@@ -38,8 +38,16 @@ static inline unsigned long __my_cpu_offset(void)
#ifdef CONFIG_CPU_V6
"1: \n\t"
" .subsection 1 \n\t"
+#if !(defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS)) && \
+ !(defined(CONFIG_LD_IS_LLD) && CONFIG_LLD_VERSION < 140000)
"2: " LOAD_SYM_ARMV6(%0, __per_cpu_offset) " \n\t"
" b 1b \n\t"
+#else
+ "2: ldr %0, 3f \n\t"
+ " ldr %0, [%0] \n\t"
+ " b 1b \n\t"
+ "3: .long __per_cpu_offset \n\t"
+#endif
" .previous \n\t"
" .pushsection \".alt.smp.init\", \"a\" \n\t"
" .long 0b - . \n\t"