aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/include/asm/memory.h
diff options
context:
space:
mode:
authorRussell King <rmk+kernel@armlinux.org.uk>2020-12-21 11:19:26 +0000
committerRussell King <rmk+kernel@armlinux.org.uk>2020-12-21 11:19:26 +0000
commitecbbb88727aee7880527d4b320b4d06dde75d46d (patch)
tree67216fc12f1b8220039eed892403bfbde884a1a7 /arch/arm/include/asm/memory.h
parentMerge branches 'fixes' and 'misc' into for-next (diff)
parentMerge tag 'arm-adrl-replacement-for-v5.11' of git://git.kernel.org/pub/scm/linux/kernel/git/ardb/linux into devel-stable (diff)
downloadlinux-dev-ecbbb88727aee7880527d4b320b4d06dde75d46d.tar.xz
linux-dev-ecbbb88727aee7880527d4b320b4d06dde75d46d.zip
Merge branch 'devel-stable' into for-next
Diffstat (limited to 'arch/arm/include/asm/memory.h')
-rw-r--r--arch/arm/include/asm/memory.h57
1 files changed, 40 insertions, 17 deletions
diff --git a/arch/arm/include/asm/memory.h b/arch/arm/include/asm/memory.h
index 38a163f50130..2f841cb65c30 100644
--- a/arch/arm/include/asm/memory.h
+++ b/arch/arm/include/asm/memory.h
@@ -183,6 +183,7 @@ extern unsigned long vectors_base;
* so that all we need to do is modify the 8-bit constant field.
*/
#define __PV_BITS_31_24 0x81000000
+#define __PV_BITS_23_16 0x810000
#define __PV_BITS_7_0 0x81
extern unsigned long __pv_phys_pfn_offset;
@@ -193,43 +194,65 @@ extern const void *__pv_table_begin, *__pv_table_end;
#define PHYS_OFFSET ((phys_addr_t)__pv_phys_pfn_offset << PAGE_SHIFT)
#define PHYS_PFN_OFFSET (__pv_phys_pfn_offset)
-#define __pv_stub(from,to,instr,type) \
+#ifndef CONFIG_THUMB2_KERNEL
+#define __pv_stub(from,to,instr) \
__asm__("@ __pv_stub\n" \
"1: " instr " %0, %1, %2\n" \
+ "2: " instr " %0, %0, %3\n" \
" .pushsection .pv_table,\"a\"\n" \
- " .long 1b\n" \
+ " .long 1b - ., 2b - .\n" \
" .popsection\n" \
: "=r" (to) \
- : "r" (from), "I" (type))
+ : "r" (from), "I" (__PV_BITS_31_24), \
+ "I"(__PV_BITS_23_16))
+
+#define __pv_add_carry_stub(x, y) \
+ __asm__("@ __pv_add_carry_stub\n" \
+ "0: movw %R0, #0\n" \
+ " adds %Q0, %1, %R0, lsl #20\n" \
+ "1: mov %R0, %2\n" \
+ " adc %R0, %R0, #0\n" \
+ " .pushsection .pv_table,\"a\"\n" \
+ " .long 0b - ., 1b - .\n" \
+ " .popsection\n" \
+ : "=&r" (y) \
+ : "r" (x), "I" (__PV_BITS_7_0) \
+ : "cc")
-#define __pv_stub_mov_hi(t) \
- __asm__ volatile("@ __pv_stub_mov\n" \
- "1: mov %R0, %1\n" \
+#else
+#define __pv_stub(from,to,instr) \
+ __asm__("@ __pv_stub\n" \
+ "0: movw %0, #0\n" \
+ " lsl %0, #21\n" \
+ " " instr " %0, %1, %0\n" \
" .pushsection .pv_table,\"a\"\n" \
- " .long 1b\n" \
+ " .long 0b - .\n" \
" .popsection\n" \
- : "=r" (t) \
- : "I" (__PV_BITS_7_0))
+ : "=&r" (to) \
+ : "r" (from))
#define __pv_add_carry_stub(x, y) \
- __asm__ volatile("@ __pv_add_carry_stub\n" \
- "1: adds %Q0, %1, %2\n" \
+ __asm__("@ __pv_add_carry_stub\n" \
+ "0: movw %R0, #0\n" \
+ " lsls %R0, #21\n" \
+ " adds %Q0, %1, %R0\n" \
+ "1: mvn %R0, #0\n" \
" adc %R0, %R0, #0\n" \
" .pushsection .pv_table,\"a\"\n" \
- " .long 1b\n" \
+ " .long 0b - ., 1b - .\n" \
" .popsection\n" \
- : "+r" (y) \
- : "r" (x), "I" (__PV_BITS_31_24) \
+ : "=&r" (y) \
+ : "r" (x) \
: "cc")
+#endif
static inline phys_addr_t __virt_to_phys_nodebug(unsigned long x)
{
phys_addr_t t;
if (sizeof(phys_addr_t) == 4) {
- __pv_stub(x, t, "add", __PV_BITS_31_24);
+ __pv_stub(x, t, "add");
} else {
- __pv_stub_mov_hi(t);
__pv_add_carry_stub(x, t);
}
return t;
@@ -245,7 +268,7 @@ static inline unsigned long __phys_to_virt(phys_addr_t x)
* assembler expression receives 32 bit argument
* in place where 'r' 32 bit operand is expected.
*/
- __pv_stub((unsigned long) x, t, "sub", __PV_BITS_31_24);
+ __pv_stub((unsigned long) x, t, "sub");
return t;
}