aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sh/include/asm/uaccess_32.h
diff options
context:
space:
mode:
authorJohn Paul Adrian Glaubitz <glaubitz@physik.fu-berlin.de>2020-06-11 09:58:11 +0200
committerRich Felker <dalias@libc.org>2020-08-14 22:05:04 -0400
commit2d2b308a8b7d0aeeadeabd8d7f329bb8200f2e2b (patch)
treeeb3d9096b05a637ff220788dfa8c6541a171b4db /arch/sh/include/asm/uaccess_32.h
parentsh: remove call to memset after dma_alloc_coherent (diff)
downloadlinux-dev-2d2b308a8b7d0aeeadeabd8d7f329bb8200f2e2b.tar.xz
linux-dev-2d2b308a8b7d0aeeadeabd8d7f329bb8200f2e2b.zip
sh: Implement __get_user_u64() required for 64-bit get_user()
Trying to build the kernel with CONFIG_INFINIBAND_USER_ACCESS enabled fails ERROR: "__get_user_unknown" [drivers/infiniband/core/ib_uverbs.ko] undefined! with on SH since the kernel misses a 64-bit implementation of get_user(). Implement the missing 64-bit get_user() as __get_user_u64(), matching the already existing __put_user_u64() which implements the 64-bit put_user(). Signed-off-by: John Paul Adrian Glaubitz <glaubitz@physik.fu-berlin.de> Acked-by: Yoshinori Sato <ysato@users.sourceforge.jp> Signed-off-by: Rich Felker <dalias@libc.org>
Diffstat (limited to '')
-rw-r--r--arch/sh/include/asm/uaccess_32.h53
1 files changed, 53 insertions, 0 deletions
diff --git a/arch/sh/include/asm/uaccess_32.h b/arch/sh/include/asm/uaccess_32.h
index 624cf55acc27..5d7ddc092afd 100644
--- a/arch/sh/include/asm/uaccess_32.h
+++ b/arch/sh/include/asm/uaccess_32.h
@@ -26,6 +26,9 @@ do { \
case 4: \
__get_user_asm(x, ptr, retval, "l"); \
break; \
+ case 8: \
+ __get_user_u64(x, ptr, retval); \
+ break; \
default: \
__get_user_unknown(); \
break; \
@@ -66,6 +69,56 @@ do { \
extern void __get_user_unknown(void);
+#if defined(CONFIG_CPU_LITTLE_ENDIAN)
+#define __get_user_u64(x, addr, err) \
+({ \
+__asm__ __volatile__( \
+ "1:\n\t" \
+ "mov.l %2,%R1\n\t" \
+ "mov.l %T2,%S1\n\t" \
+ "2:\n" \
+ ".section .fixup,\"ax\"\n" \
+ "3:\n\t" \
+ "mov #0,%R1\n\t" \
+ "mov #0,%S1\n\t" \
+ "mov.l 4f, %0\n\t" \
+ "jmp @%0\n\t" \
+ " mov %3, %0\n\t" \
+ ".balign 4\n" \
+ "4: .long 2b\n\t" \
+ ".previous\n" \
+ ".section __ex_table,\"a\"\n\t" \
+ ".long 1b, 3b\n\t" \
+ ".long 1b + 2, 3b\n\t" \
+ ".previous" \
+ :"=&r" (err), "=&r" (x) \
+ :"m" (__m(addr)), "i" (-EFAULT), "0" (err)); })
+#else
+#define __get_user_u64(x, addr, err) \
+({ \
+__asm__ __volatile__( \
+ "1:\n\t" \
+ "mov.l %2,%S1\n\t" \
+ "mov.l %T2,%R1\n\t" \
+ "2:\n" \
+ ".section .fixup,\"ax\"\n" \
+ "3:\n\t" \
+ "mov #0,%S1\n\t" \
+ "mov #0,%R1\n\t" \
+ "mov.l 4f, %0\n\t" \
+ "jmp @%0\n\t" \
+ " mov %3, %0\n\t" \
+ ".balign 4\n" \
+ "4: .long 2b\n\t" \
+ ".previous\n" \
+ ".section __ex_table,\"a\"\n\t" \
+ ".long 1b, 3b\n\t" \
+ ".long 1b + 2, 3b\n\t" \
+ ".previous" \
+ :"=&r" (err), "=&r" (x) \
+ :"m" (__m(addr)), "i" (-EFAULT), "0" (err)); })
+#endif
+
#define __put_user_size(x,ptr,size,retval) \
do { \
retval = 0; \