aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorChristoph Hellwig <hch@lst.de>2020-09-07 07:58:19 +0200
committerPalmer Dabbelt <palmerdabbelt@google.com>2020-10-04 10:26:43 -0700
commit931de11f5a374748f1d620ee17810dedf13c9f69 (patch)
treeac81211b04fe24c53009ded77ee3b3ec8f4639db
parentuaccess: provide a generic TASK_SIZE_MAX definition (diff)
downloadlinux-dev-931de11f5a374748f1d620ee17810dedf13c9f69.tar.xz
linux-dev-931de11f5a374748f1d620ee17810dedf13c9f69.zip
asm-generic: improve the nommu {get,put}_user handling
Instead of reusing raw_{copy,to}_from_user implement separate handlers using {get,put}_unaligned. This ensures unaligned access is handled correctly, and avoid the need for the small constant size optimization in raw_{copy,to}_from_user. Signed-off-by: Christoph Hellwig <hch@lst.de> Acked-by: Arnd Bergmann <arnd@arndb.de> Signed-off-by: Palmer Dabbelt <palmerdabbelt@google.com>
-rw-r--r--include/asm-generic/uaccess.h91
1 files changed, 51 insertions, 40 deletions
diff --git a/include/asm-generic/uaccess.h b/include/asm-generic/uaccess.h
index ba68ee4dabfa..6de5f524e9e6 100644
--- a/include/asm-generic/uaccess.h
+++ b/include/asm-generic/uaccess.h
@@ -10,28 +10,60 @@
#include <linux/string.h>
#ifdef CONFIG_UACCESS_MEMCPY
-static inline __must_check unsigned long
-raw_copy_from_user(void *to, const void __user * from, unsigned long n)
+#include <asm/unaligned.h>
+
+static inline int __get_user_fn(size_t size, const void __user *from, void *to)
{
- if (__builtin_constant_p(n)) {
- switch(n) {
- case 1:
- *(u8 *)to = *(u8 __force *)from;
- return 0;
- case 2:
- *(u16 *)to = *(u16 __force *)from;
- return 0;
- case 4:
- *(u32 *)to = *(u32 __force *)from;
- return 0;
-#ifdef CONFIG_64BIT
- case 8:
- *(u64 *)to = *(u64 __force *)from;
- return 0;
-#endif
- }
+ BUILD_BUG_ON(!__builtin_constant_p(size));
+
+ switch (size) {
+ case 1:
+ *(u8 *)to = get_unaligned((u8 __force *)from);
+ return 0;
+ case 2:
+ *(u16 *)to = get_unaligned((u16 __force *)from);
+ return 0;
+ case 4:
+ *(u32 *)to = get_unaligned((u32 __force *)from);
+ return 0;
+ case 8:
+ *(u64 *)to = get_unaligned((u64 __force *)from);
+ return 0;
+ default:
+ BUILD_BUG();
+ return 0;
+ }
+
+}
+#define __get_user_fn(sz, u, k) __get_user_fn(sz, u, k)
+
+static inline int __put_user_fn(size_t size, void __user *to, void *from)
+{
+ BUILD_BUG_ON(!__builtin_constant_p(size));
+
+ switch (size) {
+ case 1:
+ put_unaligned(*(u8 *)from, (u8 __force *)to);
+ return 0;
+ case 2:
+ put_unaligned(*(u16 *)from, (u16 __force *)to);
+ return 0;
+ case 4:
+ put_unaligned(*(u32 *)from, (u32 __force *)to);
+ return 0;
+ case 8:
+ put_unaligned(*(u64 *)from, (u64 __force *)to);
+ return 0;
+ default:
+ BUILD_BUG();
+ return 0;
}
+}
+#define __put_user_fn(sz, u, k) __put_user_fn(sz, u, k)
+static inline __must_check unsigned long
+raw_copy_from_user(void *to, const void __user * from, unsigned long n)
+{
memcpy(to, (const void __force *)from, n);
return 0;
}
@@ -39,27 +71,6 @@ raw_copy_from_user(void *to, const void __user * from, unsigned long n)
static inline __must_check unsigned long
raw_copy_to_user(void __user *to, const void *from, unsigned long n)
{
- if (__builtin_constant_p(n)) {
- switch(n) {
- case 1:
- *(u8 __force *)to = *(u8 *)from;
- return 0;
- case 2:
- *(u16 __force *)to = *(u16 *)from;
- return 0;
- case 4:
- *(u32 __force *)to = *(u32 *)from;
- return 0;
-#ifdef CONFIG_64BIT
- case 8:
- *(u64 __force *)to = *(u64 *)from;
- return 0;
-#endif
- default:
- break;
- }
- }
-
memcpy((void __force *)to, from, n);
return 0;
}