aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc/include/asm/checksum_32.h
diff options
context:
space:
mode:
authorAl Viro <viro@zeniv.linux.org.uk>2020-07-13 15:12:48 -0400
committerAl Viro <viro@zeniv.linux.org.uk>2020-08-20 15:45:19 -0400
commitab5e8b33124468fe9d7d6042de5a9b35414c784e (patch)
tree7f6ec7f2004819c68b0a18a25f2348e0ca35720e /arch/sparc/include/asm/checksum_32.h
parenti386: propagate the calling conventions change down to csum_partial_copy_generic() (diff)
downloadlinux-dev-ab5e8b33124468fe9d7d6042de5a9b35414c784e.tar.xz
linux-dev-ab5e8b33124468fe9d7d6042de5a9b35414c784e.zip
sparc32: propagate the calling conventions change down to __csum_partial_copy_sparc_generic()
... and get rid of zeroing the target, etc. on fault. All exception handlers merge into one; moreover, since we are not calling lookup_fault() anymore, we don't need the magic with passing arguments for it from the page fault handler. Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
Diffstat (limited to 'arch/sparc/include/asm/checksum_32.h')
-rw-r--r--arch/sparc/include/asm/checksum_32.h49
1 files changed, 5 insertions, 44 deletions
diff --git a/arch/sparc/include/asm/checksum_32.h b/arch/sparc/include/asm/checksum_32.h
index b5873b7b7bf0..d55e480172a6 100644
--- a/arch/sparc/include/asm/checksum_32.h
+++ b/arch/sparc/include/asm/checksum_32.h
@@ -50,9 +50,9 @@ csum_partial_copy_nocheck(const void *src, void *dst, int len)
__asm__ __volatile__ (
"call __csum_partial_copy_sparc_generic\n\t"
- " mov %6, %%g7\n"
+ " mov -1, %%g7\n"
: "=&r" (ret), "=&r" (d), "=&r" (l)
- : "0" (ret), "1" (d), "2" (l), "r" (0)
+ : "0" (ret), "1" (d), "2" (l)
: "o2", "o3", "o4", "o5", "o7",
"g2", "g3", "g4", "g5", "g7",
"memory", "cc");
@@ -61,29 +61,10 @@ csum_partial_copy_nocheck(const void *src, void *dst, int len)
static inline __wsum
csum_and_copy_from_user(const void __user *src, void *dst, int len)
- {
- register unsigned long ret asm("o0") = (unsigned long)src;
- register char *d asm("o1") = dst;
- register int l asm("g1") = len;
- register __wsum s asm("g7") = ~0U;
- int err = 0;
-
+{
if (unlikely(!access_ok(src, len)))
return 0;
-
- __asm__ __volatile__ (
- ".section __ex_table,#alloc\n\t"
- ".align 4\n\t"
- ".word 1f,2\n\t"
- ".previous\n"
- "1:\n\t"
- "call __csum_partial_copy_sparc_generic\n\t"
- " st %8, [%%sp + 64]\n"
- : "=&r" (ret), "=&r" (d), "=&r" (l), "=&r" (s)
- : "0" (ret), "1" (d), "2" (l), "3" (s), "r" (&err)
- : "o2", "o3", "o4", "o5", "o7", "g2", "g3", "g4", "g5",
- "cc", "memory");
- return err ? 0 : (__force __wsum)ret;
+ return csum_partial_copy_nocheck((__force void *)src, dst, len);
}
#define HAVE_CSUM_COPY_USER
@@ -91,29 +72,9 @@ csum_and_copy_from_user(const void __user *src, void *dst, int len)
static inline __wsum
csum_and_copy_to_user(const void *src, void __user *dst, int len)
{
- register unsigned long ret asm("o0") = (unsigned long)src;
- register char __user *d asm("o1") = dst;
- register int l asm("g1") = len;
- register __wsum s asm("g7") = ~0U;
- int err = 0;
-
if (!access_ok(dst, len))
return 0;
-
- __asm__ __volatile__ (
- ".section __ex_table,#alloc\n\t"
- ".align 4\n\t"
- ".word 1f,1\n\t"
- ".previous\n"
- "1:\n\t"
- "call __csum_partial_copy_sparc_generic\n\t"
- " st %8, [%%sp + 64]\n"
- : "=&r" (ret), "=&r" (d), "=&r" (l), "=&r" (s)
- : "0" (ret), "1" (d), "2" (l), "3" (s), "r" (&err)
- : "o2", "o3", "o4", "o5", "o7",
- "g2", "g3", "g4", "g5",
- "cc", "memory");
- return err ? 0 : (__force __wsum)ret;
+ return csum_partial_copy_nocheck(src, (__force void *)dst, len);
}
/* ihl is always 5 or greater, almost always is 5, and iph is word aligned