diff options
author | Al Viro <viro@zeniv.linux.org.uk> | 2020-07-13 22:12:48 +0300 |
---|---|---|
committer | Al Viro <viro@zeniv.linux.org.uk> | 2020-08-20 22:45:19 +0300 |
commit | ab5e8b33124468fe9d7d6042de5a9b35414c784e (patch) | |
tree | 7f6ec7f2004819c68b0a18a25f2348e0ca35720e /arch/sparc/include | |
parent | e8b95089990ceac4e5197db3c03737bf569f5081 (diff) | |
download | linux-ab5e8b33124468fe9d7d6042de5a9b35414c784e.tar.xz |
sparc32: propagate the calling conventions change down to __csum_partial_copy_sparc_generic()
... and get rid of zeroing the target, etc. on fault.
All exception handlers merge into one; moreover, since we are not
calling lookup_fault() anymore, we don't need the magic with passing
arguments for it from the page fault handler.
Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
Diffstat (limited to 'arch/sparc/include')
-rw-r--r-- | arch/sparc/include/asm/checksum_32.h | 49 |
1 files changed, 5 insertions, 44 deletions
diff --git a/arch/sparc/include/asm/checksum_32.h b/arch/sparc/include/asm/checksum_32.h index b5873b7b7bf0..d55e480172a6 100644 --- a/arch/sparc/include/asm/checksum_32.h +++ b/arch/sparc/include/asm/checksum_32.h @@ -50,9 +50,9 @@ csum_partial_copy_nocheck(const void *src, void *dst, int len) __asm__ __volatile__ ( "call __csum_partial_copy_sparc_generic\n\t" - " mov %6, %%g7\n" + " mov -1, %%g7\n" : "=&r" (ret), "=&r" (d), "=&r" (l) - : "0" (ret), "1" (d), "2" (l), "r" (0) + : "0" (ret), "1" (d), "2" (l) : "o2", "o3", "o4", "o5", "o7", "g2", "g3", "g4", "g5", "g7", "memory", "cc"); @@ -61,29 +61,10 @@ csum_partial_copy_nocheck(const void *src, void *dst, int len) static inline __wsum csum_and_copy_from_user(const void __user *src, void *dst, int len) - { - register unsigned long ret asm("o0") = (unsigned long)src; - register char *d asm("o1") = dst; - register int l asm("g1") = len; - register __wsum s asm("g7") = ~0U; - int err = 0; - +{ if (unlikely(!access_ok(src, len))) return 0; - - __asm__ __volatile__ ( - ".section __ex_table,#alloc\n\t" - ".align 4\n\t" - ".word 1f,2\n\t" - ".previous\n" - "1:\n\t" - "call __csum_partial_copy_sparc_generic\n\t" - " st %8, [%%sp + 64]\n" - : "=&r" (ret), "=&r" (d), "=&r" (l), "=&r" (s) - : "0" (ret), "1" (d), "2" (l), "3" (s), "r" (&err) - : "o2", "o3", "o4", "o5", "o7", "g2", "g3", "g4", "g5", - "cc", "memory"); - return err ? 0 : (__force __wsum)ret; + return csum_partial_copy_nocheck((__force void *)src, dst, len); } #define HAVE_CSUM_COPY_USER @@ -91,29 +72,9 @@ csum_and_copy_from_user(const void __user *src, void *dst, int len) static inline __wsum csum_and_copy_to_user(const void *src, void __user *dst, int len) { - register unsigned long ret asm("o0") = (unsigned long)src; - register char __user *d asm("o1") = dst; - register int l asm("g1") = len; - register __wsum s asm("g7") = ~0U; - int err = 0; - if (!access_ok(dst, len)) return 0; - - __asm__ __volatile__ ( - ".section __ex_table,#alloc\n\t" - ".align 4\n\t" - ".word 1f,1\n\t" - ".previous\n" - "1:\n\t" - "call __csum_partial_copy_sparc_generic\n\t" - " st %8, [%%sp + 64]\n" - : "=&r" (ret), "=&r" (d), "=&r" (l), "=&r" (s) - : "0" (ret), "1" (d), "2" (l), "3" (s), "r" (&err) - : "o2", "o3", "o4", "o5", "o7", - "g2", "g3", "g4", "g5", - "cc", "memory"); - return err ? 0 : (__force __wsum)ret; + return csum_partial_copy_nocheck(src, (__force void *)dst, len); } /* ihl is always 5 or greater, almost always is 5, and iph is word aligned |