diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2020-04-01 23:23:14 +0300 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2020-04-01 23:23:14 +0300 |
commit | 890f0b0d27dc400679b9a91d04ca44f5ee4c19c0 (patch) | |
tree | 898dde3e925c815c287b88fefb3d9320a583235a /arch/x86 | |
parent | 7da63b3d54aa7f1ad4b33a50d88edd623f29fded (diff) | |
download | linux-890f0b0d27dc400679b9a91d04ca44f5ee4c19c0.tar.xz |
x86: start using named parameters for low-level uaccess asms
This is partly for readability - using named arguments instead of
numbered ones makes it muchmore obvious just what is going on. Using
"%[efault]" instead of "%4" for the special -EFAULT constant just means
that you don't have to count the arguments to see what's up.
But the motivation for all this cleanup is that when we'll start to
conditionally use "asm goto" even for the __get_user_asm() case, the
argument numbers will depend on whether we have an error output, or an
error label we can just directly jump to.
So this moves us towards named arguments for the same reason that we
have to use named arguments for the asms that use SET_CC(): numbering
will eventually become similarly unreliable and depends on whether we
can use particular compiler features or not.
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'arch/x86')
-rw-r--r-- | arch/x86/include/asm/uaccess.h | 26 |
1 files changed, 15 insertions, 11 deletions
diff --git a/arch/x86/include/asm/uaccess.h b/arch/x86/include/asm/uaccess.h index f26c84316ba5..d8f283b9a569 100644 --- a/arch/x86/include/asm/uaccess.h +++ b/arch/x86/include/asm/uaccess.h @@ -289,20 +289,22 @@ do { \ ({ \ __typeof__(ptr) __ptr = (ptr); \ asm volatile("\n" \ - "1: movl %2,%%eax\n" \ - "2: movl %3,%%edx\n" \ + "1: movl %[lowbits],%%eax\n" \ + "2: movl %[highbits],%%edx\n" \ "3:\n" \ ".section .fixup,\"ax\"\n" \ - "4: mov %4,%0\n" \ + "4: mov %[efault],%[errout]\n" \ " xorl %%eax,%%eax\n" \ " xorl %%edx,%%edx\n" \ " jmp 3b\n" \ ".previous\n" \ _ASM_EXTABLE_UA(1b, 4b) \ _ASM_EXTABLE_UA(2b, 4b) \ - : "=r" (retval), "=&A"(x) \ - : "m" (__m(__ptr)), "m" __m(((u32 __user *)(__ptr)) + 1), \ - "i" (-EFAULT), "0" (retval)); \ + : [errout] "=r" (retval), \ + [output] "=&A"(x) \ + : [lowbits] "m" (__m(__ptr)), \ + [highbits] "m" __m(((u32 __user *)(__ptr)) + 1), \ + [efault] "i" (-EFAULT), "0" (retval)); \ }) #else @@ -334,16 +336,18 @@ do { \ #define __get_user_asm(x, addr, err, itype, ltype) \ asm volatile("\n" \ - "1: mov"itype" %2,%1\n" \ + "1: mov"itype" %[umem],%[output]\n" \ "2:\n" \ ".section .fixup,\"ax\"\n" \ - "3: mov %3,%0\n" \ - " xor"itype" %1,%1\n" \ + "3: mov %[efault],%[errout]\n" \ + " xor"itype" %[output],%[output]\n" \ " jmp 2b\n" \ ".previous\n" \ _ASM_EXTABLE_UA(1b, 3b) \ - : "=r" (err), ltype(x) \ - : "m" (__m(addr)), "i" (-EFAULT), "0" (err)) + : [errout] "=r" (err), \ + [output] ltype(x) \ + : [umem] "m" (__m(addr)), \ + [efault] "i" (-EFAULT), "0" (err)) #define __put_user_nocheck(x, ptr, size) \ ({ \ |