diff options
author | Mark Rutland <mark.rutland@arm.com> | 2023-03-14 18:36:58 +0300 |
---|---|---|
committer | Will Deacon <will@kernel.org> | 2023-03-28 23:13:25 +0300 |
commit | 39c8275de81cf7fb524e2ff067aa175447412284 (patch) | |
tree | b03d6dd145da3d2156036b70e3613df0e01bb3ba /arch/arm64/include/asm/barrier.h | |
parent | e5cacb540fd2509484d6849c0d5372bd67d174b9 (diff) | |
download | linux-39c8275de81cf7fb524e2ff067aa175447412284.tar.xz |
arm64: uaccess: permit __smp_store_release() to use zero register
Currently the asm constraints for __smp_store_release() require that the
value is placed in a "real" GPR (i.e. one other than [XW]ZR or SP).
This means that for cases such as:
__smp_store_release(ptr, 0)
... the compiler has to move '0' into "real" GPR, e.g.
mov xN, #0
stlr xN, [<addr>]
This is unfortunate, as using the zero register would require fewer
instructions and save a "real" GPR for other usage, allowing the
compiler to generate:
stlr xzr, [<addr>]
Modify the asm constaints for __smp_store_release() to permit the use of
the zero register for the value.
There should be no functional change as a result of this patch.
Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Cc: Catalin Marinas <catalin.marinas@arm.com>
Cc: Robin Murphy <robin.murphy@arm.com>
Cc: Will Deacon <will@kernel.org>
Link: https://lore.kernel.org/r/20230314153700.787701-3-mark.rutland@arm.com
Signed-off-by: Will Deacon <will@kernel.org>
Diffstat (limited to 'arch/arm64/include/asm/barrier.h')
-rw-r--r-- | arch/arm64/include/asm/barrier.h | 10 |
1 files changed, 5 insertions, 5 deletions
diff --git a/arch/arm64/include/asm/barrier.h b/arch/arm64/include/asm/barrier.h index 3dd8982a9ce3..cf2987464c18 100644 --- a/arch/arm64/include/asm/barrier.h +++ b/arch/arm64/include/asm/barrier.h @@ -131,25 +131,25 @@ do { \ case 1: \ asm volatile ("stlrb %w1, %0" \ : "=Q" (*__p) \ - : "r" (*(__u8 *)__u.__c) \ + : "rZ" (*(__u8 *)__u.__c) \ : "memory"); \ break; \ case 2: \ asm volatile ("stlrh %w1, %0" \ : "=Q" (*__p) \ - : "r" (*(__u16 *)__u.__c) \ + : "rZ" (*(__u16 *)__u.__c) \ : "memory"); \ break; \ case 4: \ asm volatile ("stlr %w1, %0" \ : "=Q" (*__p) \ - : "r" (*(__u32 *)__u.__c) \ + : "rZ" (*(__u32 *)__u.__c) \ : "memory"); \ break; \ case 8: \ - asm volatile ("stlr %1, %0" \ + asm volatile ("stlr %x1, %0" \ : "=Q" (*__p) \ - : "r" (*(__u64 *)__u.__c) \ + : "rZ" (*(__u64 *)__u.__c) \ : "memory"); \ break; \ } \ |