diff options
-rw-r--r-- | arch/s390/include/asm/atomic_ops.h | 14 | ||||
-rw-r--r-- | arch/s390/include/asm/cmpxchg.h | 4 |
2 files changed, 9 insertions, 9 deletions
diff --git a/arch/s390/include/asm/atomic_ops.h b/arch/s390/include/asm/atomic_ops.h index 2e818f2709bf..50510e08b893 100644 --- a/arch/s390/include/asm/atomic_ops.h +++ b/arch/s390/include/asm/atomic_ops.h @@ -31,7 +31,7 @@ static inline s64 __atomic64_read(const atomic64_t *v) asm volatile( " lg %0,%1\n" - : "=d" (c) : "T" (v->counter)); + : "=d" (c) : "RT" (v->counter)); return c; } @@ -39,7 +39,7 @@ static inline void __atomic64_set(atomic64_t *v, s64 i) { asm volatile( " stg %1,%0\n" - : "=T" (v->counter) : "d" (i)); + : "=RT" (v->counter) : "d" (i)); } #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES @@ -52,7 +52,7 @@ static inline op_type op_name(op_type val, op_type *ptr) \ asm volatile( \ op_string " %[old],%[val],%[ptr]\n" \ op_barrier \ - : [old] "=d" (old), [ptr] "+S" (*ptr) \ + : [old] "=d" (old), [ptr] "+QS" (*ptr) \ : [val] "d" (val) : "cc", "memory"); \ return old; \ } \ @@ -80,7 +80,7 @@ static __always_inline void op_name(op_type val, op_type *ptr) \ asm volatile( \ op_string " %[ptr],%[val]\n" \ op_barrier \ - : [ptr] "+S" (*ptr) : [val] "i" (val) : "cc", "memory");\ + : [ptr] "+QS" (*ptr) : [val] "i" (val) : "cc", "memory");\ } #define __ATOMIC_CONST_OPS(op_name, op_type, op_string) \ @@ -131,7 +131,7 @@ static inline long op_name(long val, long *ptr) \ op_string " %[new],%[val]\n" \ " csg %[old],%[new],%[ptr]\n" \ " jl 0b" \ - : [old] "=d" (old), [new] "=&d" (new), [ptr] "+S" (*ptr)\ + : [old] "=d" (old), [new] "=&d" (new), [ptr] "+QS" (*ptr)\ : [val] "d" (val), "0" (*ptr) : "cc", "memory"); \ return old; \ } @@ -180,7 +180,7 @@ static inline long __atomic64_cmpxchg(long *ptr, long old, long new) { asm volatile( " csg %[old],%[new],%[ptr]" - : [old] "+d" (old), [ptr] "+S" (*ptr) + : [old] "+d" (old), [ptr] "+QS" (*ptr) : [new] "d" (new) : "cc", "memory"); return old; @@ -192,7 +192,7 @@ static inline bool __atomic64_cmpxchg_bool(long *ptr, long old, long new) asm volatile( " csg %[old],%[new],%[ptr]" - : [old] "+d" (old), [ptr] "+S" (*ptr) + : [old] "+d" (old), [ptr] "+QS" (*ptr) : [new] "d" (new) : "cc", "memory"); return old == old_expected; diff --git a/arch/s390/include/asm/cmpxchg.h b/arch/s390/include/asm/cmpxchg.h index 6ae4e8a288a2..e1eb65fceef2 100644 --- a/arch/s390/include/asm/cmpxchg.h +++ b/arch/s390/include/asm/cmpxchg.h @@ -62,7 +62,7 @@ static inline unsigned long __xchg(unsigned long x, unsigned long address, int s " lg %0,%1\n" "0: csg %0,%2,%1\n" " jl 0b\n" - : "=&d" (old), "+S" (*(long *) address) + : "=&d" (old), "+QS" (*(long *) address) : "d" (x) : "memory", "cc"); return old; @@ -142,7 +142,7 @@ static inline unsigned long __cmpxchg(unsigned long address, unsigned long old, case 8: asm volatile( " csg %0,%3,%1\n" - : "=&d" (prev), "+S" (*(long *) address) + : "=&d" (prev), "+QS" (*(long *) address) : "0" (old), "d" (new) : "memory", "cc"); return prev; |