diff options
Diffstat (limited to 'arch/mips/include/asm/atomic.h')
-rw-r--r-- | arch/mips/include/asm/atomic.h | 374 |
1 files changed, 192 insertions, 182 deletions
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h index 6dd6bfc607e9..857da84cfc92 100644 --- a/arch/mips/include/asm/atomic.h +++ b/arch/mips/include/asm/atomic.h @@ -17,6 +17,7 @@ #include <linux/irqflags.h> #include <linux/types.h> #include <asm/barrier.h> +#include <asm/compiler.h> #include <asm/cpu-features.h> #include <asm/cmpxchg.h> #include <asm/war.h> @@ -40,95 +41,97 @@ */ #define atomic_set(v, i) ((v)->counter = (i)) -#define ATOMIC_OP(op, c_op, asm_op) \ -static __inline__ void atomic_##op(int i, atomic_t * v) \ -{ \ - if (kernel_uses_llsc && R10000_LLSC_WAR) { \ - int temp; \ - \ - __asm__ __volatile__( \ - " .set arch=r4000 \n" \ - "1: ll %0, %1 # atomic_" #op " \n" \ - " " #asm_op " %0, %2 \n" \ - " sc %0, %1 \n" \ - " beqzl %0, 1b \n" \ - " .set mips0 \n" \ - : "=&r" (temp), "+m" (v->counter) \ - : "Ir" (i)); \ - } else if (kernel_uses_llsc) { \ - int temp; \ - \ - do { \ - __asm__ __volatile__( \ - " .set arch=r4000 \n" \ - " ll %0, %1 # atomic_" #op "\n" \ - " " #asm_op " %0, %2 \n" \ - " sc %0, %1 \n" \ - " .set mips0 \n" \ - : "=&r" (temp), "+m" (v->counter) \ - : "Ir" (i)); \ - } while (unlikely(!temp)); \ - } else { \ - unsigned long flags; \ - \ - raw_local_irq_save(flags); \ - v->counter c_op i; \ - raw_local_irq_restore(flags); \ - } \ -} \ - -#define ATOMIC_OP_RETURN(op, c_op, asm_op) \ -static __inline__ int atomic_##op##_return(int i, atomic_t * v) \ -{ \ - int result; \ - \ - smp_mb__before_llsc(); \ - \ - if (kernel_uses_llsc && R10000_LLSC_WAR) { \ - int temp; \ - \ - __asm__ __volatile__( \ - " .set arch=r4000 \n" \ - "1: ll %1, %2 # atomic_" #op "_return \n" \ - " " #asm_op " %0, %1, %3 \n" \ - " sc %0, %2 \n" \ - " beqzl %0, 1b \n" \ - " " #asm_op " %0, %1, %3 \n" \ - " .set mips0 \n" \ - : "=&r" (result), "=&r" (temp), "+m" (v->counter) \ - : "Ir" (i)); \ - } else if (kernel_uses_llsc) { \ - int temp; \ - \ - do { \ - __asm__ __volatile__( \ - " .set arch=r4000 \n" \ - " ll %1, %2 # atomic_" #op "_return \n" \ - " " #asm_op " %0, %1, %3 \n" \ - " sc %0, %2 \n" \ - " .set mips0 \n" \ - : "=&r" (result), "=&r" (temp), "+m" (v->counter) \ - : "Ir" (i)); \ - } while (unlikely(!result)); \ - \ - result = temp; result c_op i; \ - } else { \ - unsigned long flags; \ - \ - raw_local_irq_save(flags); \ - result = v->counter; \ - result c_op i; \ - v->counter = result; \ - raw_local_irq_restore(flags); \ - } \ - \ - smp_llsc_mb(); \ - \ - return result; \ +#define ATOMIC_OP(op, c_op, asm_op) \ +static __inline__ void atomic_##op(int i, atomic_t * v) \ +{ \ + if (kernel_uses_llsc && R10000_LLSC_WAR) { \ + int temp; \ + \ + __asm__ __volatile__( \ + " .set arch=r4000 \n" \ + "1: ll %0, %1 # atomic_" #op " \n" \ + " " #asm_op " %0, %2 \n" \ + " sc %0, %1 \n" \ + " beqzl %0, 1b \n" \ + " .set mips0 \n" \ + : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ + : "Ir" (i)); \ + } else if (kernel_uses_llsc) { \ + int temp; \ + \ + do { \ + __asm__ __volatile__( \ + " .set arch=r4000 \n" \ + " ll %0, %1 # atomic_" #op "\n" \ + " " #asm_op " %0, %2 \n" \ + " sc %0, %1 \n" \ + " .set mips0 \n" \ + : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ + : "Ir" (i)); \ + } while (unlikely(!temp)); \ + } else { \ + unsigned long flags; \ + \ + raw_local_irq_save(flags); \ + v->counter c_op i; \ + raw_local_irq_restore(flags); \ + } \ } -#define ATOMIC_OPS(op, c_op, asm_op) \ - ATOMIC_OP(op, c_op, asm_op) \ +#define ATOMIC_OP_RETURN(op, c_op, asm_op) \ +static __inline__ int atomic_##op##_return(int i, atomic_t * v) \ +{ \ + int result; \ + \ + smp_mb__before_llsc(); \ + \ + if (kernel_uses_llsc && R10000_LLSC_WAR) { \ + int temp; \ + \ + __asm__ __volatile__( \ + " .set arch=r4000 \n" \ + "1: ll %1, %2 # atomic_" #op "_return \n" \ + " " #asm_op " %0, %1, %3 \n" \ + " sc %0, %2 \n" \ + " beqzl %0, 1b \n" \ + " " #asm_op " %0, %1, %3 \n" \ + " .set mips0 \n" \ + : "=&r" (result), "=&r" (temp), \ + "+" GCC_OFF12_ASM() (v->counter) \ + : "Ir" (i)); \ + } else if (kernel_uses_llsc) { \ + int temp; \ + \ + do { \ + __asm__ __volatile__( \ + " .set arch=r4000 \n" \ + " ll %1, %2 # atomic_" #op "_return \n" \ + " " #asm_op " %0, %1, %3 \n" \ + " sc %0, %2 \n" \ + " .set mips0 \n" \ + : "=&r" (result), "=&r" (temp), \ + "+" GCC_OFF12_ASM() (v->counter) \ + : "Ir" (i)); \ + } while (unlikely(!result)); \ + \ + result = temp; result c_op i; \ + } else { \ + unsigned long flags; \ + \ + raw_local_irq_save(flags); \ + result = v->counter; \ + result c_op i; \ + v->counter = result; \ + raw_local_irq_restore(flags); \ + } \ + \ + smp_llsc_mb(); \ + \ + return result; \ +} + +#define ATOMIC_OPS(op, c_op, asm_op) \ + ATOMIC_OP(op, c_op, asm_op) \ ATOMIC_OP_RETURN(op, c_op, asm_op) ATOMIC_OPS(add, +=, addu) @@ -167,8 +170,9 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) " .set reorder \n" "1: \n" " .set mips0 \n" - : "=&r" (result), "=&r" (temp), "+m" (v->counter) - : "Ir" (i), "m" (v->counter) + : "=&r" (result), "=&r" (temp), + "+" GCC_OFF12_ASM() (v->counter) + : "Ir" (i), GCC_OFF12_ASM() (v->counter) : "memory"); } else if (kernel_uses_llsc) { int temp; @@ -185,7 +189,8 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) " .set reorder \n" "1: \n" " .set mips0 \n" - : "=&r" (result), "=&r" (temp), "+m" (v->counter) + : "=&r" (result), "=&r" (temp), + "+" GCC_OFF12_ASM() (v->counter) : "Ir" (i)); } else { unsigned long flags; @@ -315,96 +320,98 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) */ #define atomic64_set(v, i) ((v)->counter = (i)) -#define ATOMIC64_OP(op, c_op, asm_op) \ -static __inline__ void atomic64_##op(long i, atomic64_t * v) \ -{ \ - if (kernel_uses_llsc && R10000_LLSC_WAR) { \ - long temp; \ - \ - __asm__ __volatile__( \ - " .set arch=r4000 \n" \ - "1: lld %0, %1 # atomic64_" #op " \n" \ - " " #asm_op " %0, %2 \n" \ - " scd %0, %1 \n" \ - " beqzl %0, 1b \n" \ - " .set mips0 \n" \ - : "=&r" (temp), "+m" (v->counter) \ - : "Ir" (i)); \ - } else if (kernel_uses_llsc) { \ - long temp; \ - \ - do { \ - __asm__ __volatile__( \ - " .set arch=r4000 \n" \ - " lld %0, %1 # atomic64_" #op "\n" \ - " " #asm_op " %0, %2 \n" \ - " scd %0, %1 \n" \ - " .set mips0 \n" \ - : "=&r" (temp), "+m" (v->counter) \ - : "Ir" (i)); \ - } while (unlikely(!temp)); \ - } else { \ - unsigned long flags; \ - \ - raw_local_irq_save(flags); \ - v->counter c_op i; \ - raw_local_irq_restore(flags); \ - } \ -} \ - -#define ATOMIC64_OP_RETURN(op, c_op, asm_op) \ -static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \ -{ \ - long result; \ - \ - smp_mb__before_llsc(); \ - \ - if (kernel_uses_llsc && R10000_LLSC_WAR) { \ - long temp; \ - \ - __asm__ __volatile__( \ - " .set arch=r4000 \n" \ - "1: lld %1, %2 # atomic64_" #op "_return\n" \ - " " #asm_op " %0, %1, %3 \n" \ - " scd %0, %2 \n" \ - " beqzl %0, 1b \n" \ - " " #asm_op " %0, %1, %3 \n" \ - " .set mips0 \n" \ - : "=&r" (result), "=&r" (temp), "+m" (v->counter) \ - : "Ir" (i)); \ - } else if (kernel_uses_llsc) { \ - long temp; \ - \ - do { \ - __asm__ __volatile__( \ - " .set arch=r4000 \n" \ - " lld %1, %2 # atomic64_" #op "_return\n" \ - " " #asm_op " %0, %1, %3 \n" \ - " scd %0, %2 \n" \ - " .set mips0 \n" \ - : "=&r" (result), "=&r" (temp), "=m" (v->counter) \ - : "Ir" (i), "m" (v->counter) \ - : "memory"); \ - } while (unlikely(!result)); \ - \ - result = temp; result c_op i; \ - } else { \ - unsigned long flags; \ - \ - raw_local_irq_save(flags); \ - result = v->counter; \ - result c_op i; \ - v->counter = result; \ - raw_local_irq_restore(flags); \ - } \ - \ - smp_llsc_mb(); \ - \ - return result; \ +#define ATOMIC64_OP(op, c_op, asm_op) \ +static __inline__ void atomic64_##op(long i, atomic64_t * v) \ +{ \ + if (kernel_uses_llsc && R10000_LLSC_WAR) { \ + long temp; \ + \ + __asm__ __volatile__( \ + " .set arch=r4000 \n" \ + "1: lld %0, %1 # atomic64_" #op " \n" \ + " " #asm_op " %0, %2 \n" \ + " scd %0, %1 \n" \ + " beqzl %0, 1b \n" \ + " .set mips0 \n" \ + : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ + : "Ir" (i)); \ + } else if (kernel_uses_llsc) { \ + long temp; \ + \ + do { \ + __asm__ __volatile__( \ + " .set arch=r4000 \n" \ + " lld %0, %1 # atomic64_" #op "\n" \ + " " #asm_op " %0, %2 \n" \ + " scd %0, %1 \n" \ + " .set mips0 \n" \ + : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \ + : "Ir" (i)); \ + } while (unlikely(!temp)); \ + } else { \ + unsigned long flags; \ + \ + raw_local_irq_save(flags); \ + v->counter c_op i; \ + raw_local_irq_restore(flags); \ + } \ +} + +#define ATOMIC64_OP_RETURN(op, c_op, asm_op) \ +static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \ +{ \ + long result; \ + \ + smp_mb__before_llsc(); \ + \ + if (kernel_uses_llsc && R10000_LLSC_WAR) { \ + long temp; \ + \ + __asm__ __volatile__( \ + " .set arch=r4000 \n" \ + "1: lld %1, %2 # atomic64_" #op "_return\n" \ + " " #asm_op " %0, %1, %3 \n" \ + " scd %0, %2 \n" \ + " beqzl %0, 1b \n" \ + " " #asm_op " %0, %1, %3 \n" \ + " .set mips0 \n" \ + : "=&r" (result), "=&r" (temp), \ + "+" GCC_OFF12_ASM() (v->counter) \ + : "Ir" (i)); \ + } else if (kernel_uses_llsc) { \ + long temp; \ + \ + do { \ + __asm__ __volatile__( \ + " .set arch=r4000 \n" \ + " lld %1, %2 # atomic64_" #op "_return\n" \ + " " #asm_op " %0, %1, %3 \n" \ + " scd %0, %2 \n" \ + " .set mips0 \n" \ + : "=&r" (result), "=&r" (temp), \ + "=" GCC_OFF12_ASM() (v->counter) \ + : "Ir" (i), GCC_OFF12_ASM() (v->counter) \ + : "memory"); \ + } while (unlikely(!result)); \ + \ + result = temp; result c_op i; \ + } else { \ + unsigned long flags; \ + \ + raw_local_irq_save(flags); \ + result = v->counter; \ + result c_op i; \ + v->counter = result; \ + raw_local_irq_restore(flags); \ + } \ + \ + smp_llsc_mb(); \ + \ + return result; \ } -#define ATOMIC64_OPS(op, c_op, asm_op) \ - ATOMIC64_OP(op, c_op, asm_op) \ +#define ATOMIC64_OPS(op, c_op, asm_op) \ + ATOMIC64_OP(op, c_op, asm_op) \ ATOMIC64_OP_RETURN(op, c_op, asm_op) ATOMIC64_OPS(add, +=, daddu) @@ -415,7 +422,8 @@ ATOMIC64_OPS(sub, -=, dsubu) #undef ATOMIC64_OP /* - * atomic64_sub_if_positive - conditionally subtract integer from atomic variable + * atomic64_sub_if_positive - conditionally subtract integer from atomic + * variable * @i: integer value to subtract * @v: pointer of type atomic64_t * @@ -443,8 +451,9 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) " .set reorder \n" "1: \n" " .set mips0 \n" - : "=&r" (result), "=&r" (temp), "=m" (v->counter) - : "Ir" (i), "m" (v->counter) + : "=&r" (result), "=&r" (temp), + "=" GCC_OFF12_ASM() (v->counter) + : "Ir" (i), GCC_OFF12_ASM() (v->counter) : "memory"); } else if (kernel_uses_llsc) { long temp; @@ -461,7 +470,8 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) " .set reorder \n" "1: \n" " .set mips0 \n" - : "=&r" (result), "=&r" (temp), "+m" (v->counter) + : "=&r" (result), "=&r" (temp), + "+" GCC_OFF12_ASM() (v->counter) : "Ir" (i)); } else { unsigned long flags; |