diff options
Diffstat (limited to 'include/asm-ppc64/atomic.h')
-rw-r--r-- | include/asm-ppc64/atomic.h | 197 |
1 files changed, 197 insertions, 0 deletions
diff --git a/include/asm-ppc64/atomic.h b/include/asm-ppc64/atomic.h new file mode 100644 index 000000000000..0e5f25e83bc0 --- /dev/null +++ b/include/asm-ppc64/atomic.h @@ -0,0 +1,197 @@ +/* + * PowerPC64 atomic operations + * + * Copyright (C) 2001 Paul Mackerras <paulus@au.ibm.com>, IBM + * Copyright (C) 2001 Anton Blanchard <anton@au.ibm.com>, IBM + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version + * 2 of the License, or (at your option) any later version. + */ + +#ifndef _ASM_PPC64_ATOMIC_H_ +#define _ASM_PPC64_ATOMIC_H_ + +#include <asm/memory.h> + +typedef struct { volatile int counter; } atomic_t; + +#define ATOMIC_INIT(i) { (i) } + +#define atomic_read(v) ((v)->counter) +#define atomic_set(v,i) (((v)->counter) = (i)) + +static __inline__ void atomic_add(int a, atomic_t *v) +{ + int t; + + __asm__ __volatile__( +"1: lwarx %0,0,%3 # atomic_add\n\ + add %0,%2,%0\n\ + stwcx. %0,0,%3\n\ + bne- 1b" + : "=&r" (t), "=m" (v->counter) + : "r" (a), "r" (&v->counter), "m" (v->counter) + : "cc"); +} + +static __inline__ int atomic_add_return(int a, atomic_t *v) +{ + int t; + + __asm__ __volatile__( + EIEIO_ON_SMP +"1: lwarx %0,0,%2 # atomic_add_return\n\ + add %0,%1,%0\n\ + stwcx. %0,0,%2\n\ + bne- 1b" + ISYNC_ON_SMP + : "=&r" (t) + : "r" (a), "r" (&v->counter) + : "cc", "memory"); + + return t; +} + +#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) + +static __inline__ void atomic_sub(int a, atomic_t *v) +{ + int t; + + __asm__ __volatile__( +"1: lwarx %0,0,%3 # atomic_sub\n\ + subf %0,%2,%0\n\ + stwcx. %0,0,%3\n\ + bne- 1b" + : "=&r" (t), "=m" (v->counter) + : "r" (a), "r" (&v->counter), "m" (v->counter) + : "cc"); +} + +static __inline__ int atomic_sub_return(int a, atomic_t *v) +{ + int t; + + __asm__ __volatile__( + EIEIO_ON_SMP +"1: lwarx %0,0,%2 # atomic_sub_return\n\ + subf %0,%1,%0\n\ + stwcx. %0,0,%2\n\ + bne- 1b" + ISYNC_ON_SMP + : "=&r" (t) + : "r" (a), "r" (&v->counter) + : "cc", "memory"); + + return t; +} + +static __inline__ void atomic_inc(atomic_t *v) +{ + int t; + + __asm__ __volatile__( +"1: lwarx %0,0,%2 # atomic_inc\n\ + addic %0,%0,1\n\ + stwcx. %0,0,%2\n\ + bne- 1b" + : "=&r" (t), "=m" (v->counter) + : "r" (&v->counter), "m" (v->counter) + : "cc"); +} + +static __inline__ int atomic_inc_return(atomic_t *v) +{ + int t; + + __asm__ __volatile__( + EIEIO_ON_SMP +"1: lwarx %0,0,%1 # atomic_inc_return\n\ + addic %0,%0,1\n\ + stwcx. %0,0,%1\n\ + bne- 1b" + ISYNC_ON_SMP + : "=&r" (t) + : "r" (&v->counter) + : "cc", "memory"); + + return t; +} + +/* + * atomic_inc_and_test - increment and test + * @v: pointer of type atomic_t + * + * Atomically increments @v by 1 + * and returns true if the result is zero, or false for all + * other cases. + */ +#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) + +static __inline__ void atomic_dec(atomic_t *v) +{ + int t; + + __asm__ __volatile__( +"1: lwarx %0,0,%2 # atomic_dec\n\ + addic %0,%0,-1\n\ + stwcx. %0,0,%2\n\ + bne- 1b" + : "=&r" (t), "=m" (v->counter) + : "r" (&v->counter), "m" (v->counter) + : "cc"); +} + +static __inline__ int atomic_dec_return(atomic_t *v) +{ + int t; + + __asm__ __volatile__( + EIEIO_ON_SMP +"1: lwarx %0,0,%1 # atomic_dec_return\n\ + addic %0,%0,-1\n\ + stwcx. %0,0,%1\n\ + bne- 1b" + ISYNC_ON_SMP + : "=&r" (t) + : "r" (&v->counter) + : "cc", "memory"); + + return t; +} + +#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) +#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) + +/* + * Atomically test *v and decrement if it is greater than 0. + * The function returns the old value of *v minus 1. + */ +static __inline__ int atomic_dec_if_positive(atomic_t *v) +{ + int t; + + __asm__ __volatile__( + EIEIO_ON_SMP +"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ + addic. %0,%0,-1\n\ + blt- 2f\n\ + stwcx. %0,0,%1\n\ + bne- 1b" + ISYNC_ON_SMP + "\n\ +2:" : "=&r" (t) + : "r" (&v->counter) + : "cc", "memory"); + + return t; +} + +#define smp_mb__before_atomic_dec() smp_mb() +#define smp_mb__after_atomic_dec() smp_mb() +#define smp_mb__before_atomic_inc() smp_mb() +#define smp_mb__after_atomic_inc() smp_mb() + +#endif /* _ASM_PPC64_ATOMIC_H_ */ |