diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2014-06-03 23:57:53 +0400 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2014-06-03 23:57:53 +0400 |
commit | 776edb59317ada867dfcddde40b55648beeb0078 (patch) | |
tree | f6a6136374642323cfefd7d6399ea429f9018ade /arch/arm/include | |
parent | 59a3d4c3631e553357b7305dc09db1990aa6757c (diff) | |
parent | 3cf2f34e1a3d4d5ff209d087925cf950e52f4805 (diff) | |
download | linux-776edb59317ada867dfcddde40b55648beeb0078.tar.xz |
Merge branch 'locking-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip into next
Pull core locking updates from Ingo Molnar:
"The main changes in this cycle were:
- reduced/streamlined smp_mb__*() interface that allows more usecases
and makes the existing ones less buggy, especially in rarer
architectures
- add rwsem implementation comments
- bump up lockdep limits"
* 'locking-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (33 commits)
rwsem: Add comments to explain the meaning of the rwsem's count field
lockdep: Increase static allocations
arch: Mass conversion of smp_mb__*()
arch,doc: Convert smp_mb__*()
arch,xtensa: Convert smp_mb__*()
arch,x86: Convert smp_mb__*()
arch,tile: Convert smp_mb__*()
arch,sparc: Convert smp_mb__*()
arch,sh: Convert smp_mb__*()
arch,score: Convert smp_mb__*()
arch,s390: Convert smp_mb__*()
arch,powerpc: Convert smp_mb__*()
arch,parisc: Convert smp_mb__*()
arch,openrisc: Convert smp_mb__*()
arch,mn10300: Convert smp_mb__*()
arch,mips: Convert smp_mb__*()
arch,metag: Convert smp_mb__*()
arch,m68k: Convert smp_mb__*()
arch,m32r: Convert smp_mb__*()
arch,ia64: Convert smp_mb__*()
...
Diffstat (limited to 'arch/arm/include')
-rw-r--r-- | arch/arm/include/asm/atomic.h | 5 | ||||
-rw-r--r-- | arch/arm/include/asm/barrier.h | 3 | ||||
-rw-r--r-- | arch/arm/include/asm/bitops.h | 4 |
3 files changed, 4 insertions, 8 deletions
diff --git a/arch/arm/include/asm/atomic.h b/arch/arm/include/asm/atomic.h index 9a92fd7864a8..3040359094d9 100644 --- a/arch/arm/include/asm/atomic.h +++ b/arch/arm/include/asm/atomic.h @@ -241,11 +241,6 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u) #define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0) -#define smp_mb__before_atomic_dec() smp_mb() -#define smp_mb__after_atomic_dec() smp_mb() -#define smp_mb__before_atomic_inc() smp_mb() -#define smp_mb__after_atomic_inc() smp_mb() - #ifndef CONFIG_GENERIC_ATOMIC64 typedef struct { long long counter; diff --git a/arch/arm/include/asm/barrier.h b/arch/arm/include/asm/barrier.h index 2f59f7443396..c6a3e73a6e24 100644 --- a/arch/arm/include/asm/barrier.h +++ b/arch/arm/include/asm/barrier.h @@ -79,5 +79,8 @@ do { \ #define set_mb(var, value) do { var = value; smp_mb(); } while (0) +#define smp_mb__before_atomic() smp_mb() +#define smp_mb__after_atomic() smp_mb() + #endif /* !__ASSEMBLY__ */ #endif /* __ASM_BARRIER_H */ diff --git a/arch/arm/include/asm/bitops.h b/arch/arm/include/asm/bitops.h index b2e298a90d76..56380995f4c3 100644 --- a/arch/arm/include/asm/bitops.h +++ b/arch/arm/include/asm/bitops.h @@ -25,9 +25,7 @@ #include <linux/compiler.h> #include <linux/irqflags.h> - -#define smp_mb__before_clear_bit() smp_mb() -#define smp_mb__after_clear_bit() smp_mb() +#include <asm/barrier.h> /* * These functions are the basis of our bit ops. |