summaryrefslogtreecommitdiff
path: root/arch/sparc64/lib/atomic.S
diff options
context:
space:
mode:
authorDavid S. Miller <davem@davemloft.net>2008-11-16 00:33:25 +0300
committerDavid S. Miller <davem@davemloft.net>2008-12-04 20:16:47 +0300
commit293666b7a17cb7a389fc274980439212386a19c4 (patch)
tree075cc7661d2113cf04da7130b3383979d8024206 /arch/sparc64/lib/atomic.S
parent64f2dde3f743c8a1ad8c0a1aa74166c1034afd92 (diff)
downloadlinux-293666b7a17cb7a389fc274980439212386a19c4.tar.xz
sparc64: Stop using memory barriers for atomics and locks.
The kernel always executes in the TSO memory model now, so none of this stuff is necessary any more. With helpful feedback from Nick Piggin. Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc64/lib/atomic.S')
-rw-r--r--arch/sparc64/lib/atomic.S26
1 files changed, 0 insertions, 26 deletions
diff --git a/arch/sparc64/lib/atomic.S b/arch/sparc64/lib/atomic.S
index 70ac4186f62b..0268210ca168 100644
--- a/arch/sparc64/lib/atomic.S
+++ b/arch/sparc64/lib/atomic.S
@@ -43,29 +43,10 @@ atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
2: BACKOFF_SPIN(%o2, %o3, 1b)
.size atomic_sub, .-atomic_sub
- /* On SMP we need to use memory barriers to ensure
- * correct memory operation ordering, nop these out
- * for uniprocessor.
- */
-#ifdef CONFIG_SMP
-
-#define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad;
-#define ATOMIC_POST_BARRIER \
- ba,pt %xcc, 80b; \
- membar #StoreLoad | #StoreStore
-
-80: retl
- nop
-#else
-#define ATOMIC_PRE_BARRIER
-#define ATOMIC_POST_BARRIER
-#endif
-
.globl atomic_add_ret
.type atomic_add_ret,#function
atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
- ATOMIC_PRE_BARRIER
1: lduw [%o1], %g1
add %g1, %o0, %g7
cas [%o1], %g1, %g7
@@ -73,7 +54,6 @@ atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
bne,pn %icc, 2f
add %g7, %o0, %g7
sra %g7, 0, %o0
- ATOMIC_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)
@@ -83,7 +63,6 @@ atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
.type atomic_sub_ret,#function
atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
- ATOMIC_PRE_BARRIER
1: lduw [%o1], %g1
sub %g1, %o0, %g7
cas [%o1], %g1, %g7
@@ -91,7 +70,6 @@ atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
bne,pn %icc, 2f
sub %g7, %o0, %g7
sra %g7, 0, %o0
- ATOMIC_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)
@@ -131,7 +109,6 @@ atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
.type atomic64_add_ret,#function
atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
- ATOMIC_PRE_BARRIER
1: ldx [%o1], %g1
add %g1, %o0, %g7
casx [%o1], %g1, %g7
@@ -139,7 +116,6 @@ atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
bne,pn %xcc, 2f
add %g7, %o0, %g7
mov %g7, %o0
- ATOMIC_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)
@@ -149,7 +125,6 @@ atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
.type atomic64_sub_ret,#function
atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
- ATOMIC_PRE_BARRIER
1: ldx [%o1], %g1
sub %g1, %o0, %g7
casx [%o1], %g1, %g7
@@ -157,7 +132,6 @@ atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
bne,pn %xcc, 2f
sub %g7, %o0, %g7
mov %g7, %o0
- ATOMIC_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)