summaryrefslogtreecommitdiff
path: root/include/asm-x86_64
diff options
context:
space:
mode:
authorAndi Kleen <ak@suse.de>2007-01-11 03:52:44 +0300
committerAndi Kleen <andi@basil.nowhere.org>2007-01-11 03:52:44 +0300
commit24420760c3701ff422b344e047a20ca09b76fc64 (patch)
tree0dabf46c0825c632775705185244c31b821cdd4a /include/asm-x86_64
parent343cde51b3b856470eea24a89f00166b8e2d7272 (diff)
downloadlinux-24420760c3701ff422b344e047a20ca09b76fc64.tar.xz
[PATCH] x86-64: Use different constraint for gcc < 4.1 in bitops.h
+m is really correct for a RMW instruction, but some older gccs error out. I finally gave in and ifdefed it. This fixes compilation errors with some compiler version. Signed-off-by: Andi Kleen <ak@suse.de>
Diffstat (limited to 'include/asm-x86_64')
-rw-r--r--include/asm-x86_64/bitops.h34
1 files changed, 20 insertions, 14 deletions
diff --git a/include/asm-x86_64/bitops.h b/include/asm-x86_64/bitops.h
index 5b535eaf5309..8da9609070f4 100644
--- a/include/asm-x86_64/bitops.h
+++ b/include/asm-x86_64/bitops.h
@@ -7,7 +7,13 @@
#include <asm/alternative.h>
-#define ADDR (*(volatile long *) addr)
+#if __GNUC__ < 4 || __GNUC_MINOR__ < 1
+/* Technically wrong, but this avoids compilation errors on some gcc
+ versions. */
+#define ADDR "=m" (*(volatile long *) addr)
+#else
+#define ADDR "+m" (*(volatile long *) addr)
+#endif
/**
* set_bit - Atomically set a bit in memory
@@ -23,7 +29,7 @@ static __inline__ void set_bit(int nr, volatile void * addr)
{
__asm__ __volatile__( LOCK_PREFIX
"btsl %1,%0"
- :"+m" (ADDR)
+ :ADDR
:"dIr" (nr) : "memory");
}
@@ -40,7 +46,7 @@ static __inline__ void __set_bit(int nr, volatile void * addr)
{
__asm__ volatile(
"btsl %1,%0"
- :"+m" (ADDR)
+ :ADDR
:"dIr" (nr) : "memory");
}
@@ -58,7 +64,7 @@ static __inline__ void clear_bit(int nr, volatile void * addr)
{
__asm__ __volatile__( LOCK_PREFIX
"btrl %1,%0"
- :"+m" (ADDR)
+ :ADDR
:"dIr" (nr));
}
@@ -66,7 +72,7 @@ static __inline__ void __clear_bit(int nr, volatile void * addr)
{
__asm__ __volatile__(
"btrl %1,%0"
- :"+m" (ADDR)
+ :ADDR
:"dIr" (nr));
}
@@ -86,7 +92,7 @@ static __inline__ void __change_bit(int nr, volatile void * addr)
{
__asm__ __volatile__(
"btcl %1,%0"
- :"+m" (ADDR)
+ :ADDR
:"dIr" (nr));
}
@@ -103,7 +109,7 @@ static __inline__ void change_bit(int nr, volatile void * addr)
{
__asm__ __volatile__( LOCK_PREFIX
"btcl %1,%0"
- :"+m" (ADDR)
+ :ADDR
:"dIr" (nr));
}
@@ -121,7 +127,7 @@ static __inline__ int test_and_set_bit(int nr, volatile void * addr)
__asm__ __volatile__( LOCK_PREFIX
"btsl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"+m" (ADDR)
+ :"=r" (oldbit),ADDR
:"dIr" (nr) : "memory");
return oldbit;
}
@@ -141,7 +147,7 @@ static __inline__ int __test_and_set_bit(int nr, volatile void * addr)
__asm__(
"btsl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"+m" (ADDR)
+ :"=r" (oldbit),ADDR
:"dIr" (nr));
return oldbit;
}
@@ -160,7 +166,7 @@ static __inline__ int test_and_clear_bit(int nr, volatile void * addr)
__asm__ __volatile__( LOCK_PREFIX
"btrl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"+m" (ADDR)
+ :"=r" (oldbit),ADDR
:"dIr" (nr) : "memory");
return oldbit;
}
@@ -180,7 +186,7 @@ static __inline__ int __test_and_clear_bit(int nr, volatile void * addr)
__asm__(
"btrl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"+m" (ADDR)
+ :"=r" (oldbit),ADDR
:"dIr" (nr));
return oldbit;
}
@@ -192,7 +198,7 @@ static __inline__ int __test_and_change_bit(int nr, volatile void * addr)
__asm__ __volatile__(
"btcl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"+m" (ADDR)
+ :"=r" (oldbit),ADDR
:"dIr" (nr) : "memory");
return oldbit;
}
@@ -211,7 +217,7 @@ static __inline__ int test_and_change_bit(int nr, volatile void * addr)
__asm__ __volatile__( LOCK_PREFIX
"btcl %2,%1\n\tsbbl %0,%0"
- :"=r" (oldbit),"+m" (ADDR)
+ :"=r" (oldbit),ADDR
:"dIr" (nr) : "memory");
return oldbit;
}
@@ -237,7 +243,7 @@ static __inline__ int variable_test_bit(int nr, volatile const void * addr)
__asm__ __volatile__(
"btl %2,%1\n\tsbbl %0,%0"
:"=r" (oldbit)
- :"m" (ADDR),"dIr" (nr));
+ :"m" (*(volatile long *)addr),"dIr" (nr));
return oldbit;
}