summaryrefslogtreecommitdiff
path: root/include/asm-x86
diff options
context:
space:
mode:
authorNick Piggin <npiggin@suse.de>2007-10-13 05:06:55 +0400
committerLinus Torvalds <torvalds@woody.linux-foundation.org>2007-10-13 05:41:21 +0400
commit4071c718555d955a35e9651f77086096ad87d498 (patch)
tree5189a9995143892a8ef7ecfc04c11c1d546bc21b /include/asm-x86
parentdf1bdc0667eb3132fe60b3562347ca9133694ee0 (diff)
downloadlinux-4071c718555d955a35e9651f77086096ad87d498.tar.xz
x86: fix IO write barrier
wmb() on x86 must always include a barrier, because stores can go out of order in many cases when dealing with devices (eg. WC memory). Signed-off-by: Nick Piggin <npiggin@suse.de> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'include/asm-x86')
-rw-r--r--include/asm-x86/system_32.h15
-rw-r--r--include/asm-x86/system_64.h6
2 files changed, 7 insertions, 14 deletions
diff --git a/include/asm-x86/system_32.h b/include/asm-x86/system_32.h
index d69ba937e092..8b15bd3057c9 100644
--- a/include/asm-x86/system_32.h
+++ b/include/asm-x86/system_32.h
@@ -216,6 +216,7 @@ static inline unsigned long get_limit(unsigned long segment)
#define mb() alternative("lock; addl $0,0(%%esp)", "mfence", X86_FEATURE_XMM2)
#define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2)
+#define wmb() alternative("lock; addl $0,0(%%esp)", "sfence", X86_FEATURE_XMM)
/**
* read_barrier_depends - Flush all pending reads that subsequents reads
@@ -271,18 +272,14 @@ static inline unsigned long get_limit(unsigned long segment)
#define read_barrier_depends() do { } while(0)
-#ifdef CONFIG_X86_OOSTORE
-/* Actually there are no OOO store capable CPUs for now that do SSE,
- but make it already an possibility. */
-#define wmb() alternative("lock; addl $0,0(%%esp)", "sfence", X86_FEATURE_XMM)
-#else
-#define wmb() __asm__ __volatile__ ("": : :"memory")
-#endif
-
#ifdef CONFIG_SMP
#define smp_mb() mb()
#define smp_rmb() rmb()
-#define smp_wmb() wmb()
+#ifdef CONFIG_X86_OOSTORE
+# define smp_wmb() wmb()
+#else
+# define smp_wmb() barrier()
+#endif
#define smp_read_barrier_depends() read_barrier_depends()
#define set_mb(var, value) do { (void) xchg(&var, value); } while (0)
#else
diff --git a/include/asm-x86/system_64.h b/include/asm-x86/system_64.h
index 02175aa1d16a..eff730b11926 100644
--- a/include/asm-x86/system_64.h
+++ b/include/asm-x86/system_64.h
@@ -159,12 +159,8 @@ static inline void write_cr8(unsigned long val)
*/
#define mb() asm volatile("mfence":::"memory")
#define rmb() asm volatile("lfence":::"memory")
-
-#ifdef CONFIG_UNORDERED_IO
#define wmb() asm volatile("sfence" ::: "memory")
-#else
-#define wmb() asm volatile("" ::: "memory")
-#endif
+
#define read_barrier_depends() do {} while(0)
#define set_mb(var, value) do { (void) xchg(&var, value); } while (0)