diff options
Diffstat (limited to 'arch/mips/include/asm/bitops.h')
-rw-r--r-- | arch/mips/include/asm/bitops.h | 443 |
1 files changed, 127 insertions, 316 deletions
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h index 985d6a02f9ea..a74769940fbd 100644 --- a/arch/mips/include/asm/bitops.h +++ b/arch/mips/include/asm/bitops.h @@ -13,16 +13,55 @@ #error only <linux/bitops.h> can be included directly #endif +#include <linux/bits.h> #include <linux/compiler.h> #include <linux/types.h> #include <asm/barrier.h> #include <asm/byteorder.h> /* sigh ... */ #include <asm/compiler.h> #include <asm/cpu-features.h> +#include <asm/isa-rev.h> #include <asm/llsc.h> #include <asm/sgidefs.h> #include <asm/war.h> +#define __bit_op(mem, insn, inputs...) do { \ + unsigned long temp; \ + \ + asm volatile( \ + " .set push \n" \ + " .set " MIPS_ISA_LEVEL " \n" \ + " " __SYNC(full, loongson3_war) " \n" \ + "1: " __LL "%0, %1 \n" \ + " " insn " \n" \ + " " __SC "%0, %1 \n" \ + " " __SC_BEQZ "%0, 1b \n" \ + " .set pop \n" \ + : "=&r"(temp), "+" GCC_OFF_SMALL_ASM()(mem) \ + : inputs \ + : __LLSC_CLOBBER); \ +} while (0) + +#define __test_bit_op(mem, ll_dst, insn, inputs...) ({ \ + unsigned long orig, temp; \ + \ + asm volatile( \ + " .set push \n" \ + " .set " MIPS_ISA_LEVEL " \n" \ + " " __SYNC(full, loongson3_war) " \n" \ + "1: " __LL ll_dst ", %2 \n" \ + " " insn " \n" \ + " " __SC "%1, %2 \n" \ + " " __SC_BEQZ "%1, 1b \n" \ + " .set pop \n" \ + : "=&r"(orig), "=&r"(temp), \ + "+" GCC_OFF_SMALL_ASM()(mem) \ + : inputs \ + : __LLSC_CLOBBER); \ + \ + orig; \ +}) + /* * These are the "slower" versions of the functions and are in bitops.c. * These functions call raw_local_irq_{save,restore}(). @@ -30,8 +69,6 @@ void __mips_set_bit(unsigned long nr, volatile unsigned long *addr); void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr); void __mips_change_bit(unsigned long nr, volatile unsigned long *addr); -int __mips_test_and_set_bit(unsigned long nr, - volatile unsigned long *addr); int __mips_test_and_set_bit_lock(unsigned long nr, volatile unsigned long *addr); int __mips_test_and_clear_bit(unsigned long nr, @@ -52,51 +89,20 @@ int __mips_test_and_change_bit(unsigned long nr, */ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - int bit = nr & SZLONG_MASK; - unsigned long temp; + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; - if (kernel_uses_llsc && R10000_LLSC_WAR) { - __asm__ __volatile__( - " .set push \n" - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # set_bit \n" - " or %0, %2 \n" - " " __SC "%0, %1 \n" - " beqzl %0, 1b \n" - " .set pop \n" - : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m) - : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m) - : __LLSC_CLOBBER); -#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) - } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { - loongson_llsc_mb(); - do { - __asm__ __volatile__( - " " __LL "%0, %1 # set_bit \n" - " " __INS "%0, %3, %2, 1 \n" - " " __SC "%0, %1 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (bit), "r" (~0) - : __LLSC_CLOBBER); - } while (unlikely(!temp)); -#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */ - } else if (kernel_uses_llsc) { - loongson_llsc_mb(); - do { - __asm__ __volatile__( - " .set push \n" - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # set_bit \n" - " or %0, %2 \n" - " " __SC "%0, %1 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (1UL << bit) - : __LLSC_CLOBBER); - } while (unlikely(!temp)); - } else + if (!kernel_uses_llsc) { __mips_set_bit(nr, addr); + return; + } + + if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit) && (bit >= 16)) { + __bit_op(*m, __INS "%0, %3, %2, 1", "i"(bit), "r"(~0)); + return; + } + + __bit_op(*m, "or\t%0, %2", "ir"(BIT(bit))); } /* @@ -111,51 +117,20 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) */ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - int bit = nr & SZLONG_MASK; - unsigned long temp; + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; - if (kernel_uses_llsc && R10000_LLSC_WAR) { - __asm__ __volatile__( - " .set push \n" - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # clear_bit \n" - " and %0, %2 \n" - " " __SC "%0, %1 \n" - " beqzl %0, 1b \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (~(1UL << bit)) - : __LLSC_CLOBBER); -#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) - } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { - loongson_llsc_mb(); - do { - __asm__ __volatile__( - " " __LL "%0, %1 # clear_bit \n" - " " __INS "%0, $0, %2, 1 \n" - " " __SC "%0, %1 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (bit) - : __LLSC_CLOBBER); - } while (unlikely(!temp)); -#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */ - } else if (kernel_uses_llsc) { - loongson_llsc_mb(); - do { - __asm__ __volatile__( - " .set push \n" - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # clear_bit \n" - " and %0, %2 \n" - " " __SC "%0, %1 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (~(1UL << bit)) - : __LLSC_CLOBBER); - } while (unlikely(!temp)); - } else + if (!kernel_uses_llsc) { __mips_clear_bit(nr, addr); + return; + } + + if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit)) { + __bit_op(*m, __INS "%0, $0, %2, 1", "i"(bit)); + return; + } + + __bit_op(*m, "and\t%0, %2", "ir"(~BIT(bit))); } /* @@ -183,159 +158,61 @@ static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *ad */ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) { - int bit = nr & SZLONG_MASK; - - if (kernel_uses_llsc && R10000_LLSC_WAR) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - __asm__ __volatile__( - " .set push \n" - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # change_bit \n" - " xor %0, %2 \n" - " " __SC "%0, %1 \n" - " beqzl %0, 1b \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (1UL << bit) - : __LLSC_CLOBBER); - } else if (kernel_uses_llsc) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - loongson_llsc_mb(); - do { - __asm__ __volatile__( - " .set push \n" - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # change_bit \n" - " xor %0, %2 \n" - " " __SC "%0, %1 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (1UL << bit) - : __LLSC_CLOBBER); - } while (unlikely(!temp)); - } else + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; + + if (!kernel_uses_llsc) { __mips_change_bit(nr, addr); + return; + } + + __bit_op(*m, "xor\t%0, %2", "ir"(BIT(bit))); } /* - * test_and_set_bit - Set a bit and return its old value + * test_and_set_bit_lock - Set a bit and return its old value * @nr: Bit to set * @addr: Address to count from * - * This operation is atomic and cannot be reordered. - * It also implies a memory barrier. + * This operation is atomic and implies acquire ordering semantics + * after the memory operation. */ -static inline int test_and_set_bit(unsigned long nr, +static inline int test_and_set_bit_lock(unsigned long nr, volatile unsigned long *addr) { - int bit = nr & SZLONG_MASK; - unsigned long res; + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; + unsigned long res, orig; - smp_mb__before_llsc(); - - if (kernel_uses_llsc && R10000_LLSC_WAR) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - __asm__ __volatile__( - " .set push \n" - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # test_and_set_bit \n" - " or %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " beqzl %2, 1b \n" - " and %2, %0, %3 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : __LLSC_CLOBBER); - } else if (kernel_uses_llsc) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - loongson_llsc_mb(); - do { - __asm__ __volatile__( - " .set push \n" - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # test_and_set_bit \n" - " or %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : __LLSC_CLOBBER); - } while (unlikely(!res)); - - res = temp & (1UL << bit); - } else - res = __mips_test_and_set_bit(nr, addr); + if (!kernel_uses_llsc) { + res = __mips_test_and_set_bit_lock(nr, addr); + } else { + orig = __test_bit_op(*m, "%0", + "or\t%1, %0, %3", + "ir"(BIT(bit))); + res = (orig & BIT(bit)) != 0; + } smp_llsc_mb(); - return res != 0; + return res; } /* - * test_and_set_bit_lock - Set a bit and return its old value + * test_and_set_bit - Set a bit and return its old value * @nr: Bit to set * @addr: Address to count from * - * This operation is atomic and implies acquire ordering semantics - * after the memory operation. + * This operation is atomic and cannot be reordered. + * It also implies a memory barrier. */ -static inline int test_and_set_bit_lock(unsigned long nr, +static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *addr) { - int bit = nr & SZLONG_MASK; - unsigned long res; - - if (kernel_uses_llsc && R10000_LLSC_WAR) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - __asm__ __volatile__( - " .set push \n" - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # test_and_set_bit \n" - " or %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " beqzl %2, 1b \n" - " and %2, %0, %3 \n" - " .set pop \n" - : "=&r" (temp), "+m" (*m), "=&r" (res) - : "r" (1UL << bit) - : __LLSC_CLOBBER); - } else if (kernel_uses_llsc) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - loongson_llsc_mb(); - do { - __asm__ __volatile__( - " .set push \n" - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # test_and_set_bit \n" - " or %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : __LLSC_CLOBBER); - } while (unlikely(!res)); - - res = temp & (1UL << bit); - } else - res = __mips_test_and_set_bit_lock(nr, addr); - - smp_llsc_mb(); - - return res != 0; + smp_mb__before_atomic(); + return test_and_set_bit_lock(nr, addr); } + /* * test_and_clear_bit - Clear a bit and return its old value * @nr: Bit to clear @@ -347,71 +224,30 @@ static inline int test_and_set_bit_lock(unsigned long nr, static inline int test_and_clear_bit(unsigned long nr, volatile unsigned long *addr) { - int bit = nr & SZLONG_MASK; - unsigned long res; - - smp_mb__before_llsc(); + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; + unsigned long res, orig; - if (kernel_uses_llsc && R10000_LLSC_WAR) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; + smp_mb__before_atomic(); - __asm__ __volatile__( - " .set push \n" - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # test_and_clear_bit \n" - " or %2, %0, %3 \n" - " xor %2, %3 \n" - " " __SC "%2, %1 \n" - " beqzl %2, 1b \n" - " and %2, %0, %3 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : __LLSC_CLOBBER); -#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) - } else if (kernel_uses_llsc && __builtin_constant_p(nr)) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - loongson_llsc_mb(); - do { - __asm__ __volatile__( - " " __LL "%0, %1 # test_and_clear_bit \n" - " " __EXT "%2, %0, %3, 1 \n" - " " __INS "%0, $0, %3, 1 \n" - " " __SC "%0, %1 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "ir" (bit) - : __LLSC_CLOBBER); - } while (unlikely(!temp)); -#endif - } else if (kernel_uses_llsc) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - loongson_llsc_mb(); - do { - __asm__ __volatile__( - " .set push \n" - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # test_and_clear_bit \n" - " or %2, %0, %3 \n" - " xor %2, %3 \n" - " " __SC "%2, %1 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : __LLSC_CLOBBER); - } while (unlikely(!res)); - - res = temp & (1UL << bit); - } else + if (!kernel_uses_llsc) { res = __mips_test_and_clear_bit(nr, addr); + } else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) { + res = __test_bit_op(*m, "%1", + __EXT "%0, %1, %3, 1;" + __INS "%1, $0, %3, 1", + "i"(bit)); + } else { + orig = __test_bit_op(*m, "%0", + "or\t%1, %0, %3;" + "xor\t%1, %1, %3", + "ir"(BIT(bit))); + res = (orig & BIT(bit)) != 0; + } smp_llsc_mb(); - return res != 0; + return res; } /* @@ -425,54 +261,29 @@ static inline int test_and_clear_bit(unsigned long nr, static inline int test_and_change_bit(unsigned long nr, volatile unsigned long *addr) { - int bit = nr & SZLONG_MASK; - unsigned long res; + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; + unsigned long res, orig; - smp_mb__before_llsc(); - - if (kernel_uses_llsc && R10000_LLSC_WAR) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; + smp_mb__before_atomic(); - __asm__ __volatile__( - " .set push \n" - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # test_and_change_bit \n" - " xor %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " beqzl %2, 1b \n" - " and %2, %0, %3 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : __LLSC_CLOBBER); - } else if (kernel_uses_llsc) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - loongson_llsc_mb(); - do { - __asm__ __volatile__( - " .set push \n" - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # test_and_change_bit \n" - " xor %2, %0, %3 \n" - " " __SC "\t%2, %1 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : __LLSC_CLOBBER); - } while (unlikely(!res)); - - res = temp & (1UL << bit); - } else + if (!kernel_uses_llsc) { res = __mips_test_and_change_bit(nr, addr); + } else { + orig = __test_bit_op(*m, "%0", + "xor\t%1, %0, %3", + "ir"(BIT(bit))); + res = (orig & BIT(bit)) != 0; + } smp_llsc_mb(); - return res != 0; + return res; } +#undef __bit_op +#undef __test_bit_op + #include <asm-generic/bitops/non-atomic.h> /* |