summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPaul Burton <paul.burton@mips.com>2019-10-02 00:53:31 +0300
committerPaul Burton <paul.burton@mips.com>2019-10-07 19:42:49 +0300
commita2e66b862cc7b8db1a0eb63b71e61943d48c3c8a (patch)
tree8ff44c2c764642531e335e29cd00a4261204ae48
parent6bbe043bd3f4766b089b7b51a80e75745868c038 (diff)
downloadlinux-a2e66b862cc7b8db1a0eb63b71e61943d48c3c8a.tar.xz
MIPS: bitops: Allow immediates in test_and_{set,clear,change}_bit
The logical operations or & xor used in the test_and_set_bit_lock(), test_and_clear_bit() & test_and_change_bit() functions currently force the value 1<<bit to be placed in a register. If the bit is compile-time constant & fits within the immediate field of an or/xor instruction (ie. 16 bits) then we can make use of the ori/xori instruction variants & avoid the use of an extra register. Add the extra "i" constraints in order to allow use of these immediate encodings. Signed-off-by: Paul Burton <paul.burton@mips.com> Cc: linux-mips@vger.kernel.org Cc: Huacai Chen <chenhc@lemote.com> Cc: Jiaxun Yang <jiaxun.yang@flygoat.com> Cc: linux-kernel@vger.kernel.org
-rw-r--r--arch/mips/include/asm/bitops.h12
1 files changed, 6 insertions, 6 deletions
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index ea35a2e87b6d..7314ba5a3683 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -261,7 +261,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
" and %2, %0, %3 \n"
" .set pop \n"
: "=&r" (temp), "+m" (*m), "=&r" (res)
- : "r" (1UL << bit)
+ : "ir" (1UL << bit)
: __LLSC_CLOBBER);
} else {
loongson_llsc_mb();
@@ -274,7 +274,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
" " __SC "%2, %1 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
- : "r" (1UL << bit)
+ : "ir" (1UL << bit)
: __LLSC_CLOBBER);
} while (unlikely(!res));
@@ -332,7 +332,7 @@ static inline int test_and_clear_bit(unsigned long nr,
" and %2, %0, %3 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
- : "r" (1UL << bit)
+ : "ir" (1UL << bit)
: __LLSC_CLOBBER);
} else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) {
loongson_llsc_mb();
@@ -358,7 +358,7 @@ static inline int test_and_clear_bit(unsigned long nr,
" " __SC "%2, %1 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
- : "r" (1UL << bit)
+ : "ir" (1UL << bit)
: __LLSC_CLOBBER);
} while (unlikely(!res));
@@ -400,7 +400,7 @@ static inline int test_and_change_bit(unsigned long nr,
" and %2, %0, %3 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
- : "r" (1UL << bit)
+ : "ir" (1UL << bit)
: __LLSC_CLOBBER);
} else {
loongson_llsc_mb();
@@ -413,7 +413,7 @@ static inline int test_and_change_bit(unsigned long nr,
" " __SC "\t%2, %1 \n"
" .set pop \n"
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
- : "r" (1UL << bit)
+ : "ir" (1UL << bit)
: __LLSC_CLOBBER);
} while (unlikely(!res));