summaryrefslogtreecommitdiff
path: root/arch/arc/include
diff options
context:
space:
mode:
authorSergey Matyukevich <sergey.matyukevich@synopsys.com>2022-02-22 17:05:24 +0300
committerVineet Gupta <vgupta@kernel.org>2022-04-19 00:45:09 +0300
commitac411e41ec065daa867b5668b6e71ea1aff7b36a (patch)
tree9aad4273d50bdf2472631936d6700ddb9d11c921 /arch/arc/include
parentd139d0f0bfdabe5762214a96f3d5c4b88f524b41 (diff)
downloadlinux-ac411e41ec065daa867b5668b6e71ea1aff7b36a.tar.xz
ARC: atomic: cleanup atomic-llsc definitions
Remove redundant c_op macro argument. Only asm_op is needed to define atomic operations using llock/scond. Signed-off-by: Sergey Matyukevich <sergey.matyukevich@synopsys.com> Signed-off-by: Vineet Gupta <vgupta@kernel.org>
Diffstat (limited to 'arch/arc/include')
-rw-r--r--arch/arc/include/asm/atomic-llsc.h32
1 files changed, 16 insertions, 16 deletions
diff --git a/arch/arc/include/asm/atomic-llsc.h b/arch/arc/include/asm/atomic-llsc.h
index 088d348781c1..1b0ffaeee16d 100644
--- a/arch/arc/include/asm/atomic-llsc.h
+++ b/arch/arc/include/asm/atomic-llsc.h
@@ -5,7 +5,7 @@
#define arch_atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
-#define ATOMIC_OP(op, c_op, asm_op) \
+#define ATOMIC_OP(op, asm_op) \
static inline void arch_atomic_##op(int i, atomic_t *v) \
{ \
unsigned int val; \
@@ -21,7 +21,7 @@ static inline void arch_atomic_##op(int i, atomic_t *v) \
: "cc"); \
} \
-#define ATOMIC_OP_RETURN(op, c_op, asm_op) \
+#define ATOMIC_OP_RETURN(op, asm_op) \
static inline int arch_atomic_##op##_return_relaxed(int i, atomic_t *v) \
{ \
unsigned int val; \
@@ -42,7 +42,7 @@ static inline int arch_atomic_##op##_return_relaxed(int i, atomic_t *v) \
#define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed
#define arch_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed
-#define ATOMIC_FETCH_OP(op, c_op, asm_op) \
+#define ATOMIC_FETCH_OP(op, asm_op) \
static inline int arch_atomic_fetch_##op##_relaxed(int i, atomic_t *v) \
{ \
unsigned int val, orig; \
@@ -69,23 +69,23 @@ static inline int arch_atomic_fetch_##op##_relaxed(int i, atomic_t *v) \
#define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed
#define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed
-#define ATOMIC_OPS(op, c_op, asm_op) \
- ATOMIC_OP(op, c_op, asm_op) \
- ATOMIC_OP_RETURN(op, c_op, asm_op) \
- ATOMIC_FETCH_OP(op, c_op, asm_op)
+#define ATOMIC_OPS(op, asm_op) \
+ ATOMIC_OP(op, asm_op) \
+ ATOMIC_OP_RETURN(op, asm_op) \
+ ATOMIC_FETCH_OP(op, asm_op)
-ATOMIC_OPS(add, +=, add)
-ATOMIC_OPS(sub, -=, sub)
+ATOMIC_OPS(add, add)
+ATOMIC_OPS(sub, sub)
#undef ATOMIC_OPS
-#define ATOMIC_OPS(op, c_op, asm_op) \
- ATOMIC_OP(op, c_op, asm_op) \
- ATOMIC_FETCH_OP(op, c_op, asm_op)
+#define ATOMIC_OPS(op, asm_op) \
+ ATOMIC_OP(op, asm_op) \
+ ATOMIC_FETCH_OP(op, asm_op)
-ATOMIC_OPS(and, &=, and)
-ATOMIC_OPS(andnot, &= ~, bic)
-ATOMIC_OPS(or, |=, or)
-ATOMIC_OPS(xor, ^=, xor)
+ATOMIC_OPS(and, and)
+ATOMIC_OPS(andnot, bic)
+ATOMIC_OPS(or, or)
+ATOMIC_OPS(xor, xor)
#define arch_atomic_andnot arch_atomic_andnot