From ac411e41ec065daa867b5668b6e71ea1aff7b36a Mon Sep 17 00:00:00 2001 From: Sergey Matyukevich Date: Tue, 22 Feb 2022 17:05:24 +0300 Subject: [PATCH] ARC: atomic: cleanup atomic-llsc definitions Remove redundant c_op macro argument. Only asm_op is needed to define atomic operations using llock/scond. Signed-off-by: Sergey Matyukevich Signed-off-by: Vineet Gupta --- arch/arc/include/asm/atomic-llsc.h | 32 +++++++++++++++--------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/arch/arc/include/asm/atomic-llsc.h b/arch/arc/include/asm/atomic-llsc.h index 088d348781c1..1b0ffaeee16d 100644 --- a/arch/arc/include/asm/atomic-llsc.h +++ b/arch/arc/include/asm/atomic-llsc.h @@ -5,7 +5,7 @@ #define arch_atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) -#define ATOMIC_OP(op, c_op, asm_op) \ +#define ATOMIC_OP(op, asm_op) \ static inline void arch_atomic_##op(int i, atomic_t *v) \ { \ unsigned int val; \ @@ -21,7 +21,7 @@ static inline void arch_atomic_##op(int i, atomic_t *v) \ : "cc"); \ } \ -#define ATOMIC_OP_RETURN(op, c_op, asm_op) \ +#define ATOMIC_OP_RETURN(op, asm_op) \ static inline int arch_atomic_##op##_return_relaxed(int i, atomic_t *v) \ { \ unsigned int val; \ @@ -42,7 +42,7 @@ static inline int arch_atomic_##op##_return_relaxed(int i, atomic_t *v) \ #define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed #define arch_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed -#define ATOMIC_FETCH_OP(op, c_op, asm_op) \ +#define ATOMIC_FETCH_OP(op, asm_op) \ static inline int arch_atomic_fetch_##op##_relaxed(int i, atomic_t *v) \ { \ unsigned int val, orig; \ @@ -69,23 +69,23 @@ static inline int arch_atomic_fetch_##op##_relaxed(int i, atomic_t *v) \ #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed -#define ATOMIC_OPS(op, c_op, asm_op) \ - ATOMIC_OP(op, c_op, asm_op) \ - ATOMIC_OP_RETURN(op, c_op, asm_op) \ - ATOMIC_FETCH_OP(op, c_op, asm_op) +#define ATOMIC_OPS(op, asm_op) \ + ATOMIC_OP(op, asm_op) \ + ATOMIC_OP_RETURN(op, asm_op) \ + ATOMIC_FETCH_OP(op, asm_op) -ATOMIC_OPS(add, +=, add) -ATOMIC_OPS(sub, -=, sub) +ATOMIC_OPS(add, add) +ATOMIC_OPS(sub, sub) #undef ATOMIC_OPS -#define ATOMIC_OPS(op, c_op, asm_op) \ - ATOMIC_OP(op, c_op, asm_op) \ - ATOMIC_FETCH_OP(op, c_op, asm_op) +#define ATOMIC_OPS(op, asm_op) \ + ATOMIC_OP(op, asm_op) \ + ATOMIC_FETCH_OP(op, asm_op) -ATOMIC_OPS(and, &=, and) -ATOMIC_OPS(andnot, &= ~, bic) -ATOMIC_OPS(or, |=, or) -ATOMIC_OPS(xor, ^=, xor) +ATOMIC_OPS(and, and) +ATOMIC_OPS(andnot, bic) +ATOMIC_OPS(or, or) +ATOMIC_OPS(xor, xor) #define arch_atomic_andnot arch_atomic_andnot