Skip to content

Commit ac411e4

Browse files
Sergey Matyukevichvineetgarc
authored andcommitted
ARC: atomic: cleanup atomic-llsc definitions
Remove redundant c_op macro argument. Only asm_op is needed to define atomic operations using llock/scond. Signed-off-by: Sergey Matyukevich <sergey.matyukevich@synopsys.com> Signed-off-by: Vineet Gupta <vgupta@kernel.org>
1 parent d139d0f commit ac411e4

File tree

1 file changed

+16
-16
lines changed

1 file changed

+16
-16
lines changed

arch/arc/include/asm/atomic-llsc.h

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
#define arch_atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
77

8-
#define ATOMIC_OP(op, c_op, asm_op) \
8+
#define ATOMIC_OP(op, asm_op) \
99
static inline void arch_atomic_##op(int i, atomic_t *v) \
1010
{ \
1111
unsigned int val; \
@@ -21,7 +21,7 @@ static inline void arch_atomic_##op(int i, atomic_t *v) \
2121
: "cc"); \
2222
} \
2323

24-
#define ATOMIC_OP_RETURN(op, c_op, asm_op) \
24+
#define ATOMIC_OP_RETURN(op, asm_op) \
2525
static inline int arch_atomic_##op##_return_relaxed(int i, atomic_t *v) \
2626
{ \
2727
unsigned int val; \
@@ -42,7 +42,7 @@ static inline int arch_atomic_##op##_return_relaxed(int i, atomic_t *v) \
4242
#define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed
4343
#define arch_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed
4444

45-
#define ATOMIC_FETCH_OP(op, c_op, asm_op) \
45+
#define ATOMIC_FETCH_OP(op, asm_op) \
4646
static inline int arch_atomic_fetch_##op##_relaxed(int i, atomic_t *v) \
4747
{ \
4848
unsigned int val, orig; \
@@ -69,23 +69,23 @@ static inline int arch_atomic_fetch_##op##_relaxed(int i, atomic_t *v) \
6969
#define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed
7070
#define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor_relaxed
7171

72-
#define ATOMIC_OPS(op, c_op, asm_op) \
73-
ATOMIC_OP(op, c_op, asm_op) \
74-
ATOMIC_OP_RETURN(op, c_op, asm_op) \
75-
ATOMIC_FETCH_OP(op, c_op, asm_op)
72+
#define ATOMIC_OPS(op, asm_op) \
73+
ATOMIC_OP(op, asm_op) \
74+
ATOMIC_OP_RETURN(op, asm_op) \
75+
ATOMIC_FETCH_OP(op, asm_op)
7676

77-
ATOMIC_OPS(add, +=, add)
78-
ATOMIC_OPS(sub, -=, sub)
77+
ATOMIC_OPS(add, add)
78+
ATOMIC_OPS(sub, sub)
7979

8080
#undef ATOMIC_OPS
81-
#define ATOMIC_OPS(op, c_op, asm_op) \
82-
ATOMIC_OP(op, c_op, asm_op) \
83-
ATOMIC_FETCH_OP(op, c_op, asm_op)
81+
#define ATOMIC_OPS(op, asm_op) \
82+
ATOMIC_OP(op, asm_op) \
83+
ATOMIC_FETCH_OP(op, asm_op)
8484

85-
ATOMIC_OPS(and, &=, and)
86-
ATOMIC_OPS(andnot, &= ~, bic)
87-
ATOMIC_OPS(or, |=, or)
88-
ATOMIC_OPS(xor, ^=, xor)
85+
ATOMIC_OPS(and, and)
86+
ATOMIC_OPS(andnot, bic)
87+
ATOMIC_OPS(or, or)
88+
ATOMIC_OPS(xor, xor)
8989

9090
#define arch_atomic_andnot arch_atomic_andnot
9191

0 commit comments

Comments
 (0)