Skip to content

Commit b115d85

Browse files
committed
Merge tag 'locking-core-2023-05-05' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull locking updates from Ingo Molnar: - Introduce local{,64}_try_cmpxchg() - a slightly more optimal primitive, which will be used in perf events ring-buffer code - Simplify/modify rwsems on PREEMPT_RT, to address writer starvation - Misc cleanups/fixes * tag 'locking-core-2023-05-05' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: locking/atomic: Correct (cmp)xchg() instrumentation locking/x86: Define arch_try_cmpxchg_local() locking/arch: Wire up local_try_cmpxchg() locking/generic: Wire up local{,64}_try_cmpxchg() locking/atomic: Add generic try_cmpxchg{,64}_local() support locking/rwbase: Mitigate indefinite writer starvation locking/arch: Rename all internal __xchg() names to __arch_xchg()
2 parents d5ed10b + ec57032 commit b115d85

File tree

33 files changed

+209
-111
lines changed

33 files changed

+209
-111
lines changed

arch/alpha/include/asm/cmpxchg.h

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,15 +6,15 @@
66
* Atomic exchange routines.
77
*/
88

9-
#define ____xchg(type, args...) __xchg ## type ## _local(args)
9+
#define ____xchg(type, args...) __arch_xchg ## type ## _local(args)
1010
#define ____cmpxchg(type, args...) __cmpxchg ## type ## _local(args)
1111
#include <asm/xchg.h>
1212

1313
#define xchg_local(ptr, x) \
1414
({ \
1515
__typeof__(*(ptr)) _x_ = (x); \
16-
(__typeof__(*(ptr))) __xchg_local((ptr), (unsigned long)_x_, \
17-
sizeof(*(ptr))); \
16+
(__typeof__(*(ptr))) __arch_xchg_local((ptr), (unsigned long)_x_,\
17+
sizeof(*(ptr))); \
1818
})
1919

2020
#define arch_cmpxchg_local(ptr, o, n) \
@@ -34,7 +34,7 @@
3434

3535
#undef ____xchg
3636
#undef ____cmpxchg
37-
#define ____xchg(type, args...) __xchg ##type(args)
37+
#define ____xchg(type, args...) __arch_xchg ##type(args)
3838
#define ____cmpxchg(type, args...) __cmpxchg ##type(args)
3939
#include <asm/xchg.h>
4040

@@ -48,7 +48,7 @@
4848
__typeof__(*(ptr)) _x_ = (x); \
4949
smp_mb(); \
5050
__ret = (__typeof__(*(ptr))) \
51-
__xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
51+
__arch_xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
5252
smp_mb(); \
5353
__ret; \
5454
})

arch/alpha/include/asm/local.h

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,8 +52,16 @@ static __inline__ long local_sub_return(long i, local_t * l)
5252
return result;
5353
}
5454

55-
#define local_cmpxchg(l, o, n) \
56-
(cmpxchg_local(&((l)->a.counter), (o), (n)))
55+
static __inline__ long local_cmpxchg(local_t *l, long old, long new)
56+
{
57+
return cmpxchg_local(&l->a.counter, old, new);
58+
}
59+
60+
static __inline__ bool local_try_cmpxchg(local_t *l, long *old, long new)
61+
{
62+
return try_cmpxchg_local(&l->a.counter, (s64 *)old, new);
63+
}
64+
5765
#define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
5866

5967
/**

arch/arc/include/asm/cmpxchg.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@
8585
*/
8686
#ifdef CONFIG_ARC_HAS_LLSC
8787

88-
#define __xchg(ptr, val) \
88+
#define __arch_xchg(ptr, val) \
8989
({ \
9090
__asm__ __volatile__( \
9191
" ex %0, [%1] \n" /* set new value */ \
@@ -102,7 +102,7 @@
102102
\
103103
switch(sizeof(*(_p_))) { \
104104
case 4: \
105-
_val_ = __xchg(_p_, _val_); \
105+
_val_ = __arch_xchg(_p_, _val_); \
106106
break; \
107107
default: \
108108
BUILD_BUG(); \

arch/arm/include/asm/cmpxchg.h

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,8 @@
2525
#define swp_is_buggy
2626
#endif
2727

28-
static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
28+
static inline unsigned long
29+
__arch_xchg(unsigned long x, volatile void *ptr, int size)
2930
{
3031
extern void __bad_xchg(volatile void *, int);
3132
unsigned long ret;
@@ -115,8 +116,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
115116
}
116117

117118
#define arch_xchg_relaxed(ptr, x) ({ \
118-
(__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), \
119-
sizeof(*(ptr))); \
119+
(__typeof__(*(ptr)))__arch_xchg((unsigned long)(x), (ptr), \
120+
sizeof(*(ptr))); \
120121
})
121122

122123
#include <asm-generic/cmpxchg-local.h>

arch/arm64/include/asm/cmpxchg.h

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -62,9 +62,8 @@ __XCHG_CASE( , , mb_, 64, dmb ish, nop, , a, l, "memory")
6262
#undef __XCHG_CASE
6363

6464
#define __XCHG_GEN(sfx) \
65-
static __always_inline unsigned long __xchg##sfx(unsigned long x, \
66-
volatile void *ptr, \
67-
int size) \
65+
static __always_inline unsigned long \
66+
__arch_xchg##sfx(unsigned long x, volatile void *ptr, int size) \
6867
{ \
6968
switch (size) { \
7069
case 1: \
@@ -93,7 +92,7 @@ __XCHG_GEN(_mb)
9392
({ \
9493
__typeof__(*(ptr)) __ret; \
9594
__ret = (__typeof__(*(ptr))) \
96-
__xchg##sfx((unsigned long)(x), (ptr), sizeof(*(ptr))); \
95+
__arch_xchg##sfx((unsigned long)(x), (ptr), sizeof(*(ptr))); \
9796
__ret; \
9897
})
9998

arch/hexagon/include/asm/cmpxchg.h

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
#define _ASM_CMPXCHG_H
1010

1111
/*
12-
* __xchg - atomically exchange a register and a memory location
12+
* __arch_xchg - atomically exchange a register and a memory location
1313
* @x: value to swap
1414
* @ptr: pointer to memory
1515
* @size: size of the value
@@ -19,8 +19,8 @@
1919
* Note: there was an errata for V2 about .new's and memw_locked.
2020
*
2121
*/
22-
static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
23-
int size)
22+
static inline unsigned long
23+
__arch_xchg(unsigned long x, volatile void *ptr, int size)
2424
{
2525
unsigned long retval;
2626

@@ -42,8 +42,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
4242
* Atomically swap the contents of a register with memory. Should be atomic
4343
* between multiple CPU's and within interrupts on the same CPU.
4444
*/
45-
#define arch_xchg(ptr, v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v), (ptr), \
46-
sizeof(*(ptr))))
45+
#define arch_xchg(ptr, v) ((__typeof__(*(ptr)))__arch_xchg((unsigned long)(v), (ptr), \
46+
sizeof(*(ptr))))
4747

4848
/*
4949
* see rt-mutex-design.txt; cmpxchg supposedly checks if *ptr == A and swaps.

arch/ia64/include/asm/cmpxchg.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
#include <uapi/asm/cmpxchg.h>
66

77
#define arch_xchg(ptr, x) \
8-
({(__typeof__(*(ptr))) __xchg((unsigned long) (x), (ptr), sizeof(*(ptr)));})
8+
({(__typeof__(*(ptr))) __arch_xchg((unsigned long) (x), (ptr), sizeof(*(ptr)));})
99

1010
#define arch_cmpxchg(ptr, o, n) cmpxchg_acq((ptr), (o), (n))
1111
#define arch_cmpxchg64(ptr, o, n) cmpxchg_acq((ptr), (o), (n))

arch/ia64/include/uapi/asm/cmpxchg.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
*/
2424
extern void ia64_xchg_called_with_bad_pointer(void);
2525

26-
#define __xchg(x, ptr, size) \
26+
#define __arch_xchg(x, ptr, size) \
2727
({ \
2828
unsigned long __xchg_result; \
2929
\
@@ -51,7 +51,7 @@ extern void ia64_xchg_called_with_bad_pointer(void);
5151

5252
#ifndef __KERNEL__
5353
#define xchg(ptr, x) \
54-
({(__typeof__(*(ptr))) __xchg((unsigned long) (x), (ptr), sizeof(*(ptr)));})
54+
({(__typeof__(*(ptr))) __arch_xchg((unsigned long) (x), (ptr), sizeof(*(ptr)));})
5555
#endif
5656

5757
/*

arch/loongarch/include/asm/cmpxchg.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ static inline unsigned int __xchg_small(volatile void *ptr, unsigned int val,
6262
}
6363

6464
static __always_inline unsigned long
65-
__xchg(volatile void *ptr, unsigned long x, int size)
65+
__arch_xchg(volatile void *ptr, unsigned long x, int size)
6666
{
6767
switch (size) {
6868
case 1:
@@ -87,7 +87,7 @@ __xchg(volatile void *ptr, unsigned long x, int size)
8787
__typeof__(*(ptr)) __res; \
8888
\
8989
__res = (__typeof__(*(ptr))) \
90-
__xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
90+
__arch_xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
9191
\
9292
__res; \
9393
})

arch/loongarch/include/asm/local.h

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,8 +56,17 @@ static inline long local_sub_return(long i, local_t *l)
5656
return result;
5757
}
5858

59-
#define local_cmpxchg(l, o, n) \
60-
((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
59+
static inline long local_cmpxchg(local_t *l, long old, long new)
60+
{
61+
return cmpxchg_local(&l->a.counter, old, new);
62+
}
63+
64+
static inline bool local_try_cmpxchg(local_t *l, long *old, long new)
65+
{
66+
typeof(l->a.counter) *__old = (typeof(l->a.counter) *) old;
67+
return try_cmpxchg_local(&l->a.counter, __old, new);
68+
}
69+
6170
#define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
6271

6372
/**

0 commit comments

Comments
 (0)