Skip to content

Commit a5b1a01

Browse files
committed
Merge tag 'locking-core-2024-03-11' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull locking updates from Ingo Molnar: - Micro-optimize local_xchg() and the rtmutex code on x86 - Fix percpu-rwsem contention tracepoints - Simplify debugging Kconfig dependencies - Update/clarify the documentation of atomic primitives - Misc cleanups * tag 'locking-core-2024-03-11' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: locking/rtmutex: Use try_cmpxchg_relaxed() in mark_rt_mutex_waiters() locking/x86: Implement local_xchg() using CMPXCHG without the LOCK prefix locking/percpu-rwsem: Trigger contention tracepoints only if contended locking/rwsem: Make DEBUG_RWSEMS and PREEMPT_RT mutually exclusive locking/rwsem: Clarify that RWSEM_READER_OWNED is just a hint locking/mutex: Simplify <linux/mutex.h> locking/qspinlock: Fix 'wait_early' set but not used warning locking/atomic: scripts: Clarify ordering of conditional atomics
2 parents b040240 + ce3576e commit a5b1a01

File tree

17 files changed

+154
-49
lines changed

17 files changed

+154
-49
lines changed

arch/x86/include/asm/local.h

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -131,8 +131,20 @@ static inline bool local_try_cmpxchg(local_t *l, long *old, long new)
131131
(typeof(l->a.counter) *) old, new);
132132
}
133133

134-
/* Always has a lock prefix */
135-
#define local_xchg(l, n) (xchg(&((l)->a.counter), (n)))
134+
/*
135+
* Implement local_xchg using CMPXCHG instruction without the LOCK prefix.
136+
* XCHG is expensive due to the implied LOCK prefix. The processor
137+
* cannot prefetch cachelines if XCHG is used.
138+
*/
139+
static __always_inline long
140+
local_xchg(local_t *l, long n)
141+
{
142+
long c = local_read(l);
143+
144+
do { } while (!local_try_cmpxchg(l, &c, n));
145+
146+
return c;
147+
}
136148

137149
/**
138150
* local_add_unless - add unless the number is already a given value

include/linux/atomic/atomic-arch-fallback.h

Lines changed: 37 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2005,6 +2005,7 @@ raw_atomic_xchg_relaxed(atomic_t *v, int new)
20052005
* @new: int value to assign
20062006
*
20072007
* If (@v == @old), atomically updates @v to @new with full ordering.
2008+
* Otherwise, @v is not modified and relaxed ordering is provided.
20082009
*
20092010
* Safe to use in noinstr code; prefer atomic_cmpxchg() elsewhere.
20102011
*
@@ -2033,6 +2034,7 @@ raw_atomic_cmpxchg(atomic_t *v, int old, int new)
20332034
* @new: int value to assign
20342035
*
20352036
* If (@v == @old), atomically updates @v to @new with acquire ordering.
2037+
* Otherwise, @v is not modified and relaxed ordering is provided.
20362038
*
20372039
* Safe to use in noinstr code; prefer atomic_cmpxchg_acquire() elsewhere.
20382040
*
@@ -2061,6 +2063,7 @@ raw_atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
20612063
* @new: int value to assign
20622064
*
20632065
* If (@v == @old), atomically updates @v to @new with release ordering.
2066+
* Otherwise, @v is not modified and relaxed ordering is provided.
20642067
*
20652068
* Safe to use in noinstr code; prefer atomic_cmpxchg_release() elsewhere.
20662069
*
@@ -2088,6 +2091,7 @@ raw_atomic_cmpxchg_release(atomic_t *v, int old, int new)
20882091
* @new: int value to assign
20892092
*
20902093
* If (@v == @old), atomically updates @v to @new with relaxed ordering.
2094+
* Otherwise, @v is not modified and relaxed ordering is provided.
20912095
*
20922096
* Safe to use in noinstr code; prefer atomic_cmpxchg_relaxed() elsewhere.
20932097
*
@@ -2112,7 +2116,8 @@ raw_atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
21122116
* @new: int value to assign
21132117
*
21142118
* If (@v == @old), atomically updates @v to @new with full ordering.
2115-
* Otherwise, updates @old to the current value of @v.
2119+
* Otherwise, @v is not modified, @old is updated to the current value of @v,
2120+
* and relaxed ordering is provided.
21162121
*
21172122
* Safe to use in noinstr code; prefer atomic_try_cmpxchg() elsewhere.
21182123
*
@@ -2145,7 +2150,8 @@ raw_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
21452150
* @new: int value to assign
21462151
*
21472152
* If (@v == @old), atomically updates @v to @new with acquire ordering.
2148-
* Otherwise, updates @old to the current value of @v.
2153+
* Otherwise, @v is not modified, @old is updated to the current value of @v,
2154+
* and relaxed ordering is provided.
21492155
*
21502156
* Safe to use in noinstr code; prefer atomic_try_cmpxchg_acquire() elsewhere.
21512157
*
@@ -2178,7 +2184,8 @@ raw_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
21782184
* @new: int value to assign
21792185
*
21802186
* If (@v == @old), atomically updates @v to @new with release ordering.
2181-
* Otherwise, updates @old to the current value of @v.
2187+
* Otherwise, @v is not modified, @old is updated to the current value of @v,
2188+
* and relaxed ordering is provided.
21822189
*
21832190
* Safe to use in noinstr code; prefer atomic_try_cmpxchg_release() elsewhere.
21842191
*
@@ -2210,7 +2217,8 @@ raw_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
22102217
* @new: int value to assign
22112218
*
22122219
* If (@v == @old), atomically updates @v to @new with relaxed ordering.
2213-
* Otherwise, updates @old to the current value of @v.
2220+
* Otherwise, @v is not modified, @old is updated to the current value of @v,
2221+
* and relaxed ordering is provided.
22142222
*
22152223
* Safe to use in noinstr code; prefer atomic_try_cmpxchg_relaxed() elsewhere.
22162224
*
@@ -2403,6 +2411,7 @@ raw_atomic_add_negative_relaxed(int i, atomic_t *v)
24032411
* @u: int value to compare with
24042412
*
24052413
* If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
2414+
* Otherwise, @v is not modified and relaxed ordering is provided.
24062415
*
24072416
* Safe to use in noinstr code; prefer atomic_fetch_add_unless() elsewhere.
24082417
*
@@ -2432,6 +2441,7 @@ raw_atomic_fetch_add_unless(atomic_t *v, int a, int u)
24322441
* @u: int value to compare with
24332442
*
24342443
* If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
2444+
* Otherwise, @v is not modified and relaxed ordering is provided.
24352445
*
24362446
* Safe to use in noinstr code; prefer atomic_add_unless() elsewhere.
24372447
*
@@ -2452,6 +2462,7 @@ raw_atomic_add_unless(atomic_t *v, int a, int u)
24522462
* @v: pointer to atomic_t
24532463
*
24542464
* If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
2465+
* Otherwise, @v is not modified and relaxed ordering is provided.
24552466
*
24562467
* Safe to use in noinstr code; prefer atomic_inc_not_zero() elsewhere.
24572468
*
@@ -2472,6 +2483,7 @@ raw_atomic_inc_not_zero(atomic_t *v)
24722483
* @v: pointer to atomic_t
24732484
*
24742485
* If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
2486+
* Otherwise, @v is not modified and relaxed ordering is provided.
24752487
*
24762488
* Safe to use in noinstr code; prefer atomic_inc_unless_negative() elsewhere.
24772489
*
@@ -2499,6 +2511,7 @@ raw_atomic_inc_unless_negative(atomic_t *v)
24992511
* @v: pointer to atomic_t
25002512
*
25012513
* If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
2514+
* Otherwise, @v is not modified and relaxed ordering is provided.
25022515
*
25032516
* Safe to use in noinstr code; prefer atomic_dec_unless_positive() elsewhere.
25042517
*
@@ -2526,6 +2539,7 @@ raw_atomic_dec_unless_positive(atomic_t *v)
25262539
* @v: pointer to atomic_t
25272540
*
25282541
* If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
2542+
* Otherwise, @v is not modified and relaxed ordering is provided.
25292543
*
25302544
* Safe to use in noinstr code; prefer atomic_dec_if_positive() elsewhere.
25312545
*
@@ -4117,6 +4131,7 @@ raw_atomic64_xchg_relaxed(atomic64_t *v, s64 new)
41174131
* @new: s64 value to assign
41184132
*
41194133
* If (@v == @old), atomically updates @v to @new with full ordering.
4134+
* Otherwise, @v is not modified and relaxed ordering is provided.
41204135
*
41214136
* Safe to use in noinstr code; prefer atomic64_cmpxchg() elsewhere.
41224137
*
@@ -4145,6 +4160,7 @@ raw_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
41454160
* @new: s64 value to assign
41464161
*
41474162
* If (@v == @old), atomically updates @v to @new with acquire ordering.
4163+
* Otherwise, @v is not modified and relaxed ordering is provided.
41484164
*
41494165
* Safe to use in noinstr code; prefer atomic64_cmpxchg_acquire() elsewhere.
41504166
*
@@ -4173,6 +4189,7 @@ raw_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
41734189
* @new: s64 value to assign
41744190
*
41754191
* If (@v == @old), atomically updates @v to @new with release ordering.
4192+
* Otherwise, @v is not modified and relaxed ordering is provided.
41764193
*
41774194
* Safe to use in noinstr code; prefer atomic64_cmpxchg_release() elsewhere.
41784195
*
@@ -4200,6 +4217,7 @@ raw_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
42004217
* @new: s64 value to assign
42014218
*
42024219
* If (@v == @old), atomically updates @v to @new with relaxed ordering.
4220+
* Otherwise, @v is not modified and relaxed ordering is provided.
42034221
*
42044222
* Safe to use in noinstr code; prefer atomic64_cmpxchg_relaxed() elsewhere.
42054223
*
@@ -4224,7 +4242,8 @@ raw_atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
42244242
* @new: s64 value to assign
42254243
*
42264244
* If (@v == @old), atomically updates @v to @new with full ordering.
4227-
* Otherwise, updates @old to the current value of @v.
4245+
* Otherwise, @v is not modified, @old is updated to the current value of @v,
4246+
* and relaxed ordering is provided.
42284247
*
42294248
* Safe to use in noinstr code; prefer atomic64_try_cmpxchg() elsewhere.
42304249
*
@@ -4257,7 +4276,8 @@ raw_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
42574276
* @new: s64 value to assign
42584277
*
42594278
* If (@v == @old), atomically updates @v to @new with acquire ordering.
4260-
* Otherwise, updates @old to the current value of @v.
4279+
* Otherwise, @v is not modified, @old is updated to the current value of @v,
4280+
* and relaxed ordering is provided.
42614281
*
42624282
* Safe to use in noinstr code; prefer atomic64_try_cmpxchg_acquire() elsewhere.
42634283
*
@@ -4290,7 +4310,8 @@ raw_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
42904310
* @new: s64 value to assign
42914311
*
42924312
* If (@v == @old), atomically updates @v to @new with release ordering.
4293-
* Otherwise, updates @old to the current value of @v.
4313+
* Otherwise, @v is not modified, @old is updated to the current value of @v,
4314+
* and relaxed ordering is provided.
42944315
*
42954316
* Safe to use in noinstr code; prefer atomic64_try_cmpxchg_release() elsewhere.
42964317
*
@@ -4322,7 +4343,8 @@ raw_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
43224343
* @new: s64 value to assign
43234344
*
43244345
* If (@v == @old), atomically updates @v to @new with relaxed ordering.
4325-
* Otherwise, updates @old to the current value of @v.
4346+
* Otherwise, @v is not modified, @old is updated to the current value of @v,
4347+
* and relaxed ordering is provided.
43264348
*
43274349
* Safe to use in noinstr code; prefer atomic64_try_cmpxchg_relaxed() elsewhere.
43284350
*
@@ -4515,6 +4537,7 @@ raw_atomic64_add_negative_relaxed(s64 i, atomic64_t *v)
45154537
* @u: s64 value to compare with
45164538
*
45174539
* If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
4540+
* Otherwise, @v is not modified and relaxed ordering is provided.
45184541
*
45194542
* Safe to use in noinstr code; prefer atomic64_fetch_add_unless() elsewhere.
45204543
*
@@ -4544,6 +4567,7 @@ raw_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
45444567
* @u: s64 value to compare with
45454568
*
45464569
* If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
4570+
* Otherwise, @v is not modified and relaxed ordering is provided.
45474571
*
45484572
* Safe to use in noinstr code; prefer atomic64_add_unless() elsewhere.
45494573
*
@@ -4564,6 +4588,7 @@ raw_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
45644588
* @v: pointer to atomic64_t
45654589
*
45664590
* If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
4591+
* Otherwise, @v is not modified and relaxed ordering is provided.
45674592
*
45684593
* Safe to use in noinstr code; prefer atomic64_inc_not_zero() elsewhere.
45694594
*
@@ -4584,6 +4609,7 @@ raw_atomic64_inc_not_zero(atomic64_t *v)
45844609
* @v: pointer to atomic64_t
45854610
*
45864611
* If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
4612+
* Otherwise, @v is not modified and relaxed ordering is provided.
45874613
*
45884614
* Safe to use in noinstr code; prefer atomic64_inc_unless_negative() elsewhere.
45894615
*
@@ -4611,6 +4637,7 @@ raw_atomic64_inc_unless_negative(atomic64_t *v)
46114637
* @v: pointer to atomic64_t
46124638
*
46134639
* If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
4640+
* Otherwise, @v is not modified and relaxed ordering is provided.
46144641
*
46154642
* Safe to use in noinstr code; prefer atomic64_dec_unless_positive() elsewhere.
46164643
*
@@ -4638,6 +4665,7 @@ raw_atomic64_dec_unless_positive(atomic64_t *v)
46384665
* @v: pointer to atomic64_t
46394666
*
46404667
* If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
4668+
* Otherwise, @v is not modified and relaxed ordering is provided.
46414669
*
46424670
* Safe to use in noinstr code; prefer atomic64_dec_if_positive() elsewhere.
46434671
*
@@ -4662,4 +4690,4 @@ raw_atomic64_dec_if_positive(atomic64_t *v)
46624690
}
46634691

46644692
#endif /* _LINUX_ATOMIC_FALLBACK_H */
4665-
// eec048affea735b8464f58e6d96992101f8f85f1
4693+
// 14850c0b0db20c62fdc78ccd1d42b98b88d76331

0 commit comments

Comments
 (0)