@@ -2005,6 +2005,7 @@ raw_atomic_xchg_relaxed(atomic_t *v, int new)
2005
2005
* @new: int value to assign
2006
2006
*
2007
2007
* If (@v == @old), atomically updates @v to @new with full ordering.
2008
+ * Otherwise, @v is not modified and relaxed ordering is provided.
2008
2009
*
2009
2010
* Safe to use in noinstr code; prefer atomic_cmpxchg() elsewhere.
2010
2011
*
@@ -2033,6 +2034,7 @@ raw_atomic_cmpxchg(atomic_t *v, int old, int new)
2033
2034
* @new: int value to assign
2034
2035
*
2035
2036
* If (@v == @old), atomically updates @v to @new with acquire ordering.
2037
+ * Otherwise, @v is not modified and relaxed ordering is provided.
2036
2038
*
2037
2039
* Safe to use in noinstr code; prefer atomic_cmpxchg_acquire() elsewhere.
2038
2040
*
@@ -2061,6 +2063,7 @@ raw_atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
2061
2063
* @new: int value to assign
2062
2064
*
2063
2065
* If (@v == @old), atomically updates @v to @new with release ordering.
2066
+ * Otherwise, @v is not modified and relaxed ordering is provided.
2064
2067
*
2065
2068
* Safe to use in noinstr code; prefer atomic_cmpxchg_release() elsewhere.
2066
2069
*
@@ -2088,6 +2091,7 @@ raw_atomic_cmpxchg_release(atomic_t *v, int old, int new)
2088
2091
* @new: int value to assign
2089
2092
*
2090
2093
* If (@v == @old), atomically updates @v to @new with relaxed ordering.
2094
+ * Otherwise, @v is not modified and relaxed ordering is provided.
2091
2095
*
2092
2096
* Safe to use in noinstr code; prefer atomic_cmpxchg_relaxed() elsewhere.
2093
2097
*
@@ -2112,7 +2116,8 @@ raw_atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
2112
2116
* @new: int value to assign
2113
2117
*
2114
2118
* If (@v == @old), atomically updates @v to @new with full ordering.
2115
- * Otherwise, updates @old to the current value of @v.
2119
+ * Otherwise, @v is not modified, @old is updated to the current value of @v,
2120
+ * and relaxed ordering is provided.
2116
2121
*
2117
2122
* Safe to use in noinstr code; prefer atomic_try_cmpxchg() elsewhere.
2118
2123
*
@@ -2145,7 +2150,8 @@ raw_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
2145
2150
* @new: int value to assign
2146
2151
*
2147
2152
* If (@v == @old), atomically updates @v to @new with acquire ordering.
2148
- * Otherwise, updates @old to the current value of @v.
2153
+ * Otherwise, @v is not modified, @old is updated to the current value of @v,
2154
+ * and relaxed ordering is provided.
2149
2155
*
2150
2156
* Safe to use in noinstr code; prefer atomic_try_cmpxchg_acquire() elsewhere.
2151
2157
*
@@ -2178,7 +2184,8 @@ raw_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
2178
2184
* @new: int value to assign
2179
2185
*
2180
2186
* If (@v == @old), atomically updates @v to @new with release ordering.
2181
- * Otherwise, updates @old to the current value of @v.
2187
+ * Otherwise, @v is not modified, @old is updated to the current value of @v,
2188
+ * and relaxed ordering is provided.
2182
2189
*
2183
2190
* Safe to use in noinstr code; prefer atomic_try_cmpxchg_release() elsewhere.
2184
2191
*
@@ -2210,7 +2217,8 @@ raw_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
2210
2217
* @new: int value to assign
2211
2218
*
2212
2219
* If (@v == @old), atomically updates @v to @new with relaxed ordering.
2213
- * Otherwise, updates @old to the current value of @v.
2220
+ * Otherwise, @v is not modified, @old is updated to the current value of @v,
2221
+ * and relaxed ordering is provided.
2214
2222
*
2215
2223
* Safe to use in noinstr code; prefer atomic_try_cmpxchg_relaxed() elsewhere.
2216
2224
*
@@ -2403,6 +2411,7 @@ raw_atomic_add_negative_relaxed(int i, atomic_t *v)
2403
2411
* @u: int value to compare with
2404
2412
*
2405
2413
* If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
2414
+ * Otherwise, @v is not modified and relaxed ordering is provided.
2406
2415
*
2407
2416
* Safe to use in noinstr code; prefer atomic_fetch_add_unless() elsewhere.
2408
2417
*
@@ -2432,6 +2441,7 @@ raw_atomic_fetch_add_unless(atomic_t *v, int a, int u)
2432
2441
* @u: int value to compare with
2433
2442
*
2434
2443
* If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
2444
+ * Otherwise, @v is not modified and relaxed ordering is provided.
2435
2445
*
2436
2446
* Safe to use in noinstr code; prefer atomic_add_unless() elsewhere.
2437
2447
*
@@ -2452,6 +2462,7 @@ raw_atomic_add_unless(atomic_t *v, int a, int u)
2452
2462
* @v: pointer to atomic_t
2453
2463
*
2454
2464
* If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
2465
+ * Otherwise, @v is not modified and relaxed ordering is provided.
2455
2466
*
2456
2467
* Safe to use in noinstr code; prefer atomic_inc_not_zero() elsewhere.
2457
2468
*
@@ -2472,6 +2483,7 @@ raw_atomic_inc_not_zero(atomic_t *v)
2472
2483
* @v: pointer to atomic_t
2473
2484
*
2474
2485
* If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
2486
+ * Otherwise, @v is not modified and relaxed ordering is provided.
2475
2487
*
2476
2488
* Safe to use in noinstr code; prefer atomic_inc_unless_negative() elsewhere.
2477
2489
*
@@ -2499,6 +2511,7 @@ raw_atomic_inc_unless_negative(atomic_t *v)
2499
2511
* @v: pointer to atomic_t
2500
2512
*
2501
2513
* If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
2514
+ * Otherwise, @v is not modified and relaxed ordering is provided.
2502
2515
*
2503
2516
* Safe to use in noinstr code; prefer atomic_dec_unless_positive() elsewhere.
2504
2517
*
@@ -2526,6 +2539,7 @@ raw_atomic_dec_unless_positive(atomic_t *v)
2526
2539
* @v: pointer to atomic_t
2527
2540
*
2528
2541
* If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
2542
+ * Otherwise, @v is not modified and relaxed ordering is provided.
2529
2543
*
2530
2544
* Safe to use in noinstr code; prefer atomic_dec_if_positive() elsewhere.
2531
2545
*
@@ -4117,6 +4131,7 @@ raw_atomic64_xchg_relaxed(atomic64_t *v, s64 new)
4117
4131
* @new: s64 value to assign
4118
4132
*
4119
4133
* If (@v == @old), atomically updates @v to @new with full ordering.
4134
+ * Otherwise, @v is not modified and relaxed ordering is provided.
4120
4135
*
4121
4136
* Safe to use in noinstr code; prefer atomic64_cmpxchg() elsewhere.
4122
4137
*
@@ -4145,6 +4160,7 @@ raw_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
4145
4160
* @new: s64 value to assign
4146
4161
*
4147
4162
* If (@v == @old), atomically updates @v to @new with acquire ordering.
4163
+ * Otherwise, @v is not modified and relaxed ordering is provided.
4148
4164
*
4149
4165
* Safe to use in noinstr code; prefer atomic64_cmpxchg_acquire() elsewhere.
4150
4166
*
@@ -4173,6 +4189,7 @@ raw_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
4173
4189
* @new: s64 value to assign
4174
4190
*
4175
4191
* If (@v == @old), atomically updates @v to @new with release ordering.
4192
+ * Otherwise, @v is not modified and relaxed ordering is provided.
4176
4193
*
4177
4194
* Safe to use in noinstr code; prefer atomic64_cmpxchg_release() elsewhere.
4178
4195
*
@@ -4200,6 +4217,7 @@ raw_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
4200
4217
* @new: s64 value to assign
4201
4218
*
4202
4219
* If (@v == @old), atomically updates @v to @new with relaxed ordering.
4220
+ * Otherwise, @v is not modified and relaxed ordering is provided.
4203
4221
*
4204
4222
* Safe to use in noinstr code; prefer atomic64_cmpxchg_relaxed() elsewhere.
4205
4223
*
@@ -4224,7 +4242,8 @@ raw_atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
4224
4242
* @new: s64 value to assign
4225
4243
*
4226
4244
* If (@v == @old), atomically updates @v to @new with full ordering.
4227
- * Otherwise, updates @old to the current value of @v.
4245
+ * Otherwise, @v is not modified, @old is updated to the current value of @v,
4246
+ * and relaxed ordering is provided.
4228
4247
*
4229
4248
* Safe to use in noinstr code; prefer atomic64_try_cmpxchg() elsewhere.
4230
4249
*
@@ -4257,7 +4276,8 @@ raw_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
4257
4276
* @new: s64 value to assign
4258
4277
*
4259
4278
* If (@v == @old), atomically updates @v to @new with acquire ordering.
4260
- * Otherwise, updates @old to the current value of @v.
4279
+ * Otherwise, @v is not modified, @old is updated to the current value of @v,
4280
+ * and relaxed ordering is provided.
4261
4281
*
4262
4282
* Safe to use in noinstr code; prefer atomic64_try_cmpxchg_acquire() elsewhere.
4263
4283
*
@@ -4290,7 +4310,8 @@ raw_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
4290
4310
* @new: s64 value to assign
4291
4311
*
4292
4312
* If (@v == @old), atomically updates @v to @new with release ordering.
4293
- * Otherwise, updates @old to the current value of @v.
4313
+ * Otherwise, @v is not modified, @old is updated to the current value of @v,
4314
+ * and relaxed ordering is provided.
4294
4315
*
4295
4316
* Safe to use in noinstr code; prefer atomic64_try_cmpxchg_release() elsewhere.
4296
4317
*
@@ -4322,7 +4343,8 @@ raw_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
4322
4343
* @new: s64 value to assign
4323
4344
*
4324
4345
* If (@v == @old), atomically updates @v to @new with relaxed ordering.
4325
- * Otherwise, updates @old to the current value of @v.
4346
+ * Otherwise, @v is not modified, @old is updated to the current value of @v,
4347
+ * and relaxed ordering is provided.
4326
4348
*
4327
4349
* Safe to use in noinstr code; prefer atomic64_try_cmpxchg_relaxed() elsewhere.
4328
4350
*
@@ -4515,6 +4537,7 @@ raw_atomic64_add_negative_relaxed(s64 i, atomic64_t *v)
4515
4537
* @u: s64 value to compare with
4516
4538
*
4517
4539
* If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
4540
+ * Otherwise, @v is not modified and relaxed ordering is provided.
4518
4541
*
4519
4542
* Safe to use in noinstr code; prefer atomic64_fetch_add_unless() elsewhere.
4520
4543
*
@@ -4544,6 +4567,7 @@ raw_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
4544
4567
* @u: s64 value to compare with
4545
4568
*
4546
4569
* If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
4570
+ * Otherwise, @v is not modified and relaxed ordering is provided.
4547
4571
*
4548
4572
* Safe to use in noinstr code; prefer atomic64_add_unless() elsewhere.
4549
4573
*
@@ -4564,6 +4588,7 @@ raw_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
4564
4588
* @v: pointer to atomic64_t
4565
4589
*
4566
4590
* If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
4591
+ * Otherwise, @v is not modified and relaxed ordering is provided.
4567
4592
*
4568
4593
* Safe to use in noinstr code; prefer atomic64_inc_not_zero() elsewhere.
4569
4594
*
@@ -4584,6 +4609,7 @@ raw_atomic64_inc_not_zero(atomic64_t *v)
4584
4609
* @v: pointer to atomic64_t
4585
4610
*
4586
4611
* If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
4612
+ * Otherwise, @v is not modified and relaxed ordering is provided.
4587
4613
*
4588
4614
* Safe to use in noinstr code; prefer atomic64_inc_unless_negative() elsewhere.
4589
4615
*
@@ -4611,6 +4637,7 @@ raw_atomic64_inc_unless_negative(atomic64_t *v)
4611
4637
* @v: pointer to atomic64_t
4612
4638
*
4613
4639
* If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
4640
+ * Otherwise, @v is not modified and relaxed ordering is provided.
4614
4641
*
4615
4642
* Safe to use in noinstr code; prefer atomic64_dec_unless_positive() elsewhere.
4616
4643
*
@@ -4638,6 +4665,7 @@ raw_atomic64_dec_unless_positive(atomic64_t *v)
4638
4665
* @v: pointer to atomic64_t
4639
4666
*
4640
4667
* If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
4668
+ * Otherwise, @v is not modified and relaxed ordering is provided.
4641
4669
*
4642
4670
* Safe to use in noinstr code; prefer atomic64_dec_if_positive() elsewhere.
4643
4671
*
@@ -4662,4 +4690,4 @@ raw_atomic64_dec_if_positive(atomic64_t *v)
4662
4690
}
4663
4691
4664
4692
#endif /* _LINUX_ATOMIC_FALLBACK_H */
4665
- // eec048affea735b8464f58e6d96992101f8f85f1
4693
+ // 14850c0b0db20c62fdc78ccd1d42b98b88d76331
0 commit comments