Skip to content

Commit 0fbbf07

Browse files
compudjshuahkh
authored andcommitted
selftests/rseq: Fix arm64 buggy load-acquire/store-release macros
The arm64 load-acquire/store-release macros from the Linux kernel rseq selftests are buggy. Remplace them by a working implementation. Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com> Cc: Catalin Marinas <catalin.marinas@arm.com> Cc: Will Deacon <will@kernel.org> Cc: Peter Zijlstra <peterz@infradead.org> Signed-off-by: Shuah Khan <skhan@linuxfoundation.org>
1 parent d6aaa23 commit 0fbbf07

File tree

1 file changed

+30
-28
lines changed

1 file changed

+30
-28
lines changed

tools/testing/selftests/rseq/rseq-arm64.h

Lines changed: 30 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -27,59 +27,61 @@
2727

2828
#define rseq_smp_load_acquire(p) \
2929
__extension__ ({ \
30-
__typeof(*p) ____p1; \
31-
switch (sizeof(*p)) { \
30+
union { rseq_unqual_scalar_typeof(*(p)) __val; char __c[sizeof(*(p))]; } __u; \
31+
switch (sizeof(*(p))) { \
3232
case 1: \
33-
asm volatile ("ldarb %w0, %1" \
34-
: "=r" (*(__u8 *)p) \
35-
: "Q" (*p) : "memory"); \
33+
__asm__ __volatile__ ("ldarb %w0, %1" \
34+
: "=r" (*(__u8 *)__u.__c) \
35+
: "Q" (*(p)) : "memory"); \
3636
break; \
3737
case 2: \
38-
asm volatile ("ldarh %w0, %1" \
39-
: "=r" (*(__u16 *)p) \
40-
: "Q" (*p) : "memory"); \
38+
__asm__ __volatile__ ("ldarh %w0, %1" \
39+
: "=r" (*(__u16 *)__u.__c) \
40+
: "Q" (*(p)) : "memory"); \
4141
break; \
4242
case 4: \
43-
asm volatile ("ldar %w0, %1" \
44-
: "=r" (*(__u32 *)p) \
45-
: "Q" (*p) : "memory"); \
43+
__asm__ __volatile__ ("ldar %w0, %1" \
44+
: "=r" (*(__u32 *)__u.__c) \
45+
: "Q" (*(p)) : "memory"); \
4646
break; \
4747
case 8: \
48-
asm volatile ("ldar %0, %1" \
49-
: "=r" (*(__u64 *)p) \
50-
: "Q" (*p) : "memory"); \
48+
__asm__ __volatile__ ("ldar %0, %1" \
49+
: "=r" (*(__u64 *)__u.__c) \
50+
: "Q" (*(p)) : "memory"); \
5151
break; \
5252
} \
53-
____p1; \
53+
(rseq_unqual_scalar_typeof(*(p)))__u.__val; \
5454
})
5555

5656
#define rseq_smp_acquire__after_ctrl_dep() rseq_smp_rmb()
5757

5858
#define rseq_smp_store_release(p, v) \
5959
do { \
60-
switch (sizeof(*p)) { \
60+
union { rseq_unqual_scalar_typeof(*(p)) __val; char __c[sizeof(*(p))]; } __u = \
61+
{ .__val = (rseq_unqual_scalar_typeof(*(p))) (v) }; \
62+
switch (sizeof(*(p))) { \
6163
case 1: \
62-
asm volatile ("stlrb %w1, %0" \
63-
: "=Q" (*p) \
64-
: "r" ((__u8)v) \
64+
__asm__ __volatile__ ("stlrb %w1, %0" \
65+
: "=Q" (*(p)) \
66+
: "r" (*(__u8 *)__u.__c) \
6567
: "memory"); \
6668
break; \
6769
case 2: \
68-
asm volatile ("stlrh %w1, %0" \
69-
: "=Q" (*p) \
70-
: "r" ((__u16)v) \
70+
__asm__ __volatile__ ("stlrh %w1, %0" \
71+
: "=Q" (*(p)) \
72+
: "r" (*(__u16 *)__u.__c) \
7173
: "memory"); \
7274
break; \
7375
case 4: \
74-
asm volatile ("stlr %w1, %0" \
75-
: "=Q" (*p) \
76-
: "r" ((__u32)v) \
76+
__asm__ __volatile__ ("stlr %w1, %0" \
77+
: "=Q" (*(p)) \
78+
: "r" (*(__u32 *)__u.__c) \
7779
: "memory"); \
7880
break; \
7981
case 8: \
80-
asm volatile ("stlr %1, %0" \
81-
: "=Q" (*p) \
82-
: "r" ((__u64)v) \
82+
__asm__ __volatile__ ("stlr %1, %0" \
83+
: "=Q" (*(p)) \
84+
: "r" (*(__u64 *)__u.__c) \
8385
: "memory"); \
8486
break; \
8587
} \

0 commit comments

Comments
 (0)