selftests/rseq: Fix arm64 buggy load-acquire/store-release macros

The arm64 load-acquire/store-release macros from the Linux kernel rseq
selftests are buggy. Remplace them by a working implementation.

Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
Cc: Catalin Marinas <catalin.marinas@arm.com>
Cc: Will Deacon <will@kernel.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Signed-off-by: Shuah Khan <skhan@linuxfoundation.org>
This commit is contained in:
Mathieu Desnoyers 2023-06-27 11:29:22 -04:00 committed by Shuah Khan
parent d6aaa23a2b
commit 0fbbf07ce6

View File

@ -27,59 +27,61 @@
#define rseq_smp_load_acquire(p) \
__extension__ ({ \
__typeof(*p) ____p1; \
switch (sizeof(*p)) { \
union { rseq_unqual_scalar_typeof(*(p)) __val; char __c[sizeof(*(p))]; } __u; \
switch (sizeof(*(p))) { \
case 1: \
asm volatile ("ldarb %w0, %1" \
: "=r" (*(__u8 *)p) \
: "Q" (*p) : "memory"); \
__asm__ __volatile__ ("ldarb %w0, %1" \
: "=r" (*(__u8 *)__u.__c) \
: "Q" (*(p)) : "memory"); \
break; \
case 2: \
asm volatile ("ldarh %w0, %1" \
: "=r" (*(__u16 *)p) \
: "Q" (*p) : "memory"); \
__asm__ __volatile__ ("ldarh %w0, %1" \
: "=r" (*(__u16 *)__u.__c) \
: "Q" (*(p)) : "memory"); \
break; \
case 4: \
asm volatile ("ldar %w0, %1" \
: "=r" (*(__u32 *)p) \
: "Q" (*p) : "memory"); \
__asm__ __volatile__ ("ldar %w0, %1" \
: "=r" (*(__u32 *)__u.__c) \
: "Q" (*(p)) : "memory"); \
break; \
case 8: \
asm volatile ("ldar %0, %1" \
: "=r" (*(__u64 *)p) \
: "Q" (*p) : "memory"); \
__asm__ __volatile__ ("ldar %0, %1" \
: "=r" (*(__u64 *)__u.__c) \
: "Q" (*(p)) : "memory"); \
break; \
} \
____p1; \
(rseq_unqual_scalar_typeof(*(p)))__u.__val; \
})
#define rseq_smp_acquire__after_ctrl_dep() rseq_smp_rmb()
#define rseq_smp_store_release(p, v) \
do { \
switch (sizeof(*p)) { \
union { rseq_unqual_scalar_typeof(*(p)) __val; char __c[sizeof(*(p))]; } __u = \
{ .__val = (rseq_unqual_scalar_typeof(*(p))) (v) }; \
switch (sizeof(*(p))) { \
case 1: \
asm volatile ("stlrb %w1, %0" \
: "=Q" (*p) \
: "r" ((__u8)v) \
__asm__ __volatile__ ("stlrb %w1, %0" \
: "=Q" (*(p)) \
: "r" (*(__u8 *)__u.__c) \
: "memory"); \
break; \
case 2: \
asm volatile ("stlrh %w1, %0" \
: "=Q" (*p) \
: "r" ((__u16)v) \
__asm__ __volatile__ ("stlrh %w1, %0" \
: "=Q" (*(p)) \
: "r" (*(__u16 *)__u.__c) \
: "memory"); \
break; \
case 4: \
asm volatile ("stlr %w1, %0" \
: "=Q" (*p) \
: "r" ((__u32)v) \
__asm__ __volatile__ ("stlr %w1, %0" \
: "=Q" (*(p)) \
: "r" (*(__u32 *)__u.__c) \
: "memory"); \
break; \
case 8: \
asm volatile ("stlr %1, %0" \
: "=Q" (*p) \
: "r" ((__u64)v) \
__asm__ __volatile__ ("stlr %1, %0" \
: "=Q" (*(p)) \
: "r" (*(__u64 *)__u.__c) \
: "memory"); \
break; \
} \