diff options
author | Heiko Carstens <hca@linux.ibm.com> | 2024-11-06 11:03:07 +0100 |
---|---|---|
committer | Heiko Carstens <hca@linux.ibm.com> | 2024-11-12 14:01:28 +0100 |
commit | 0b6de9aec285d24d49a918d52915ed231b2d9e87 (patch) | |
tree | 25332fde377cb9a55b9c8486fe08d5eba1033fc0 | |
parent | c8603d692857a43e6f8b7b5eccab1d9aec48bd12 (diff) | |
download | linux-0b6de9aec285d24d49a918d52915ed231b2d9e87.tar.gz linux-0b6de9aec285d24d49a918d52915ed231b2d9e87.tar.bz2 linux-0b6de9aec285d24d49a918d52915ed231b2d9e87.zip |
s390/cmpxchg: Provide arch_try_cmpxchg()
Since gcc 14 flag output operands are supported also for s390.
Provide an arch_try_cmpxchg() implementation so that all existing
try_cmpxchg() variants provide slightly better code, if compiled
with gcc 14 or newer.
Reviewed-by: Juergen Christ <jchrist@linux.ibm.com>
Signed-off-by: Heiko Carstens <hca@linux.ibm.com>
-rw-r--r-- | arch/s390/include/asm/asm.h | 12 | ||||
-rw-r--r-- | arch/s390/include/asm/cmpxchg.h | 52 |
2 files changed, 64 insertions, 0 deletions
diff --git a/arch/s390/include/asm/asm.h b/arch/s390/include/asm/asm.h new file mode 100644 index 000000000000..bea9dcf9e70e --- /dev/null +++ b/arch/s390/include/asm/asm.h @@ -0,0 +1,12 @@ +/* SPDX-License-Identifier: GPL-2.0 */ +#ifndef _ASM_S390_ASM_H +#define _ASM_S390_ASM_H + +/* GCC versions before 14.2.0 may die with an ICE in some configurations. */ +#if defined(__GCC_ASM_FLAG_OUTPUTS__) && !(IS_ENABLED(CONFIG_CC_IS_GCC) && (GCC_VERSION < 140200)) + +#define __HAVE_ASM_FLAG_OUTPUTS__ + +#endif + +#endif /* _ASM_S390_ASM_H */ diff --git a/arch/s390/include/asm/cmpxchg.h b/arch/s390/include/asm/cmpxchg.h index 587529673399..05f123d39bc3 100644 --- a/arch/s390/include/asm/cmpxchg.h +++ b/arch/s390/include/asm/cmpxchg.h @@ -11,6 +11,7 @@ #include <linux/mmdebug.h> #include <linux/types.h> #include <linux/bug.h> +#include <asm/asm.h> void __xchg_called_with_bad_pointer(void); @@ -174,6 +175,57 @@ static __always_inline u64 __arch_cmpxchg(u64 ptr, u64 old, u64 new, int size) #define arch_cmpxchg_local arch_cmpxchg #define arch_cmpxchg64_local arch_cmpxchg +#ifdef __HAVE_ASM_FLAG_OUTPUTS__ + +#define arch_try_cmpxchg(ptr, oldp, new) \ +({ \ + __typeof__(ptr) __oldp = (__typeof__(ptr))(oldp); \ + __typeof__(*(ptr)) __old = *__oldp; \ + __typeof__(*(ptr)) __new = (new); \ + __typeof__(*(ptr)) __prev; \ + int __cc; \ + \ + switch (sizeof(*(ptr))) { \ + case 1: \ + case 2: { \ + __prev = arch_cmpxchg((ptr), (__old), (__new)); \ + __cc = (__prev != __old); \ + if (unlikely(__cc)) \ + *__oldp = __prev; \ + break; \ + } \ + case 4: { \ + asm volatile( \ + " cs %[__old],%[__new],%[__ptr]\n" \ + : [__old] "+d" (*__oldp), \ + [__ptr] "+Q" (*(ptr)), \ + "=@cc" (__cc) \ + : [__new] "d" (__new) \ + : "memory"); \ + break; \ + } \ + case 8: { \ + asm volatile( \ + " csg %[__old],%[__new],%[__ptr]\n" \ + : [__old] "+d" (*__oldp), \ + [__ptr] "+QS" (*(ptr)), \ + "=@cc" (__cc) \ + : [__new] "d" (__new) \ + : "memory"); \ + break; \ + } \ + default: \ + __cmpxchg_called_with_bad_pointer(); \ + } \ + likely(__cc == 0); \ +}) + +#define arch_try_cmpxchg64 arch_try_cmpxchg +#define arch_try_cmpxchg_local arch_try_cmpxchg +#define arch_try_cmpxchg64_local arch_try_cmpxchg + +#endif /* __HAVE_ASM_FLAG_OUTPUTS__ */ + #define system_has_cmpxchg128() 1 static __always_inline u128 arch_cmpxchg128(volatile u128 *ptr, u128 old, u128 new) |