Skip to content

Commit

Permalink
Merge tag 'locking-core-2023-05-05' of git://git.kernel.org/pub/scm/l…
Browse files Browse the repository at this point in the history
…inux/kernel/git/tip/tip

Pull locking updates from Ingo Molnar:

 - Introduce local{,64}_try_cmpxchg() - a slightly more optimal
   primitive, which will be used in perf events ring-buffer code

 - Simplify/modify rwsems on PREEMPT_RT, to address writer starvation

 - Misc cleanups/fixes

* tag 'locking-core-2023-05-05' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
  locking/atomic: Correct (cmp)xchg() instrumentation
  locking/x86: Define arch_try_cmpxchg_local()
  locking/arch: Wire up local_try_cmpxchg()
  locking/generic: Wire up local{,64}_try_cmpxchg()
  locking/atomic: Add generic try_cmpxchg{,64}_local() support
  locking/rwbase: Mitigate indefinite writer starvation
  locking/arch: Rename all internal __xchg() names to __arch_xchg()
  • Loading branch information
torvalds committed May 5, 2023
2 parents d5ed10b + ec57032 commit b115d85
Show file tree
Hide file tree
Showing 33 changed files with 209 additions and 111 deletions.
10 changes: 5 additions & 5 deletions arch/alpha/include/asm/cmpxchg.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,15 @@
* Atomic exchange routines.
*/

#define ____xchg(type, args...) __xchg ## type ## _local(args)
#define ____xchg(type, args...) __arch_xchg ## type ## _local(args)
#define ____cmpxchg(type, args...) __cmpxchg ## type ## _local(args)
#include <asm/xchg.h>

#define xchg_local(ptr, x) \
({ \
__typeof__(*(ptr)) _x_ = (x); \
(__typeof__(*(ptr))) __xchg_local((ptr), (unsigned long)_x_, \
sizeof(*(ptr))); \
(__typeof__(*(ptr))) __arch_xchg_local((ptr), (unsigned long)_x_,\
sizeof(*(ptr))); \
})

#define arch_cmpxchg_local(ptr, o, n) \
Expand All @@ -34,7 +34,7 @@

#undef ____xchg
#undef ____cmpxchg
#define ____xchg(type, args...) __xchg ##type(args)
#define ____xchg(type, args...) __arch_xchg ##type(args)
#define ____cmpxchg(type, args...) __cmpxchg ##type(args)
#include <asm/xchg.h>

Expand All @@ -48,7 +48,7 @@
__typeof__(*(ptr)) _x_ = (x); \
smp_mb(); \
__ret = (__typeof__(*(ptr))) \
__xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
__arch_xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
smp_mb(); \
__ret; \
})
Expand Down
12 changes: 10 additions & 2 deletions arch/alpha/include/asm/local.h
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,16 @@ static __inline__ long local_sub_return(long i, local_t * l)
return result;
}

#define local_cmpxchg(l, o, n) \
(cmpxchg_local(&((l)->a.counter), (o), (n)))
static __inline__ long local_cmpxchg(local_t *l, long old, long new)
{
return cmpxchg_local(&l->a.counter, old, new);
}

static __inline__ bool local_try_cmpxchg(local_t *l, long *old, long new)
{
return try_cmpxchg_local(&l->a.counter, (s64 *)old, new);
}

#define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))

/**
Expand Down
4 changes: 2 additions & 2 deletions arch/arc/include/asm/cmpxchg.h
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@
*/
#ifdef CONFIG_ARC_HAS_LLSC

#define __xchg(ptr, val) \
#define __arch_xchg(ptr, val) \
({ \
__asm__ __volatile__( \
" ex %0, [%1] \n" /* set new value */ \
Expand All @@ -102,7 +102,7 @@
\
switch(sizeof(*(_p_))) { \
case 4: \
_val_ = __xchg(_p_, _val_); \
_val_ = __arch_xchg(_p_, _val_); \
break; \
default: \
BUILD_BUG(); \
Expand Down
7 changes: 4 additions & 3 deletions arch/arm/include/asm/cmpxchg.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@
#define swp_is_buggy
#endif

static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
static inline unsigned long
__arch_xchg(unsigned long x, volatile void *ptr, int size)
{
extern void __bad_xchg(volatile void *, int);
unsigned long ret;
Expand Down Expand Up @@ -115,8 +116,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
}

#define arch_xchg_relaxed(ptr, x) ({ \
(__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), \
sizeof(*(ptr))); \
(__typeof__(*(ptr)))__arch_xchg((unsigned long)(x), (ptr), \
sizeof(*(ptr))); \
})

#include <asm-generic/cmpxchg-local.h>
Expand Down
7 changes: 3 additions & 4 deletions arch/arm64/include/asm/cmpxchg.h
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,8 @@ __XCHG_CASE( , , mb_, 64, dmb ish, nop, , a, l, "memory")
#undef __XCHG_CASE

#define __XCHG_GEN(sfx) \
static __always_inline unsigned long __xchg##sfx(unsigned long x, \
volatile void *ptr, \
int size) \
static __always_inline unsigned long \
__arch_xchg##sfx(unsigned long x, volatile void *ptr, int size) \
{ \
switch (size) { \
case 1: \
Expand Down Expand Up @@ -93,7 +92,7 @@ __XCHG_GEN(_mb)
({ \
__typeof__(*(ptr)) __ret; \
__ret = (__typeof__(*(ptr))) \
__xchg##sfx((unsigned long)(x), (ptr), sizeof(*(ptr))); \
__arch_xchg##sfx((unsigned long)(x), (ptr), sizeof(*(ptr))); \
__ret; \
})

Expand Down
10 changes: 5 additions & 5 deletions arch/hexagon/include/asm/cmpxchg.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
#define _ASM_CMPXCHG_H

/*
* __xchg - atomically exchange a register and a memory location
* __arch_xchg - atomically exchange a register and a memory location
* @x: value to swap
* @ptr: pointer to memory
* @size: size of the value
Expand All @@ -19,8 +19,8 @@
* Note: there was an errata for V2 about .new's and memw_locked.
*
*/
static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
int size)
static inline unsigned long
__arch_xchg(unsigned long x, volatile void *ptr, int size)
{
unsigned long retval;

Expand All @@ -42,8 +42,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
* Atomically swap the contents of a register with memory. Should be atomic
* between multiple CPU's and within interrupts on the same CPU.
*/
#define arch_xchg(ptr, v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v), (ptr), \
sizeof(*(ptr))))
#define arch_xchg(ptr, v) ((__typeof__(*(ptr)))__arch_xchg((unsigned long)(v), (ptr), \
sizeof(*(ptr))))

/*
* see rt-mutex-design.txt; cmpxchg supposedly checks if *ptr == A and swaps.
Expand Down
2 changes: 1 addition & 1 deletion arch/ia64/include/asm/cmpxchg.h
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
#include <uapi/asm/cmpxchg.h>

#define arch_xchg(ptr, x) \
({(__typeof__(*(ptr))) __xchg((unsigned long) (x), (ptr), sizeof(*(ptr)));})
({(__typeof__(*(ptr))) __arch_xchg((unsigned long) (x), (ptr), sizeof(*(ptr)));})

#define arch_cmpxchg(ptr, o, n) cmpxchg_acq((ptr), (o), (n))
#define arch_cmpxchg64(ptr, o, n) cmpxchg_acq((ptr), (o), (n))
Expand Down
4 changes: 2 additions & 2 deletions arch/ia64/include/uapi/asm/cmpxchg.h
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
*/
extern void ia64_xchg_called_with_bad_pointer(void);

#define __xchg(x, ptr, size) \
#define __arch_xchg(x, ptr, size) \
({ \
unsigned long __xchg_result; \
\
Expand Down Expand Up @@ -51,7 +51,7 @@ extern void ia64_xchg_called_with_bad_pointer(void);

#ifndef __KERNEL__
#define xchg(ptr, x) \
({(__typeof__(*(ptr))) __xchg((unsigned long) (x), (ptr), sizeof(*(ptr)));})
({(__typeof__(*(ptr))) __arch_xchg((unsigned long) (x), (ptr), sizeof(*(ptr)));})
#endif

/*
Expand Down
4 changes: 2 additions & 2 deletions arch/loongarch/include/asm/cmpxchg.h
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ static inline unsigned int __xchg_small(volatile void *ptr, unsigned int val,
}

static __always_inline unsigned long
__xchg(volatile void *ptr, unsigned long x, int size)
__arch_xchg(volatile void *ptr, unsigned long x, int size)
{
switch (size) {
case 1:
Expand All @@ -87,7 +87,7 @@ __xchg(volatile void *ptr, unsigned long x, int size)
__typeof__(*(ptr)) __res; \
\
__res = (__typeof__(*(ptr))) \
__xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
__arch_xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
\
__res; \
})
Expand Down
13 changes: 11 additions & 2 deletions arch/loongarch/include/asm/local.h
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,17 @@ static inline long local_sub_return(long i, local_t *l)
return result;
}

#define local_cmpxchg(l, o, n) \
((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
static inline long local_cmpxchg(local_t *l, long old, long new)
{
return cmpxchg_local(&l->a.counter, old, new);
}

static inline bool local_try_cmpxchg(local_t *l, long *old, long new)
{
typeof(l->a.counter) *__old = (typeof(l->a.counter) *) old;
return try_cmpxchg_local(&l->a.counter, __old, new);
}

#define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))

/**
Expand Down
6 changes: 3 additions & 3 deletions arch/m68k/include/asm/cmpxchg.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
extern unsigned long __invalid_xchg_size(unsigned long, volatile void *, int);

#ifndef CONFIG_RMW_INSNS
static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
static inline unsigned long __arch_xchg(unsigned long x, volatile void * ptr, int size)
{
unsigned long flags, tmp;

Expand Down Expand Up @@ -40,7 +40,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
return x;
}
#else
static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
static inline unsigned long __arch_xchg(unsigned long x, volatile void * ptr, int size)
{
switch (size) {
case 1:
Expand Down Expand Up @@ -75,7 +75,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
}
#endif

#define arch_xchg(ptr,x) ({(__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr)));})
#define arch_xchg(ptr,x) ({(__typeof__(*(ptr)))__arch_xchg((unsigned long)(x),(ptr),sizeof(*(ptr)));})

#include <asm-generic/cmpxchg-local.h>

Expand Down
4 changes: 2 additions & 2 deletions arch/mips/include/asm/cmpxchg.h
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
unsigned int size);

static __always_inline
unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
unsigned long __arch_xchg(volatile void *ptr, unsigned long x, int size)
{
switch (size) {
case 1:
Expand Down Expand Up @@ -102,7 +102,7 @@ unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
smp_mb__before_llsc(); \
\
__res = (__typeof__(*(ptr))) \
__xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
__arch_xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
\
smp_llsc_mb(); \
\
Expand Down
13 changes: 11 additions & 2 deletions arch/mips/include/asm/local.h
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,17 @@ static __inline__ long local_sub_return(long i, local_t * l)
return result;
}

#define local_cmpxchg(l, o, n) \
((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
static __inline__ long local_cmpxchg(local_t *l, long old, long new)
{
return cmpxchg_local(&l->a.counter, old, new);
}

static __inline__ bool local_try_cmpxchg(local_t *l, long *old, long new)
{
typeof(l->a.counter) *__old = (typeof(l->a.counter) *) old;
return try_cmpxchg_local(&l->a.counter, __old, new);
}

#define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))

/**
Expand Down
10 changes: 5 additions & 5 deletions arch/openrisc/include/asm/cmpxchg.h
Original file line number Diff line number Diff line change
Expand Up @@ -147,8 +147,8 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
extern unsigned long __xchg_called_with_bad_pointer(void)
__compiletime_error("Bad argument size for xchg");

static inline unsigned long __xchg(volatile void *ptr, unsigned long with,
int size)
static inline unsigned long
__arch_xchg(volatile void *ptr, unsigned long with, int size)
{
switch (size) {
case 1:
Expand All @@ -163,9 +163,9 @@ static inline unsigned long __xchg(volatile void *ptr, unsigned long with,

#define arch_xchg(ptr, with) \
({ \
(__typeof__(*(ptr))) __xchg((ptr), \
(unsigned long)(with), \
sizeof(*(ptr))); \
(__typeof__(*(ptr))) __arch_xchg((ptr), \
(unsigned long)(with), \
sizeof(*(ptr))); \
})

#endif /* __ASM_OPENRISC_CMPXCHG_H */
4 changes: 2 additions & 2 deletions arch/parisc/include/asm/cmpxchg.h
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ extern unsigned long __xchg64(unsigned long, volatile unsigned long *);

/* optimizer better get rid of switch since size is a constant */
static inline unsigned long
__xchg(unsigned long x, volatile void *ptr, int size)
__arch_xchg(unsigned long x, volatile void *ptr, int size)
{
switch (size) {
#ifdef CONFIG_64BIT
Expand All @@ -49,7 +49,7 @@ __xchg(unsigned long x, volatile void *ptr, int size)
__typeof__(*(ptr)) __ret; \
__typeof__(*(ptr)) _x_ = (x); \
__ret = (__typeof__(*(ptr))) \
__xchg((unsigned long)_x_, (ptr), sizeof(*(ptr))); \
__arch_xchg((unsigned long)_x_, (ptr), sizeof(*(ptr))); \
__ret; \
})

Expand Down
4 changes: 2 additions & 2 deletions arch/powerpc/include/asm/cmpxchg.h
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ __xchg_local(void *ptr, unsigned long x, unsigned int size)
return __xchg_u64_local(ptr, x);
#endif
}
BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg");
BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg_local");
return x;
}

Expand All @@ -248,7 +248,7 @@ __xchg_relaxed(void *ptr, unsigned long x, unsigned int size)
return __xchg_u64_relaxed(ptr, x);
#endif
}
BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg_local");
BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg_relaxed");
return x;
}
#define arch_xchg_local(ptr,x) \
Expand Down
11 changes: 11 additions & 0 deletions arch/powerpc/include/asm/local.h
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,17 @@ static __inline__ long local_cmpxchg(local_t *l, long o, long n)
return t;
}

static __inline__ bool local_try_cmpxchg(local_t *l, long *po, long n)
{
long o = *po, r;

r = local_cmpxchg(l, o, n);
if (unlikely(r != o))
*po = r;

return likely(r == o);
}

static __inline__ long local_xchg(local_t *l, long n)
{
long t;
Expand Down
2 changes: 1 addition & 1 deletion arch/riscv/include/asm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,7 @@ c_t arch_atomic##prefix##_xchg_release(atomic##prefix##_t *v, c_t n) \
static __always_inline \
c_t arch_atomic##prefix##_xchg(atomic##prefix##_t *v, c_t n) \
{ \
return __xchg(&(v->counter), n, size); \
return __arch_xchg(&(v->counter), n, size); \
} \
static __always_inline \
c_t arch_atomic##prefix##_cmpxchg_relaxed(atomic##prefix##_t *v, \
Expand Down
4 changes: 2 additions & 2 deletions arch/riscv/include/asm/cmpxchg.h
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@
_x_, sizeof(*(ptr))); \
})

#define __xchg(ptr, new, size) \
#define __arch_xchg(ptr, new, size) \
({ \
__typeof__(ptr) __ptr = (ptr); \
__typeof__(new) __new = (new); \
Expand Down Expand Up @@ -143,7 +143,7 @@
#define arch_xchg(ptr, x) \
({ \
__typeof__(*(ptr)) _x_ = (x); \
(__typeof__(*(ptr))) __xchg((ptr), _x_, sizeof(*(ptr))); \
(__typeof__(*(ptr))) __arch_xchg((ptr), _x_, sizeof(*(ptr))); \
})

#define xchg32(ptr, x) \
Expand Down
Loading

0 comments on commit b115d85

Please sign in to comment.