Skip to content

Commit

Permalink
x86/asm: Modernize sync_bitops.h
Browse files Browse the repository at this point in the history
Add missing instruction suffixes and use rmwcc.h just like was (more or less)
recently done for bitops.h as well, see:

  22636f8: x86/asm: Add instruction suffixes to bitops
  288e452: x86/asm: 'Simplify' GEN_*_RMWcc() macros

No change in functionality intended.

Signed-off-by: Jan Beulich <[email protected]>
Cc: Andy Lutomirski <[email protected]>
Cc: Boris Ostrovsky <[email protected]>
Cc: Borislav Petkov <[email protected]>
Cc: Brian Gerst <[email protected]>
Cc: Denys Vlasenko <[email protected]>
Cc: H. Peter Anvin <[email protected]>
Cc: Juergen Gross <[email protected]>
Cc: Linus Torvalds <[email protected]>
Cc: Peter Zijlstra <[email protected]>
Cc: Thomas Gleixner <[email protected]>
Link: http://lkml.kernel.org/r/[email protected]
[ Cleaned up the changelog a bit. ]
Signed-off-by: Ingo Molnar <[email protected]>
  • Loading branch information
jbeulich authored and Ingo Molnar committed Apr 10, 2019
1 parent 28e3ace commit 547571b
Showing 1 changed file with 9 additions and 22 deletions.
31 changes: 9 additions & 22 deletions arch/x86/include/asm/sync_bitops.h
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
* bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
*/

#include <asm/rmwcc.h>

#define ADDR (*(volatile long *)addr)

/**
Expand All @@ -29,7 +31,7 @@
*/
static inline void sync_set_bit(long nr, volatile unsigned long *addr)
{
asm volatile("lock; bts %1,%0"
asm volatile("lock; " __ASM_SIZE(bts) " %1,%0"
: "+m" (ADDR)
: "Ir" (nr)
: "memory");
Expand All @@ -47,7 +49,7 @@ static inline void sync_set_bit(long nr, volatile unsigned long *addr)
*/
static inline void sync_clear_bit(long nr, volatile unsigned long *addr)
{
asm volatile("lock; btr %1,%0"
asm volatile("lock; " __ASM_SIZE(btr) " %1,%0"
: "+m" (ADDR)
: "Ir" (nr)
: "memory");
Expand All @@ -64,7 +66,7 @@ static inline void sync_clear_bit(long nr, volatile unsigned long *addr)
*/
static inline void sync_change_bit(long nr, volatile unsigned long *addr)
{
asm volatile("lock; btc %1,%0"
asm volatile("lock; " __ASM_SIZE(btc) " %1,%0"
: "+m" (ADDR)
: "Ir" (nr)
: "memory");
Expand All @@ -78,14 +80,9 @@ static inline void sync_change_bit(long nr, volatile unsigned long *addr)
* This operation is atomic and cannot be reordered.
* It also implies a memory barrier.
*/
static inline int sync_test_and_set_bit(long nr, volatile unsigned long *addr)
static inline bool sync_test_and_set_bit(long nr, volatile unsigned long *addr)
{
unsigned char oldbit;

asm volatile("lock; bts %2,%1\n\tsetc %0"
: "=qm" (oldbit), "+m" (ADDR)
: "Ir" (nr) : "memory");
return oldbit;
return GEN_BINARY_RMWcc("lock; " __ASM_SIZE(bts), *addr, c, "Ir", nr);
}

/**
Expand All @@ -98,12 +95,7 @@ static inline int sync_test_and_set_bit(long nr, volatile unsigned long *addr)
*/
static inline int sync_test_and_clear_bit(long nr, volatile unsigned long *addr)
{
unsigned char oldbit;

asm volatile("lock; btr %2,%1\n\tsetc %0"
: "=qm" (oldbit), "+m" (ADDR)
: "Ir" (nr) : "memory");
return oldbit;
return GEN_BINARY_RMWcc("lock; " __ASM_SIZE(btr), *addr, c, "Ir", nr);
}

/**
Expand All @@ -116,12 +108,7 @@ static inline int sync_test_and_clear_bit(long nr, volatile unsigned long *addr)
*/
static inline int sync_test_and_change_bit(long nr, volatile unsigned long *addr)
{
unsigned char oldbit;

asm volatile("lock; btc %2,%1\n\tsetc %0"
: "=qm" (oldbit), "+m" (ADDR)
: "Ir" (nr) : "memory");
return oldbit;
return GEN_BINARY_RMWcc("lock; " __ASM_SIZE(btc), *addr, c, "Ir", nr);
}

#define sync_test_bit(nr, addr) test_bit(nr, addr)
Expand Down

0 comments on commit 547571b

Please sign in to comment.