Skip to content

Commit

Permalink
x86: spinlock ops are always-inlined
Browse files Browse the repository at this point in the history
Signed-off-by: Ingo Molnar <[email protected]>
  • Loading branch information
Ingo Molnar committed Apr 17, 2008
1 parent d93c870 commit 7fda20f
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions include/asm-x86/spinlock.h
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ static inline int __raw_spin_is_contended(raw_spinlock_t *lock)
return (((tmp >> 8) & 0xff) - (tmp & 0xff)) > 1;
}

static inline void __raw_spin_lock(raw_spinlock_t *lock)
static __always_inline void __raw_spin_lock(raw_spinlock_t *lock)
{
short inc = 0x0100;

Expand All @@ -99,7 +99,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)

#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)

static inline int __raw_spin_trylock(raw_spinlock_t *lock)
static __always_inline int __raw_spin_trylock(raw_spinlock_t *lock)
{
int tmp;
short new;
Expand All @@ -120,7 +120,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock)
return tmp;
}

static inline void __raw_spin_unlock(raw_spinlock_t *lock)
static __always_inline void __raw_spin_unlock(raw_spinlock_t *lock)
{
asm volatile(UNLOCK_LOCK_PREFIX "incb %0"
: "+m" (lock->slock)
Expand All @@ -142,7 +142,7 @@ static inline int __raw_spin_is_contended(raw_spinlock_t *lock)
return (((tmp >> 16) & 0xffff) - (tmp & 0xffff)) > 1;
}

static inline void __raw_spin_lock(raw_spinlock_t *lock)
static __always_inline void __raw_spin_lock(raw_spinlock_t *lock)
{
int inc = 0x00010000;
int tmp;
Expand All @@ -165,7 +165,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)

#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)

static inline int __raw_spin_trylock(raw_spinlock_t *lock)
static __always_inline int __raw_spin_trylock(raw_spinlock_t *lock)
{
int tmp;
int new;
Expand All @@ -187,7 +187,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock)
return tmp;
}

static inline void __raw_spin_unlock(raw_spinlock_t *lock)
static __always_inline void __raw_spin_unlock(raw_spinlock_t *lock)
{
asm volatile(UNLOCK_LOCK_PREFIX "incw %0"
: "+m" (lock->slock)
Expand Down

0 comments on commit 7fda20f

Please sign in to comment.