diff options
Diffstat (limited to 'include/linux/spinlock.h')
-rw-r--r-- | include/linux/spinlock.h | 30 |
1 files changed, 15 insertions, 15 deletions
diff --git a/include/linux/spinlock.h b/include/linux/spinlock.h index 0063b24b4f36..ffcd053ca89a 100644 --- a/include/linux/spinlock.h +++ b/include/linux/spinlock.h @@ -296,7 +296,7 @@ static inline void do_raw_spin_unlock(raw_spinlock_t *lock) __releases(lock) * Map the spin_lock functions to the raw variants for PREEMPT_RT=n */ -static inline raw_spinlock_t *spinlock_check(spinlock_t *lock) +static __always_inline raw_spinlock_t *spinlock_check(spinlock_t *lock) { return &lock->rlock; } @@ -307,17 +307,17 @@ do { \ raw_spin_lock_init(&(_lock)->rlock); \ } while (0) -static inline void spin_lock(spinlock_t *lock) +static __always_inline void spin_lock(spinlock_t *lock) { raw_spin_lock(&lock->rlock); } -static inline void spin_lock_bh(spinlock_t *lock) +static __always_inline void spin_lock_bh(spinlock_t *lock) { raw_spin_lock_bh(&lock->rlock); } -static inline int spin_trylock(spinlock_t *lock) +static __always_inline int spin_trylock(spinlock_t *lock) { return raw_spin_trylock(&lock->rlock); } @@ -337,7 +337,7 @@ do { \ raw_spin_lock_nest_lock(spinlock_check(lock), nest_lock); \ } while (0) -static inline void spin_lock_irq(spinlock_t *lock) +static __always_inline void spin_lock_irq(spinlock_t *lock) { raw_spin_lock_irq(&lock->rlock); } @@ -352,32 +352,32 @@ do { \ raw_spin_lock_irqsave_nested(spinlock_check(lock), flags, subclass); \ } while (0) -static inline void spin_unlock(spinlock_t *lock) +static __always_inline void spin_unlock(spinlock_t *lock) { raw_spin_unlock(&lock->rlock); } -static inline void spin_unlock_bh(spinlock_t *lock) +static __always_inline void spin_unlock_bh(spinlock_t *lock) { raw_spin_unlock_bh(&lock->rlock); } -static inline void spin_unlock_irq(spinlock_t *lock) +static __always_inline void spin_unlock_irq(spinlock_t *lock) { raw_spin_unlock_irq(&lock->rlock); } -static inline void spin_unlock_irqrestore(spinlock_t *lock, unsigned long flags) +static __always_inline void spin_unlock_irqrestore(spinlock_t *lock, unsigned long flags) { raw_spin_unlock_irqrestore(&lock->rlock, flags); } -static inline int spin_trylock_bh(spinlock_t *lock) +static __always_inline int spin_trylock_bh(spinlock_t *lock) { return raw_spin_trylock_bh(&lock->rlock); } -static inline int spin_trylock_irq(spinlock_t *lock) +static __always_inline int spin_trylock_irq(spinlock_t *lock) { return raw_spin_trylock_irq(&lock->rlock); } @@ -387,22 +387,22 @@ static inline int spin_trylock_irq(spinlock_t *lock) raw_spin_trylock_irqsave(spinlock_check(lock), flags); \ }) -static inline void spin_unlock_wait(spinlock_t *lock) +static __always_inline void spin_unlock_wait(spinlock_t *lock) { raw_spin_unlock_wait(&lock->rlock); } -static inline int spin_is_locked(spinlock_t *lock) +static __always_inline int spin_is_locked(spinlock_t *lock) { return raw_spin_is_locked(&lock->rlock); } -static inline int spin_is_contended(spinlock_t *lock) +static __always_inline int spin_is_contended(spinlock_t *lock) { return raw_spin_is_contended(&lock->rlock); } -static inline int spin_can_lock(spinlock_t *lock) +static __always_inline int spin_can_lock(spinlock_t *lock) { return raw_spin_can_lock(&lock->rlock); } |