summaryrefslogtreecommitdiff
path: root/include
diff options
context:
space:
mode:
Diffstat (limited to 'include')
-rw-r--r--include/asm-generic/atomic-instrumented.h2
-rw-r--r--include/asm-generic/atomic64.h3
-rw-r--r--include/linux/atomic.h24
3 files changed, 16 insertions, 13 deletions
diff --git a/include/asm-generic/atomic-instrumented.h b/include/asm-generic/atomic-instrumented.h
index b8b14cc2df6c..497faa4a05e3 100644
--- a/include/asm-generic/atomic-instrumented.h
+++ b/include/asm-generic/atomic-instrumented.h
@@ -205,7 +205,7 @@ static __always_inline s64 atomic64_dec_return(atomic64_t *v)
return arch_atomic64_dec_return(v);
}
-static __always_inline s64 atomic64_inc_not_zero(atomic64_t *v)
+static __always_inline bool atomic64_inc_not_zero(atomic64_t *v)
{
kasan_check_write(v, sizeof(*v));
return arch_atomic64_inc_not_zero(v);
diff --git a/include/asm-generic/atomic64.h b/include/asm-generic/atomic64.h
index 8d28eb010d0d..a951a721e1bb 100644
--- a/include/asm-generic/atomic64.h
+++ b/include/asm-generic/atomic64.h
@@ -11,6 +11,7 @@
*/
#ifndef _ASM_GENERIC_ATOMIC64_H
#define _ASM_GENERIC_ATOMIC64_H
+#include <linux/types.h>
typedef struct {
long long counter;
@@ -52,7 +53,7 @@ ATOMIC64_OPS(xor)
extern long long atomic64_dec_if_positive(atomic64_t *v);
extern long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n);
extern long long atomic64_xchg(atomic64_t *v, long long new);
-extern int atomic64_add_unless(atomic64_t *v, long long a, long long u);
+extern bool atomic64_add_unless(atomic64_t *v, long long a, long long u);
#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
#define atomic64_inc(v) atomic64_add(1LL, (v))
diff --git a/include/linux/atomic.h b/include/linux/atomic.h
index 5c5620ae5a35..307a7f6d619a 100644
--- a/include/linux/atomic.h
+++ b/include/linux/atomic.h
@@ -2,6 +2,8 @@
/* Atomic operations usable in machine independent code */
#ifndef _LINUX_ATOMIC_H
#define _LINUX_ATOMIC_H
+#include <linux/types.h>
+
#include <asm/atomic.h>
#include <asm/barrier.h>
@@ -525,10 +527,10 @@
* @a: the amount to add to v...
* @u: ...unless v is equal to u.
*
- * Atomically adds @a to @v, so long as @v was not already @u.
- * Returns non-zero if @v was not @u, and zero otherwise.
+ * Atomically adds @a to @v, if @v was not already @u.
+ * Returns true if the addition was done.
*/
-static inline int atomic_add_unless(atomic_t *v, int a, int u)
+static inline bool atomic_add_unless(atomic_t *v, int a, int u)
{
return atomic_fetch_add_unless(v, a, u) != u;
}
@@ -537,8 +539,8 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u)
* atomic_inc_not_zero - increment unless the number is zero
* @v: pointer of type atomic_t
*
- * Atomically increments @v by 1, so long as @v is non-zero.
- * Returns non-zero if @v was non-zero, and zero otherwise.
+ * Atomically increments @v by 1, if @v is non-zero.
+ * Returns true if the increment was done.
*/
#ifndef atomic_inc_not_zero
#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
@@ -572,28 +574,28 @@ static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
#endif
#ifndef atomic_inc_unless_negative
-static inline int atomic_inc_unless_negative(atomic_t *p)
+static inline bool atomic_inc_unless_negative(atomic_t *p)
{
int v, v1;
for (v = 0; v >= 0; v = v1) {
v1 = atomic_cmpxchg(p, v, v + 1);
if (likely(v1 == v))
- return 1;
+ return true;
}
- return 0;
+ return false;
}
#endif
#ifndef atomic_dec_unless_positive
-static inline int atomic_dec_unless_positive(atomic_t *p)
+static inline bool atomic_dec_unless_positive(atomic_t *p)
{
int v, v1;
for (v = 0; v <= 0; v = v1) {
v1 = atomic_cmpxchg(p, v, v - 1);
if (likely(v1 == v))
- return 1;
+ return true;
}
- return 0;
+ return false;
}
#endif