summaryrefslogtreecommitdiff
path: root/arch/tile/include
diff options
context:
space:
mode:
Diffstat (limited to 'arch/tile/include')
-rw-r--r--arch/tile/include/asm/atomic.h14
-rw-r--r--arch/tile/include/asm/atomic_32.h14
-rw-r--r--arch/tile/include/asm/atomic_64.h6
-rw-r--r--arch/tile/include/asm/bitops_32.h2
-rw-r--r--arch/tile/include/asm/bitops_64.h7
-rw-r--r--arch/tile/include/asm/ptrace.h2
-rw-r--r--arch/tile/include/asm/spinlock_32.h2
7 files changed, 14 insertions, 33 deletions
diff --git a/arch/tile/include/asm/atomic.h b/arch/tile/include/asm/atomic.h
index 739cfe0499d1..921dbeb8a70c 100644
--- a/arch/tile/include/asm/atomic.h
+++ b/arch/tile/include/asm/atomic.h
@@ -121,15 +121,6 @@ static inline int atomic_read(const atomic_t *v)
*/
#define atomic_add_negative(i, v) (atomic_add_return((i), (v)) < 0)
-/**
- * atomic_inc_not_zero - increment unless the number is zero
- * @v: pointer of type atomic_t
- *
- * Atomically increments @v by 1, so long as @v is non-zero.
- * Returns non-zero if @v was non-zero, and zero otherwise.
- */
-#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
-
/* Nonexistent functions intended to cause link errors. */
extern unsigned long __xchg_called_with_bad_pointer(void);
extern unsigned long __cmpxchg_called_with_bad_pointer(void);
@@ -186,9 +177,4 @@ extern unsigned long __cmpxchg_called_with_bad_pointer(void);
#include <asm/atomic_64.h>
#endif
-/* Provide the appropriate atomic_long_t definitions. */
-#ifndef __ASSEMBLY__
-#include <asm-generic/atomic-long.h>
-#endif
-
#endif /* _ASM_TILE_ATOMIC_H */
diff --git a/arch/tile/include/asm/atomic_32.h b/arch/tile/include/asm/atomic_32.h
index 92a8bee32311..c03349e0ca9f 100644
--- a/arch/tile/include/asm/atomic_32.h
+++ b/arch/tile/include/asm/atomic_32.h
@@ -11,7 +11,7 @@
* NON INFRINGEMENT. See the GNU General Public License for
* more details.
*
- * Do not include directly; use <asm/atomic.h>.
+ * Do not include directly; use <linux/atomic.h>.
*/
#ifndef _ASM_TILE_ATOMIC_32_H
@@ -21,7 +21,7 @@
#ifndef __ASSEMBLY__
-/* Tile-specific routines to support <asm/atomic.h>. */
+/* Tile-specific routines to support <linux/atomic.h>. */
int _atomic_xchg(atomic_t *v, int n);
int _atomic_xchg_add(atomic_t *v, int i);
int _atomic_xchg_add_unless(atomic_t *v, int a, int u);
@@ -81,18 +81,18 @@ static inline int atomic_add_return(int i, atomic_t *v)
}
/**
- * atomic_add_unless - add unless the number is already a given value
+ * __atomic_add_unless - add unless the number is already a given value
* @v: pointer of type atomic_t
* @a: the amount to add to v...
* @u: ...unless v is equal to u.
*
* Atomically adds @a to @v, so long as @v was not already @u.
- * Returns non-zero if @v was not @u, and zero otherwise.
+ * Returns the old value of @v.
*/
-static inline int atomic_add_unless(atomic_t *v, int a, int u)
+static inline int __atomic_add_unless(atomic_t *v, int a, int u)
{
smp_mb(); /* barrier for proper semantics */
- return _atomic_xchg_add_unless(v, a, u) != u;
+ return _atomic_xchg_add_unless(v, a, u);
}
/**
@@ -199,7 +199,7 @@ static inline u64 atomic64_add_return(u64 i, atomic64_t *v)
* @u: ...unless v is equal to u.
*
* Atomically adds @a to @v, so long as @v was not already @u.
- * Returns non-zero if @v was not @u, and zero otherwise.
+ * Returns the old value of @v.
*/
static inline u64 atomic64_add_unless(atomic64_t *v, u64 a, u64 u)
{
diff --git a/arch/tile/include/asm/atomic_64.h b/arch/tile/include/asm/atomic_64.h
index 1c1e60d8ccb6..27fe667fddfe 100644
--- a/arch/tile/include/asm/atomic_64.h
+++ b/arch/tile/include/asm/atomic_64.h
@@ -11,7 +11,7 @@
* NON INFRINGEMENT. See the GNU General Public License for
* more details.
*
- * Do not include directly; use <asm/atomic.h>.
+ * Do not include directly; use <linux/atomic.h>.
*/
#ifndef _ASM_TILE_ATOMIC_64_H
@@ -64,7 +64,7 @@ static inline int atomic_add_return(int i, atomic_t *v)
return val;
}
-static inline int atomic_add_unless(atomic_t *v, int a, int u)
+static inline int __atomic_add_unless(atomic_t *v, int a, int u)
{
int guess, oldval = v->counter;
do {
@@ -73,7 +73,7 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u)
guess = oldval;
oldval = atomic_cmpxchg(v, guess, guess + a);
} while (guess != oldval);
- return oldval != u;
+ return oldval;
}
/* Now the true 64-bit operations. */
diff --git a/arch/tile/include/asm/bitops_32.h b/arch/tile/include/asm/bitops_32.h
index d31ab905cfa7..571b118bfd9b 100644
--- a/arch/tile/include/asm/bitops_32.h
+++ b/arch/tile/include/asm/bitops_32.h
@@ -16,7 +16,7 @@
#define _ASM_TILE_BITOPS_32_H
#include <linux/compiler.h>
-#include <asm/atomic.h>
+#include <linux/atomic.h>
#include <asm/system.h>
/* Tile-specific routines to support <asm/bitops.h>. */
diff --git a/arch/tile/include/asm/bitops_64.h b/arch/tile/include/asm/bitops_64.h
index 99615e8d2d8b..e9c8e381ee0e 100644
--- a/arch/tile/include/asm/bitops_64.h
+++ b/arch/tile/include/asm/bitops_64.h
@@ -16,7 +16,7 @@
#define _ASM_TILE_BITOPS_64_H
#include <linux/compiler.h>
-#include <asm/atomic.h>
+#include <linux/atomic.h>
#include <asm/system.h>
/* See <asm/bitops.h> for API comments. */
@@ -97,9 +97,6 @@ static inline int test_and_change_bit(unsigned nr,
return (oldval & mask) != 0;
}
-#define ext2_set_bit_atomic(lock, nr, addr) \
- test_and_set_bit((nr), (unsigned long *)(addr))
-#define ext2_clear_bit_atomic(lock, nr, addr) \
- test_and_clear_bit((nr), (unsigned long *)(addr))
+#include <asm-generic/bitops/ext2-atomic-setbit.h>
#endif /* _ASM_TILE_BITOPS_64_H */
diff --git a/arch/tile/include/asm/ptrace.h b/arch/tile/include/asm/ptrace.h
index 6be2246e015c..c6cddd7e8d51 100644
--- a/arch/tile/include/asm/ptrace.h
+++ b/arch/tile/include/asm/ptrace.h
@@ -112,8 +112,6 @@ struct pt_regs *get_pt_regs(struct pt_regs *);
/* Trace the current syscall. */
extern void do_syscall_trace(void);
-extern void show_regs(struct pt_regs *);
-
#define arch_has_single_step() (1)
/*
diff --git a/arch/tile/include/asm/spinlock_32.h b/arch/tile/include/asm/spinlock_32.h
index a8f2c6e31a87..a5e4208d34f9 100644
--- a/arch/tile/include/asm/spinlock_32.h
+++ b/arch/tile/include/asm/spinlock_32.h
@@ -17,7 +17,7 @@
#ifndef _ASM_TILE_SPINLOCK_32_H
#define _ASM_TILE_SPINLOCK_32_H
-#include <asm/atomic.h>
+#include <linux/atomic.h>
#include <asm/page.h>
#include <asm/system.h>
#include <linux/compiler.h>