summaryrefslogtreecommitdiff
path: root/include/asm-generic
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-generic')
-rw-r--r--include/asm-generic/atomic-instrumented.h1337
-rw-r--r--include/asm-generic/atomic-long.h1014
-rw-r--r--include/asm-generic/bitops/atomic.h32
-rw-r--r--include/asm-generic/bitops/builtin-ffs.h2
-rw-r--r--include/asm-generic/bitops/ffs.h2
-rw-r--r--include/asm-generic/bitops/lock.h39
-rw-r--r--include/asm-generic/bitops/non-atomic.h39
-rw-r--r--include/asm-generic/compat.h17
-rw-r--r--include/asm-generic/uaccess.h53
-rw-r--r--include/asm-generic/vmlinux.lds.h1
10 files changed, 96 insertions, 2440 deletions
diff --git a/include/asm-generic/atomic-instrumented.h b/include/asm-generic/atomic-instrumented.h
deleted file mode 100644
index bc45af52c93b..000000000000
--- a/include/asm-generic/atomic-instrumented.h
+++ /dev/null
@@ -1,1337 +0,0 @@
-// SPDX-License-Identifier: GPL-2.0
-
-// Generated by scripts/atomic/gen-atomic-instrumented.sh
-// DO NOT MODIFY THIS FILE DIRECTLY
-
-/*
- * This file provides wrappers with KASAN instrumentation for atomic operations.
- * To use this functionality an arch's atomic.h file needs to define all
- * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
- * this file at the end. This file provides atomic_read() that forwards to
- * arch_atomic_read() for actual atomic operation.
- * Note: if an arch atomic operation is implemented by means of other atomic
- * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
- * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
- * double instrumentation.
- */
-#ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
-#define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
-
-#include <linux/build_bug.h>
-#include <linux/compiler.h>
-#include <linux/instrumented.h>
-
-static __always_inline int
-atomic_read(const atomic_t *v)
-{
- instrument_atomic_read(v, sizeof(*v));
- return arch_atomic_read(v);
-}
-
-static __always_inline int
-atomic_read_acquire(const atomic_t *v)
-{
- instrument_atomic_read(v, sizeof(*v));
- return arch_atomic_read_acquire(v);
-}
-
-static __always_inline void
-atomic_set(atomic_t *v, int i)
-{
- instrument_atomic_write(v, sizeof(*v));
- arch_atomic_set(v, i);
-}
-
-static __always_inline void
-atomic_set_release(atomic_t *v, int i)
-{
- instrument_atomic_write(v, sizeof(*v));
- arch_atomic_set_release(v, i);
-}
-
-static __always_inline void
-atomic_add(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic_add(i, v);
-}
-
-static __always_inline int
-atomic_add_return(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_add_return(i, v);
-}
-
-static __always_inline int
-atomic_add_return_acquire(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_add_return_acquire(i, v);
-}
-
-static __always_inline int
-atomic_add_return_release(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_add_return_release(i, v);
-}
-
-static __always_inline int
-atomic_add_return_relaxed(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_add_return_relaxed(i, v);
-}
-
-static __always_inline int
-atomic_fetch_add(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_add(i, v);
-}
-
-static __always_inline int
-atomic_fetch_add_acquire(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_add_acquire(i, v);
-}
-
-static __always_inline int
-atomic_fetch_add_release(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_add_release(i, v);
-}
-
-static __always_inline int
-atomic_fetch_add_relaxed(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_add_relaxed(i, v);
-}
-
-static __always_inline void
-atomic_sub(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic_sub(i, v);
-}
-
-static __always_inline int
-atomic_sub_return(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_sub_return(i, v);
-}
-
-static __always_inline int
-atomic_sub_return_acquire(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_sub_return_acquire(i, v);
-}
-
-static __always_inline int
-atomic_sub_return_release(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_sub_return_release(i, v);
-}
-
-static __always_inline int
-atomic_sub_return_relaxed(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_sub_return_relaxed(i, v);
-}
-
-static __always_inline int
-atomic_fetch_sub(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_sub(i, v);
-}
-
-static __always_inline int
-atomic_fetch_sub_acquire(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_sub_acquire(i, v);
-}
-
-static __always_inline int
-atomic_fetch_sub_release(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_sub_release(i, v);
-}
-
-static __always_inline int
-atomic_fetch_sub_relaxed(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_sub_relaxed(i, v);
-}
-
-static __always_inline void
-atomic_inc(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic_inc(v);
-}
-
-static __always_inline int
-atomic_inc_return(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_inc_return(v);
-}
-
-static __always_inline int
-atomic_inc_return_acquire(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_inc_return_acquire(v);
-}
-
-static __always_inline int
-atomic_inc_return_release(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_inc_return_release(v);
-}
-
-static __always_inline int
-atomic_inc_return_relaxed(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_inc_return_relaxed(v);
-}
-
-static __always_inline int
-atomic_fetch_inc(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_inc(v);
-}
-
-static __always_inline int
-atomic_fetch_inc_acquire(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_inc_acquire(v);
-}
-
-static __always_inline int
-atomic_fetch_inc_release(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_inc_release(v);
-}
-
-static __always_inline int
-atomic_fetch_inc_relaxed(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_inc_relaxed(v);
-}
-
-static __always_inline void
-atomic_dec(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic_dec(v);
-}
-
-static __always_inline int
-atomic_dec_return(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_dec_return(v);
-}
-
-static __always_inline int
-atomic_dec_return_acquire(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_dec_return_acquire(v);
-}
-
-static __always_inline int
-atomic_dec_return_release(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_dec_return_release(v);
-}
-
-static __always_inline int
-atomic_dec_return_relaxed(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_dec_return_relaxed(v);
-}
-
-static __always_inline int
-atomic_fetch_dec(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_dec(v);
-}
-
-static __always_inline int
-atomic_fetch_dec_acquire(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_dec_acquire(v);
-}
-
-static __always_inline int
-atomic_fetch_dec_release(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_dec_release(v);
-}
-
-static __always_inline int
-atomic_fetch_dec_relaxed(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_dec_relaxed(v);
-}
-
-static __always_inline void
-atomic_and(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic_and(i, v);
-}
-
-static __always_inline int
-atomic_fetch_and(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_and(i, v);
-}
-
-static __always_inline int
-atomic_fetch_and_acquire(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_and_acquire(i, v);
-}
-
-static __always_inline int
-atomic_fetch_and_release(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_and_release(i, v);
-}
-
-static __always_inline int
-atomic_fetch_and_relaxed(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_and_relaxed(i, v);
-}
-
-static __always_inline void
-atomic_andnot(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic_andnot(i, v);
-}
-
-static __always_inline int
-atomic_fetch_andnot(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_andnot(i, v);
-}
-
-static __always_inline int
-atomic_fetch_andnot_acquire(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_andnot_acquire(i, v);
-}
-
-static __always_inline int
-atomic_fetch_andnot_release(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_andnot_release(i, v);
-}
-
-static __always_inline int
-atomic_fetch_andnot_relaxed(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_andnot_relaxed(i, v);
-}
-
-static __always_inline void
-atomic_or(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic_or(i, v);
-}
-
-static __always_inline int
-atomic_fetch_or(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_or(i, v);
-}
-
-static __always_inline int
-atomic_fetch_or_acquire(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_or_acquire(i, v);
-}
-
-static __always_inline int
-atomic_fetch_or_release(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_or_release(i, v);
-}
-
-static __always_inline int
-atomic_fetch_or_relaxed(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_or_relaxed(i, v);
-}
-
-static __always_inline void
-atomic_xor(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic_xor(i, v);
-}
-
-static __always_inline int
-atomic_fetch_xor(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_xor(i, v);
-}
-
-static __always_inline int
-atomic_fetch_xor_acquire(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_xor_acquire(i, v);
-}
-
-static __always_inline int
-atomic_fetch_xor_release(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_xor_release(i, v);
-}
-
-static __always_inline int
-atomic_fetch_xor_relaxed(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_xor_relaxed(i, v);
-}
-
-static __always_inline int
-atomic_xchg(atomic_t *v, int i)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_xchg(v, i);
-}
-
-static __always_inline int
-atomic_xchg_acquire(atomic_t *v, int i)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_xchg_acquire(v, i);
-}
-
-static __always_inline int
-atomic_xchg_release(atomic_t *v, int i)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_xchg_release(v, i);
-}
-
-static __always_inline int
-atomic_xchg_relaxed(atomic_t *v, int i)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_xchg_relaxed(v, i);
-}
-
-static __always_inline int
-atomic_cmpxchg(atomic_t *v, int old, int new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_cmpxchg(v, old, new);
-}
-
-static __always_inline int
-atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_cmpxchg_acquire(v, old, new);
-}
-
-static __always_inline int
-atomic_cmpxchg_release(atomic_t *v, int old, int new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_cmpxchg_release(v, old, new);
-}
-
-static __always_inline int
-atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_cmpxchg_relaxed(v, old, new);
-}
-
-static __always_inline bool
-atomic_try_cmpxchg(atomic_t *v, int *old, int new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- instrument_atomic_read_write(old, sizeof(*old));
- return arch_atomic_try_cmpxchg(v, old, new);
-}
-
-static __always_inline bool
-atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- instrument_atomic_read_write(old, sizeof(*old));
- return arch_atomic_try_cmpxchg_acquire(v, old, new);
-}
-
-static __always_inline bool
-atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- instrument_atomic_read_write(old, sizeof(*old));
- return arch_atomic_try_cmpxchg_release(v, old, new);
-}
-
-static __always_inline bool
-atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- instrument_atomic_read_write(old, sizeof(*old));
- return arch_atomic_try_cmpxchg_relaxed(v, old, new);
-}
-
-static __always_inline bool
-atomic_sub_and_test(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_sub_and_test(i, v);
-}
-
-static __always_inline bool
-atomic_dec_and_test(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_dec_and_test(v);
-}
-
-static __always_inline bool
-atomic_inc_and_test(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_inc_and_test(v);
-}
-
-static __always_inline bool
-atomic_add_negative(int i, atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_add_negative(i, v);
-}
-
-static __always_inline int
-atomic_fetch_add_unless(atomic_t *v, int a, int u)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_fetch_add_unless(v, a, u);
-}
-
-static __always_inline bool
-atomic_add_unless(atomic_t *v, int a, int u)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_add_unless(v, a, u);
-}
-
-static __always_inline bool
-atomic_inc_not_zero(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_inc_not_zero(v);
-}
-
-static __always_inline bool
-atomic_inc_unless_negative(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_inc_unless_negative(v);
-}
-
-static __always_inline bool
-atomic_dec_unless_positive(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_dec_unless_positive(v);
-}
-
-static __always_inline int
-atomic_dec_if_positive(atomic_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic_dec_if_positive(v);
-}
-
-static __always_inline s64
-atomic64_read(const atomic64_t *v)
-{
- instrument_atomic_read(v, sizeof(*v));
- return arch_atomic64_read(v);
-}
-
-static __always_inline s64
-atomic64_read_acquire(const atomic64_t *v)
-{
- instrument_atomic_read(v, sizeof(*v));
- return arch_atomic64_read_acquire(v);
-}
-
-static __always_inline void
-atomic64_set(atomic64_t *v, s64 i)
-{
- instrument_atomic_write(v, sizeof(*v));
- arch_atomic64_set(v, i);
-}
-
-static __always_inline void
-atomic64_set_release(atomic64_t *v, s64 i)
-{
- instrument_atomic_write(v, sizeof(*v));
- arch_atomic64_set_release(v, i);
-}
-
-static __always_inline void
-atomic64_add(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic64_add(i, v);
-}
-
-static __always_inline s64
-atomic64_add_return(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_add_return(i, v);
-}
-
-static __always_inline s64
-atomic64_add_return_acquire(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_add_return_acquire(i, v);
-}
-
-static __always_inline s64
-atomic64_add_return_release(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_add_return_release(i, v);
-}
-
-static __always_inline s64
-atomic64_add_return_relaxed(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_add_return_relaxed(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_add(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_add(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_add_acquire(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_add_release(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_add_release(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_add_relaxed(i, v);
-}
-
-static __always_inline void
-atomic64_sub(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic64_sub(i, v);
-}
-
-static __always_inline s64
-atomic64_sub_return(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_sub_return(i, v);
-}
-
-static __always_inline s64
-atomic64_sub_return_acquire(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_sub_return_acquire(i, v);
-}
-
-static __always_inline s64
-atomic64_sub_return_release(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_sub_return_release(i, v);
-}
-
-static __always_inline s64
-atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_sub_return_relaxed(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_sub(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_sub(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_sub_acquire(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_sub_release(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_sub_release(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_sub_relaxed(i, v);
-}
-
-static __always_inline void
-atomic64_inc(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic64_inc(v);
-}
-
-static __always_inline s64
-atomic64_inc_return(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_inc_return(v);
-}
-
-static __always_inline s64
-atomic64_inc_return_acquire(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_inc_return_acquire(v);
-}
-
-static __always_inline s64
-atomic64_inc_return_release(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_inc_return_release(v);
-}
-
-static __always_inline s64
-atomic64_inc_return_relaxed(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_inc_return_relaxed(v);
-}
-
-static __always_inline s64
-atomic64_fetch_inc(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_inc(v);
-}
-
-static __always_inline s64
-atomic64_fetch_inc_acquire(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_inc_acquire(v);
-}
-
-static __always_inline s64
-atomic64_fetch_inc_release(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_inc_release(v);
-}
-
-static __always_inline s64
-atomic64_fetch_inc_relaxed(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_inc_relaxed(v);
-}
-
-static __always_inline void
-atomic64_dec(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic64_dec(v);
-}
-
-static __always_inline s64
-atomic64_dec_return(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_dec_return(v);
-}
-
-static __always_inline s64
-atomic64_dec_return_acquire(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_dec_return_acquire(v);
-}
-
-static __always_inline s64
-atomic64_dec_return_release(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_dec_return_release(v);
-}
-
-static __always_inline s64
-atomic64_dec_return_relaxed(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_dec_return_relaxed(v);
-}
-
-static __always_inline s64
-atomic64_fetch_dec(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_dec(v);
-}
-
-static __always_inline s64
-atomic64_fetch_dec_acquire(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_dec_acquire(v);
-}
-
-static __always_inline s64
-atomic64_fetch_dec_release(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_dec_release(v);
-}
-
-static __always_inline s64
-atomic64_fetch_dec_relaxed(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_dec_relaxed(v);
-}
-
-static __always_inline void
-atomic64_and(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic64_and(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_and(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_and(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_and_acquire(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_and_release(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_and_release(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_and_relaxed(i, v);
-}
-
-static __always_inline void
-atomic64_andnot(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic64_andnot(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_andnot(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_andnot(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_andnot_acquire(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_andnot_release(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_andnot_relaxed(i, v);
-}
-
-static __always_inline void
-atomic64_or(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic64_or(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_or(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_or(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_or_acquire(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_or_release(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_or_release(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_or_relaxed(i, v);
-}
-
-static __always_inline void
-atomic64_xor(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- arch_atomic64_xor(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_xor(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_xor(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_xor_acquire(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_xor_release(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_xor_release(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_xor_relaxed(i, v);
-}
-
-static __always_inline s64
-atomic64_xchg(atomic64_t *v, s64 i)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_xchg(v, i);
-}
-
-static __always_inline s64
-atomic64_xchg_acquire(atomic64_t *v, s64 i)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_xchg_acquire(v, i);
-}
-
-static __always_inline s64
-atomic64_xchg_release(atomic64_t *v, s64 i)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_xchg_release(v, i);
-}
-
-static __always_inline s64
-atomic64_xchg_relaxed(atomic64_t *v, s64 i)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_xchg_relaxed(v, i);
-}
-
-static __always_inline s64
-atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_cmpxchg(v, old, new);
-}
-
-static __always_inline s64
-atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_cmpxchg_acquire(v, old, new);
-}
-
-static __always_inline s64
-atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_cmpxchg_release(v, old, new);
-}
-
-static __always_inline s64
-atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_cmpxchg_relaxed(v, old, new);
-}
-
-static __always_inline bool
-atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- instrument_atomic_read_write(old, sizeof(*old));
- return arch_atomic64_try_cmpxchg(v, old, new);
-}
-
-static __always_inline bool
-atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- instrument_atomic_read_write(old, sizeof(*old));
- return arch_atomic64_try_cmpxchg_acquire(v, old, new);
-}
-
-static __always_inline bool
-atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- instrument_atomic_read_write(old, sizeof(*old));
- return arch_atomic64_try_cmpxchg_release(v, old, new);
-}
-
-static __always_inline bool
-atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- instrument_atomic_read_write(old, sizeof(*old));
- return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
-}
-
-static __always_inline bool
-atomic64_sub_and_test(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_sub_and_test(i, v);
-}
-
-static __always_inline bool
-atomic64_dec_and_test(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_dec_and_test(v);
-}
-
-static __always_inline bool
-atomic64_inc_and_test(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_inc_and_test(v);
-}
-
-static __always_inline bool
-atomic64_add_negative(s64 i, atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_add_negative(i, v);
-}
-
-static __always_inline s64
-atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_fetch_add_unless(v, a, u);
-}
-
-static __always_inline bool
-atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_add_unless(v, a, u);
-}
-
-static __always_inline bool
-atomic64_inc_not_zero(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_inc_not_zero(v);
-}
-
-static __always_inline bool
-atomic64_inc_unless_negative(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_inc_unless_negative(v);
-}
-
-static __always_inline bool
-atomic64_dec_unless_positive(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_dec_unless_positive(v);
-}
-
-static __always_inline s64
-atomic64_dec_if_positive(atomic64_t *v)
-{
- instrument_atomic_read_write(v, sizeof(*v));
- return arch_atomic64_dec_if_positive(v);
-}
-
-#define xchg(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_xchg(__ai_ptr, __VA_ARGS__); \
-})
-
-#define xchg_acquire(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_xchg_acquire(__ai_ptr, __VA_ARGS__); \
-})
-
-#define xchg_release(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_xchg_release(__ai_ptr, __VA_ARGS__); \
-})
-
-#define xchg_relaxed(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_xchg_relaxed(__ai_ptr, __VA_ARGS__); \
-})
-
-#define cmpxchg(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_cmpxchg(__ai_ptr, __VA_ARGS__); \
-})
-
-#define cmpxchg_acquire(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \
-})
-
-#define cmpxchg_release(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_cmpxchg_release(__ai_ptr, __VA_ARGS__); \
-})
-
-#define cmpxchg_relaxed(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \
-})
-
-#define cmpxchg64(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_cmpxchg64(__ai_ptr, __VA_ARGS__); \
-})
-
-#define cmpxchg64_acquire(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \
-})
-
-#define cmpxchg64_release(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \
-})
-
-#define cmpxchg64_relaxed(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \
-})
-
-#define try_cmpxchg(ptr, oldp, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- typeof(oldp) __ai_oldp = (oldp); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
- arch_try_cmpxchg(__ai_ptr, __ai_oldp, __VA_ARGS__); \
-})
-
-#define try_cmpxchg_acquire(ptr, oldp, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- typeof(oldp) __ai_oldp = (oldp); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
- arch_try_cmpxchg_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
-})
-
-#define try_cmpxchg_release(ptr, oldp, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- typeof(oldp) __ai_oldp = (oldp); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
- arch_try_cmpxchg_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
-})
-
-#define try_cmpxchg_relaxed(ptr, oldp, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- typeof(oldp) __ai_oldp = (oldp); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
- arch_try_cmpxchg_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
-})
-
-#define cmpxchg_local(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_cmpxchg_local(__ai_ptr, __VA_ARGS__); \
-})
-
-#define cmpxchg64_local(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \
-})
-
-#define sync_cmpxchg(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
- arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \
-})
-
-#define cmpxchg_double(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
- arch_cmpxchg_double(__ai_ptr, __VA_ARGS__); \
-})
-
-
-#define cmpxchg_double_local(ptr, ...) \
-({ \
- typeof(ptr) __ai_ptr = (ptr); \
- instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
- arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__); \
-})
-
-#endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
-// 1d7c3a25aca5c7fb031c307be4c3d24c7b48fcd5
diff --git a/include/asm-generic/atomic-long.h b/include/asm-generic/atomic-long.h
deleted file mode 100644
index 073cf40f431b..000000000000
--- a/include/asm-generic/atomic-long.h
+++ /dev/null
@@ -1,1014 +0,0 @@
-// SPDX-License-Identifier: GPL-2.0
-
-// Generated by scripts/atomic/gen-atomic-long.sh
-// DO NOT MODIFY THIS FILE DIRECTLY
-
-#ifndef _ASM_GENERIC_ATOMIC_LONG_H
-#define _ASM_GENERIC_ATOMIC_LONG_H
-
-#include <linux/compiler.h>
-#include <asm/types.h>
-
-#ifdef CONFIG_64BIT
-typedef atomic64_t atomic_long_t;
-#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
-#define atomic_long_cond_read_acquire atomic64_cond_read_acquire
-#define atomic_long_cond_read_relaxed atomic64_cond_read_relaxed
-#else
-typedef atomic_t atomic_long_t;
-#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
-#define atomic_long_cond_read_acquire atomic_cond_read_acquire
-#define atomic_long_cond_read_relaxed atomic_cond_read_relaxed
-#endif
-
-#ifdef CONFIG_64BIT
-
-static __always_inline long
-atomic_long_read(const atomic_long_t *v)
-{
- return atomic64_read(v);
-}
-
-static __always_inline long
-atomic_long_read_acquire(const atomic_long_t *v)
-{
- return atomic64_read_acquire(v);
-}
-
-static __always_inline void
-atomic_long_set(atomic_long_t *v, long i)
-{
- atomic64_set(v, i);
-}
-
-static __always_inline void
-atomic_long_set_release(atomic_long_t *v, long i)
-{
- atomic64_set_release(v, i);
-}
-
-static __always_inline void
-atomic_long_add(long i, atomic_long_t *v)
-{
- atomic64_add(i, v);
-}
-
-static __always_inline long
-atomic_long_add_return(long i, atomic_long_t *v)
-{
- return atomic64_add_return(i, v);
-}
-
-static __always_inline long
-atomic_long_add_return_acquire(long i, atomic_long_t *v)
-{
- return atomic64_add_return_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_add_return_release(long i, atomic_long_t *v)
-{
- return atomic64_add_return_release(i, v);
-}
-
-static __always_inline long
-atomic_long_add_return_relaxed(long i, atomic_long_t *v)
-{
- return atomic64_add_return_relaxed(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_add(long i, atomic_long_t *v)
-{
- return atomic64_fetch_add(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
-{
- return atomic64_fetch_add_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_add_release(long i, atomic_long_t *v)
-{
- return atomic64_fetch_add_release(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
-{
- return atomic64_fetch_add_relaxed(i, v);
-}
-
-static __always_inline void
-atomic_long_sub(long i, atomic_long_t *v)
-{
- atomic64_sub(i, v);
-}
-
-static __always_inline long
-atomic_long_sub_return(long i, atomic_long_t *v)
-{
- return atomic64_sub_return(i, v);
-}
-
-static __always_inline long
-atomic_long_sub_return_acquire(long i, atomic_long_t *v)
-{
- return atomic64_sub_return_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_sub_return_release(long i, atomic_long_t *v)
-{
- return atomic64_sub_return_release(i, v);
-}
-
-static __always_inline long
-atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
-{
- return atomic64_sub_return_relaxed(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_sub(long i, atomic_long_t *v)
-{
- return atomic64_fetch_sub(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
-{
- return atomic64_fetch_sub_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_sub_release(long i, atomic_long_t *v)
-{
- return atomic64_fetch_sub_release(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
-{
- return atomic64_fetch_sub_relaxed(i, v);
-}
-
-static __always_inline void
-atomic_long_inc(atomic_long_t *v)
-{
- atomic64_inc(v);
-}
-
-static __always_inline long
-atomic_long_inc_return(atomic_long_t *v)
-{
- return atomic64_inc_return(v);
-}
-
-static __always_inline long
-atomic_long_inc_return_acquire(atomic_long_t *v)
-{
- return atomic64_inc_return_acquire(v);
-}
-
-static __always_inline long
-atomic_long_inc_return_release(atomic_long_t *v)
-{
- return atomic64_inc_return_release(v);
-}
-
-static __always_inline long
-atomic_long_inc_return_relaxed(atomic_long_t *v)
-{
- return atomic64_inc_return_relaxed(v);
-}
-
-static __always_inline long
-atomic_long_fetch_inc(atomic_long_t *v)
-{
- return atomic64_fetch_inc(v);
-}
-
-static __always_inline long
-atomic_long_fetch_inc_acquire(atomic_long_t *v)
-{
- return atomic64_fetch_inc_acquire(v);
-}
-
-static __always_inline long
-atomic_long_fetch_inc_release(atomic_long_t *v)
-{
- return atomic64_fetch_inc_release(v);
-}
-
-static __always_inline long
-atomic_long_fetch_inc_relaxed(atomic_long_t *v)
-{
- return atomic64_fetch_inc_relaxed(v);
-}
-
-static __always_inline void
-atomic_long_dec(atomic_long_t *v)
-{
- atomic64_dec(v);
-}
-
-static __always_inline long
-atomic_long_dec_return(atomic_long_t *v)
-{
- return atomic64_dec_return(v);
-}
-
-static __always_inline long
-atomic_long_dec_return_acquire(atomic_long_t *v)
-{
- return atomic64_dec_return_acquire(v);
-}
-
-static __always_inline long
-atomic_long_dec_return_release(atomic_long_t *v)
-{
- return atomic64_dec_return_release(v);
-}
-
-static __always_inline long
-atomic_long_dec_return_relaxed(atomic_long_t *v)
-{
- return atomic64_dec_return_relaxed(v);
-}
-
-static __always_inline long
-atomic_long_fetch_dec(atomic_long_t *v)
-{
- return atomic64_fetch_dec(v);
-}
-
-static __always_inline long
-atomic_long_fetch_dec_acquire(atomic_long_t *v)
-{
- return atomic64_fetch_dec_acquire(v);
-}
-
-static __always_inline long
-atomic_long_fetch_dec_release(atomic_long_t *v)
-{
- return atomic64_fetch_dec_release(v);
-}
-
-static __always_inline long
-atomic_long_fetch_dec_relaxed(atomic_long_t *v)
-{
- return atomic64_fetch_dec_relaxed(v);
-}
-
-static __always_inline void
-atomic_long_and(long i, atomic_long_t *v)
-{
- atomic64_and(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_and(long i, atomic_long_t *v)
-{
- return atomic64_fetch_and(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
-{
- return atomic64_fetch_and_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_and_release(long i, atomic_long_t *v)
-{
- return atomic64_fetch_and_release(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
-{
- return atomic64_fetch_and_relaxed(i, v);
-}
-
-static __always_inline void
-atomic_long_andnot(long i, atomic_long_t *v)
-{
- atomic64_andnot(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_andnot(long i, atomic_long_t *v)
-{
- return atomic64_fetch_andnot(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
-{
- return atomic64_fetch_andnot_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
-{
- return atomic64_fetch_andnot_release(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
-{
- return atomic64_fetch_andnot_relaxed(i, v);
-}
-
-static __always_inline void
-atomic_long_or(long i, atomic_long_t *v)
-{
- atomic64_or(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_or(long i, atomic_long_t *v)
-{
- return atomic64_fetch_or(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
-{
- return atomic64_fetch_or_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_or_release(long i, atomic_long_t *v)
-{
- return atomic64_fetch_or_release(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
-{
- return atomic64_fetch_or_relaxed(i, v);
-}
-
-static __always_inline void
-atomic_long_xor(long i, atomic_long_t *v)
-{
- atomic64_xor(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_xor(long i, atomic_long_t *v)
-{
- return atomic64_fetch_xor(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
-{
- return atomic64_fetch_xor_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_xor_release(long i, atomic_long_t *v)
-{
- return atomic64_fetch_xor_release(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
-{
- return atomic64_fetch_xor_relaxed(i, v);
-}
-
-static __always_inline long
-atomic_long_xchg(atomic_long_t *v, long i)
-{
- return atomic64_xchg(v, i);
-}
-
-static __always_inline long
-atomic_long_xchg_acquire(atomic_long_t *v, long i)
-{
- return atomic64_xchg_acquire(v, i);
-}
-
-static __always_inline long
-atomic_long_xchg_release(atomic_long_t *v, long i)
-{
- return atomic64_xchg_release(v, i);
-}
-
-static __always_inline long
-atomic_long_xchg_relaxed(atomic_long_t *v, long i)
-{
- return atomic64_xchg_relaxed(v, i);
-}
-
-static __always_inline long
-atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
-{
- return atomic64_cmpxchg(v, old, new);
-}
-
-static __always_inline long
-atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
-{
- return atomic64_cmpxchg_acquire(v, old, new);
-}
-
-static __always_inline long
-atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
-{
- return atomic64_cmpxchg_release(v, old, new);
-}
-
-static __always_inline long
-atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
-{
- return atomic64_cmpxchg_relaxed(v, old, new);
-}
-
-static __always_inline bool
-atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
-{
- return atomic64_try_cmpxchg(v, (s64 *)old, new);
-}
-
-static __always_inline bool
-atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
-{
- return atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
-}
-
-static __always_inline bool
-atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
-{
- return atomic64_try_cmpxchg_release(v, (s64 *)old, new);
-}
-
-static __always_inline bool
-atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
-{
- return atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
-}
-
-static __always_inline bool
-atomic_long_sub_and_test(long i, atomic_long_t *v)
-{
- return atomic64_sub_and_test(i, v);
-}
-
-static __always_inline bool
-atomic_long_dec_and_test(atomic_long_t *v)
-{
- return atomic64_dec_and_test(v);
-}
-
-static __always_inline bool
-atomic_long_inc_and_test(atomic_long_t *v)
-{
- return atomic64_inc_and_test(v);
-}
-
-static __always_inline bool
-atomic_long_add_negative(long i, atomic_long_t *v)
-{
- return atomic64_add_negative(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
-{
- return atomic64_fetch_add_unless(v, a, u);
-}
-
-static __always_inline bool
-atomic_long_add_unless(atomic_long_t *v, long a, long u)
-{
- return atomic64_add_unless(v, a, u);
-}
-
-static __always_inline bool
-atomic_long_inc_not_zero(atomic_long_t *v)
-{
- return atomic64_inc_not_zero(v);
-}
-
-static __always_inline bool
-atomic_long_inc_unless_negative(atomic_long_t *v)
-{
- return atomic64_inc_unless_negative(v);
-}
-
-static __always_inline bool
-atomic_long_dec_unless_positive(atomic_long_t *v)
-{
- return atomic64_dec_unless_positive(v);
-}
-
-static __always_inline long
-atomic_long_dec_if_positive(atomic_long_t *v)
-{
- return atomic64_dec_if_positive(v);
-}
-
-#else /* CONFIG_64BIT */
-
-static __always_inline long
-atomic_long_read(const atomic_long_t *v)
-{
- return atomic_read(v);
-}
-
-static __always_inline long
-atomic_long_read_acquire(const atomic_long_t *v)
-{
- return atomic_read_acquire(v);
-}
-
-static __always_inline void
-atomic_long_set(atomic_long_t *v, long i)
-{
- atomic_set(v, i);
-}
-
-static __always_inline void
-atomic_long_set_release(atomic_long_t *v, long i)
-{
- atomic_set_release(v, i);
-}
-
-static __always_inline void
-atomic_long_add(long i, atomic_long_t *v)
-{
- atomic_add(i, v);
-}
-
-static __always_inline long
-atomic_long_add_return(long i, atomic_long_t *v)
-{
- return atomic_add_return(i, v);
-}
-
-static __always_inline long
-atomic_long_add_return_acquire(long i, atomic_long_t *v)
-{
- return atomic_add_return_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_add_return_release(long i, atomic_long_t *v)
-{
- return atomic_add_return_release(i, v);
-}
-
-static __always_inline long
-atomic_long_add_return_relaxed(long i, atomic_long_t *v)
-{
- return atomic_add_return_relaxed(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_add(long i, atomic_long_t *v)
-{
- return atomic_fetch_add(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
-{
- return atomic_fetch_add_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_add_release(long i, atomic_long_t *v)
-{
- return atomic_fetch_add_release(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
-{
- return atomic_fetch_add_relaxed(i, v);
-}
-
-static __always_inline void
-atomic_long_sub(long i, atomic_long_t *v)
-{
- atomic_sub(i, v);
-}
-
-static __always_inline long
-atomic_long_sub_return(long i, atomic_long_t *v)
-{
- return atomic_sub_return(i, v);
-}
-
-static __always_inline long
-atomic_long_sub_return_acquire(long i, atomic_long_t *v)
-{
- return atomic_sub_return_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_sub_return_release(long i, atomic_long_t *v)
-{
- return atomic_sub_return_release(i, v);
-}
-
-static __always_inline long
-atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
-{
- return atomic_sub_return_relaxed(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_sub(long i, atomic_long_t *v)
-{
- return atomic_fetch_sub(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
-{
- return atomic_fetch_sub_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_sub_release(long i, atomic_long_t *v)
-{
- return atomic_fetch_sub_release(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
-{
- return atomic_fetch_sub_relaxed(i, v);
-}
-
-static __always_inline void
-atomic_long_inc(atomic_long_t *v)
-{
- atomic_inc(v);
-}
-
-static __always_inline long
-atomic_long_inc_return(atomic_long_t *v)
-{
- return atomic_inc_return(v);
-}
-
-static __always_inline long
-atomic_long_inc_return_acquire(atomic_long_t *v)
-{
- return atomic_inc_return_acquire(v);
-}
-
-static __always_inline long
-atomic_long_inc_return_release(atomic_long_t *v)
-{
- return atomic_inc_return_release(v);
-}
-
-static __always_inline long
-atomic_long_inc_return_relaxed(atomic_long_t *v)
-{
- return atomic_inc_return_relaxed(v);
-}
-
-static __always_inline long
-atomic_long_fetch_inc(atomic_long_t *v)
-{
- return atomic_fetch_inc(v);
-}
-
-static __always_inline long
-atomic_long_fetch_inc_acquire(atomic_long_t *v)
-{
- return atomic_fetch_inc_acquire(v);
-}
-
-static __always_inline long
-atomic_long_fetch_inc_release(atomic_long_t *v)
-{
- return atomic_fetch_inc_release(v);
-}
-
-static __always_inline long
-atomic_long_fetch_inc_relaxed(atomic_long_t *v)
-{
- return atomic_fetch_inc_relaxed(v);
-}
-
-static __always_inline void
-atomic_long_dec(atomic_long_t *v)
-{
- atomic_dec(v);
-}
-
-static __always_inline long
-atomic_long_dec_return(atomic_long_t *v)
-{
- return atomic_dec_return(v);
-}
-
-static __always_inline long
-atomic_long_dec_return_acquire(atomic_long_t *v)
-{
- return atomic_dec_return_acquire(v);
-}
-
-static __always_inline long
-atomic_long_dec_return_release(atomic_long_t *v)
-{
- return atomic_dec_return_release(v);
-}
-
-static __always_inline long
-atomic_long_dec_return_relaxed(atomic_long_t *v)
-{
- return atomic_dec_return_relaxed(v);
-}
-
-static __always_inline long
-atomic_long_fetch_dec(atomic_long_t *v)
-{
- return atomic_fetch_dec(v);
-}
-
-static __always_inline long
-atomic_long_fetch_dec_acquire(atomic_long_t *v)
-{
- return atomic_fetch_dec_acquire(v);
-}
-
-static __always_inline long
-atomic_long_fetch_dec_release(atomic_long_t *v)
-{
- return atomic_fetch_dec_release(v);
-}
-
-static __always_inline long
-atomic_long_fetch_dec_relaxed(atomic_long_t *v)
-{
- return atomic_fetch_dec_relaxed(v);
-}
-
-static __always_inline void
-atomic_long_and(long i, atomic_long_t *v)
-{
- atomic_and(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_and(long i, atomic_long_t *v)
-{
- return atomic_fetch_and(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
-{
- return atomic_fetch_and_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_and_release(long i, atomic_long_t *v)
-{
- return atomic_fetch_and_release(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
-{
- return atomic_fetch_and_relaxed(i, v);
-}
-
-static __always_inline void
-atomic_long_andnot(long i, atomic_long_t *v)
-{
- atomic_andnot(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_andnot(long i, atomic_long_t *v)
-{
- return atomic_fetch_andnot(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
-{
- return atomic_fetch_andnot_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
-{
- return atomic_fetch_andnot_release(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
-{
- return atomic_fetch_andnot_relaxed(i, v);
-}
-
-static __always_inline void
-atomic_long_or(long i, atomic_long_t *v)
-{
- atomic_or(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_or(long i, atomic_long_t *v)
-{
- return atomic_fetch_or(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
-{
- return atomic_fetch_or_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_or_release(long i, atomic_long_t *v)
-{
- return atomic_fetch_or_release(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
-{
- return atomic_fetch_or_relaxed(i, v);
-}
-
-static __always_inline void
-atomic_long_xor(long i, atomic_long_t *v)
-{
- atomic_xor(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_xor(long i, atomic_long_t *v)
-{
- return atomic_fetch_xor(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
-{
- return atomic_fetch_xor_acquire(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_xor_release(long i, atomic_long_t *v)
-{
- return atomic_fetch_xor_release(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
-{
- return atomic_fetch_xor_relaxed(i, v);
-}
-
-static __always_inline long
-atomic_long_xchg(atomic_long_t *v, long i)
-{
- return atomic_xchg(v, i);
-}
-
-static __always_inline long
-atomic_long_xchg_acquire(atomic_long_t *v, long i)
-{
- return atomic_xchg_acquire(v, i);
-}
-
-static __always_inline long
-atomic_long_xchg_release(atomic_long_t *v, long i)
-{
- return atomic_xchg_release(v, i);
-}
-
-static __always_inline long
-atomic_long_xchg_relaxed(atomic_long_t *v, long i)
-{
- return atomic_xchg_relaxed(v, i);
-}
-
-static __always_inline long
-atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
-{
- return atomic_cmpxchg(v, old, new);
-}
-
-static __always_inline long
-atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
-{
- return atomic_cmpxchg_acquire(v, old, new);
-}
-
-static __always_inline long
-atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
-{
- return atomic_cmpxchg_release(v, old, new);
-}
-
-static __always_inline long
-atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
-{
- return atomic_cmpxchg_relaxed(v, old, new);
-}
-
-static __always_inline bool
-atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
-{
- return atomic_try_cmpxchg(v, (int *)old, new);
-}
-
-static __always_inline bool
-atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
-{
- return atomic_try_cmpxchg_acquire(v, (int *)old, new);
-}
-
-static __always_inline bool
-atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
-{
- return atomic_try_cmpxchg_release(v, (int *)old, new);
-}
-
-static __always_inline bool
-atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
-{
- return atomic_try_cmpxchg_relaxed(v, (int *)old, new);
-}
-
-static __always_inline bool
-atomic_long_sub_and_test(long i, atomic_long_t *v)
-{
- return atomic_sub_and_test(i, v);
-}
-
-static __always_inline bool
-atomic_long_dec_and_test(atomic_long_t *v)
-{
- return atomic_dec_and_test(v);
-}
-
-static __always_inline bool
-atomic_long_inc_and_test(atomic_long_t *v)
-{
- return atomic_inc_and_test(v);
-}
-
-static __always_inline bool
-atomic_long_add_negative(long i, atomic_long_t *v)
-{
- return atomic_add_negative(i, v);
-}
-
-static __always_inline long
-atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
-{
- return atomic_fetch_add_unless(v, a, u);
-}
-
-static __always_inline bool
-atomic_long_add_unless(atomic_long_t *v, long a, long u)
-{
- return atomic_add_unless(v, a, u);
-}
-
-static __always_inline bool
-atomic_long_inc_not_zero(atomic_long_t *v)
-{
- return atomic_inc_not_zero(v);
-}
-
-static __always_inline bool
-atomic_long_inc_unless_negative(atomic_long_t *v)
-{
- return atomic_inc_unless_negative(v);
-}
-
-static __always_inline bool
-atomic_long_dec_unless_positive(atomic_long_t *v)
-{
- return atomic_dec_unless_positive(v);
-}
-
-static __always_inline long
-atomic_long_dec_if_positive(atomic_long_t *v)
-{
- return atomic_dec_if_positive(v);
-}
-
-#endif /* CONFIG_64BIT */
-#endif /* _ASM_GENERIC_ATOMIC_LONG_H */
-// a624200981f552b2c6be4f32fe44da8289f30d87
diff --git a/include/asm-generic/bitops/atomic.h b/include/asm-generic/bitops/atomic.h
index 0e7316a86240..3096f086b5a3 100644
--- a/include/asm-generic/bitops/atomic.h
+++ b/include/asm-generic/bitops/atomic.h
@@ -11,25 +11,29 @@
* See Documentation/atomic_bitops.txt for details.
*/
-static __always_inline void set_bit(unsigned int nr, volatile unsigned long *p)
+static __always_inline void
+arch_set_bit(unsigned int nr, volatile unsigned long *p)
{
p += BIT_WORD(nr);
- atomic_long_or(BIT_MASK(nr), (atomic_long_t *)p);
+ arch_atomic_long_or(BIT_MASK(nr), (atomic_long_t *)p);
}
-static __always_inline void clear_bit(unsigned int nr, volatile unsigned long *p)
+static __always_inline void
+arch_clear_bit(unsigned int nr, volatile unsigned long *p)
{
p += BIT_WORD(nr);
- atomic_long_andnot(BIT_MASK(nr), (atomic_long_t *)p);
+ arch_atomic_long_andnot(BIT_MASK(nr), (atomic_long_t *)p);
}
-static __always_inline void change_bit(unsigned int nr, volatile unsigned long *p)
+static __always_inline void
+arch_change_bit(unsigned int nr, volatile unsigned long *p)
{
p += BIT_WORD(nr);
- atomic_long_xor(BIT_MASK(nr), (atomic_long_t *)p);
+ arch_atomic_long_xor(BIT_MASK(nr), (atomic_long_t *)p);
}
-static inline int test_and_set_bit(unsigned int nr, volatile unsigned long *p)
+static __always_inline int
+arch_test_and_set_bit(unsigned int nr, volatile unsigned long *p)
{
long old;
unsigned long mask = BIT_MASK(nr);
@@ -38,11 +42,12 @@ static inline int test_and_set_bit(unsigned int nr, volatile unsigned long *p)
if (READ_ONCE(*p) & mask)
return 1;
- old = atomic_long_fetch_or(mask, (atomic_long_t *)p);
+ old = arch_atomic_long_fetch_or(mask, (atomic_long_t *)p);
return !!(old & mask);
}
-static inline int test_and_clear_bit(unsigned int nr, volatile unsigned long *p)
+static __always_inline int
+arch_test_and_clear_bit(unsigned int nr, volatile unsigned long *p)
{
long old;
unsigned long mask = BIT_MASK(nr);
@@ -51,18 +56,21 @@ static inline int test_and_clear_bit(unsigned int nr, volatile unsigned long *p)
if (!(READ_ONCE(*p) & mask))
return 0;
- old = atomic_long_fetch_andnot(mask, (atomic_long_t *)p);
+ old = arch_atomic_long_fetch_andnot(mask, (atomic_long_t *)p);
return !!(old & mask);
}
-static inline int test_and_change_bit(unsigned int nr, volatile unsigned long *p)
+static __always_inline int
+arch_test_and_change_bit(unsigned int nr, volatile unsigned long *p)
{
long old;
unsigned long mask = BIT_MASK(nr);
p += BIT_WORD(nr);
- old = atomic_long_fetch_xor(mask, (atomic_long_t *)p);
+ old = arch_atomic_long_fetch_xor(mask, (atomic_long_t *)p);
return !!(old & mask);
}
+#include <asm-generic/bitops/instrumented-atomic.h>
+
#endif /* _ASM_GENERIC_BITOPS_ATOMIC_H */
diff --git a/include/asm-generic/bitops/builtin-ffs.h b/include/asm-generic/bitops/builtin-ffs.h
index 1dacfdb4247e..7b129329046b 100644
--- a/include/asm-generic/bitops/builtin-ffs.h
+++ b/include/asm-generic/bitops/builtin-ffs.h
@@ -8,7 +8,7 @@
*
* This is defined the same way as
* the libc and compiler builtin ffs routines, therefore
- * differs in spirit from the above ffz (man ffs).
+ * differs in spirit from ffz (man ffs).
*/
#define ffs(x) __builtin_ffs(x)
diff --git a/include/asm-generic/bitops/ffs.h b/include/asm-generic/bitops/ffs.h
index e81868b2c0f0..323fd5d6ae26 100644
--- a/include/asm-generic/bitops/ffs.h
+++ b/include/asm-generic/bitops/ffs.h
@@ -8,7 +8,7 @@
*
* This is defined the same way as
* the libc and compiler builtin ffs routines, therefore
- * differs in spirit from the above ffz (man ffs).
+ * differs in spirit from ffz (man ffs).
*/
static inline int ffs(int x)
{
diff --git a/include/asm-generic/bitops/lock.h b/include/asm-generic/bitops/lock.h
index 3ae021368f48..630f2f6b9595 100644
--- a/include/asm-generic/bitops/lock.h
+++ b/include/asm-generic/bitops/lock.h
@@ -7,7 +7,7 @@
#include <asm/barrier.h>
/**
- * test_and_set_bit_lock - Set a bit and return its old value, for lock
+ * arch_test_and_set_bit_lock - Set a bit and return its old value, for lock
* @nr: Bit to set
* @addr: Address to count from
*
@@ -15,8 +15,8 @@
* the returned value is 0.
* It can be used to implement bit locks.
*/
-static inline int test_and_set_bit_lock(unsigned int nr,
- volatile unsigned long *p)
+static __always_inline int
+arch_test_and_set_bit_lock(unsigned int nr, volatile unsigned long *p)
{
long old;
unsigned long mask = BIT_MASK(nr);
@@ -25,26 +25,27 @@ static inline int test_and_set_bit_lock(unsigned int nr,
if (READ_ONCE(*p) & mask)
return 1;
- old = atomic_long_fetch_or_acquire(mask, (atomic_long_t *)p);
+ old = arch_atomic_long_fetch_or_acquire(mask, (atomic_long_t *)p);
return !!(old & mask);
}
/**
- * clear_bit_unlock - Clear a bit in memory, for unlock
+ * arch_clear_bit_unlock - Clear a bit in memory, for unlock
* @nr: the bit to set
* @addr: the address to start counting from
*
* This operation is atomic and provides release barrier semantics.
*/
-static inline void clear_bit_unlock(unsigned int nr, volatile unsigned long *p)
+static __always_inline void
+arch_clear_bit_unlock(unsigned int nr, volatile unsigned long *p)
{
p += BIT_WORD(nr);
- atomic_long_fetch_andnot_release(BIT_MASK(nr), (atomic_long_t *)p);
+ arch_atomic_long_fetch_andnot_release(BIT_MASK(nr), (atomic_long_t *)p);
}
/**
- * __clear_bit_unlock - Clear a bit in memory, for unlock
+ * arch___clear_bit_unlock - Clear a bit in memory, for unlock
* @nr: the bit to set
* @addr: the address to start counting from
*
@@ -54,38 +55,40 @@ static inline void clear_bit_unlock(unsigned int nr, volatile unsigned long *p)
*
* See for example x86's implementation.
*/
-static inline void __clear_bit_unlock(unsigned int nr,
- volatile unsigned long *p)
+static inline void
+arch___clear_bit_unlock(unsigned int nr, volatile unsigned long *p)
{
unsigned long old;
p += BIT_WORD(nr);
old = READ_ONCE(*p);
old &= ~BIT_MASK(nr);
- atomic_long_set_release((atomic_long_t *)p, old);
+ arch_atomic_long_set_release((atomic_long_t *)p, old);
}
/**
- * clear_bit_unlock_is_negative_byte - Clear a bit in memory and test if bottom
- * byte is negative, for unlock.
+ * arch_clear_bit_unlock_is_negative_byte - Clear a bit in memory and test if bottom
+ * byte is negative, for unlock.
* @nr: the bit to clear
* @addr: the address to start counting from
*
* This is a bit of a one-trick-pony for the filemap code, which clears
* PG_locked and tests PG_waiters,
*/
-#ifndef clear_bit_unlock_is_negative_byte
-static inline bool clear_bit_unlock_is_negative_byte(unsigned int nr,
- volatile unsigned long *p)
+#ifndef arch_clear_bit_unlock_is_negative_byte
+static inline bool arch_clear_bit_unlock_is_negative_byte(unsigned int nr,
+ volatile unsigned long *p)
{
long old;
unsigned long mask = BIT_MASK(nr);
p += BIT_WORD(nr);
- old = atomic_long_fetch_andnot_release(mask, (atomic_long_t *)p);
+ old = arch_atomic_long_fetch_andnot_release(mask, (atomic_long_t *)p);
return !!(old & BIT(7));
}
-#define clear_bit_unlock_is_negative_byte clear_bit_unlock_is_negative_byte
+#define arch_clear_bit_unlock_is_negative_byte arch_clear_bit_unlock_is_negative_byte
#endif
+#include <asm-generic/bitops/instrumented-lock.h>
+
#endif /* _ASM_GENERIC_BITOPS_LOCK_H_ */
diff --git a/include/asm-generic/bitops/non-atomic.h b/include/asm-generic/bitops/non-atomic.h
index 7e10c4b50c5d..078cc68be2f1 100644
--- a/include/asm-generic/bitops/non-atomic.h
+++ b/include/asm-generic/bitops/non-atomic.h
@@ -5,7 +5,7 @@
#include <asm/types.h>
/**
- * __set_bit - Set a bit in memory
+ * arch___set_bit - Set a bit in memory
* @nr: the bit to set
* @addr: the address to start counting from
*
@@ -13,24 +13,28 @@
* If it's called on the same region of memory simultaneously, the effect
* may be that only one operation succeeds.
*/
-static inline void __set_bit(int nr, volatile unsigned long *addr)
+static __always_inline void
+arch___set_bit(unsigned int nr, volatile unsigned long *addr)
{
unsigned long mask = BIT_MASK(nr);
unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
*p |= mask;
}
+#define __set_bit arch___set_bit
-static inline void __clear_bit(int nr, volatile unsigned long *addr)
+static __always_inline void
+arch___clear_bit(unsigned int nr, volatile unsigned long *addr)
{
unsigned long mask = BIT_MASK(nr);
unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
*p &= ~mask;
}
+#define __clear_bit arch___clear_bit
/**
- * __change_bit - Toggle a bit in memory
+ * arch___change_bit - Toggle a bit in memory
* @nr: the bit to change
* @addr: the address to start counting from
*
@@ -38,16 +42,18 @@ static inline void __clear_bit(int nr, volatile unsigned long *addr)
* If it's called on the same region of memory simultaneously, the effect
* may be that only one operation succeeds.
*/
-static inline void __change_bit(int nr, volatile unsigned long *addr)
+static __always_inline
+void arch___change_bit(unsigned int nr, volatile unsigned long *addr)
{
unsigned long mask = BIT_MASK(nr);
unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
*p ^= mask;
}
+#define __change_bit arch___change_bit
/**
- * __test_and_set_bit - Set a bit and return its old value
+ * arch___test_and_set_bit - Set a bit and return its old value
* @nr: Bit to set
* @addr: Address to count from
*
@@ -55,7 +61,8 @@ static inline void __change_bit(int nr, volatile unsigned long *addr)
* If two examples of this operation race, one can appear to succeed
* but actually fail. You must protect multiple accesses with a lock.
*/
-static inline int __test_and_set_bit(int nr, volatile unsigned long *addr)
+static __always_inline int
+arch___test_and_set_bit(unsigned int nr, volatile unsigned long *addr)
{
unsigned long mask = BIT_MASK(nr);
unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
@@ -64,9 +71,10 @@ static inline int __test_and_set_bit(int nr, volatile unsigned long *addr)
*p = old | mask;
return (old & mask) != 0;
}
+#define __test_and_set_bit arch___test_and_set_bit
/**
- * __test_and_clear_bit - Clear a bit and return its old value
+ * arch___test_and_clear_bit - Clear a bit and return its old value
* @nr: Bit to clear
* @addr: Address to count from
*
@@ -74,7 +82,8 @@ static inline int __test_and_set_bit(int nr, volatile unsigned long *addr)
* If two examples of this operation race, one can appear to succeed
* but actually fail. You must protect multiple accesses with a lock.
*/
-static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr)
+static __always_inline int
+arch___test_and_clear_bit(unsigned int nr, volatile unsigned long *addr)
{
unsigned long mask = BIT_MASK(nr);
unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
@@ -83,10 +92,11 @@ static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr)
*p = old & ~mask;
return (old & mask) != 0;
}
+#define __test_and_clear_bit arch___test_and_clear_bit
/* WARNING: non atomic and it can be reordered! */
-static inline int __test_and_change_bit(int nr,
- volatile unsigned long *addr)
+static __always_inline int
+arch___test_and_change_bit(unsigned int nr, volatile unsigned long *addr)
{
unsigned long mask = BIT_MASK(nr);
unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
@@ -95,15 +105,18 @@ static inline int __test_and_change_bit(int nr,
*p = old ^ mask;
return (old & mask) != 0;
}
+#define __test_and_change_bit arch___test_and_change_bit
/**
- * test_bit - Determine whether a bit is set
+ * arch_test_bit - Determine whether a bit is set
* @nr: bit number to test
* @addr: Address to start counting from
*/
-static inline int test_bit(int nr, const volatile unsigned long *addr)
+static __always_inline int
+arch_test_bit(unsigned int nr, const volatile unsigned long *addr)
{
return 1UL & (addr[BIT_WORD(nr)] >> (nr & (BITS_PER_LONG-1)));
}
+#define test_bit arch_test_bit
#endif /* _ASM_GENERIC_BITOPS_NON_ATOMIC_H_ */
diff --git a/include/asm-generic/compat.h b/include/asm-generic/compat.h
index 30f7b18a36f9..d46c0201cc34 100644
--- a/include/asm-generic/compat.h
+++ b/include/asm-generic/compat.h
@@ -20,7 +20,18 @@ typedef u16 compat_ushort_t;
typedef u32 compat_uint_t;
typedef u32 compat_ulong_t;
typedef u32 compat_uptr_t;
+typedef u32 compat_caddr_t;
typedef u32 compat_aio_context_t;
+typedef u32 compat_old_sigset_t;
+
+#ifndef __compat_uid32_t
+typedef u32 __compat_uid32_t;
+typedef u32 __compat_gid32_t;
+#endif
+
+#ifndef compat_mode_t
+typedef u32 compat_mode_t;
+#endif
#ifdef CONFIG_COMPAT_FOR_U64_ALIGNMENT
typedef s64 __attribute__((aligned(4))) compat_s64;
@@ -30,4 +41,10 @@ typedef s64 compat_s64;
typedef u64 compat_u64;
#endif
+#ifndef _COMPAT_NSIG
+typedef u32 compat_sigset_word;
+#define _COMPAT_NSIG _NSIG
+#define _COMPAT_NSIG_BPW 32
+#endif
+
#endif
diff --git a/include/asm-generic/uaccess.h b/include/asm-generic/uaccess.h
index a0b2f270dddc..10ffa8b5c117 100644
--- a/include/asm-generic/uaccess.h
+++ b/include/asm-generic/uaccess.h
@@ -119,6 +119,11 @@ static inline void set_fs(mm_segment_t fs)
#ifndef uaccess_kernel
#define uaccess_kernel() (get_fs().seg == KERNEL_DS.seg)
#endif
+
+#ifndef user_addr_max
+#define user_addr_max() (uaccess_kernel() ? ~0UL : TASK_SIZE)
+#endif
+
#endif /* CONFIG_SET_FS */
#define access_ok(addr, size) __access_ok((unsigned long)(addr),(size))
@@ -244,50 +249,6 @@ static inline int __get_user_fn(size_t size, const void __user *ptr, void *x)
extern int __get_user_bad(void) __attribute__((noreturn));
/*
- * Copy a null terminated string from userspace.
- */
-#ifndef __strncpy_from_user
-static inline long
-__strncpy_from_user(char *dst, const char __user *src, long count)
-{
- char *tmp;
- strncpy(dst, (const char __force *)src, count);
- for (tmp = dst; *tmp && count > 0; tmp++, count--)
- ;
- return (tmp - dst);
-}
-#endif
-
-static inline long
-strncpy_from_user(char *dst, const char __user *src, long count)
-{
- if (!access_ok(src, 1))
- return -EFAULT;
- return __strncpy_from_user(dst, src, count);
-}
-
-/*
- * Return the size of a string (including the ending 0)
- *
- * Return 0 on exception, a value greater than N if too long
- */
-#ifndef __strnlen_user
-#define __strnlen_user(s, n) (strnlen((s), (n)) + 1)
-#endif
-
-/*
- * Unlike strnlen, strnlen_user includes the nul terminator in
- * its returned count. Callers should check for a returned value
- * greater than N as an indication the string is too long.
- */
-static inline long strnlen_user(const char __user *src, long n)
-{
- if (!access_ok(src, 1))
- return 0;
- return __strnlen_user(src, n);
-}
-
-/*
* Zero Userspace
*/
#ifndef __clear_user
@@ -311,4 +272,8 @@ clear_user(void __user *to, unsigned long n)
#include <asm/extable.h>
+__must_check long strncpy_from_user(char *dst, const char __user *src,
+ long count);
+__must_check long strnlen_user(const char __user *src, long n);
+
#endif /* __ASM_GENERIC_UACCESS_H */
diff --git a/include/asm-generic/vmlinux.lds.h b/include/asm-generic/vmlinux.lds.h
index 17325416e2de..62669b36a772 100644
--- a/include/asm-generic/vmlinux.lds.h
+++ b/include/asm-generic/vmlinux.lds.h
@@ -586,6 +586,7 @@
NOINSTR_TEXT \
*(.text..refcount) \
*(.ref.text) \
+ *(.text.asan.* .text.tsan.*) \
TEXT_CFI_JT \
MEM_KEEP(init.text*) \
MEM_KEEP(exit.text*) \