summaryrefslogtreecommitdiff
path: root/scripts/atomic/fallbacks
diff options
context:
space:
mode:
authorMarco Elver <elver@google.com>2019-11-26 17:04:05 +0300
committerThomas Gleixner <tglx@linutronix.de>2020-06-11 09:03:24 +0300
commit765dcd209947e7b3666c08fb109ab8b879f7a471 (patch)
tree0dbe7fe72d9bd74804abfb90453138f8a6e997c1 /scripts/atomic/fallbacks
parentb29482fde649c72441d5478a4ea2c52c56d97a5e (diff)
downloadlinux-765dcd209947e7b3666c08fb109ab8b879f7a471.tar.xz
asm-generic/atomic: Use __always_inline for fallback wrappers
Use __always_inline for atomic fallback wrappers. When building for size (CC_OPTIMIZE_FOR_SIZE), some compilers appear to be less inclined to inline even relatively small static inline functions that are assumed to be inlinable such as atomic ops. This can cause problems, for example in UACCESS regions. While the fallback wrappers aren't pure wrappers, they are trivial nonetheless, and the function they wrap should determine the final inlining policy. For x86 tinyconfig we observe: - vmlinux baseline: 1315988 - vmlinux with patch: 1315928 (-60 bytes) [ tglx: Cherry-picked from KCSAN ] Suggested-by: Mark Rutland <mark.rutland@arm.com> Signed-off-by: Marco Elver <elver@google.com> Acked-by: Mark Rutland <mark.rutland@arm.com> Signed-off-by: Paul E. McKenney <paulmck@kernel.org> Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'scripts/atomic/fallbacks')
-rwxr-xr-xscripts/atomic/fallbacks/acquire2
-rwxr-xr-xscripts/atomic/fallbacks/add_negative2
-rwxr-xr-xscripts/atomic/fallbacks/add_unless2
-rwxr-xr-xscripts/atomic/fallbacks/andnot2
-rwxr-xr-xscripts/atomic/fallbacks/dec2
-rwxr-xr-xscripts/atomic/fallbacks/dec_and_test2
-rwxr-xr-xscripts/atomic/fallbacks/dec_if_positive2
-rwxr-xr-xscripts/atomic/fallbacks/dec_unless_positive2
-rwxr-xr-xscripts/atomic/fallbacks/fence2
-rwxr-xr-xscripts/atomic/fallbacks/fetch_add_unless2
-rwxr-xr-xscripts/atomic/fallbacks/inc2
-rwxr-xr-xscripts/atomic/fallbacks/inc_and_test2
-rwxr-xr-xscripts/atomic/fallbacks/inc_not_zero2
-rwxr-xr-xscripts/atomic/fallbacks/inc_unless_negative2
-rwxr-xr-xscripts/atomic/fallbacks/read_acquire2
-rwxr-xr-xscripts/atomic/fallbacks/release2
-rwxr-xr-xscripts/atomic/fallbacks/set_release2
-rwxr-xr-xscripts/atomic/fallbacks/sub_and_test2
-rwxr-xr-xscripts/atomic/fallbacks/try_cmpxchg2
19 files changed, 19 insertions, 19 deletions
diff --git a/scripts/atomic/fallbacks/acquire b/scripts/atomic/fallbacks/acquire
index e38871e64db6..ea489acc285e 100755
--- a/scripts/atomic/fallbacks/acquire
+++ b/scripts/atomic/fallbacks/acquire
@@ -1,5 +1,5 @@
cat <<EOF
-static inline ${ret}
+static __always_inline ${ret}
${atomic}_${pfx}${name}${sfx}_acquire(${params})
{
${ret} ret = ${atomic}_${pfx}${name}${sfx}_relaxed(${args});
diff --git a/scripts/atomic/fallbacks/add_negative b/scripts/atomic/fallbacks/add_negative
index e6f4815637de..03cc2e07fac5 100755
--- a/scripts/atomic/fallbacks/add_negative
+++ b/scripts/atomic/fallbacks/add_negative
@@ -8,7 +8,7 @@ cat <<EOF
* if the result is negative, or false when
* result is greater than or equal to zero.
*/
-static inline bool
+static __always_inline bool
${atomic}_add_negative(${int} i, ${atomic}_t *v)
{
return ${atomic}_add_return(i, v) < 0;
diff --git a/scripts/atomic/fallbacks/add_unless b/scripts/atomic/fallbacks/add_unless
index 792533885fbf..daf87a04c850 100755
--- a/scripts/atomic/fallbacks/add_unless
+++ b/scripts/atomic/fallbacks/add_unless
@@ -8,7 +8,7 @@ cat << EOF
* Atomically adds @a to @v, if @v was not already @u.
* Returns true if the addition was done.
*/
-static inline bool
+static __always_inline bool
${atomic}_add_unless(${atomic}_t *v, ${int} a, ${int} u)
{
return ${atomic}_fetch_add_unless(v, a, u) != u;
diff --git a/scripts/atomic/fallbacks/andnot b/scripts/atomic/fallbacks/andnot
index 9f3a3216b5e3..14efce01225a 100755
--- a/scripts/atomic/fallbacks/andnot
+++ b/scripts/atomic/fallbacks/andnot
@@ -1,5 +1,5 @@
cat <<EOF
-static inline ${ret}
+static __always_inline ${ret}
${atomic}_${pfx}andnot${sfx}${order}(${int} i, ${atomic}_t *v)
{
${retstmt}${atomic}_${pfx}and${sfx}${order}(~i, v);
diff --git a/scripts/atomic/fallbacks/dec b/scripts/atomic/fallbacks/dec
index 10bbc82be31d..118282f3a5a3 100755
--- a/scripts/atomic/fallbacks/dec
+++ b/scripts/atomic/fallbacks/dec
@@ -1,5 +1,5 @@
cat <<EOF
-static inline ${ret}
+static __always_inline ${ret}
${atomic}_${pfx}dec${sfx}${order}(${atomic}_t *v)
{
${retstmt}${atomic}_${pfx}sub${sfx}${order}(1, v);
diff --git a/scripts/atomic/fallbacks/dec_and_test b/scripts/atomic/fallbacks/dec_and_test
index 0ce7103b3df2..f8967a891117 100755
--- a/scripts/atomic/fallbacks/dec_and_test
+++ b/scripts/atomic/fallbacks/dec_and_test
@@ -7,7 +7,7 @@ cat <<EOF
* returns true if the result is 0, or false for all other
* cases.
*/
-static inline bool
+static __always_inline bool
${atomic}_dec_and_test(${atomic}_t *v)
{
return ${atomic}_dec_return(v) == 0;
diff --git a/scripts/atomic/fallbacks/dec_if_positive b/scripts/atomic/fallbacks/dec_if_positive
index c52eacec43c8..cfb380bd2da6 100755
--- a/scripts/atomic/fallbacks/dec_if_positive
+++ b/scripts/atomic/fallbacks/dec_if_positive
@@ -1,5 +1,5 @@
cat <<EOF
-static inline ${ret}
+static __always_inline ${ret}
${atomic}_dec_if_positive(${atomic}_t *v)
{
${int} dec, c = ${atomic}_read(v);
diff --git a/scripts/atomic/fallbacks/dec_unless_positive b/scripts/atomic/fallbacks/dec_unless_positive
index 8a2578f14268..69cb7aa01f9c 100755
--- a/scripts/atomic/fallbacks/dec_unless_positive
+++ b/scripts/atomic/fallbacks/dec_unless_positive
@@ -1,5 +1,5 @@
cat <<EOF
-static inline bool
+static __always_inline bool
${atomic}_dec_unless_positive(${atomic}_t *v)
{
${int} c = ${atomic}_read(v);
diff --git a/scripts/atomic/fallbacks/fence b/scripts/atomic/fallbacks/fence
index 82f68fa6931a..92a3a4691bab 100755
--- a/scripts/atomic/fallbacks/fence
+++ b/scripts/atomic/fallbacks/fence
@@ -1,5 +1,5 @@
cat <<EOF
-static inline ${ret}
+static __always_inline ${ret}
${atomic}_${pfx}${name}${sfx}(${params})
{
${ret} ret;
diff --git a/scripts/atomic/fallbacks/fetch_add_unless b/scripts/atomic/fallbacks/fetch_add_unless
index d2c091db7eae..fffbc0d16fdf 100755
--- a/scripts/atomic/fallbacks/fetch_add_unless
+++ b/scripts/atomic/fallbacks/fetch_add_unless
@@ -8,7 +8,7 @@ cat << EOF
* Atomically adds @a to @v, so long as @v was not already @u.
* Returns original value of @v
*/
-static inline ${int}
+static __always_inline ${int}
${atomic}_fetch_add_unless(${atomic}_t *v, ${int} a, ${int} u)
{
${int} c = ${atomic}_read(v);
diff --git a/scripts/atomic/fallbacks/inc b/scripts/atomic/fallbacks/inc
index f866b3ad2353..10751cd62829 100755
--- a/scripts/atomic/fallbacks/inc
+++ b/scripts/atomic/fallbacks/inc
@@ -1,5 +1,5 @@
cat <<EOF
-static inline ${ret}
+static __always_inline ${ret}
${atomic}_${pfx}inc${sfx}${order}(${atomic}_t *v)
{
${retstmt}${atomic}_${pfx}add${sfx}${order}(1, v);
diff --git a/scripts/atomic/fallbacks/inc_and_test b/scripts/atomic/fallbacks/inc_and_test
index 4e2068869f7e..4acea9c93604 100755
--- a/scripts/atomic/fallbacks/inc_and_test
+++ b/scripts/atomic/fallbacks/inc_and_test
@@ -7,7 +7,7 @@ cat <<EOF
* and returns true if the result is zero, or false for all
* other cases.
*/
-static inline bool
+static __always_inline bool
${atomic}_inc_and_test(${atomic}_t *v)
{
return ${atomic}_inc_return(v) == 0;
diff --git a/scripts/atomic/fallbacks/inc_not_zero b/scripts/atomic/fallbacks/inc_not_zero
index a7c45c8d107c..d9f7b97aab42 100755
--- a/scripts/atomic/fallbacks/inc_not_zero
+++ b/scripts/atomic/fallbacks/inc_not_zero
@@ -6,7 +6,7 @@ cat <<EOF
* Atomically increments @v by 1, if @v is non-zero.
* Returns true if the increment was done.
*/
-static inline bool
+static __always_inline bool
${atomic}_inc_not_zero(${atomic}_t *v)
{
return ${atomic}_add_unless(v, 1, 0);
diff --git a/scripts/atomic/fallbacks/inc_unless_negative b/scripts/atomic/fallbacks/inc_unless_negative
index 0c266e71dbd4..177a7cb51eda 100755
--- a/scripts/atomic/fallbacks/inc_unless_negative
+++ b/scripts/atomic/fallbacks/inc_unless_negative
@@ -1,5 +1,5 @@
cat <<EOF
-static inline bool
+static __always_inline bool
${atomic}_inc_unless_negative(${atomic}_t *v)
{
${int} c = ${atomic}_read(v);
diff --git a/scripts/atomic/fallbacks/read_acquire b/scripts/atomic/fallbacks/read_acquire
index 75863b5203f7..12fa83cb3a6d 100755
--- a/scripts/atomic/fallbacks/read_acquire
+++ b/scripts/atomic/fallbacks/read_acquire
@@ -1,5 +1,5 @@
cat <<EOF
-static inline ${ret}
+static __always_inline ${ret}
${atomic}_read_acquire(const ${atomic}_t *v)
{
return smp_load_acquire(&(v)->counter);
diff --git a/scripts/atomic/fallbacks/release b/scripts/atomic/fallbacks/release
index 3f628a3802d9..730d2a6d3e07 100755
--- a/scripts/atomic/fallbacks/release
+++ b/scripts/atomic/fallbacks/release
@@ -1,5 +1,5 @@
cat <<EOF
-static inline ${ret}
+static __always_inline ${ret}
${atomic}_${pfx}${name}${sfx}_release(${params})
{
__atomic_release_fence();
diff --git a/scripts/atomic/fallbacks/set_release b/scripts/atomic/fallbacks/set_release
index 45bb5e0cfc08..e5d72c717434 100755
--- a/scripts/atomic/fallbacks/set_release
+++ b/scripts/atomic/fallbacks/set_release
@@ -1,5 +1,5 @@
cat <<EOF
-static inline void
+static __always_inline void
${atomic}_set_release(${atomic}_t *v, ${int} i)
{
smp_store_release(&(v)->counter, i);
diff --git a/scripts/atomic/fallbacks/sub_and_test b/scripts/atomic/fallbacks/sub_and_test
index 289ef17a2d7a..6cfe4ed49746 100755
--- a/scripts/atomic/fallbacks/sub_and_test
+++ b/scripts/atomic/fallbacks/sub_and_test
@@ -8,7 +8,7 @@ cat <<EOF
* true if the result is zero, or false for all
* other cases.
*/
-static inline bool
+static __always_inline bool
${atomic}_sub_and_test(${int} i, ${atomic}_t *v)
{
return ${atomic}_sub_return(i, v) == 0;
diff --git a/scripts/atomic/fallbacks/try_cmpxchg b/scripts/atomic/fallbacks/try_cmpxchg
index 4ed85e2f5378..c7a26213b978 100755
--- a/scripts/atomic/fallbacks/try_cmpxchg
+++ b/scripts/atomic/fallbacks/try_cmpxchg
@@ -1,5 +1,5 @@
cat <<EOF
-static inline bool
+static __always_inline bool
${atomic}_try_cmpxchg${order}(${atomic}_t *v, ${int} *old, ${int} new)
{
${int} r, o = *old;