diff options
author | Ingo Molnar <mingo@kernel.org> | 2020-02-24 13:36:09 +0300 |
---|---|---|
committer | Ingo Molnar <mingo@kernel.org> | 2020-02-24 13:36:09 +0300 |
commit | 546121b65f47384e11ec1fa2e55449fc9f4846b2 (patch) | |
tree | 8f18470ec7c0c77b0f48eb1b2338e591b0b0aaff /arch/mips/lib/memset.S | |
parent | 000619680c3714020ce9db17eef6a4a7ce2dc28b (diff) | |
parent | f8788d86ab28f61f7b46eb6be375f8a726783636 (diff) | |
download | linux-546121b65f47384e11ec1fa2e55449fc9f4846b2.tar.xz |
Merge tag 'v5.6-rc3' into sched/core, to pick up fixes and dependent patches
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'arch/mips/lib/memset.S')
-rw-r--r-- | arch/mips/lib/memset.S | 16 |
1 files changed, 8 insertions, 8 deletions
diff --git a/arch/mips/lib/memset.S b/arch/mips/lib/memset.S index 418611ef13cf..d5449e8a3dfc 100644 --- a/arch/mips/lib/memset.S +++ b/arch/mips/lib/memset.S @@ -115,7 +115,7 @@ #endif .set reorder -#ifdef CONFIG_CPU_HAS_LOAD_STORE_LR +#ifndef CONFIG_CPU_NO_LOAD_STORE_LR R10KCBARRIER(0(ra)) #ifdef __MIPSEB__ EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */ @@ -125,7 +125,7 @@ PTR_SUBU a0, t0 /* long align ptr */ PTR_ADDU a2, t0 /* correct size */ -#else /* !CONFIG_CPU_HAS_LOAD_STORE_LR */ +#else /* CONFIG_CPU_NO_LOAD_STORE_LR */ #define STORE_BYTE(N) \ EX(sb, a1, N(a0), .Lbyte_fixup\@); \ .set noreorder; \ @@ -150,7 +150,7 @@ ori a0, STORMASK xori a0, STORMASK PTR_ADDIU a0, STORSIZE -#endif /* !CONFIG_CPU_HAS_LOAD_STORE_LR */ +#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ 1: ori t1, a2, 0x3f /* # of full blocks */ xori t1, 0x3f andi t0, a2, 0x40-STORSIZE @@ -185,7 +185,7 @@ .set noreorder beqz a2, 1f -#ifdef CONFIG_CPU_HAS_LOAD_STORE_LR +#ifndef CONFIG_CPU_NO_LOAD_STORE_LR PTR_ADDU a0, a2 /* What's left */ .set reorder R10KCBARRIER(0(ra)) @@ -194,7 +194,7 @@ #else EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@) #endif -#else +#else /* CONFIG_CPU_NO_LOAD_STORE_LR */ PTR_SUBU t0, $0, a2 .set reorder move a2, zero /* No remaining longs */ @@ -211,7 +211,7 @@ EX(sb, a1, 6(a0), .Lbyte_fixup\@) #endif 0: -#endif +#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ 1: move a2, zero jr ra @@ -234,7 +234,7 @@ .hidden __memset .endif -#ifndef CONFIG_CPU_HAS_LOAD_STORE_LR +#ifdef CONFIG_CPU_NO_LOAD_STORE_LR .Lbyte_fixup\@: /* * unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1 @@ -243,7 +243,7 @@ PTR_SUBU a2, t0 PTR_ADDIU a2, 1 jr ra -#endif /* !CONFIG_CPU_HAS_LOAD_STORE_LR */ +#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */ .Lfirst_fixup\@: /* unset_bytes already in a2 */ |