summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatt Redfearn <matt.redfearn@mips.com>2018-03-29 12:28:23 +0300
committerJames Hogan <jhogan@kernel.org>2018-04-16 23:31:26 +0300
commit8a8158c85e1e774a44fbe81106fa41138580dfd1 (patch)
tree140e1d39f575b2572edb6d385ccb5dc81b41b8b7
parent2c2bf522ed8cbfaac666f7dc65cfd38de2b89f0f (diff)
downloadlinux-8a8158c85e1e774a44fbe81106fa41138580dfd1.tar.xz
MIPS: memset.S: EVA & fault support for small_memset
The MIPS kernel memset / bzero implementation includes a small_memset branch which is used when the region to be set is smaller than a long (4 bytes on 32bit, 8 bytes on 64bit). The current small_memset implementation uses a simple store byte loop to write the destination. There are 2 issues with this implementation: 1. When EVA mode is active, user and kernel address spaces may overlap. Currently the use of the sb instruction means kernel mode addressing is always used and an intended write to userspace may actually overwrite some critical kernel data. 2. If the write triggers a page fault, for example by calling __clear_user(NULL, 2), instead of gracefully handling the fault, an OOPS is triggered. Fix these issues by replacing the sb instruction with the EX() macro, which will emit EVA compatible instuctions as required. Additionally implement a fault fixup for small_memset which sets a2 to the number of bytes that could not be cleared (as defined by __clear_user). Reported-by: Chuanhua Lei <chuanhua.lei@intel.com> Signed-off-by: Matt Redfearn <matt.redfearn@mips.com> Cc: Ralf Baechle <ralf@linux-mips.org> Cc: linux-mips@linux-mips.org Cc: stable@vger.kernel.org Patchwork: https://patchwork.linux-mips.org/patch/18975/ Signed-off-by: James Hogan <jhogan@kernel.org>
-rw-r--r--arch/mips/lib/memset.S7
1 files changed, 6 insertions, 1 deletions
diff --git a/arch/mips/lib/memset.S b/arch/mips/lib/memset.S
index a1456664d6c2..90bcdf1224ee 100644
--- a/arch/mips/lib/memset.S
+++ b/arch/mips/lib/memset.S
@@ -219,7 +219,7 @@
1: PTR_ADDIU a0, 1 /* fill bytewise */
R10KCBARRIER(0(ra))
bne t1, a0, 1b
- sb a1, -1(a0)
+ EX(sb, a1, -1(a0), .Lsmall_fixup\@)
2: jr ra /* done */
move a2, zero
@@ -260,6 +260,11 @@
jr ra
andi v1, a2, STORMASK
+.Lsmall_fixup\@:
+ PTR_SUBU a2, t1, a0
+ jr ra
+ PTR_ADDIU a2, 1
+
.endm
/*