summaryrefslogtreecommitdiff
path: root/arch
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2018-12-07 03:39:44 +0300
committerLinus Torvalds <torvalds@linux-foundation.org>2018-12-07 03:39:44 +0300
commit7e40b56c776f75a35838360bf6360e2aa9311f92 (patch)
tree8e057012a093f9bda070d00b476085ec97e14f4f /arch
parentabb8d6ecbd8f7801c048f6543f79d22d24cead7b (diff)
parentc2a3831df6dc164af66d8d86cf356a90c021b86f (diff)
downloadlinux-7e40b56c776f75a35838360bf6360e2aa9311f92.tar.xz
Merge branch 'fixes' of git://git.armlinux.org.uk/~rmk/linux-arm
Pull ARM fixes from Russell King: "Some small fixes that have been accumulated: - Chris Cole noticed that in a SMP environment, the DMA cache coherence handling can produce undesirable results in a corner case - Propagate that fix for ARMv7M as well - Fix a false positive with source fortification - Fix an uninitialised return that Nathan Jones spotted" * 'fixes' of git://git.armlinux.org.uk/~rmk/linux-arm: ARM: 8816/1: dma-mapping: fix potential uninitialized return ARM: 8815/1: V7M: align v7m_dma_inv_range() with v7 counterpart ARM: 8814/1: mm: improve/fix ARM v7_dma_inv_range() unaligned address handling ARM: 8806/1: kprobes: Fix false positive with FORTIFY_SOURCE
Diffstat (limited to 'arch')
-rw-r--r--arch/arm/mm/cache-v7.S8
-rw-r--r--arch/arm/mm/cache-v7m.S14
-rw-r--r--arch/arm/mm/dma-mapping.c2
-rw-r--r--arch/arm/probes/kprobes/opt-arm.c2
4 files changed, 16 insertions, 10 deletions
diff --git a/arch/arm/mm/cache-v7.S b/arch/arm/mm/cache-v7.S
index 215df435bfb9..2149b47a0c5a 100644
--- a/arch/arm/mm/cache-v7.S
+++ b/arch/arm/mm/cache-v7.S
@@ -360,14 +360,16 @@ v7_dma_inv_range:
ALT_UP(W(nop))
#endif
mcrne p15, 0, r0, c7, c14, 1 @ clean & invalidate D / U line
+ addne r0, r0, r2
tst r1, r3
bic r1, r1, r3
mcrne p15, 0, r1, c7, c14, 1 @ clean & invalidate D / U line
-1:
- mcr p15, 0, r0, c7, c6, 1 @ invalidate D / U line
- add r0, r0, r2
cmp r0, r1
+1:
+ mcrlo p15, 0, r0, c7, c6, 1 @ invalidate D / U line
+ addlo r0, r0, r2
+ cmplo r0, r1
blo 1b
dsb st
ret lr
diff --git a/arch/arm/mm/cache-v7m.S b/arch/arm/mm/cache-v7m.S
index 788486e830d3..32aa2a2aa260 100644
--- a/arch/arm/mm/cache-v7m.S
+++ b/arch/arm/mm/cache-v7m.S
@@ -73,9 +73,11 @@
/*
* dcimvac: Invalidate data cache line by MVA to PoC
*/
-.macro dcimvac, rt, tmp
- v7m_cacheop \rt, \tmp, V7M_SCB_DCIMVAC
+.irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
+.macro dcimvac\c, rt, tmp
+ v7m_cacheop \rt, \tmp, V7M_SCB_DCIMVAC, \c
.endm
+.endr
/*
* dccmvau: Clean data cache line by MVA to PoU
@@ -369,14 +371,16 @@ v7m_dma_inv_range:
tst r0, r3
bic r0, r0, r3
dccimvacne r0, r3
+ addne r0, r0, r2
subne r3, r2, #1 @ restore r3, corrupted by v7m's dccimvac
tst r1, r3
bic r1, r1, r3
dccimvacne r1, r3
-1:
- dcimvac r0, r3
- add r0, r0, r2
cmp r0, r1
+1:
+ dcimvaclo r0, r3
+ addlo r0, r0, r2
+ cmplo r0, r1
blo 1b
dsb st
ret lr
diff --git a/arch/arm/mm/dma-mapping.c b/arch/arm/mm/dma-mapping.c
index 661fe48ab78d..78de138aa66d 100644
--- a/arch/arm/mm/dma-mapping.c
+++ b/arch/arm/mm/dma-mapping.c
@@ -829,7 +829,7 @@ static int __arm_dma_mmap(struct device *dev, struct vm_area_struct *vma,
void *cpu_addr, dma_addr_t dma_addr, size_t size,
unsigned long attrs)
{
- int ret;
+ int ret = -ENXIO;
unsigned long nr_vma_pages = vma_pages(vma);
unsigned long nr_pages = PAGE_ALIGN(size) >> PAGE_SHIFT;
unsigned long pfn = dma_to_pfn(dev, dma_addr);
diff --git a/arch/arm/probes/kprobes/opt-arm.c b/arch/arm/probes/kprobes/opt-arm.c
index b2aa9b32bff2..2c118a6ab358 100644
--- a/arch/arm/probes/kprobes/opt-arm.c
+++ b/arch/arm/probes/kprobes/opt-arm.c
@@ -247,7 +247,7 @@ int arch_prepare_optimized_kprobe(struct optimized_kprobe *op, struct kprobe *or
}
/* Copy arch-dep-instance from template. */
- memcpy(code, &optprobe_template_entry,
+ memcpy(code, (unsigned char *)optprobe_template_entry,
TMPL_END_IDX * sizeof(kprobe_opcode_t));
/* Adjust buffer according to instruction. */