diff options
author | Anton Blanchard <anton@samba.org> | 2012-08-07 21:51:41 +0400 |
---|---|---|
committer | Benjamin Herrenschmidt <benh@kernel.crashing.org> | 2012-08-24 14:26:09 +0400 |
commit | 2fae7cdb60240e2e2d9b378afbf6d9fcce8a3890 (patch) | |
tree | b866b45dcd53a21646ab1bc10148988c158b21f9 | |
parent | dad477ccd65f05bf3b5a874e0118bf4156a1fcbb (diff) | |
download | linux-2fae7cdb60240e2e2d9b378afbf6d9fcce8a3890.tar.xz |
powerpc: Fix VMX in interrupt check in POWER7 copy loops
The enhanced prefetch hint patches corrupt the condition register
that was used to check if we are in interrupt. Fix this by using cr1.
Signed-off-by: Anton Blanchard <anton@samba.org>
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
-rw-r--r-- | arch/powerpc/lib/copyuser_power7.S | 4 | ||||
-rw-r--r-- | arch/powerpc/lib/memcpy_power7.S | 4 |
2 files changed, 4 insertions, 4 deletions
diff --git a/arch/powerpc/lib/copyuser_power7.S b/arch/powerpc/lib/copyuser_power7.S index 4a4a46fbad0e..0d24ff15f5f6 100644 --- a/arch/powerpc/lib/copyuser_power7.S +++ b/arch/powerpc/lib/copyuser_power7.S @@ -288,7 +288,7 @@ err1; stb r0,0(r3) std r0,16(r1) stdu r1,-STACKFRAMESIZE(r1) bl .enter_vmx_usercopy - cmpwi r3,0 + cmpwi cr1,r3,0 ld r0,STACKFRAMESIZE+16(r1) ld r3,STACKFRAMESIZE+48(r1) ld r4,STACKFRAMESIZE+56(r1) @@ -326,7 +326,7 @@ err1; stb r0,0(r3) dcbt r0,r8,0b01010 /* GO */ .machine pop - beq .Lunwind_stack_nonvmx_copy + beq cr1,.Lunwind_stack_nonvmx_copy /* * If source and destination are not relatively aligned we use a diff --git a/arch/powerpc/lib/memcpy_power7.S b/arch/powerpc/lib/memcpy_power7.S index 0efdc51bc716..7ba6c96de778 100644 --- a/arch/powerpc/lib/memcpy_power7.S +++ b/arch/powerpc/lib/memcpy_power7.S @@ -222,7 +222,7 @@ _GLOBAL(memcpy_power7) std r0,16(r1) stdu r1,-STACKFRAMESIZE(r1) bl .enter_vmx_copy - cmpwi r3,0 + cmpwi cr1,r3,0 ld r0,STACKFRAMESIZE+16(r1) ld r3,STACKFRAMESIZE+48(r1) ld r4,STACKFRAMESIZE+56(r1) @@ -260,7 +260,7 @@ _GLOBAL(memcpy_power7) dcbt r0,r8,0b01010 /* GO */ .machine pop - beq .Lunwind_stack_nonvmx_copy + beq cr1,.Lunwind_stack_nonvmx_copy /* * If source and destination are not relatively aligned we use a |