summaryrefslogtreecommitdiff
path: root/arch/powerpc/lib/crtsavres.S
diff options
context:
space:
mode:
authorAnton Blanchard <anton@samba.org>2015-02-10 01:51:22 +0300
committerMichael Ellerman <mpe@ellerman.id.au>2015-03-16 10:32:11 +0300
commitc2ce6f9f3dc00daca5714ef070a9a2d4e78eb336 (patch)
treec008f72ced83ffd950f0920566c378b4809780cf /arch/powerpc/lib/crtsavres.S
parent06e5801b8cb3fc057d88cb4dc03c0b64b2744cda (diff)
downloadlinux-c2ce6f9f3dc00daca5714ef070a9a2d4e78eb336.tar.xz
powerpc: Change vrX register defines to vX to match gcc and glibc
As our various loops (copy, string, crypto etc) get more complicated, we want to share implementations between userspace (eg glibc) and the kernel. We also want to write userspace test harnesses to put in tools/testing/selftest. One gratuitous difference between userspace and the kernel is the VMX register definitions - the kernel uses vrX whereas both gcc and glibc use vX. Change the kernel to match userspace. Signed-off-by: Anton Blanchard <anton@samba.org> Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Diffstat (limited to 'arch/powerpc/lib/crtsavres.S')
-rw-r--r--arch/powerpc/lib/crtsavres.S96
1 files changed, 48 insertions, 48 deletions
diff --git a/arch/powerpc/lib/crtsavres.S b/arch/powerpc/lib/crtsavres.S
index a5b30c71a8d3..18af0b3d3eb2 100644
--- a/arch/powerpc/lib/crtsavres.S
+++ b/arch/powerpc/lib/crtsavres.S
@@ -236,78 +236,78 @@ _GLOBAL(_rest32gpr_31_x)
_GLOBAL(_savevr_20)
li r11,-192
- stvx vr20,r11,r0
+ stvx v20,r11,r0
_GLOBAL(_savevr_21)
li r11,-176
- stvx vr21,r11,r0
+ stvx v21,r11,r0
_GLOBAL(_savevr_22)
li r11,-160
- stvx vr22,r11,r0
+ stvx v22,r11,r0
_GLOBAL(_savevr_23)
li r11,-144
- stvx vr23,r11,r0
+ stvx v23,r11,r0
_GLOBAL(_savevr_24)
li r11,-128
- stvx vr24,r11,r0
+ stvx v24,r11,r0
_GLOBAL(_savevr_25)
li r11,-112
- stvx vr25,r11,r0
+ stvx v25,r11,r0
_GLOBAL(_savevr_26)
li r11,-96
- stvx vr26,r11,r0
+ stvx v26,r11,r0
_GLOBAL(_savevr_27)
li r11,-80
- stvx vr27,r11,r0
+ stvx v27,r11,r0
_GLOBAL(_savevr_28)
li r11,-64
- stvx vr28,r11,r0
+ stvx v28,r11,r0
_GLOBAL(_savevr_29)
li r11,-48
- stvx vr29,r11,r0
+ stvx v29,r11,r0
_GLOBAL(_savevr_30)
li r11,-32
- stvx vr30,r11,r0
+ stvx v30,r11,r0
_GLOBAL(_savevr_31)
li r11,-16
- stvx vr31,r11,r0
+ stvx v31,r11,r0
blr
_GLOBAL(_restvr_20)
li r11,-192
- lvx vr20,r11,r0
+ lvx v20,r11,r0
_GLOBAL(_restvr_21)
li r11,-176
- lvx vr21,r11,r0
+ lvx v21,r11,r0
_GLOBAL(_restvr_22)
li r11,-160
- lvx vr22,r11,r0
+ lvx v22,r11,r0
_GLOBAL(_restvr_23)
li r11,-144
- lvx vr23,r11,r0
+ lvx v23,r11,r0
_GLOBAL(_restvr_24)
li r11,-128
- lvx vr24,r11,r0
+ lvx v24,r11,r0
_GLOBAL(_restvr_25)
li r11,-112
- lvx vr25,r11,r0
+ lvx v25,r11,r0
_GLOBAL(_restvr_26)
li r11,-96
- lvx vr26,r11,r0
+ lvx v26,r11,r0
_GLOBAL(_restvr_27)
li r11,-80
- lvx vr27,r11,r0
+ lvx v27,r11,r0
_GLOBAL(_restvr_28)
li r11,-64
- lvx vr28,r11,r0
+ lvx v28,r11,r0
_GLOBAL(_restvr_29)
li r11,-48
- lvx vr29,r11,r0
+ lvx v29,r11,r0
_GLOBAL(_restvr_30)
li r11,-32
- lvx vr30,r11,r0
+ lvx v30,r11,r0
_GLOBAL(_restvr_31)
li r11,-16
- lvx vr31,r11,r0
+ lvx v31,r11,r0
blr
#endif /* CONFIG_ALTIVEC */
@@ -443,101 +443,101 @@ _restgpr0_31:
.globl _savevr_20
_savevr_20:
li r12,-192
- stvx vr20,r12,r0
+ stvx v20,r12,r0
.globl _savevr_21
_savevr_21:
li r12,-176
- stvx vr21,r12,r0
+ stvx v21,r12,r0
.globl _savevr_22
_savevr_22:
li r12,-160
- stvx vr22,r12,r0
+ stvx v22,r12,r0
.globl _savevr_23
_savevr_23:
li r12,-144
- stvx vr23,r12,r0
+ stvx v23,r12,r0
.globl _savevr_24
_savevr_24:
li r12,-128
- stvx vr24,r12,r0
+ stvx v24,r12,r0
.globl _savevr_25
_savevr_25:
li r12,-112
- stvx vr25,r12,r0
+ stvx v25,r12,r0
.globl _savevr_26
_savevr_26:
li r12,-96
- stvx vr26,r12,r0
+ stvx v26,r12,r0
.globl _savevr_27
_savevr_27:
li r12,-80
- stvx vr27,r12,r0
+ stvx v27,r12,r0
.globl _savevr_28
_savevr_28:
li r12,-64
- stvx vr28,r12,r0
+ stvx v28,r12,r0
.globl _savevr_29
_savevr_29:
li r12,-48
- stvx vr29,r12,r0
+ stvx v29,r12,r0
.globl _savevr_30
_savevr_30:
li r12,-32
- stvx vr30,r12,r0
+ stvx v30,r12,r0
.globl _savevr_31
_savevr_31:
li r12,-16
- stvx vr31,r12,r0
+ stvx v31,r12,r0
blr
.globl _restvr_20
_restvr_20:
li r12,-192
- lvx vr20,r12,r0
+ lvx v20,r12,r0
.globl _restvr_21
_restvr_21:
li r12,-176
- lvx vr21,r12,r0
+ lvx v21,r12,r0
.globl _restvr_22
_restvr_22:
li r12,-160
- lvx vr22,r12,r0
+ lvx v22,r12,r0
.globl _restvr_23
_restvr_23:
li r12,-144
- lvx vr23,r12,r0
+ lvx v23,r12,r0
.globl _restvr_24
_restvr_24:
li r12,-128
- lvx vr24,r12,r0
+ lvx v24,r12,r0
.globl _restvr_25
_restvr_25:
li r12,-112
- lvx vr25,r12,r0
+ lvx v25,r12,r0
.globl _restvr_26
_restvr_26:
li r12,-96
- lvx vr26,r12,r0
+ lvx v26,r12,r0
.globl _restvr_27
_restvr_27:
li r12,-80
- lvx vr27,r12,r0
+ lvx v27,r12,r0
.globl _restvr_28
_restvr_28:
li r12,-64
- lvx vr28,r12,r0
+ lvx v28,r12,r0
.globl _restvr_29
_restvr_29:
li r12,-48
- lvx vr29,r12,r0
+ lvx v29,r12,r0
.globl _restvr_30
_restvr_30:
li r12,-32
- lvx vr30,r12,r0
+ lvx v30,r12,r0
.globl _restvr_31
_restvr_31:
li r12,-16
- lvx vr31,r12,r0
+ lvx v31,r12,r0
blr
#endif /* CONFIG_ALTIVEC */