|
@@ -61,7 +61,7 @@
|
|
|
ld r15,STK_REG(r15)(r1)
|
|
|
ld r14,STK_REG(r14)(r1)
|
|
|
.Ldo_err3:
|
|
|
- bl .exit_vmx_copy
|
|
|
+ bl .exit_vmx_usercopy
|
|
|
ld r0,STACKFRAMESIZE+16(r1)
|
|
|
mtlr r0
|
|
|
b .Lexit
|
|
@@ -290,7 +290,7 @@ err1; stb r0,0(r3)
|
|
|
mflr r0
|
|
|
std r0,16(r1)
|
|
|
stdu r1,-STACKFRAMESIZE(r1)
|
|
|
- bl .enter_vmx_copy
|
|
|
+ bl .enter_vmx_usercopy
|
|
|
cmpwi r3,0
|
|
|
ld r0,STACKFRAMESIZE+16(r1)
|
|
|
ld r3,STACKFRAMESIZE+48(r1)
|
|
@@ -507,7 +507,7 @@ err3; lbz r0,0(r4)
|
|
|
err3; stb r0,0(r3)
|
|
|
|
|
|
15: addi r1,r1,STACKFRAMESIZE
|
|
|
- b .exit_vmx_copy /* tail call optimise */
|
|
|
+ b .exit_vmx_usercopy /* tail call optimise */
|
|
|
|
|
|
.Lvmx_unaligned_copy:
|
|
|
/* Get the destination 16B aligned */
|
|
@@ -710,5 +710,5 @@ err3; lbz r0,0(r4)
|
|
|
err3; stb r0,0(r3)
|
|
|
|
|
|
15: addi r1,r1,STACKFRAMESIZE
|
|
|
- b .exit_vmx_copy /* tail call optimise */
|
|
|
+ b .exit_vmx_usercopy /* tail call optimise */
|
|
|
#endif /* CONFiG_ALTIVEC */
|