|
@@ -126,26 +126,26 @@ END_FW_FTR_SECTION_IFSET(FW_FEATURE_SPLPAR)
|
|
|
#define REST_32VRS(n,b,base) REST_16VRS(n,b,base); REST_16VRS(n+16,b,base)
|
|
|
|
|
|
/* Save the lower 32 VSRs in the thread VSR region */
|
|
|
-#define SAVE_VSR(n,b,base) li b,THREAD_VSR0+(16*(n)); STXVD2X(n,base,b)
|
|
|
+#define SAVE_VSR(n,b,base) li b,THREAD_VSR0+(16*(n)); STXVD2X(n,R##base,R##b)
|
|
|
#define SAVE_2VSRS(n,b,base) SAVE_VSR(n,b,base); SAVE_VSR(n+1,b,base)
|
|
|
#define SAVE_4VSRS(n,b,base) SAVE_2VSRS(n,b,base); SAVE_2VSRS(n+2,b,base)
|
|
|
#define SAVE_8VSRS(n,b,base) SAVE_4VSRS(n,b,base); SAVE_4VSRS(n+4,b,base)
|
|
|
#define SAVE_16VSRS(n,b,base) SAVE_8VSRS(n,b,base); SAVE_8VSRS(n+8,b,base)
|
|
|
#define SAVE_32VSRS(n,b,base) SAVE_16VSRS(n,b,base); SAVE_16VSRS(n+16,b,base)
|
|
|
-#define REST_VSR(n,b,base) li b,THREAD_VSR0+(16*(n)); LXVD2X(n,base,b)
|
|
|
+#define REST_VSR(n,b,base) li b,THREAD_VSR0+(16*(n)); LXVD2X(n,R##base,R##b)
|
|
|
#define REST_2VSRS(n,b,base) REST_VSR(n,b,base); REST_VSR(n+1,b,base)
|
|
|
#define REST_4VSRS(n,b,base) REST_2VSRS(n,b,base); REST_2VSRS(n+2,b,base)
|
|
|
#define REST_8VSRS(n,b,base) REST_4VSRS(n,b,base); REST_4VSRS(n+4,b,base)
|
|
|
#define REST_16VSRS(n,b,base) REST_8VSRS(n,b,base); REST_8VSRS(n+8,b,base)
|
|
|
#define REST_32VSRS(n,b,base) REST_16VSRS(n,b,base); REST_16VSRS(n+16,b,base)
|
|
|
/* Save the upper 32 VSRs (32-63) in the thread VSX region (0-31) */
|
|
|
-#define SAVE_VSRU(n,b,base) li b,THREAD_VR0+(16*(n)); STXVD2X(n+32,base,b)
|
|
|
+#define SAVE_VSRU(n,b,base) li b,THREAD_VR0+(16*(n)); STXVD2X(n+32,R##base,R##b)
|
|
|
#define SAVE_2VSRSU(n,b,base) SAVE_VSRU(n,b,base); SAVE_VSRU(n+1,b,base)
|
|
|
#define SAVE_4VSRSU(n,b,base) SAVE_2VSRSU(n,b,base); SAVE_2VSRSU(n+2,b,base)
|
|
|
#define SAVE_8VSRSU(n,b,base) SAVE_4VSRSU(n,b,base); SAVE_4VSRSU(n+4,b,base)
|
|
|
#define SAVE_16VSRSU(n,b,base) SAVE_8VSRSU(n,b,base); SAVE_8VSRSU(n+8,b,base)
|
|
|
#define SAVE_32VSRSU(n,b,base) SAVE_16VSRSU(n,b,base); SAVE_16VSRSU(n+16,b,base)
|
|
|
-#define REST_VSRU(n,b,base) li b,THREAD_VR0+(16*(n)); LXVD2X(n+32,base,b)
|
|
|
+#define REST_VSRU(n,b,base) li b,THREAD_VR0+(16*(n)); LXVD2X(n+32,R##base,R##b)
|
|
|
#define REST_2VSRSU(n,b,base) REST_VSRU(n,b,base); REST_VSRU(n+1,b,base)
|
|
|
#define REST_4VSRSU(n,b,base) REST_2VSRSU(n,b,base); REST_2VSRSU(n+2,b,base)
|
|
|
#define REST_8VSRSU(n,b,base) REST_4VSRSU(n,b,base); REST_4VSRSU(n+4,b,base)
|
|
@@ -183,15 +183,18 @@ END_FW_FTR_SECTION_IFSET(FW_FEATURE_SPLPAR)
|
|
|
#else
|
|
|
#define ULONG_SIZE 4
|
|
|
#endif
|
|
|
-#define VCPU_GPR(n) (VCPU_GPRS + (n * ULONG_SIZE))
|
|
|
+#define __VCPU_GPR(n) (VCPU_GPRS + (n * ULONG_SIZE))
|
|
|
+#define VCPU_GPR(n) __VCPU_GPR(__REG_##n)
|
|
|
|
|
|
#ifdef __KERNEL__
|
|
|
#ifdef CONFIG_PPC64
|
|
|
|
|
|
#define STACKFRAMESIZE 256
|
|
|
-#define STK_REG(i) (112 + ((i)-14)*8)
|
|
|
+#define __STK_REG(i) (112 + ((i)-14)*8)
|
|
|
+#define STK_REG(i) __STK_REG(__REG_##i)
|
|
|
|
|
|
-#define STK_PARAM(i) (48 + ((i)-3)*8)
|
|
|
+#define __STK_PARAM(i) (48 + ((i)-3)*8)
|
|
|
+#define STK_PARAM(i) __STK_PARAM(__REG_##i)
|
|
|
|
|
|
#define XGLUE(a,b) a##b
|
|
|
#define GLUE(a,b) XGLUE(a,b)
|