|
@@ -212,11 +212,19 @@ static int __kprobes read_mem_unaligned(unsigned long *dest, unsigned long ea,
|
|
|
{
|
|
|
int err;
|
|
|
unsigned long x, b, c;
|
|
|
+#ifdef __LITTLE_ENDIAN__
|
|
|
+ int len = nb; /* save a copy of the length for byte reversal */
|
|
|
+#endif
|
|
|
|
|
|
/* unaligned, do this in pieces */
|
|
|
x = 0;
|
|
|
for (; nb > 0; nb -= c) {
|
|
|
+#ifdef __LITTLE_ENDIAN__
|
|
|
+ c = 1;
|
|
|
+#endif
|
|
|
+#ifdef __BIG_ENDIAN__
|
|
|
c = max_align(ea);
|
|
|
+#endif
|
|
|
if (c > nb)
|
|
|
c = max_align(nb);
|
|
|
err = read_mem_aligned(&b, ea, c);
|
|
@@ -225,7 +233,24 @@ static int __kprobes read_mem_unaligned(unsigned long *dest, unsigned long ea,
|
|
|
x = (x << (8 * c)) + b;
|
|
|
ea += c;
|
|
|
}
|
|
|
+#ifdef __LITTLE_ENDIAN__
|
|
|
+ switch (len) {
|
|
|
+ case 2:
|
|
|
+ *dest = byterev_2(x);
|
|
|
+ break;
|
|
|
+ case 4:
|
|
|
+ *dest = byterev_4(x);
|
|
|
+ break;
|
|
|
+#ifdef __powerpc64__
|
|
|
+ case 8:
|
|
|
+ *dest = byterev_8(x);
|
|
|
+ break;
|
|
|
+#endif
|
|
|
+ }
|
|
|
+#endif
|
|
|
+#ifdef __BIG_ENDIAN__
|
|
|
*dest = x;
|
|
|
+#endif
|
|
|
return 0;
|
|
|
}
|
|
|
|
|
@@ -273,9 +298,29 @@ static int __kprobes write_mem_unaligned(unsigned long val, unsigned long ea,
|
|
|
int err;
|
|
|
unsigned long c;
|
|
|
|
|
|
+#ifdef __LITTLE_ENDIAN__
|
|
|
+ switch (nb) {
|
|
|
+ case 2:
|
|
|
+ val = byterev_2(val);
|
|
|
+ break;
|
|
|
+ case 4:
|
|
|
+ val = byterev_4(val);
|
|
|
+ break;
|
|
|
+#ifdef __powerpc64__
|
|
|
+ case 8:
|
|
|
+ val = byterev_8(val);
|
|
|
+ break;
|
|
|
+#endif
|
|
|
+ }
|
|
|
+#endif
|
|
|
/* unaligned or little-endian, do this in pieces */
|
|
|
for (; nb > 0; nb -= c) {
|
|
|
+#ifdef __LITTLE_ENDIAN__
|
|
|
+ c = 1;
|
|
|
+#endif
|
|
|
+#ifdef __BIG_ENDIAN__
|
|
|
c = max_align(ea);
|
|
|
+#endif
|
|
|
if (c > nb)
|
|
|
c = max_align(nb);
|
|
|
err = write_mem_aligned(val >> (nb - c) * 8, ea, c);
|