Переглянути джерело

powerpc: Use the new byteorder headers

Signed-off-by: Harvey Harrison <harvey.harrison@gmail.com>
Signed-off-by: Paul Mackerras <paulus@samba.org>
Harvey Harrison 16 роки тому
батько
коміт
1c1b777a56
1 змінених файлів з 21 додано та 17 видалено
  1. 21 17
      arch/powerpc/include/asm/byteorder.h

+ 21 - 17
arch/powerpc/include/asm/byteorder.h

@@ -11,6 +11,8 @@
 #include <asm/types.h>
 #include <asm/types.h>
 #include <linux/compiler.h>
 #include <linux/compiler.h>
 
 
+#define __BIG_ENDIAN
+
 #ifdef __GNUC__
 #ifdef __GNUC__
 #ifdef __KERNEL__
 #ifdef __KERNEL__
 
 
@@ -21,12 +23,19 @@ static __inline__ __u16 ld_le16(const volatile __u16 *addr)
 	__asm__ __volatile__ ("lhbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
 	__asm__ __volatile__ ("lhbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
 	return val;
 	return val;
 }
 }
+#define __arch_swab16p ld_le16
 
 
 static __inline__ void st_le16(volatile __u16 *addr, const __u16 val)
 static __inline__ void st_le16(volatile __u16 *addr, const __u16 val)
 {
 {
 	__asm__ __volatile__ ("sthbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
 	__asm__ __volatile__ ("sthbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
 }
 }
 
 
+static inline void __arch_swab16s(__u16 *addr)
+{
+	st_le16(addr, *addr);
+}
+#define __arch_swab16s __arch_swab16s
+
 static __inline__ __u32 ld_le32(const volatile __u32 *addr)
 static __inline__ __u32 ld_le32(const volatile __u32 *addr)
 {
 {
 	__u32 val;
 	__u32 val;
@@ -34,13 +43,20 @@ static __inline__ __u32 ld_le32(const volatile __u32 *addr)
 	__asm__ __volatile__ ("lwbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
 	__asm__ __volatile__ ("lwbrx %0,0,%1" : "=r" (val) : "r" (addr), "m" (*addr));
 	return val;
 	return val;
 }
 }
+#define __arch_swab32p ld_le32
 
 
 static __inline__ void st_le32(volatile __u32 *addr, const __u32 val)
 static __inline__ void st_le32(volatile __u32 *addr, const __u32 val)
 {
 {
 	__asm__ __volatile__ ("stwbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
 	__asm__ __volatile__ ("stwbrx %1,0,%2" : "=m" (*addr) : "r" (val), "r" (addr));
 }
 }
 
 
-static __inline__ __attribute_const__ __u16 ___arch__swab16(__u16 value)
+static inline void __arch_swab32s(__u32 *addr)
+{
+	st_le32(addr, *addr);
+}
+#define __arch_swab32s __arch_swab32s
+
+static inline __attribute_const__ __u16 __arch_swab16(__u16 value)
 {
 {
 	__u16 result;
 	__u16 result;
 
 
@@ -49,8 +65,9 @@ static __inline__ __attribute_const__ __u16 ___arch__swab16(__u16 value)
 	    : "r" (value), "0" (value >> 8));
 	    : "r" (value), "0" (value >> 8));
 	return result;
 	return result;
 }
 }
+#define __arch_swab16 __arch_swab16
 
 
-static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 value)
+static inline __attribute_const__ __u32 __arch_swab32(__u32 value)
 {
 {
 	__u32 result;
 	__u32 result;
 
 
@@ -61,29 +78,16 @@ static __inline__ __attribute_const__ __u32 ___arch__swab32(__u32 value)
 	    : "r" (value), "0" (value >> 24));
 	    : "r" (value), "0" (value >> 24));
 	return result;
 	return result;
 }
 }
-
-#define __arch__swab16(x) ___arch__swab16(x)
-#define __arch__swab32(x) ___arch__swab32(x)
-
-/* The same, but returns converted value from the location pointer by addr. */
-#define __arch__swab16p(addr) ld_le16(addr)
-#define __arch__swab32p(addr) ld_le32(addr)
-
-/* The same, but do the conversion in situ, ie. put the value back to addr. */
-#define __arch__swab16s(addr) st_le16(addr,*addr)
-#define __arch__swab32s(addr) st_le32(addr,*addr)
+#define __arch_swab32 __arch_swab32
 
 
 #endif /* __KERNEL__ */
 #endif /* __KERNEL__ */
 
 
-#ifndef __STRICT_ANSI__
-#define __BYTEORDER_HAS_U64__
 #ifndef __powerpc64__
 #ifndef __powerpc64__
 #define __SWAB_64_THRU_32__
 #define __SWAB_64_THRU_32__
 #endif /* __powerpc64__ */
 #endif /* __powerpc64__ */
-#endif /* __STRICT_ANSI__ */
 
 
 #endif /* __GNUC__ */
 #endif /* __GNUC__ */
 
 
-#include <linux/byteorder/big_endian.h>
+#include <linux/byteorder.h>
 
 
 #endif /* _ASM_POWERPC_BYTEORDER_H */
 #endif /* _ASM_POWERPC_BYTEORDER_H */