byteorder.h 1.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081
  1. #ifndef ASM_X86__BYTEORDER_H
  2. #define ASM_X86__BYTEORDER_H
  3. #include <asm/types.h>
  4. #include <linux/compiler.h>
  5. #ifdef __GNUC__
  6. #ifdef __i386__
  7. static inline __attribute_const__ __u32 ___arch__swab32(__u32 x)
  8. {
  9. #ifdef CONFIG_X86_BSWAP
  10. asm("bswap %0" : "=r" (x) : "0" (x));
  11. #else
  12. asm("xchgb %b0,%h0\n\t" /* swap lower bytes */
  13. "rorl $16,%0\n\t" /* swap words */
  14. "xchgb %b0,%h0" /* swap higher bytes */
  15. : "=q" (x)
  16. : "0" (x));
  17. #endif
  18. return x;
  19. }
  20. static inline __attribute_const__ __u64 ___arch__swab64(__u64 val)
  21. {
  22. union {
  23. struct {
  24. __u32 a;
  25. __u32 b;
  26. } s;
  27. __u64 u;
  28. } v;
  29. v.u = val;
  30. #ifdef CONFIG_X86_BSWAP
  31. asm("bswapl %0 ; bswapl %1 ; xchgl %0,%1"
  32. : "=r" (v.s.a), "=r" (v.s.b)
  33. : "0" (v.s.a), "1" (v.s.b));
  34. #else
  35. v.s.a = ___arch__swab32(v.s.a);
  36. v.s.b = ___arch__swab32(v.s.b);
  37. asm("xchgl %0,%1"
  38. : "=r" (v.s.a), "=r" (v.s.b)
  39. : "0" (v.s.a), "1" (v.s.b));
  40. #endif
  41. return v.u;
  42. }
  43. #else /* __i386__ */
  44. static inline __attribute_const__ __u64 ___arch__swab64(__u64 x)
  45. {
  46. asm("bswapq %0"
  47. : "=r" (x)
  48. : "0" (x));
  49. return x;
  50. }
  51. static inline __attribute_const__ __u32 ___arch__swab32(__u32 x)
  52. {
  53. asm("bswapl %0"
  54. : "=r" (x)
  55. : "0" (x));
  56. return x;
  57. }
  58. #endif
  59. /* Do not define swab16. Gcc is smart enough to recognize "C" version and
  60. convert it into rotation or exhange. */
  61. #define __arch__swab64(x) ___arch__swab64(x)
  62. #define __arch__swab32(x) ___arch__swab32(x)
  63. #define __BYTEORDER_HAS_U64__
  64. #endif /* __GNUC__ */
  65. #include <linux/byteorder/little_endian.h>
  66. #endif /* ASM_X86__BYTEORDER_H */