byteorder.h 1.3 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465
  1. #ifndef _ASM_X86_BYTEORDER_H
  2. #define _ASM_X86_BYTEORDER_H
  3. #include <asm/types.h>
  4. #include <linux/compiler.h>
  5. #define __LITTLE_ENDIAN
  6. static inline __attribute_const__ __u32 __arch_swab32(__u32 val)
  7. {
  8. #ifdef __i386__
  9. # ifdef CONFIG_X86_BSWAP
  10. asm("bswap %0" : "=r" (val) : "0" (val));
  11. # else
  12. asm("xchgb %b0,%h0\n\t" /* swap lower bytes */
  13. "rorl $16,%0\n\t" /* swap words */
  14. "xchgb %b0,%h0" /* swap higher bytes */
  15. : "=q" (val)
  16. : "0" (val));
  17. # endif
  18. #else /* __i386__ */
  19. asm("bswapl %0"
  20. : "=r" (val)
  21. : "0" (val));
  22. #endif
  23. return val;
  24. }
  25. #define __arch_swab32 __arch_swab32
  26. static inline __attribute_const__ __u64 __arch_swab64(__u64 val)
  27. {
  28. #ifdef __i386__
  29. union {
  30. struct {
  31. __u32 a;
  32. __u32 b;
  33. } s;
  34. __u64 u;
  35. } v;
  36. v.u = val;
  37. # ifdef CONFIG_X86_BSWAP
  38. asm("bswapl %0 ; bswapl %1 ; xchgl %0,%1"
  39. : "=r" (v.s.a), "=r" (v.s.b)
  40. : "0" (v.s.a), "1" (v.s.b));
  41. # else
  42. v.s.a = __arch_swab32(v.s.a);
  43. v.s.b = __arch_swab32(v.s.b);
  44. asm("xchgl %0,%1"
  45. : "=r" (v.s.a), "=r" (v.s.b)
  46. : "0" (v.s.a), "1" (v.s.b));
  47. # endif
  48. return v.u;
  49. #else /* __i386__ */
  50. asm("bswapq %0"
  51. : "=r" (val)
  52. : "0" (val));
  53. return val;
  54. #endif
  55. }
  56. #define __arch_swab64 __arch_swab64
  57. #include <linux/byteorder.h>
  58. #endif /* _ASM_X86_BYTEORDER_H */