swab.h 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222
  1. #ifndef _LINUX_BYTEORDER_SWAB_H
  2. #define _LINUX_BYTEORDER_SWAB_H
  3. /*
  4. * linux/byteorder/swab.h
  5. * Byte-swapping, independently from CPU endianness
  6. * swabXX[ps]?(foo)
  7. *
  8. * Francois-Rene Rideau <fare@tunes.org> 19971205
  9. * separated swab functions from cpu_to_XX,
  10. * to clean up support for bizarre-endian architectures.
  11. *
  12. * Trent Piepho <xyzzy@speakeasy.org> 2007114
  13. * make constant-folding work, provide C versions that
  14. * gcc can optimize better, explain different versions
  15. *
  16. * See asm-i386/byteorder.h and suches for examples of how to provide
  17. * architecture-dependent optimized versions
  18. *
  19. */
  20. #include <linux/compiler.h>
  21. /* Functions/macros defined, there are a lot:
  22. *
  23. * ___swabXX
  24. * Generic C versions of the swab functions.
  25. *
  26. * ___constant_swabXX
  27. * C versions that gcc can fold into a compile-time constant when
  28. * the argument is a compile-time constant.
  29. *
  30. * __arch__swabXX[sp]?
  31. * Architecture optimized versions of all the swab functions
  32. * (including the s and p versions). These can be defined in
  33. * asm-arch/byteorder.h. Any which are not, are defined here.
  34. * __arch__swabXXs() is defined in terms of __arch__swabXXp(), which
  35. * is defined in terms of __arch__swabXX(), which is in turn defined
  36. * in terms of ___swabXX(x).
  37. * These must be macros. They may be unsafe for arguments with
  38. * side-effects.
  39. *
  40. * __fswabXX
  41. * Inline function versions of the __arch__ macros. These _are_ safe
  42. * if the arguments have side-effects. Note there are no s and p
  43. * versions of these.
  44. *
  45. * __swabXX[sb]
  46. * There are the ones you should actually use. The __swabXX versions
  47. * will be a constant given a constant argument and use the arch
  48. * specific code (if any) for non-constant arguments. The s and p
  49. * versions always use the arch specific code (constant folding
  50. * doesn't apply). They are safe to use with arguments with
  51. * side-effects.
  52. *
  53. * swabXX[sb]
  54. * Nicknames for __swabXX[sb] to use in the kernel.
  55. */
  56. /* casts are necessary for constants, because we never know how for sure
  57. * how U/UL/ULL map to __u16, __u32, __u64. At least not in a portable way.
  58. */
  59. static __inline__ __attribute_const__ __u16 ___swab16(__u16 x)
  60. {
  61. return x<<8 | x>>8;
  62. }
  63. static __inline__ __attribute_const__ __u32 ___swab32(__u32 x)
  64. {
  65. return x<<24 | x>>24 |
  66. (x & (__u32)0x0000ff00UL)<<8 |
  67. (x & (__u32)0x00ff0000UL)>>8;
  68. }
  69. static __inline__ __attribute_const__ __u64 ___swab64(__u64 x)
  70. {
  71. return x<<56 | x>>56 |
  72. (x & (__u64)0x000000000000ff00ULL)<<40 |
  73. (x & (__u64)0x0000000000ff0000ULL)<<24 |
  74. (x & (__u64)0x00000000ff000000ULL)<< 8 |
  75. (x & (__u64)0x000000ff00000000ULL)>> 8 |
  76. (x & (__u64)0x0000ff0000000000ULL)>>24 |
  77. (x & (__u64)0x00ff000000000000ULL)>>40;
  78. }
  79. #define ___constant_swab16(x) \
  80. ((__u16)( \
  81. (((__u16)(x) & (__u16)0x00ffU) << 8) | \
  82. (((__u16)(x) & (__u16)0xff00U) >> 8) ))
  83. #define ___constant_swab32(x) \
  84. ((__u32)( \
  85. (((__u32)(x) & (__u32)0x000000ffUL) << 24) | \
  86. (((__u32)(x) & (__u32)0x0000ff00UL) << 8) | \
  87. (((__u32)(x) & (__u32)0x00ff0000UL) >> 8) | \
  88. (((__u32)(x) & (__u32)0xff000000UL) >> 24) ))
  89. #define ___constant_swab64(x) \
  90. ((__u64)( \
  91. (__u64)(((__u64)(x) & (__u64)0x00000000000000ffULL) << 56) | \
  92. (__u64)(((__u64)(x) & (__u64)0x000000000000ff00ULL) << 40) | \
  93. (__u64)(((__u64)(x) & (__u64)0x0000000000ff0000ULL) << 24) | \
  94. (__u64)(((__u64)(x) & (__u64)0x00000000ff000000ULL) << 8) | \
  95. (__u64)(((__u64)(x) & (__u64)0x000000ff00000000ULL) >> 8) | \
  96. (__u64)(((__u64)(x) & (__u64)0x0000ff0000000000ULL) >> 24) | \
  97. (__u64)(((__u64)(x) & (__u64)0x00ff000000000000ULL) >> 40) | \
  98. (__u64)(((__u64)(x) & (__u64)0xff00000000000000ULL) >> 56) ))
  99. /*
  100. * provide defaults when no architecture-specific optimization is detected
  101. */
  102. #ifndef __arch__swab16
  103. # define __arch__swab16(x) ___swab16(x)
  104. #endif
  105. #ifndef __arch__swab32
  106. # define __arch__swab32(x) ___swab32(x)
  107. #endif
  108. #ifndef __arch__swab64
  109. # define __arch__swab64(x) ___swab64(x)
  110. #endif
  111. #ifndef __arch__swab16p
  112. # define __arch__swab16p(x) __arch__swab16(*(x))
  113. #endif
  114. #ifndef __arch__swab32p
  115. # define __arch__swab32p(x) __arch__swab32(*(x))
  116. #endif
  117. #ifndef __arch__swab64p
  118. # define __arch__swab64p(x) __arch__swab64(*(x))
  119. #endif
  120. #ifndef __arch__swab16s
  121. # define __arch__swab16s(x) ((void)(*(x) = __arch__swab16p(x)))
  122. #endif
  123. #ifndef __arch__swab32s
  124. # define __arch__swab32s(x) ((void)(*(x) = __arch__swab32p(x)))
  125. #endif
  126. #ifndef __arch__swab64s
  127. # define __arch__swab64s(x) ((void)(*(x) = __arch__swab64p(x)))
  128. #endif
  129. /*
  130. * Allow constant folding
  131. */
  132. #if defined(__GNUC__) && defined(__OPTIMIZE__)
  133. # define __swab16(x) \
  134. (__builtin_constant_p((__u16)(x)) ? \
  135. ___constant_swab16((x)) : \
  136. __fswab16((x)))
  137. # define __swab32(x) \
  138. (__builtin_constant_p((__u32)(x)) ? \
  139. ___constant_swab32((x)) : \
  140. __fswab32((x)))
  141. # define __swab64(x) \
  142. (__builtin_constant_p((__u64)(x)) ? \
  143. ___constant_swab64((x)) : \
  144. __fswab64((x)))
  145. #else
  146. # define __swab16(x) __fswab16(x)
  147. # define __swab32(x) __fswab32(x)
  148. # define __swab64(x) __fswab64(x)
  149. #endif /* OPTIMIZE */
  150. static __inline__ __attribute_const__ __u16 __fswab16(__u16 x)
  151. {
  152. return __arch__swab16(x);
  153. }
  154. static __inline__ __u16 __swab16p(const __u16 *x)
  155. {
  156. return __arch__swab16p(x);
  157. }
  158. static __inline__ void __swab16s(__u16 *addr)
  159. {
  160. __arch__swab16s(addr);
  161. }
  162. static __inline__ __attribute_const__ __u32 __fswab32(__u32 x)
  163. {
  164. return __arch__swab32(x);
  165. }
  166. static __inline__ __u32 __swab32p(const __u32 *x)
  167. {
  168. return __arch__swab32p(x);
  169. }
  170. static __inline__ void __swab32s(__u32 *addr)
  171. {
  172. __arch__swab32s(addr);
  173. }
  174. #ifdef __BYTEORDER_HAS_U64__
  175. static __inline__ __attribute_const__ __u64 __fswab64(__u64 x)
  176. {
  177. # ifdef __SWAB_64_THRU_32__
  178. __u32 h = x >> 32;
  179. __u32 l = x & ((1ULL<<32)-1);
  180. return (((__u64)__swab32(l)) << 32) | ((__u64)(__swab32(h)));
  181. # else
  182. return __arch__swab64(x);
  183. # endif
  184. }
  185. static __inline__ __u64 __swab64p(const __u64 *x)
  186. {
  187. return __arch__swab64p(x);
  188. }
  189. static __inline__ void __swab64s(__u64 *addr)
  190. {
  191. __arch__swab64s(addr);
  192. }
  193. #endif /* __BYTEORDER_HAS_U64__ */
  194. #if defined(__KERNEL__)
  195. #define swab16 __swab16
  196. #define swab32 __swab32
  197. #define swab64 __swab64
  198. #define swab16p __swab16p
  199. #define swab32p __swab32p
  200. #define swab64p __swab64p
  201. #define swab16s __swab16s
  202. #define swab32s __swab32s
  203. #define swab64s __swab64s
  204. #endif
  205. #endif /* _LINUX_BYTEORDER_SWAB_H */