1b15cbc0bSwdenk #ifndef _LINUX_BYTEORDER_SWAB_H 2b15cbc0bSwdenk #define _LINUX_BYTEORDER_SWAB_H 3b15cbc0bSwdenk 4b15cbc0bSwdenk /* 5b15cbc0bSwdenk * linux/byteorder/swab.h 6b15cbc0bSwdenk * Byte-swapping, independently from CPU endianness 7b15cbc0bSwdenk * swabXX[ps]?(foo) 8b15cbc0bSwdenk * 9b15cbc0bSwdenk * Francois-Rene Rideau <fare@tunes.org> 19971205 10b15cbc0bSwdenk * separated swab functions from cpu_to_XX, 11b15cbc0bSwdenk * to clean up support for bizarre-endian architectures. 12b15cbc0bSwdenk * 13b15cbc0bSwdenk * See asm-i386/byteorder.h and suches for examples of how to provide 14b15cbc0bSwdenk * architecture-dependent optimized versions 15b15cbc0bSwdenk * 16b15cbc0bSwdenk */ 17b15cbc0bSwdenk 18b15cbc0bSwdenk /* casts are necessary for constants, because we never know how for sure 19b15cbc0bSwdenk * how U/UL/ULL map to __u16, __u32, __u64. At least not in a portable way. 20b15cbc0bSwdenk */ 21b15cbc0bSwdenk #define ___swab16(x) \ 22b15cbc0bSwdenk ((__u16)( \ 23b15cbc0bSwdenk (((__u16)(x) & (__u16)0x00ffU) << 8) | \ 24b15cbc0bSwdenk (((__u16)(x) & (__u16)0xff00U) >> 8) )) 25b15cbc0bSwdenk #define ___swab32(x) \ 26b15cbc0bSwdenk ((__u32)( \ 27b15cbc0bSwdenk (((__u32)(x) & (__u32)0x000000ffUL) << 24) | \ 28b15cbc0bSwdenk (((__u32)(x) & (__u32)0x0000ff00UL) << 8) | \ 29b15cbc0bSwdenk (((__u32)(x) & (__u32)0x00ff0000UL) >> 8) | \ 30b15cbc0bSwdenk (((__u32)(x) & (__u32)0xff000000UL) >> 24) )) 31b15cbc0bSwdenk #define ___swab64(x) \ 32b15cbc0bSwdenk ((__u64)( \ 33b15cbc0bSwdenk (__u64)(((__u64)(x) & (__u64)0x00000000000000ffULL) << 56) | \ 34b15cbc0bSwdenk (__u64)(((__u64)(x) & (__u64)0x000000000000ff00ULL) << 40) | \ 35b15cbc0bSwdenk (__u64)(((__u64)(x) & (__u64)0x0000000000ff0000ULL) << 24) | \ 36b15cbc0bSwdenk (__u64)(((__u64)(x) & (__u64)0x00000000ff000000ULL) << 8) | \ 37b15cbc0bSwdenk (__u64)(((__u64)(x) & (__u64)0x000000ff00000000ULL) >> 8) | \ 38b15cbc0bSwdenk (__u64)(((__u64)(x) & (__u64)0x0000ff0000000000ULL) >> 24) | \ 39b15cbc0bSwdenk (__u64)(((__u64)(x) & (__u64)0x00ff000000000000ULL) >> 40) | \ 40b15cbc0bSwdenk (__u64)(((__u64)(x) & (__u64)0xff00000000000000ULL) >> 56) )) 41b15cbc0bSwdenk 42b15cbc0bSwdenk /* 43b15cbc0bSwdenk * provide defaults when no architecture-specific optimization is detected 44b15cbc0bSwdenk */ 45b15cbc0bSwdenk #ifndef __arch__swab16 46b15cbc0bSwdenk # define __arch__swab16(x) ___swab16(x) 47b15cbc0bSwdenk #endif 48b15cbc0bSwdenk #ifndef __arch__swab32 49b15cbc0bSwdenk # define __arch__swab32(x) ___swab32(x) 50b15cbc0bSwdenk #endif 51b15cbc0bSwdenk #ifndef __arch__swab64 52b15cbc0bSwdenk # define __arch__swab64(x) ___swab64(x) 53b15cbc0bSwdenk #endif 54b15cbc0bSwdenk 55b15cbc0bSwdenk #ifndef __arch__swab16p 56b15cbc0bSwdenk # define __arch__swab16p(x) __swab16(*(x)) 57b15cbc0bSwdenk #endif 58b15cbc0bSwdenk #ifndef __arch__swab32p 59b15cbc0bSwdenk # define __arch__swab32p(x) __swab32(*(x)) 60b15cbc0bSwdenk #endif 61b15cbc0bSwdenk #ifndef __arch__swab64p 62b15cbc0bSwdenk # define __arch__swab64p(x) __swab64(*(x)) 63b15cbc0bSwdenk #endif 64b15cbc0bSwdenk 65b15cbc0bSwdenk #ifndef __arch__swab16s 66b15cbc0bSwdenk # define __arch__swab16s(x) do { *(x) = __swab16p((x)); } while (0) 67b15cbc0bSwdenk #endif 68b15cbc0bSwdenk #ifndef __arch__swab32s 69b15cbc0bSwdenk # define __arch__swab32s(x) do { *(x) = __swab32p((x)); } while (0) 70b15cbc0bSwdenk #endif 71b15cbc0bSwdenk #ifndef __arch__swab64s 72b15cbc0bSwdenk # define __arch__swab64s(x) do { *(x) = __swab64p((x)); } while (0) 73b15cbc0bSwdenk #endif 74b15cbc0bSwdenk 75b15cbc0bSwdenk 76b15cbc0bSwdenk /* 77b15cbc0bSwdenk * Allow constant folding 78b15cbc0bSwdenk */ 79b15cbc0bSwdenk #if defined(__GNUC__) && (__GNUC__ >= 2) && defined(__OPTIMIZE__) 80b15cbc0bSwdenk # define __swab16(x) \ 81b15cbc0bSwdenk (__builtin_constant_p((__u16)(x)) ? \ 82b15cbc0bSwdenk ___swab16((x)) : \ 83b15cbc0bSwdenk __fswab16((x))) 84b15cbc0bSwdenk # define __swab32(x) \ 85b15cbc0bSwdenk (__builtin_constant_p((__u32)(x)) ? \ 86b15cbc0bSwdenk ___swab32((x)) : \ 87b15cbc0bSwdenk __fswab32((x))) 88b15cbc0bSwdenk # define __swab64(x) \ 89b15cbc0bSwdenk (__builtin_constant_p((__u64)(x)) ? \ 90b15cbc0bSwdenk ___swab64((x)) : \ 91b15cbc0bSwdenk __fswab64((x))) 92b15cbc0bSwdenk #else 93b15cbc0bSwdenk # define __swab16(x) __fswab16(x) 94b15cbc0bSwdenk # define __swab32(x) __fswab32(x) 95b15cbc0bSwdenk # define __swab64(x) __fswab64(x) 96b15cbc0bSwdenk #endif /* OPTIMIZE */ 97b15cbc0bSwdenk 98b15cbc0bSwdenk __fswab16(__u16 x)9977ddac94SWolfgang Denkstatic __inline__ __attribute__((const)) __u16 __fswab16(__u16 x) 100b15cbc0bSwdenk { 101b15cbc0bSwdenk return __arch__swab16(x); 102b15cbc0bSwdenk } __swab16p(const __u16 * x)103*eef1cf2dSKim Phillipsstatic __inline__ __u16 __swab16p(const __u16 *x) 104b15cbc0bSwdenk { 105b15cbc0bSwdenk return __arch__swab16p(x); 106b15cbc0bSwdenk } __swab16s(__u16 * addr)107f4863a7aSwdenkstatic __inline__ void __swab16s(__u16 *addr) 108b15cbc0bSwdenk { 109b15cbc0bSwdenk __arch__swab16s(addr); 110b15cbc0bSwdenk } 111b15cbc0bSwdenk __fswab32(__u32 x)11277ddac94SWolfgang Denkstatic __inline__ __attribute__((const)) __u32 __fswab32(__u32 x) 113b15cbc0bSwdenk { 114b15cbc0bSwdenk return __arch__swab32(x); 115b15cbc0bSwdenk } __swab32p(const __u32 * x)116*eef1cf2dSKim Phillipsstatic __inline__ __u32 __swab32p(const __u32 *x) 117b15cbc0bSwdenk { 118b15cbc0bSwdenk return __arch__swab32p(x); 119b15cbc0bSwdenk } __swab32s(__u32 * addr)120f4863a7aSwdenkstatic __inline__ void __swab32s(__u32 *addr) 121b15cbc0bSwdenk { 122b15cbc0bSwdenk __arch__swab32s(addr); 123b15cbc0bSwdenk } 124b15cbc0bSwdenk __fswab64(__u64 x)12577ddac94SWolfgang Denkstatic __inline__ __attribute__((const)) __u64 __fswab64(__u64 x) 126b15cbc0bSwdenk { 127b15cbc0bSwdenk # ifdef __SWAB_64_THRU_32__ 128b15cbc0bSwdenk __u32 h = x >> 32; 129b15cbc0bSwdenk __u32 l = x & ((1ULL<<32)-1); 130b15cbc0bSwdenk return (((__u64)__swab32(l)) << 32) | ((__u64)(__swab32(h))); 131b15cbc0bSwdenk # else 132b15cbc0bSwdenk return __arch__swab64(x); 133b15cbc0bSwdenk # endif 134b15cbc0bSwdenk } __swab64p(const __u64 * x)135*eef1cf2dSKim Phillipsstatic __inline__ __u64 __swab64p(const __u64 *x) 136b15cbc0bSwdenk { 137b15cbc0bSwdenk return __arch__swab64p(x); 138b15cbc0bSwdenk } __swab64s(__u64 * addr)139f4863a7aSwdenkstatic __inline__ void __swab64s(__u64 *addr) 140b15cbc0bSwdenk { 141b15cbc0bSwdenk __arch__swab64s(addr); 142b15cbc0bSwdenk } 143b15cbc0bSwdenk 144b15cbc0bSwdenk #if defined(__KERNEL__) 145b15cbc0bSwdenk #define swab16 __swab16 146b15cbc0bSwdenk #define swab32 __swab32 147b15cbc0bSwdenk #define swab64 __swab64 148b15cbc0bSwdenk #define swab16p __swab16p 149b15cbc0bSwdenk #define swab32p __swab32p 150b15cbc0bSwdenk #define swab64p __swab64p 151b15cbc0bSwdenk #define swab16s __swab16s 152b15cbc0bSwdenk #define swab32s __swab32s 153b15cbc0bSwdenk #define swab64s __swab64s 154b15cbc0bSwdenk #endif 155b15cbc0bSwdenk 156b15cbc0bSwdenk #endif /* _LINUX_BYTEORDER_SWAB_H */ 157