1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
21da177e4SLinus Torvalds #ifndef _LINUX_BYTEORDER_GENERIC_H
31da177e4SLinus Torvalds #define _LINUX_BYTEORDER_GENERIC_H
41da177e4SLinus Torvalds 
51da177e4SLinus Torvalds /*
690a85643SGeoff Levand  * linux/byteorder/generic.h
71da177e4SLinus Torvalds  * Generic Byte-reordering support
81da177e4SLinus Torvalds  *
9e0487992SEd L. Cashin  * The "... p" macros, like le64_to_cpup, can be used with pointers
10e0487992SEd L. Cashin  * to unaligned data, but there will be a performance penalty on
11e0487992SEd L. Cashin  * some architectures.  Use get_unaligned for unaligned data.
12e0487992SEd L. Cashin  *
131da177e4SLinus Torvalds  * Francois-Rene Rideau <fare@tunes.org> 19970707
141da177e4SLinus Torvalds  *    gathered all the good ideas from all asm-foo/byteorder.h into one file,
151da177e4SLinus Torvalds  *    cleaned them up.
161da177e4SLinus Torvalds  *    I hope it is compliant with non-GCC compilers.
171da177e4SLinus Torvalds  *    I decided to put __BYTEORDER_HAS_U64__ in byteorder.h,
181da177e4SLinus Torvalds  *    because I wasn't sure it would be ok to put it in types.h
191da177e4SLinus Torvalds  *    Upgraded it to 2.1.43
201da177e4SLinus Torvalds  * Francois-Rene Rideau <fare@tunes.org> 19971012
211da177e4SLinus Torvalds  *    Upgraded it to 2.1.57
221da177e4SLinus Torvalds  *    to please Linus T., replaced huge #ifdef's between little/big endian
231da177e4SLinus Torvalds  *    by nestedly #include'd files.
241da177e4SLinus Torvalds  * Francois-Rene Rideau <fare@tunes.org> 19971205
251da177e4SLinus Torvalds  *    Made it to 2.1.71; now a facelift:
261da177e4SLinus Torvalds  *    Put files under include/linux/byteorder/
271da177e4SLinus Torvalds  *    Split swab from generic support.
281da177e4SLinus Torvalds  *
291da177e4SLinus Torvalds  * TODO:
301da177e4SLinus Torvalds  *   = Regular kernel maintainers could also replace all these manual
311da177e4SLinus Torvalds  *    byteswap macros that remain, disseminated among drivers,
321da177e4SLinus Torvalds  *    after some grep or the sources...
331da177e4SLinus Torvalds  *   = Linus might want to rename all these macros and files to fit his taste,
341da177e4SLinus Torvalds  *    to fit his personal naming scheme.
351da177e4SLinus Torvalds  *   = it seems that a few drivers would also appreciate
361da177e4SLinus Torvalds  *    nybble swapping support...
371da177e4SLinus Torvalds  *   = every architecture could add their byteswap macro in asm/byteorder.h
381da177e4SLinus Torvalds  *    see how some architectures already do (i386, alpha, ppc, etc)
391da177e4SLinus Torvalds  *   = cpu_to_beXX and beXX_to_cpu might some day need to be well
401da177e4SLinus Torvalds  *    distinguished throughout the kernel. This is not the case currently,
411da177e4SLinus Torvalds  *    since little endian, big endian, and pdp endian machines needn't it.
421da177e4SLinus Torvalds  *    But this might be the case for, say, a port of Linux to 20/21 bit
431da177e4SLinus Torvalds  *    architectures (and F21 Linux addict around?).
441da177e4SLinus Torvalds  */
451da177e4SLinus Torvalds 
461da177e4SLinus Torvalds /*
471da177e4SLinus Torvalds  * The following macros are to be defined by <asm/byteorder.h>:
481da177e4SLinus Torvalds  *
491da177e4SLinus Torvalds  * Conversion of long and short int between network and host format
501da177e4SLinus Torvalds  *	ntohl(__u32 x)
511da177e4SLinus Torvalds  *	ntohs(__u16 x)
521da177e4SLinus Torvalds  *	htonl(__u32 x)
531da177e4SLinus Torvalds  *	htons(__u16 x)
541da177e4SLinus Torvalds  * It seems that some programs (which? where? or perhaps a standard? POSIX?)
551da177e4SLinus Torvalds  * might like the above to be functions, not macros (why?).
561da177e4SLinus Torvalds  * if that's true, then detect them, and take measures.
571da177e4SLinus Torvalds  * Anyway, the measure is: define only ___ntohl as a macro instead,
581da177e4SLinus Torvalds  * and in a separate file, have
591da177e4SLinus Torvalds  * unsigned long inline ntohl(x){return ___ntohl(x);}
601da177e4SLinus Torvalds  *
611da177e4SLinus Torvalds  * The same for constant arguments
621da177e4SLinus Torvalds  *	__constant_ntohl(__u32 x)
631da177e4SLinus Torvalds  *	__constant_ntohs(__u16 x)
641da177e4SLinus Torvalds  *	__constant_htonl(__u32 x)
651da177e4SLinus Torvalds  *	__constant_htons(__u16 x)
661da177e4SLinus Torvalds  *
671da177e4SLinus Torvalds  * Conversion of XX-bit integers (16- 32- or 64-)
681da177e4SLinus Torvalds  * between native CPU format and little/big endian format
691da177e4SLinus Torvalds  * 64-bit stuff only defined for proper architectures
701da177e4SLinus Torvalds  *	cpu_to_[bl]eXX(__uXX x)
711da177e4SLinus Torvalds  *	[bl]eXX_to_cpu(__uXX x)
721da177e4SLinus Torvalds  *
731da177e4SLinus Torvalds  * The same, but takes a pointer to the value to convert
741da177e4SLinus Torvalds  *	cpu_to_[bl]eXXp(__uXX x)
751da177e4SLinus Torvalds  *	[bl]eXX_to_cpup(__uXX x)
761da177e4SLinus Torvalds  *
771da177e4SLinus Torvalds  * The same, but change in situ
781da177e4SLinus Torvalds  *	cpu_to_[bl]eXXs(__uXX x)
791da177e4SLinus Torvalds  *	[bl]eXX_to_cpus(__uXX x)
801da177e4SLinus Torvalds  *
811da177e4SLinus Torvalds  * See asm-foo/byteorder.h for examples of how to provide
821da177e4SLinus Torvalds  * architecture-optimized versions
831da177e4SLinus Torvalds  *
841da177e4SLinus Torvalds  */
851da177e4SLinus Torvalds 
861da177e4SLinus Torvalds #define cpu_to_le64 __cpu_to_le64
871da177e4SLinus Torvalds #define le64_to_cpu __le64_to_cpu
881da177e4SLinus Torvalds #define cpu_to_le32 __cpu_to_le32
891da177e4SLinus Torvalds #define le32_to_cpu __le32_to_cpu
901da177e4SLinus Torvalds #define cpu_to_le16 __cpu_to_le16
911da177e4SLinus Torvalds #define le16_to_cpu __le16_to_cpu
921da177e4SLinus Torvalds #define cpu_to_be64 __cpu_to_be64
931da177e4SLinus Torvalds #define be64_to_cpu __be64_to_cpu
941da177e4SLinus Torvalds #define cpu_to_be32 __cpu_to_be32
951da177e4SLinus Torvalds #define be32_to_cpu __be32_to_cpu
961da177e4SLinus Torvalds #define cpu_to_be16 __cpu_to_be16
971da177e4SLinus Torvalds #define be16_to_cpu __be16_to_cpu
981da177e4SLinus Torvalds #define cpu_to_le64p __cpu_to_le64p
991da177e4SLinus Torvalds #define le64_to_cpup __le64_to_cpup
1001da177e4SLinus Torvalds #define cpu_to_le32p __cpu_to_le32p
1011da177e4SLinus Torvalds #define le32_to_cpup __le32_to_cpup
1021da177e4SLinus Torvalds #define cpu_to_le16p __cpu_to_le16p
1031da177e4SLinus Torvalds #define le16_to_cpup __le16_to_cpup
1041da177e4SLinus Torvalds #define cpu_to_be64p __cpu_to_be64p
1051da177e4SLinus Torvalds #define be64_to_cpup __be64_to_cpup
1061da177e4SLinus Torvalds #define cpu_to_be32p __cpu_to_be32p
1071da177e4SLinus Torvalds #define be32_to_cpup __be32_to_cpup
1081da177e4SLinus Torvalds #define cpu_to_be16p __cpu_to_be16p
1091da177e4SLinus Torvalds #define be16_to_cpup __be16_to_cpup
1101da177e4SLinus Torvalds #define cpu_to_le64s __cpu_to_le64s
1111da177e4SLinus Torvalds #define le64_to_cpus __le64_to_cpus
1121da177e4SLinus Torvalds #define cpu_to_le32s __cpu_to_le32s
1131da177e4SLinus Torvalds #define le32_to_cpus __le32_to_cpus
1141da177e4SLinus Torvalds #define cpu_to_le16s __cpu_to_le16s
1151da177e4SLinus Torvalds #define le16_to_cpus __le16_to_cpus
1161da177e4SLinus Torvalds #define cpu_to_be64s __cpu_to_be64s
1171da177e4SLinus Torvalds #define be64_to_cpus __be64_to_cpus
1181da177e4SLinus Torvalds #define cpu_to_be32s __cpu_to_be32s
1191da177e4SLinus Torvalds #define be32_to_cpus __be32_to_cpus
1201da177e4SLinus Torvalds #define cpu_to_be16s __cpu_to_be16s
1211da177e4SLinus Torvalds #define be16_to_cpus __be16_to_cpus
1221da177e4SLinus Torvalds 
1231da177e4SLinus Torvalds /*
1241da177e4SLinus Torvalds  * They have to be macros in order to do the constant folding
1251da177e4SLinus Torvalds  * correctly - if the argument passed into a inline function
1261da177e4SLinus Torvalds  * it is no longer constant according to gcc..
1271da177e4SLinus Torvalds  */
1281da177e4SLinus Torvalds 
1291da177e4SLinus Torvalds #undef ntohl
1301da177e4SLinus Torvalds #undef ntohs
1311da177e4SLinus Torvalds #undef htonl
1321da177e4SLinus Torvalds #undef htons
1331da177e4SLinus Torvalds 
1341da177e4SLinus Torvalds #define ___htonl(x) __cpu_to_be32(x)
1351da177e4SLinus Torvalds #define ___htons(x) __cpu_to_be16(x)
1361da177e4SLinus Torvalds #define ___ntohl(x) __be32_to_cpu(x)
1371da177e4SLinus Torvalds #define ___ntohs(x) __be16_to_cpu(x)
1381da177e4SLinus Torvalds 
1391da177e4SLinus Torvalds #define htonl(x) ___htonl(x)
1401da177e4SLinus Torvalds #define ntohl(x) ___ntohl(x)
1411da177e4SLinus Torvalds #define htons(x) ___htons(x)
1421da177e4SLinus Torvalds #define ntohs(x) ___ntohs(x)
1431da177e4SLinus Torvalds 
le16_add_cpu(__le16 * var,u16 val)1448b5f6883SMarcin Slusarz static inline void le16_add_cpu(__le16 *var, u16 val)
1458b5f6883SMarcin Slusarz {
1468b5f6883SMarcin Slusarz 	*var = cpu_to_le16(le16_to_cpu(*var) + val);
1478b5f6883SMarcin Slusarz }
1488b5f6883SMarcin Slusarz 
le32_add_cpu(__le32 * var,u32 val)1498b5f6883SMarcin Slusarz static inline void le32_add_cpu(__le32 *var, u32 val)
1508b5f6883SMarcin Slusarz {
1518b5f6883SMarcin Slusarz 	*var = cpu_to_le32(le32_to_cpu(*var) + val);
1528b5f6883SMarcin Slusarz }
1538b5f6883SMarcin Slusarz 
le64_add_cpu(__le64 * var,u64 val)1548b5f6883SMarcin Slusarz static inline void le64_add_cpu(__le64 *var, u64 val)
1558b5f6883SMarcin Slusarz {
1568b5f6883SMarcin Slusarz 	*var = cpu_to_le64(le64_to_cpu(*var) + val);
1578b5f6883SMarcin Slusarz }
1588b5f6883SMarcin Slusarz 
1599def0510SAndy Shevchenko /* XXX: this stuff can be optimized */
le32_to_cpu_array(u32 * buf,unsigned int words)1609def0510SAndy Shevchenko static inline void le32_to_cpu_array(u32 *buf, unsigned int words)
1619def0510SAndy Shevchenko {
1629def0510SAndy Shevchenko 	while (words--) {
1639def0510SAndy Shevchenko 		__le32_to_cpus(buf);
1649def0510SAndy Shevchenko 		buf++;
1659def0510SAndy Shevchenko 	}
1669def0510SAndy Shevchenko }
1679def0510SAndy Shevchenko 
cpu_to_le32_array(u32 * buf,unsigned int words)1689def0510SAndy Shevchenko static inline void cpu_to_le32_array(u32 *buf, unsigned int words)
1699def0510SAndy Shevchenko {
1709def0510SAndy Shevchenko 	while (words--) {
1719def0510SAndy Shevchenko 		__cpu_to_le32s(buf);
1729def0510SAndy Shevchenko 		buf++;
1739def0510SAndy Shevchenko 	}
1749def0510SAndy Shevchenko }
1759def0510SAndy Shevchenko 
be16_add_cpu(__be16 * var,u16 val)1768b5f6883SMarcin Slusarz static inline void be16_add_cpu(__be16 *var, u16 val)
1778b5f6883SMarcin Slusarz {
1788b5f6883SMarcin Slusarz 	*var = cpu_to_be16(be16_to_cpu(*var) + val);
1798b5f6883SMarcin Slusarz }
1808b5f6883SMarcin Slusarz 
be32_add_cpu(__be32 * var,u32 val)1818b5f6883SMarcin Slusarz static inline void be32_add_cpu(__be32 *var, u32 val)
1828b5f6883SMarcin Slusarz {
1838b5f6883SMarcin Slusarz 	*var = cpu_to_be32(be32_to_cpu(*var) + val);
1848b5f6883SMarcin Slusarz }
1858b5f6883SMarcin Slusarz 
be64_add_cpu(__be64 * var,u64 val)1868b5f6883SMarcin Slusarz static inline void be64_add_cpu(__be64 *var, u64 val)
1878b5f6883SMarcin Slusarz {
1888b5f6883SMarcin Slusarz 	*var = cpu_to_be64(be64_to_cpu(*var) + val);
1898b5f6883SMarcin Slusarz }
1908b5f6883SMarcin Slusarz 
cpu_to_be32_array(__be32 * dst,const u32 * src,size_t len)191f2f2efb8SMika Westerberg static inline void cpu_to_be32_array(__be32 *dst, const u32 *src, size_t len)
192f2f2efb8SMika Westerberg {
193*b4c80629SHeinrich Schuchardt 	size_t i;
194f2f2efb8SMika Westerberg 
195f2f2efb8SMika Westerberg 	for (i = 0; i < len; i++)
196f2f2efb8SMika Westerberg 		dst[i] = cpu_to_be32(src[i]);
197f2f2efb8SMika Westerberg }
198f2f2efb8SMika Westerberg 
be32_to_cpu_array(u32 * dst,const __be32 * src,size_t len)199f2f2efb8SMika Westerberg static inline void be32_to_cpu_array(u32 *dst, const __be32 *src, size_t len)
200f2f2efb8SMika Westerberg {
201*b4c80629SHeinrich Schuchardt 	size_t i;
202f2f2efb8SMika Westerberg 
203f2f2efb8SMika Westerberg 	for (i = 0; i < len; i++)
204f2f2efb8SMika Westerberg 		dst[i] = be32_to_cpu(src[i]);
205f2f2efb8SMika Westerberg }
206f2f2efb8SMika Westerberg 
2071da177e4SLinus Torvalds #endif /* _LINUX_BYTEORDER_GENERIC_H */
208