byte_order.h 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223
  1. /* byte_order.h */
  2. #ifndef BYTE_ORDER_H
  3. #define BYTE_ORDER_H
  4. #include "ustd.h"
  5. #include <stdlib.h>
  6. #if 0
  7. #if defined(__GLIBC__)
  8. # include <endian.h>
  9. #endif
  10. #endif
  11. #if defined(__FreeBSD__) || defined(__DragonFly__) || defined(__APPLE__)
  12. # include <sys/types.h>
  13. #elif defined (__NetBSD__) || defined(__OpenBSD__)
  14. # include <sys/param.h>
  15. #endif
  16. #ifdef __cplusplus
  17. extern "C" {
  18. #endif
  19. /* if x86 compatible cpu */
  20. #if defined(i386) || defined(__i386__) || defined(__i486__) || \
  21. defined(__i586__) || defined(__i686__) || defined(__pentium__) || \
  22. defined(__pentiumpro__) || defined(__pentium4__) || \
  23. defined(__nocona__) || defined(prescott) || defined(__core2__) || \
  24. defined(__k6__) || defined(__k8__) || defined(__athlon__) || \
  25. defined(__amd64) || defined(__amd64__) || \
  26. defined(__x86_64) || defined(__x86_64__) || defined(_M_IX86) || \
  27. defined(_M_AMD64) || defined(_M_IA64) || defined(_M_X64)
  28. /* detect if x86-64 instruction set is supported */
  29. # if defined(_LP64) || defined(__LP64__) || defined(__x86_64) || \
  30. defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
  31. # define CPU_X64
  32. # else
  33. # define CPU_IA32
  34. # endif
  35. #endif
  36. #include <cm3p/kwiml/abi.h>
  37. #if KWIML_ABI_ENDIAN_ID == KWIML_ABI_ENDIAN_ID_LITTLE
  38. # define CPU_LITTLE_ENDIAN
  39. # define IS_BIG_ENDIAN 0
  40. # define IS_LITTLE_ENDIAN 1
  41. #elif KWIML_ABI_ENDIAN_ID == KWIML_ABI_ENDIAN_ID_BIG
  42. # define CPU_BIG_ENDIAN
  43. # define IS_BIG_ENDIAN 1
  44. # define IS_LITTLE_ENDIAN 0
  45. #endif
  46. #if 0
  47. #define RHASH_BYTE_ORDER_LE 1234
  48. #define RHASH_BYTE_ORDER_BE 4321
  49. #if (defined(__BYTE_ORDER) && defined(__LITTLE_ENDIAN) && __BYTE_ORDER == __LITTLE_ENDIAN) || \
  50. (defined(__BYTE_ORDER__) && defined(__ORDER_LITTLE_ENDIAN__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
  51. # define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_LE
  52. #elif (defined(__BYTE_ORDER) && defined(__BIG_ENDIAN) && __BYTE_ORDER == __BIG_ENDIAN) || \
  53. (defined(__BYTE_ORDER__) && defined(__ORDER_BIG_ENDIAN__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
  54. # define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_BE
  55. #elif defined(_BYTE_ORDER)
  56. # if defined(_LITTLE_ENDIAN) && (_BYTE_ORDER == _LITTLE_ENDIAN)
  57. # define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_LE
  58. # elif defined(_BIG_ENDIAN) && (_BYTE_ORDER == _BIG_ENDIAN)
  59. # define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_BE
  60. # endif
  61. #elif defined(__sun) && defined(_LITTLE_ENDIAN)
  62. # define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_LE
  63. #elif defined(__sun) && defined(_BIG_ENDIAN)
  64. # define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_BE
  65. #endif
  66. /* try detecting endianness by CPU */
  67. #ifdef RHASH_BYTE_ORDER
  68. #elif defined(CPU_IA32) || defined(CPU_X64) || defined(__ia64) || defined(__ia64__) || \
  69. defined(__alpha__) || defined(_M_ALPHA) || defined(vax) || defined(MIPSEL) || \
  70. defined(_ARM_) || defined(__arm__)
  71. # define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_LE
  72. #elif defined(__sparc) || defined(__sparc__) || defined(sparc) || \
  73. defined(_ARCH_PPC) || defined(_ARCH_PPC64) || defined(_POWER) || \
  74. defined(__POWERPC__) || defined(POWERPC) || defined(__powerpc) || \
  75. defined(__powerpc__) || defined(__powerpc64__) || defined(__ppc__) || \
  76. defined(__hpux) || defined(_MIPSEB) || defined(mc68000) || \
  77. defined(__s390__) || defined(__s390x__) || defined(sel)
  78. # define RHASH_BYTE_ORDER RHASH_BYTE_ORDER_BE
  79. #else
  80. # error "Can't detect CPU architechture"
  81. #endif
  82. #define IS_BIG_ENDIAN (RHASH_BYTE_ORDER == RHASH_BYTE_ORDER_BE)
  83. #define IS_LITTLE_ENDIAN (RHASH_BYTE_ORDER == RHASH_BYTE_ORDER_LE)
  84. #endif
  85. #ifndef __has_builtin
  86. # define __has_builtin(x) 0
  87. #endif
  88. #define IS_ALIGNED_32(p) (0 == (3 & ((const char*)(p) - (const char*)0)))
  89. #define IS_ALIGNED_64(p) (0 == (7 & ((const char*)(p) - (const char*)0)))
  90. #if defined(_MSC_VER)
  91. #define ALIGN_ATTR(n) __declspec(align(n))
  92. #elif defined(__GNUC__)
  93. #define ALIGN_ATTR(n) __attribute__((aligned (n)))
  94. #else
  95. #define ALIGN_ATTR(n) /* nothing */
  96. #endif
  97. #if defined(_MSC_VER) || defined(__BORLANDC__)
  98. #define I64(x) x##ui64
  99. #else
  100. #define I64(x) x##ULL
  101. #endif
  102. #if defined(_MSC_VER)
  103. #define RHASH_INLINE __inline
  104. #elif defined(__GNUC__) && !defined(__STRICT_ANSI__)
  105. #define RHASH_INLINE inline
  106. #elif defined(__GNUC__)
  107. #define RHASH_INLINE __inline__
  108. #else
  109. #define RHASH_INLINE
  110. #endif
  111. /* define rhash_ctz - count traling zero bits */
  112. #if (defined(__GNUC__) && __GNUC__ >= 4 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) || \
  113. (defined(__clang__) && __has_builtin(__builtin_ctz))
  114. /* GCC >= 3.4 or clang */
  115. # define rhash_ctz(x) __builtin_ctz(x)
  116. #else
  117. unsigned rhash_ctz(unsigned); /* define as function */
  118. #endif
  119. void rhash_swap_copy_str_to_u32(void* to, int index, const void* from, size_t length);
  120. void rhash_swap_copy_str_to_u64(void* to, int index, const void* from, size_t length);
  121. void rhash_swap_copy_u64_to_str(void* to, const void* from, size_t length);
  122. void rhash_u32_mem_swap(unsigned* p, int length_in_u32);
  123. /* bswap definitions */
  124. #if (defined(__GNUC__) && (__GNUC__ >= 4) && (__GNUC__ > 4 || __GNUC_MINOR__ >= 3)) || \
  125. (defined(__clang__) && __has_builtin(__builtin_bswap32) && __has_builtin(__builtin_bswap64))
  126. /* GCC >= 4.3 or clang */
  127. # define bswap_32(x) __builtin_bswap32(x)
  128. # define bswap_64(x) __builtin_bswap64(x)
  129. #elif (_MSC_VER > 1300) && (defined(CPU_IA32) || defined(CPU_X64)) /* MS VC */
  130. # define bswap_32(x) _byteswap_ulong((unsigned long)x)
  131. # define bswap_64(x) _byteswap_uint64((__int64)x)
  132. #else
  133. /* fallback to generic bswap definition */
  134. static RHASH_INLINE uint32_t bswap_32(uint32_t x)
  135. {
  136. # if defined(__GNUC__) && defined(CPU_IA32) && !defined(__i386__) && !defined(RHASH_NO_ASM)
  137. __asm("bswap\t%0" : "=r" (x) : "0" (x)); /* gcc x86 version */
  138. return x;
  139. # else
  140. x = ((x << 8) & 0xFF00FF00u) | ((x >> 8) & 0x00FF00FFu);
  141. return (x >> 16) | (x << 16);
  142. # endif
  143. }
  144. static RHASH_INLINE uint64_t bswap_64(uint64_t x)
  145. {
  146. union {
  147. uint64_t ll;
  148. uint32_t l[2];
  149. } w, r;
  150. w.ll = x;
  151. r.l[0] = bswap_32(w.l[1]);
  152. r.l[1] = bswap_32(w.l[0]);
  153. return r.ll;
  154. }
  155. #endif /* bswap definitions */
  156. #if IS_BIG_ENDIAN
  157. # define be2me_32(x) (x)
  158. # define be2me_64(x) (x)
  159. # define le2me_32(x) bswap_32(x)
  160. # define le2me_64(x) bswap_64(x)
  161. # define be32_copy(to, index, from, length) memcpy((to) + (index), (from), (length))
  162. # define le32_copy(to, index, from, length) rhash_swap_copy_str_to_u32((to), (index), (from), (length))
  163. # define be64_copy(to, index, from, length) memcpy((to) + (index), (from), (length))
  164. # define le64_copy(to, index, from, length) rhash_swap_copy_str_to_u64((to), (index), (from), (length))
  165. # define me64_to_be_str(to, from, length) memcpy((to), (from), (length))
  166. # define me64_to_le_str(to, from, length) rhash_swap_copy_u64_to_str((to), (from), (length))
  167. #else /* IS_BIG_ENDIAN */
  168. # define be2me_32(x) bswap_32(x)
  169. # define be2me_64(x) bswap_64(x)
  170. # define le2me_32(x) (x)
  171. # define le2me_64(x) (x)
  172. # define be32_copy(to, index, from, length) rhash_swap_copy_str_to_u32((to), (index), (from), (length))
  173. # define le32_copy(to, index, from, length) memcpy((to) + (index), (from), (length))
  174. # define be64_copy(to, index, from, length) rhash_swap_copy_str_to_u64((to), (index), (from), (length))
  175. # define le64_copy(to, index, from, length) memcpy((to) + (index), (from), (length))
  176. # define me64_to_be_str(to, from, length) rhash_swap_copy_u64_to_str((to), (from), (length))
  177. # define me64_to_le_str(to, from, length) memcpy((to), (from), (length))
  178. #endif /* IS_BIG_ENDIAN */
  179. /* ROTL/ROTR macros rotate a 32/64-bit word left/right by n bits */
  180. #define ROTL32(dword, n) ((dword) << (n) ^ ((dword) >> (32 - (n))))
  181. #define ROTR32(dword, n) ((dword) >> (n) ^ ((dword) << (32 - (n))))
  182. #define ROTL64(qword, n) ((qword) << (n) ^ ((qword) >> (64 - (n))))
  183. #define ROTR64(qword, n) ((qword) >> (n) ^ ((qword) << (64 - (n))))
  184. #define CPU_FEATURE_SSE4_2 (52)
  185. #if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) \
  186. && (defined(CPU_X64) || defined(CPU_IA32))
  187. # define HAS_INTEL_CPUID
  188. int has_cpu_feature(unsigned feature_bit);
  189. #else
  190. # define has_cpu_feature(x) (0)
  191. #endif
  192. #ifdef __cplusplus
  193. } /* extern "C" */
  194. #endif /* __cplusplus */
  195. #endif /* BYTE_ORDER_H */