compiler.h 9.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289
  1. /*
  2. * Copyright (c) 2016-2020, Yann Collet, Facebook, Inc.
  3. * All rights reserved.
  4. *
  5. * This source code is licensed under both the BSD-style license (found in the
  6. * LICENSE file in the root directory of this source tree) and the GPLv2 (found
  7. * in the COPYING file in the root directory of this source tree).
  8. * You may select, at your option, one of the above-listed licenses.
  9. */
  10. #ifndef ZSTD_COMPILER_H
  11. #define ZSTD_COMPILER_H
  12. /*-*******************************************************
  13. * Compiler specifics
  14. *********************************************************/
  15. /* force inlining */
  16. #if !defined(ZSTD_NO_INLINE)
  17. #if (defined(__GNUC__) && !defined(__STRICT_ANSI__)) || defined(__cplusplus) || defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L /* C99 */
  18. # define INLINE_KEYWORD inline
  19. #else
  20. # define INLINE_KEYWORD
  21. #endif
  22. #if defined(__GNUC__) || defined(__ICCARM__)
  23. # define FORCE_INLINE_ATTR __attribute__((always_inline))
  24. #elif defined(_MSC_VER)
  25. # define FORCE_INLINE_ATTR __forceinline
  26. #else
  27. # define FORCE_INLINE_ATTR
  28. #endif
  29. #else
  30. #define INLINE_KEYWORD
  31. #define FORCE_INLINE_ATTR
  32. #endif
  33. /**
  34. On MSVC qsort requires that functions passed into it use the __cdecl calling conversion(CC).
  35. This explictly marks such functions as __cdecl so that the code will still compile
  36. if a CC other than __cdecl has been made the default.
  37. */
  38. #if defined(_MSC_VER)
  39. # define WIN_CDECL __cdecl
  40. #else
  41. # define WIN_CDECL
  42. #endif
  43. /**
  44. * FORCE_INLINE_TEMPLATE is used to define C "templates", which take constant
  45. * parameters. They must be inlined for the compiler to eliminate the constant
  46. * branches.
  47. */
  48. #define FORCE_INLINE_TEMPLATE static INLINE_KEYWORD FORCE_INLINE_ATTR
  49. /**
  50. * HINT_INLINE is used to help the compiler generate better code. It is *not*
  51. * used for "templates", so it can be tweaked based on the compilers
  52. * performance.
  53. *
  54. * gcc-4.8 and gcc-4.9 have been shown to benefit from leaving off the
  55. * always_inline attribute.
  56. *
  57. * clang up to 5.0.0 (trunk) benefit tremendously from the always_inline
  58. * attribute.
  59. */
  60. #if !defined(__clang__) && defined(__GNUC__) && __GNUC__ >= 4 && __GNUC_MINOR__ >= 8 && __GNUC__ < 5
  61. # define HINT_INLINE static INLINE_KEYWORD
  62. #else
  63. # define HINT_INLINE static INLINE_KEYWORD FORCE_INLINE_ATTR
  64. #endif
  65. /* UNUSED_ATTR tells the compiler it is okay if the function is unused. */
  66. #if defined(__GNUC__)
  67. # define UNUSED_ATTR __attribute__((unused))
  68. #else
  69. # define UNUSED_ATTR
  70. #endif
  71. /* force no inlining */
  72. #ifdef _MSC_VER
  73. # define FORCE_NOINLINE static __declspec(noinline)
  74. #else
  75. # if defined(__GNUC__) || defined(__ICCARM__)
  76. # define FORCE_NOINLINE static __attribute__((__noinline__))
  77. # else
  78. # define FORCE_NOINLINE static
  79. # endif
  80. #endif
  81. /* target attribute */
  82. #ifndef __has_attribute
  83. #define __has_attribute(x) 0 /* Compatibility with non-clang compilers. */
  84. #endif
  85. #if defined(__GNUC__) || defined(__ICCARM__)
  86. # define TARGET_ATTRIBUTE(target) __attribute__((__target__(target)))
  87. #else
  88. # define TARGET_ATTRIBUTE(target)
  89. #endif
  90. /* Enable runtime BMI2 dispatch based on the CPU.
  91. * Enabled for clang & gcc >=4.8 on x86 when BMI2 isn't enabled by default.
  92. */
  93. #ifndef DYNAMIC_BMI2
  94. #if ((defined(__clang__) && __has_attribute(__target__)) \
  95. || (defined(__GNUC__) \
  96. && (__GNUC__ >= 5 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)))) \
  97. && (defined(__x86_64__) || defined(_M_X86)) \
  98. && !defined(__BMI2__)
  99. # define DYNAMIC_BMI2 1
  100. #else
  101. # define DYNAMIC_BMI2 0
  102. #endif
  103. #endif
  104. /* prefetch
  105. * can be disabled, by declaring NO_PREFETCH build macro */
  106. #if defined(NO_PREFETCH)
  107. # define PREFETCH_L1(ptr) (void)(ptr) /* disabled */
  108. # define PREFETCH_L2(ptr) (void)(ptr) /* disabled */
  109. #else
  110. # if defined(_MSC_VER) && (defined(_M_X64) || defined(_M_I86)) /* _mm_prefetch() is not defined outside of x86/x64 */
  111. # include <mmintrin.h> /* https://msdn.microsoft.com/fr-fr/library/84szxsww(v=vs.90).aspx */
  112. # define PREFETCH_L1(ptr) _mm_prefetch((const char*)(ptr), _MM_HINT_T0)
  113. # define PREFETCH_L2(ptr) _mm_prefetch((const char*)(ptr), _MM_HINT_T1)
  114. # elif defined(__GNUC__) && ( (__GNUC__ >= 4) || ( (__GNUC__ == 3) && (__GNUC_MINOR__ >= 1) ) )
  115. # define PREFETCH_L1(ptr) __builtin_prefetch((ptr), 0 /* rw==read */, 3 /* locality */)
  116. # define PREFETCH_L2(ptr) __builtin_prefetch((ptr), 0 /* rw==read */, 2 /* locality */)
  117. # elif defined(__aarch64__)
  118. # define PREFETCH_L1(ptr) __asm__ __volatile__("prfm pldl1keep, %0" ::"Q"(*(ptr)))
  119. # define PREFETCH_L2(ptr) __asm__ __volatile__("prfm pldl2keep, %0" ::"Q"(*(ptr)))
  120. # else
  121. # define PREFETCH_L1(ptr) (void)(ptr) /* disabled */
  122. # define PREFETCH_L2(ptr) (void)(ptr) /* disabled */
  123. # endif
  124. #endif /* NO_PREFETCH */
  125. #define CACHELINE_SIZE 64
  126. #define PREFETCH_AREA(p, s) { \
  127. const char* const _ptr = (const char*)(p); \
  128. size_t const _size = (size_t)(s); \
  129. size_t _pos; \
  130. for (_pos=0; _pos<_size; _pos+=CACHELINE_SIZE) { \
  131. PREFETCH_L2(_ptr + _pos); \
  132. } \
  133. }
  134. /* vectorization
  135. * older GCC (pre gcc-4.3 picked as the cutoff) uses a different syntax */
  136. #if !defined(__INTEL_COMPILER) && !defined(__clang__) && defined(__GNUC__)
  137. # if (__GNUC__ == 4 && __GNUC_MINOR__ > 3) || (__GNUC__ >= 5)
  138. # define DONT_VECTORIZE __attribute__((optimize("no-tree-vectorize")))
  139. # else
  140. # define DONT_VECTORIZE _Pragma("GCC optimize(\"no-tree-vectorize\")")
  141. # endif
  142. #else
  143. # define DONT_VECTORIZE
  144. #endif
  145. /* Tell the compiler that a branch is likely or unlikely.
  146. * Only use these macros if it causes the compiler to generate better code.
  147. * If you can remove a LIKELY/UNLIKELY annotation without speed changes in gcc
  148. * and clang, please do.
  149. */
  150. #if defined(__GNUC__)
  151. #define LIKELY(x) (__builtin_expect((x), 1))
  152. #define UNLIKELY(x) (__builtin_expect((x), 0))
  153. #else
  154. #define LIKELY(x) (x)
  155. #define UNLIKELY(x) (x)
  156. #endif
  157. /* disable warnings */
  158. #ifdef _MSC_VER /* Visual Studio */
  159. # include <intrin.h> /* For Visual 2005 */
  160. # pragma warning(disable : 4100) /* disable: C4100: unreferenced formal parameter */
  161. # pragma warning(disable : 4127) /* disable: C4127: conditional expression is constant */
  162. # pragma warning(disable : 4204) /* disable: C4204: non-constant aggregate initializer */
  163. # pragma warning(disable : 4214) /* disable: C4214: non-int bitfields */
  164. # pragma warning(disable : 4324) /* disable: C4324: padded structure */
  165. #endif
  166. /*Like DYNAMIC_BMI2 but for compile time determination of BMI2 support*/
  167. #ifndef STATIC_BMI2
  168. # if defined(_MSC_VER) && (defined(_M_X64) || defined(_M_I86))
  169. # ifdef __AVX2__ //MSVC does not have a BMI2 specific flag, but every CPU that supports AVX2 also supports BMI2
  170. # define STATIC_BMI2 1
  171. # endif
  172. # endif
  173. #endif
  174. #ifndef STATIC_BMI2
  175. #define STATIC_BMI2 0
  176. #endif
  177. /* compat. with non-clang compilers */
  178. #ifndef __has_builtin
  179. # define __has_builtin(x) 0
  180. #endif
  181. /* compat. with non-clang compilers */
  182. #ifndef __has_feature
  183. # define __has_feature(x) 0
  184. #endif
  185. /* detects whether we are being compiled under msan */
  186. #ifndef ZSTD_MEMORY_SANITIZER
  187. # if __has_feature(memory_sanitizer)
  188. # define ZSTD_MEMORY_SANITIZER 1
  189. # else
  190. # define ZSTD_MEMORY_SANITIZER 0
  191. # endif
  192. #endif
  193. #if ZSTD_MEMORY_SANITIZER
  194. /* Not all platforms that support msan provide sanitizers/msan_interface.h.
  195. * We therefore declare the functions we need ourselves, rather than trying to
  196. * include the header file... */
  197. #include <stddef.h> /* size_t */
  198. #define ZSTD_DEPS_NEED_STDINT
  199. #include "zstd_deps.h" /* intptr_t */
  200. /* Make memory region fully initialized (without changing its contents). */
  201. void __msan_unpoison(const volatile void *a, size_t size);
  202. /* Make memory region fully uninitialized (without changing its contents).
  203. This is a legacy interface that does not update origin information. Use
  204. __msan_allocated_memory() instead. */
  205. void __msan_poison(const volatile void *a, size_t size);
  206. /* Returns the offset of the first (at least partially) poisoned byte in the
  207. memory range, or -1 if the whole range is good. */
  208. intptr_t __msan_test_shadow(const volatile void *x, size_t size);
  209. #endif
  210. /* detects whether we are being compiled under asan */
  211. #ifndef ZSTD_ADDRESS_SANITIZER
  212. # if __has_feature(address_sanitizer)
  213. # define ZSTD_ADDRESS_SANITIZER 1
  214. # elif defined(__SANITIZE_ADDRESS__)
  215. # define ZSTD_ADDRESS_SANITIZER 1
  216. # else
  217. # define ZSTD_ADDRESS_SANITIZER 0
  218. # endif
  219. #endif
  220. #if ZSTD_ADDRESS_SANITIZER
  221. /* Not all platforms that support asan provide sanitizers/asan_interface.h.
  222. * We therefore declare the functions we need ourselves, rather than trying to
  223. * include the header file... */
  224. #include <stddef.h> /* size_t */
  225. /**
  226. * Marks a memory region (<c>[addr, addr+size)</c>) as unaddressable.
  227. *
  228. * This memory must be previously allocated by your program. Instrumented
  229. * code is forbidden from accessing addresses in this region until it is
  230. * unpoisoned. This function is not guaranteed to poison the entire region -
  231. * it could poison only a subregion of <c>[addr, addr+size)</c> due to ASan
  232. * alignment restrictions.
  233. *
  234. * \note This function is not thread-safe because no two threads can poison or
  235. * unpoison memory in the same memory region simultaneously.
  236. *
  237. * \param addr Start of memory region.
  238. * \param size Size of memory region. */
  239. void __asan_poison_memory_region(void const volatile *addr, size_t size);
  240. /**
  241. * Marks a memory region (<c>[addr, addr+size)</c>) as addressable.
  242. *
  243. * This memory must be previously allocated by your program. Accessing
  244. * addresses in this region is allowed until this region is poisoned again.
  245. * This function could unpoison a super-region of <c>[addr, addr+size)</c> due
  246. * to ASan alignment restrictions.
  247. *
  248. * \note This function is not thread-safe because no two threads can
  249. * poison or unpoison memory in the same memory region simultaneously.
  250. *
  251. * \param addr Start of memory region.
  252. * \param size Size of memory region. */
  253. void __asan_unpoison_memory_region(void const volatile *addr, size_t size);
  254. #endif
  255. #endif /* ZSTD_COMPILER_H */