1 /*
2  * Copyright (c) 2016-2020, Yann Collet, Facebook, Inc.
3  * All rights reserved.
4  *
5  * This source code is licensed under both the BSD-style license (found in the
6  * LICENSE file in the root directory of this source tree) and the GPLv2 (found
7  * in the COPYING file in the root directory of this source tree).
8  * You may select, at your option, one of the above-listed licenses.
9  */
10 
11 #ifndef ZSTD_COMPILER_H
12 #define ZSTD_COMPILER_H
13 
14 /*-*******************************************************
15 *  Compiler specifics
16 *********************************************************/
17 /* force inlining */
18 
19 #if !defined(ZSTD_NO_INLINE)
20 #if (defined(__GNUC__) && !defined(__STRICT_ANSI__)) || defined(__cplusplus) || defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L   /* C99 */
21 #  define INLINE_KEYWORD inline
22 #else
23 #  define INLINE_KEYWORD
24 #endif
25 
26 #if defined(__GNUC__) || defined(__ICCARM__)
27 #  define FORCE_INLINE_ATTR __attribute__((always_inline))
28 #elif defined(_MSC_VER)
29 #  define FORCE_INLINE_ATTR __forceinline
30 #else
31 #  define FORCE_INLINE_ATTR
32 #endif
33 
34 #else
35 
36 #define INLINE_KEYWORD
37 #define FORCE_INLINE_ATTR
38 
39 #endif
40 
41 /**
42   On MSVC qsort requires that functions passed into it use the __cdecl calling conversion(CC).
43   This explictly marks such functions as __cdecl so that the code will still compile
44   if a CC other than __cdecl has been made the default.
45 */
46 #if  defined(_MSC_VER)
47 #  define WIN_CDECL __cdecl
48 #else
49 #  define WIN_CDECL
50 #endif
51 
52 /**
53  * FORCE_INLINE_TEMPLATE is used to define C "templates", which take constant
54  * parameters. They must be inlined for the compiler to eliminate the constant
55  * branches.
56  */
57 #define FORCE_INLINE_TEMPLATE static INLINE_KEYWORD FORCE_INLINE_ATTR
58 /**
59  * HINT_INLINE is used to help the compiler generate better code. It is *not*
60  * used for "templates", so it can be tweaked based on the compilers
61  * performance.
62  *
63  * gcc-4.8 and gcc-4.9 have been shown to benefit from leaving off the
64  * always_inline attribute.
65  *
66  * clang up to 5.0.0 (trunk) benefit tremendously from the always_inline
67  * attribute.
68  */
69 #if !defined(__clang__) && defined(__GNUC__) && __GNUC__ >= 4 && __GNUC_MINOR__ >= 8 && __GNUC__ < 5
70 #  define HINT_INLINE static INLINE_KEYWORD
71 #else
72 #  define HINT_INLINE static INLINE_KEYWORD FORCE_INLINE_ATTR
73 #endif
74 
75 /* UNUSED_ATTR tells the compiler it is okay if the function is unused. */
76 #if defined(__GNUC__)
77 #  define UNUSED_ATTR __attribute__((unused))
78 #else
79 #  define UNUSED_ATTR
80 #endif
81 
82 /* force no inlining */
83 #ifdef _MSC_VER
84 #  define FORCE_NOINLINE static __declspec(noinline)
85 #else
86 #  if defined(__GNUC__) || defined(__ICCARM__)
87 #    define FORCE_NOINLINE static __attribute__((__noinline__))
88 #  else
89 #    define FORCE_NOINLINE static
90 #  endif
91 #endif
92 
93 /* target attribute */
94 #ifndef __has_attribute
95   #define __has_attribute(x) 0  /* Compatibility with non-clang compilers. */
96 #endif
97 #if defined(__GNUC__) || defined(__ICCARM__)
98 #  define TARGET_ATTRIBUTE(target) __attribute__((__target__(target)))
99 #else
100 #  define TARGET_ATTRIBUTE(target)
101 #endif
102 
103 /* Enable runtime BMI2 dispatch based on the CPU.
104  * Enabled for clang & gcc >=4.8 on x86 when BMI2 isn't enabled by default.
105  */
106 #ifndef DYNAMIC_BMI2
107   #if ((defined(__clang__) && __has_attribute(__target__)) \
108       || (defined(__GNUC__) \
109           && (__GNUC__ >= 5 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)))) \
110       && (defined(__x86_64__) || defined(_M_X86)) \
111       && !defined(__BMI2__)
112   #  define DYNAMIC_BMI2 1
113   #else
114   #  define DYNAMIC_BMI2 0
115   #endif
116 #endif
117 
118 /* prefetch
119  * can be disabled, by declaring NO_PREFETCH build macro */
120 #if defined(NO_PREFETCH)
121 #  define PREFETCH_L1(ptr)  (void)(ptr)  /* disabled */
122 #  define PREFETCH_L2(ptr)  (void)(ptr)  /* disabled */
123 #else
124 #  if defined(_MSC_VER) && (defined(_M_X64) || defined(_M_I86))  /* _mm_prefetch() is not defined outside of x86/x64 */
125 #    include <mmintrin.h>   /* https://msdn.microsoft.com/fr-fr/library/84szxsww(v=vs.90).aspx */
126 #    define PREFETCH_L1(ptr)  _mm_prefetch((const char*)(ptr), _MM_HINT_T0)
127 #    define PREFETCH_L2(ptr)  _mm_prefetch((const char*)(ptr), _MM_HINT_T1)
128 #  elif defined(__GNUC__) && ( (__GNUC__ >= 4) || ( (__GNUC__ == 3) && (__GNUC_MINOR__ >= 1) ) )
129 #    define PREFETCH_L1(ptr)  __builtin_prefetch((ptr), 0 /* rw==read */, 3 /* locality */)
130 #    define PREFETCH_L2(ptr)  __builtin_prefetch((ptr), 0 /* rw==read */, 2 /* locality */)
131 #  elif defined(__aarch64__)
132 #    define PREFETCH_L1(ptr)  __asm__ __volatile__("prfm pldl1keep, %0" ::"Q"(*(ptr)))
133 #    define PREFETCH_L2(ptr)  __asm__ __volatile__("prfm pldl2keep, %0" ::"Q"(*(ptr)))
134 #  else
135 #    define PREFETCH_L1(ptr) (void)(ptr)  /* disabled */
136 #    define PREFETCH_L2(ptr) (void)(ptr)  /* disabled */
137 #  endif
138 #endif  /* NO_PREFETCH */
139 
140 #define CACHELINE_SIZE 64
141 
142 #define PREFETCH_AREA(p, s)  {            \
143     const char* const _ptr = (const char*)(p);  \
144     size_t const _size = (size_t)(s);     \
145     size_t _pos;                          \
146     for (_pos=0; _pos<_size; _pos+=CACHELINE_SIZE) {  \
147         PREFETCH_L2(_ptr + _pos);         \
148     }                                     \
149 }
150 
151 /* vectorization
152  * older GCC (pre gcc-4.3 picked as the cutoff) uses a different syntax */
153 #if !defined(__INTEL_COMPILER) && !defined(__clang__) && defined(__GNUC__)
154 #  if (__GNUC__ == 4 && __GNUC_MINOR__ > 3) || (__GNUC__ >= 5)
155 #    define DONT_VECTORIZE __attribute__((optimize("no-tree-vectorize")))
156 #  else
157 #    define DONT_VECTORIZE _Pragma("GCC optimize(\"no-tree-vectorize\")")
158 #  endif
159 #else
160 #  define DONT_VECTORIZE
161 #endif
162 
163 /* Tell the compiler that a branch is likely or unlikely.
164  * Only use these macros if it causes the compiler to generate better code.
165  * If you can remove a LIKELY/UNLIKELY annotation without speed changes in gcc
166  * and clang, please do.
167  */
168 #if defined(__GNUC__)
169 #define LIKELY(x) (__builtin_expect((x), 1))
170 #define UNLIKELY(x) (__builtin_expect((x), 0))
171 #else
172 #define LIKELY(x) (x)
173 #define UNLIKELY(x) (x)
174 #endif
175 
176 /* disable warnings */
177 #ifdef _MSC_VER    /* Visual Studio */
178 #  include <intrin.h>                    /* For Visual 2005 */
179 #  pragma warning(disable : 4100)        /* disable: C4100: unreferenced formal parameter */
180 #  pragma warning(disable : 4127)        /* disable: C4127: conditional expression is constant */
181 #  pragma warning(disable : 4204)        /* disable: C4204: non-constant aggregate initializer */
182 #  pragma warning(disable : 4214)        /* disable: C4214: non-int bitfields */
183 #  pragma warning(disable : 4324)        /* disable: C4324: padded structure */
184 #endif
185 
186 /*Like DYNAMIC_BMI2 but for compile time determination of BMI2 support*/
187 #ifndef STATIC_BMI2
188 #  if defined(_MSC_VER) && (defined(_M_X64) || defined(_M_I86))
189 #    ifdef __AVX2__  //MSVC does not have a BMI2 specific flag, but every CPU that supports AVX2 also supports BMI2
190 #       define STATIC_BMI2 1
191 #    endif
192 #  endif
193 #endif
194 
195 #ifndef STATIC_BMI2
196     #define STATIC_BMI2 0
197 #endif
198 
199 /* compat. with non-clang compilers */
200 #ifndef __has_builtin
201 #  define __has_builtin(x) 0
202 #endif
203 
204 /* compat. with non-clang compilers */
205 #ifndef __has_feature
206 #  define __has_feature(x) 0
207 #endif
208 
209 /* detects whether we are being compiled under msan */
210 #ifndef ZSTD_MEMORY_SANITIZER
211 #  if __has_feature(memory_sanitizer)
212 #    define ZSTD_MEMORY_SANITIZER 1
213 #  else
214 #    define ZSTD_MEMORY_SANITIZER 0
215 #  endif
216 #endif
217 
218 #if ZSTD_MEMORY_SANITIZER
219 /* Not all platforms that support msan provide sanitizers/msan_interface.h.
220  * We therefore declare the functions we need ourselves, rather than trying to
221  * include the header file... */
222 #include <stddef.h>  /* size_t */
223 #define ZSTD_DEPS_NEED_STDINT
224 #include "zstd_deps.h"  /* intptr_t */
225 
226 /* Make memory region fully initialized (without changing its contents). */
227 void __msan_unpoison(const volatile void *a, size_t size);
228 
229 /* Make memory region fully uninitialized (without changing its contents).
230    This is a legacy interface that does not update origin information. Use
231    __msan_allocated_memory() instead. */
232 void __msan_poison(const volatile void *a, size_t size);
233 
234 /* Returns the offset of the first (at least partially) poisoned byte in the
235    memory range, or -1 if the whole range is good. */
236 intptr_t __msan_test_shadow(const volatile void *x, size_t size);
237 #endif
238 
239 /* detects whether we are being compiled under asan */
240 #ifndef ZSTD_ADDRESS_SANITIZER
241 #  if __has_feature(address_sanitizer)
242 #    define ZSTD_ADDRESS_SANITIZER 1
243 #  elif defined(__SANITIZE_ADDRESS__)
244 #    define ZSTD_ADDRESS_SANITIZER 1
245 #  else
246 #    define ZSTD_ADDRESS_SANITIZER 0
247 #  endif
248 #endif
249 
250 #if ZSTD_ADDRESS_SANITIZER
251 /* Not all platforms that support asan provide sanitizers/asan_interface.h.
252  * We therefore declare the functions we need ourselves, rather than trying to
253  * include the header file... */
254 #include <stddef.h>  /* size_t */
255 
256 /**
257  * Marks a memory region (<c>[addr, addr+size)</c>) as unaddressable.
258  *
259  * This memory must be previously allocated by your program. Instrumented
260  * code is forbidden from accessing addresses in this region until it is
261  * unpoisoned. This function is not guaranteed to poison the entire region -
262  * it could poison only a subregion of <c>[addr, addr+size)</c> due to ASan
263  * alignment restrictions.
264  *
265  * \note This function is not thread-safe because no two threads can poison or
266  * unpoison memory in the same memory region simultaneously.
267  *
268  * \param addr Start of memory region.
269  * \param size Size of memory region. */
270 void __asan_poison_memory_region(void const volatile *addr, size_t size);
271 
272 /**
273  * Marks a memory region (<c>[addr, addr+size)</c>) as addressable.
274  *
275  * This memory must be previously allocated by your program. Accessing
276  * addresses in this region is allowed until this region is poisoned again.
277  * This function could unpoison a super-region of <c>[addr, addr+size)</c> due
278  * to ASan alignment restrictions.
279  *
280  * \note This function is not thread-safe because no two threads can
281  * poison or unpoison memory in the same memory region simultaneously.
282  *
283  * \param addr Start of memory region.
284  * \param size Size of memory region. */
285 void __asan_unpoison_memory_region(void const volatile *addr, size_t size);
286 #endif
287 
288 #endif /* ZSTD_COMPILER_H */
289