LLVM OpenMP* Runtime Library
kmp_os.h
1 /*
2  * kmp_os.h -- KPTS runtime header file.
3  */
4 
5 //===----------------------------------------------------------------------===//
6 //
7 // The LLVM Compiler Infrastructure
8 //
9 // This file is dual licensed under the MIT and the University of Illinois Open
10 // Source Licenses. See LICENSE.txt for details.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #ifndef KMP_OS_H
15 #define KMP_OS_H
16 
17 #include "kmp_config.h"
18 #include <stdlib.h>
19 
20 #define KMP_FTN_PLAIN 1
21 #define KMP_FTN_APPEND 2
22 #define KMP_FTN_UPPER 3
23 /*
24 #define KMP_FTN_PREPEND 4
25 #define KMP_FTN_UAPPEND 5
26 */
27 
28 #define KMP_PTR_SKIP (sizeof(void *))
29 
30 /* -------------------------- Compiler variations ------------------------ */
31 
32 #define KMP_OFF 0
33 #define KMP_ON 1
34 
35 #define KMP_MEM_CONS_VOLATILE 0
36 #define KMP_MEM_CONS_FENCE 1
37 
38 #ifndef KMP_MEM_CONS_MODEL
39 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
40 #endif
41 
42 /* ------------------------- Compiler recognition ---------------------- */
43 #define KMP_COMPILER_ICC 0
44 #define KMP_COMPILER_GCC 0
45 #define KMP_COMPILER_CLANG 0
46 #define KMP_COMPILER_MSVC 0
47 
48 #if defined(__INTEL_COMPILER)
49 #undef KMP_COMPILER_ICC
50 #define KMP_COMPILER_ICC 1
51 #elif defined(__clang__)
52 #undef KMP_COMPILER_CLANG
53 #define KMP_COMPILER_CLANG 1
54 #elif defined(__GNUC__)
55 #undef KMP_COMPILER_GCC
56 #define KMP_COMPILER_GCC 1
57 #elif defined(_MSC_VER)
58 #undef KMP_COMPILER_MSVC
59 #define KMP_COMPILER_MSVC 1
60 #else
61 #error Unknown compiler
62 #endif
63 
64 #if (KMP_OS_LINUX || KMP_OS_WINDOWS) && !KMP_OS_CNK
65 #define KMP_AFFINITY_SUPPORTED 1
66 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64
67 #define KMP_GROUP_AFFINITY 1
68 #else
69 #define KMP_GROUP_AFFINITY 0
70 #endif
71 #else
72 #define KMP_AFFINITY_SUPPORTED 0
73 #define KMP_GROUP_AFFINITY 0
74 #endif
75 
76 /* Check for quad-precision extension. */
77 #define KMP_HAVE_QUAD 0
78 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
79 #if KMP_COMPILER_ICC
80 /* _Quad is already defined for icc */
81 #undef KMP_HAVE_QUAD
82 #define KMP_HAVE_QUAD 1
83 #elif KMP_COMPILER_CLANG
84 /* Clang doesn't support a software-implemented
85  128-bit extended precision type yet */
86 typedef long double _Quad;
87 #elif KMP_COMPILER_GCC
88 typedef __float128 _Quad;
89 #undef KMP_HAVE_QUAD
90 #define KMP_HAVE_QUAD 1
91 #elif KMP_COMPILER_MSVC
92 typedef long double _Quad;
93 #endif
94 #else
95 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
96 typedef long double _Quad;
97 #undef KMP_HAVE_QUAD
98 #define KMP_HAVE_QUAD 1
99 #endif
100 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
101 
102 #if KMP_OS_WINDOWS
103 typedef char kmp_int8;
104 typedef unsigned char kmp_uint8;
105 typedef short kmp_int16;
106 typedef unsigned short kmp_uint16;
107 typedef int kmp_int32;
108 typedef unsigned int kmp_uint32;
109 #define KMP_INT32_SPEC "d"
110 #define KMP_UINT32_SPEC "u"
111 #ifndef KMP_STRUCT64
112 typedef __int64 kmp_int64;
113 typedef unsigned __int64 kmp_uint64;
114 #define KMP_INT64_SPEC "I64d"
115 #define KMP_UINT64_SPEC "I64u"
116 #else
117 struct kmp_struct64 {
118  kmp_int32 a, b;
119 };
120 typedef struct kmp_struct64 kmp_int64;
121 typedef struct kmp_struct64 kmp_uint64;
122 /* Not sure what to use for KMP_[U]INT64_SPEC here */
123 #endif
124 #if KMP_ARCH_X86_64
125 #define KMP_INTPTR 1
126 typedef __int64 kmp_intptr_t;
127 typedef unsigned __int64 kmp_uintptr_t;
128 #define KMP_INTPTR_SPEC "I64d"
129 #define KMP_UINTPTR_SPEC "I64u"
130 #endif
131 #endif /* KMP_OS_WINDOWS */
132 
133 #if KMP_OS_UNIX
134 typedef char kmp_int8;
135 typedef unsigned char kmp_uint8;
136 typedef short kmp_int16;
137 typedef unsigned short kmp_uint16;
138 typedef int kmp_int32;
139 typedef unsigned int kmp_uint32;
140 typedef long long kmp_int64;
141 typedef unsigned long long kmp_uint64;
142 #define KMP_INT32_SPEC "d"
143 #define KMP_UINT32_SPEC "u"
144 #define KMP_INT64_SPEC "lld"
145 #define KMP_UINT64_SPEC "llu"
146 #endif /* KMP_OS_UNIX */
147 
148 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS
149 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
150 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS64
151 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
152 #else
153 #error "Can't determine size_t printf format specifier."
154 #endif
155 
156 #if KMP_ARCH_X86
157 #define KMP_SIZE_T_MAX (0xFFFFFFFF)
158 #else
159 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
160 #endif
161 
162 typedef size_t kmp_size_t;
163 typedef float kmp_real32;
164 typedef double kmp_real64;
165 
166 #ifndef KMP_INTPTR
167 #define KMP_INTPTR 1
168 typedef long kmp_intptr_t;
169 typedef unsigned long kmp_uintptr_t;
170 #define KMP_INTPTR_SPEC "ld"
171 #define KMP_UINTPTR_SPEC "lu"
172 #endif
173 
174 #ifdef BUILD_I8
175 typedef kmp_int64 kmp_int;
176 typedef kmp_uint64 kmp_uint;
177 #else
178 typedef kmp_int32 kmp_int;
179 typedef kmp_uint32 kmp_uint;
180 #endif /* BUILD_I8 */
181 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
182 #define KMP_INT_MIN ((kmp_int32)0x80000000)
183 
184 #ifdef __cplusplus
185 // macros to cast out qualifiers and to re-interpret types
186 #define CCAST(type, var) const_cast<type>(var)
187 #define RCAST(type, var) reinterpret_cast<type>(var)
188 //-------------------------------------------------------------------------
189 // template for debug prints specification ( d, u, lld, llu ), and to obtain
190 // signed/unsigned flavors of a type
191 template <typename T> struct traits_t {};
192 // int
193 template <> struct traits_t<signed int> {
194  typedef signed int signed_t;
195  typedef unsigned int unsigned_t;
196  typedef double floating_t;
197  static char const *spec;
198  static const signed_t max_value = 0x7fffffff;
199  static const signed_t min_value = 0x80000000;
200  static const int type_size = sizeof(signed_t);
201 };
202 // unsigned int
203 template <> struct traits_t<unsigned int> {
204  typedef signed int signed_t;
205  typedef unsigned int unsigned_t;
206  typedef double floating_t;
207  static char const *spec;
208  static const unsigned_t max_value = 0xffffffff;
209  static const unsigned_t min_value = 0x00000000;
210  static const int type_size = sizeof(unsigned_t);
211 };
212 // long long
213 template <> struct traits_t<signed long long> {
214  typedef signed long long signed_t;
215  typedef unsigned long long unsigned_t;
216  typedef long double floating_t;
217  static char const *spec;
218  static const signed_t max_value = 0x7fffffffffffffffLL;
219  static const signed_t min_value = 0x8000000000000000LL;
220  static const int type_size = sizeof(signed_t);
221 };
222 // unsigned long long
223 template <> struct traits_t<unsigned long long> {
224  typedef signed long long signed_t;
225  typedef unsigned long long unsigned_t;
226  typedef long double floating_t;
227  static char const *spec;
228  static const unsigned_t max_value = 0xffffffffffffffffLL;
229  static const unsigned_t min_value = 0x0000000000000000LL;
230  static const int type_size = sizeof(unsigned_t);
231 };
232 //-------------------------------------------------------------------------
233 #else
234 #define CCAST(type, var) (type)(var)
235 #define RCAST(type, var) (type)(var)
236 #endif // __cplusplus
237 
238 #define KMP_EXPORT extern /* export declaration in guide libraries */
239 
240 #if __GNUC__ >= 4
241 #define __forceinline __inline
242 #endif
243 
244 #if KMP_OS_WINDOWS
245 #include <windows.h>
246 
247 static inline int KMP_GET_PAGE_SIZE(void) {
248  SYSTEM_INFO si;
249  GetSystemInfo(&si);
250  return si.dwPageSize;
251 }
252 #else
253 #define KMP_GET_PAGE_SIZE() getpagesize()
254 #endif
255 
256 #define PAGE_ALIGNED(_addr) \
257  (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
258 #define ALIGN_TO_PAGE(x) \
259  (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
260 
261 /* ---------- Support for cache alignment, padding, etc. ----------------*/
262 
263 #ifdef __cplusplus
264 extern "C" {
265 #endif // __cplusplus
266 
267 #define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */
268 
269 /* Define the default size of the cache line */
270 #ifndef CACHE_LINE
271 #define CACHE_LINE 128 /* cache line size in bytes */
272 #else
273 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
274 // 2006-02-13: This produces too many warnings on OS X*. Disable for now
275 #warning CACHE_LINE is too small.
276 #endif
277 #endif /* CACHE_LINE */
278 
279 #define KMP_CACHE_PREFETCH(ADDR) /* nothing */
280 
281 // Define attribute that indicates a function does not return
282 #if __cplusplus >= 201103L
283 #define KMP_NORETURN [[noreturn]]
284 #elif KMP_OS_WINDOWS
285 #define KMP_NORETURN __declspec(noreturn)
286 #else
287 #define KMP_NORETURN __attribute__((noreturn))
288 #endif
289 
290 #if KMP_OS_WINDOWS
291 #define KMP_ALIGN(bytes) __declspec(align(bytes))
292 #define KMP_THREAD_LOCAL __declspec(thread)
293 #define KMP_ALIAS /* Nothing */
294 #else
295 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
296 #define KMP_THREAD_LOCAL __thread
297 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
298 #endif
299 
300 #if KMP_HAVE_WEAK_ATTRIBUTE
301 #define KMP_WEAK_ATTRIBUTE __attribute__((weak))
302 #else
303 #define KMP_WEAK_ATTRIBUTE /* Nothing */
304 #endif
305 
306 // Define KMP_VERSION_SYMBOL and KMP_EXPAND_NAME
307 #ifdef KMP_USE_VERSION_SYMBOLS
308 #define KMP_STR(x) _KMP_STR(x)
309 #define _KMP_STR(x) #x
310 // If using versioned symbols, KMP_EXPAND_NAME prepends
311 // __kmp_api_ to the real API name
312 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
313 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
314 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) \
315  _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
316 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) \
317  __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias \
318  __attribute__((alias(KMP_STR(__kmp_api_##api_name)))); \
319  __asm__( \
320  ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
321  api_name) "@" ver_str "\n\t"); \
322  __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR( \
323  api_name) "@@" default_ver "\n\t")
324 #else // KMP_USE_VERSION_SYMBOLS
325 #define KMP_EXPAND_NAME(api_name) api_name
326 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) /* Nothing */
327 #endif // KMP_USE_VERSION_SYMBOLS
328 
329 /* Temporary note: if performance testing of this passes, we can remove
330  all references to KMP_DO_ALIGN and replace with KMP_ALIGN. */
331 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
332 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
333 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
334 
335 /* General purpose fence types for memory operations */
336 enum kmp_mem_fence_type {
337  kmp_no_fence, /* No memory fence */
338  kmp_acquire_fence, /* Acquire (read) memory fence */
339  kmp_release_fence, /* Release (write) memory fence */
340  kmp_full_fence /* Full (read+write) memory fence */
341 };
342 
343 // Synchronization primitives
344 
345 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS
346 
347 #pragma intrinsic(InterlockedExchangeAdd)
348 #pragma intrinsic(InterlockedCompareExchange)
349 #pragma intrinsic(InterlockedExchange)
350 #pragma intrinsic(InterlockedExchange64)
351 
352 // Using InterlockedIncrement / InterlockedDecrement causes a library loading
353 // ordering problem, so we use InterlockedExchangeAdd instead.
354 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
355 #define KMP_TEST_THEN_INC_ACQ32(p) \
356  InterlockedExchangeAdd((volatile long *)(p), 1)
357 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
358 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
359  InterlockedExchangeAdd((volatile long *)(p), 4)
360 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
361 #define KMP_TEST_THEN_DEC_ACQ32(p) \
362  InterlockedExchangeAdd((volatile long *)(p), -1)
363 #define KMP_TEST_THEN_ADD32(p, v) \
364  InterlockedExchangeAdd((volatile long *)(p), (v))
365 
366 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
367  InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
368 
369 #define KMP_XCHG_FIXED32(p, v) \
370  InterlockedExchange((volatile long *)(p), (long)(v))
371 #define KMP_XCHG_FIXED64(p, v) \
372  InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
373 
374 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
375  kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v);
376  return *(kmp_real32 *)&tmp;
377 }
378 
379 // Routines that we still need to implement in assembly.
380 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
381 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
382 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
383 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
384 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
385 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
386 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
387 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
388 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
389 
390 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
391  kmp_int8 sv);
392 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
393  kmp_int16 sv);
394 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
395  kmp_int32 sv);
396 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
397  kmp_int64 sv);
398 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
399  kmp_int8 sv);
400 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
401  kmp_int16 cv, kmp_int16 sv);
402 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
403  kmp_int32 cv, kmp_int32 sv);
404 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
405  kmp_int64 cv, kmp_int64 sv);
406 
407 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
408 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
409 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
410 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
411 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
412 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
413 
414 //#define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32((p), 1)
415 //#define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32((p), 1)
416 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
417 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
418 //#define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32((p), 4)
419 //#define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32((p), 4)
420 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
421 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
422 //#define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32((p), -1)
423 //#define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32((p), -1)
424 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
425 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
426 //#define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32((p), (v))
427 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
428 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
429 
430 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
431 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
432 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
433 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
434 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
435 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
436 
437 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
438  __kmp_compare_and_store8((p), (cv), (sv))
439 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
440  __kmp_compare_and_store8((p), (cv), (sv))
441 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
442  __kmp_compare_and_store16((p), (cv), (sv))
443 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
444  __kmp_compare_and_store16((p), (cv), (sv))
445 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
446  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
447  (kmp_int32)(sv))
448 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
449  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
450  (kmp_int32)(sv))
451 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
452  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
453  (kmp_int64)(sv))
454 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
455  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
456  (kmp_int64)(sv))
457 
458 #if KMP_ARCH_X86
459 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
460  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
461  (kmp_int32)(sv))
462 #else /* 64 bit pointers */
463 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
464  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
465  (kmp_int64)(sv))
466 #endif /* KMP_ARCH_X86 */
467 
468 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
469  __kmp_compare_and_store_ret8((p), (cv), (sv))
470 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
471  __kmp_compare_and_store_ret16((p), (cv), (sv))
472 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
473  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
474  (kmp_int64)(sv))
475 
476 #define KMP_XCHG_FIXED8(p, v) \
477  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
478 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
479 //#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
480 //#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
481 //#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
482 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
483 
484 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
485 
486 /* cast p to correct type so that proper intrinsic will be used */
487 #define KMP_TEST_THEN_INC32(p) \
488  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
489 #define KMP_TEST_THEN_INC_ACQ32(p) \
490  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
491 #define KMP_TEST_THEN_INC64(p) \
492  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
493 #define KMP_TEST_THEN_INC_ACQ64(p) \
494  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
495 #define KMP_TEST_THEN_ADD4_32(p) \
496  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
497 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
498  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
499 #define KMP_TEST_THEN_ADD4_64(p) \
500  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
501 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
502  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
503 #define KMP_TEST_THEN_DEC32(p) \
504  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
505 #define KMP_TEST_THEN_DEC_ACQ32(p) \
506  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
507 #define KMP_TEST_THEN_DEC64(p) \
508  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
509 #define KMP_TEST_THEN_DEC_ACQ64(p) \
510  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
511 #define KMP_TEST_THEN_ADD8(p, v) \
512  __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
513 #define KMP_TEST_THEN_ADD32(p, v) \
514  __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
515 #define KMP_TEST_THEN_ADD64(p, v) \
516  __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
517 
518 #define KMP_TEST_THEN_OR8(p, v) \
519  __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
520 #define KMP_TEST_THEN_AND8(p, v) \
521  __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
522 #define KMP_TEST_THEN_OR32(p, v) \
523  __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
524 #define KMP_TEST_THEN_AND32(p, v) \
525  __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
526 #define KMP_TEST_THEN_OR64(p, v) \
527  __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
528 #define KMP_TEST_THEN_AND64(p, v) \
529  __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
530 
531 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
532  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
533  (kmp_uint8)(sv))
534 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
535  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
536  (kmp_uint8)(sv))
537 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
538  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
539  (kmp_uint16)(sv))
540 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
541  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
542  (kmp_uint16)(sv))
543 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
544  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
545  (kmp_uint32)(sv))
546 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
547  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
548  (kmp_uint32)(sv))
549 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
550  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
551  (kmp_uint64)(sv))
552 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
553  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
554  (kmp_uint64)(sv))
555 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
556  __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \
557  (void *)(sv))
558 
559 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
560  __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
561  (kmp_uint8)(sv))
562 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
563  __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
564  (kmp_uint16)(sv))
565 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
566  __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
567  (kmp_uint32)(sv))
568 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
569  __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
570  (kmp_uint64)(sv))
571 
572 #define KMP_XCHG_FIXED8(p, v) \
573  __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
574 #define KMP_XCHG_FIXED16(p, v) \
575  __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
576 #define KMP_XCHG_FIXED32(p, v) \
577  __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
578 #define KMP_XCHG_FIXED64(p, v) \
579  __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
580 
581 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
582  kmp_int32 tmp =
583  __sync_lock_test_and_set((volatile kmp_uint32 *)(p), *(kmp_uint32 *)&v);
584  return *(kmp_real32 *)&tmp;
585 }
586 
587 inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) {
588  kmp_int64 tmp =
589  __sync_lock_test_and_set((volatile kmp_uint64 *)(p), *(kmp_uint64 *)&v);
590  return *(kmp_real64 *)&tmp;
591 }
592 
593 #else
594 
595 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
596 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
597 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
598 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
599 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
600 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
601 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
602 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
603 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
604 
605 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
606  kmp_int8 sv);
607 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
608  kmp_int16 sv);
609 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
610  kmp_int32 sv);
611 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
612  kmp_int64 sv);
613 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
614  kmp_int8 sv);
615 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
616  kmp_int16 cv, kmp_int16 sv);
617 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
618  kmp_int32 cv, kmp_int32 sv);
619 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
620  kmp_int64 cv, kmp_int64 sv);
621 
622 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
623 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
624 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
625 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
626 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
627 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
628 
629 #define KMP_TEST_THEN_INC32(p) \
630  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
631 #define KMP_TEST_THEN_INC_ACQ32(p) \
632  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
633 #define KMP_TEST_THEN_INC64(p) \
634  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
635 #define KMP_TEST_THEN_INC_ACQ64(p) \
636  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
637 #define KMP_TEST_THEN_ADD4_32(p) \
638  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
639 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
640  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
641 #define KMP_TEST_THEN_ADD4_64(p) \
642  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
643 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
644  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
645 #define KMP_TEST_THEN_DEC32(p) \
646  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
647 #define KMP_TEST_THEN_DEC_ACQ32(p) \
648  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
649 #define KMP_TEST_THEN_DEC64(p) \
650  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
651 #define KMP_TEST_THEN_DEC_ACQ64(p) \
652  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
653 #define KMP_TEST_THEN_ADD8(p, v) \
654  __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
655 #define KMP_TEST_THEN_ADD32(p, v) \
656  __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
657 #define KMP_TEST_THEN_ADD64(p, v) \
658  __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
659 
660 #define KMP_TEST_THEN_OR8(p, v) \
661  __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
662 #define KMP_TEST_THEN_AND8(p, v) \
663  __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
664 #define KMP_TEST_THEN_OR32(p, v) \
665  __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
666 #define KMP_TEST_THEN_AND32(p, v) \
667  __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
668 #define KMP_TEST_THEN_OR64(p, v) \
669  __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
670 #define KMP_TEST_THEN_AND64(p, v) \
671  __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
672 
673 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
674  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
675  (kmp_int8)(sv))
676 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
677  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
678  (kmp_int8)(sv))
679 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
680  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
681  (kmp_int16)(sv))
682 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
683  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
684  (kmp_int16)(sv))
685 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
686  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
687  (kmp_int32)(sv))
688 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
689  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
690  (kmp_int32)(sv))
691 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
692  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
693  (kmp_int64)(sv))
694 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
695  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
696  (kmp_int64)(sv))
697 
698 #if KMP_ARCH_X86
699 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
700  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
701  (kmp_int32)(sv))
702 #else /* 64 bit pointers */
703 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
704  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
705  (kmp_int64)(sv))
706 #endif /* KMP_ARCH_X86 */
707 
708 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
709  __kmp_compare_and_store_ret8((p), (cv), (sv))
710 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
711  __kmp_compare_and_store_ret16((p), (cv), (sv))
712 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
713  __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
714  (kmp_int32)(sv))
715 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
716  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
717  (kmp_int64)(sv))
718 
719 #define KMP_XCHG_FIXED8(p, v) \
720  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
721 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
722 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
723 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
724 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
725 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
726 
727 #endif /* KMP_ASM_INTRINS */
728 
729 /* ------------- relaxed consistency memory model stuff ------------------ */
730 
731 #if KMP_OS_WINDOWS
732 #ifdef __ABSOFT_WIN
733 #define KMP_MB() asm("nop")
734 #define KMP_IMB() asm("nop")
735 #else
736 #define KMP_MB() /* _asm{ nop } */
737 #define KMP_IMB() /* _asm{ nop } */
738 #endif
739 #endif /* KMP_OS_WINDOWS */
740 
741 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \
742  KMP_ARCH_MIPS64
743 #define KMP_MB() __sync_synchronize()
744 #endif
745 
746 #ifndef KMP_MB
747 #define KMP_MB() /* nothing to do */
748 #endif
749 
750 #ifndef KMP_IMB
751 #define KMP_IMB() /* nothing to do */
752 #endif
753 
754 #ifndef KMP_ST_REL32
755 #define KMP_ST_REL32(A, D) (*(A) = (D))
756 #endif
757 
758 #ifndef KMP_ST_REL64
759 #define KMP_ST_REL64(A, D) (*(A) = (D))
760 #endif
761 
762 #ifndef KMP_LD_ACQ32
763 #define KMP_LD_ACQ32(A) (*(A))
764 #endif
765 
766 #ifndef KMP_LD_ACQ64
767 #define KMP_LD_ACQ64(A) (*(A))
768 #endif
769 
770 /* ------------------------------------------------------------------------ */
771 // FIXME - maybe this should this be
772 //
773 // #define TCR_4(a) (*(volatile kmp_int32 *)(&a))
774 // #define TCW_4(a,b) (a) = (*(volatile kmp_int32 *)&(b))
775 //
776 // #define TCR_8(a) (*(volatile kmp_int64 *)(a))
777 // #define TCW_8(a,b) (a) = (*(volatile kmp_int64 *)(&b))
778 //
779 // I'm fairly certain this is the correct thing to do, but I'm afraid
780 // of performance regressions.
781 
782 #define TCR_1(a) (a)
783 #define TCW_1(a, b) (a) = (b)
784 #define TCR_4(a) (a)
785 #define TCW_4(a, b) (a) = (b)
786 #define TCI_4(a) (++(a))
787 #define TCD_4(a) (--(a))
788 #define TCR_8(a) (a)
789 #define TCW_8(a, b) (a) = (b)
790 #define TCI_8(a) (++(a))
791 #define TCD_8(a) (--(a))
792 #define TCR_SYNC_4(a) (a)
793 #define TCW_SYNC_4(a, b) (a) = (b)
794 #define TCX_SYNC_4(a, b, c) \
795  KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \
796  (kmp_int32)(b), (kmp_int32)(c))
797 #define TCR_SYNC_8(a) (a)
798 #define TCW_SYNC_8(a, b) (a) = (b)
799 #define TCX_SYNC_8(a, b, c) \
800  KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \
801  (kmp_int64)(b), (kmp_int64)(c))
802 
803 #if KMP_ARCH_X86 || KMP_ARCH_MIPS
804 // What about ARM?
805 #define TCR_PTR(a) ((void *)TCR_4(a))
806 #define TCW_PTR(a, b) TCW_4((a), (b))
807 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
808 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
809 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
810 
811 #else /* 64 bit pointers */
812 
813 #define TCR_PTR(a) ((void *)TCR_8(a))
814 #define TCW_PTR(a, b) TCW_8((a), (b))
815 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
816 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
817 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
818 
819 #endif /* KMP_ARCH_X86 */
820 
821 /* If these FTN_{TRUE,FALSE} values change, may need to change several places
822  where they are used to check that language is Fortran, not C. */
823 
824 #ifndef FTN_TRUE
825 #define FTN_TRUE TRUE
826 #endif
827 
828 #ifndef FTN_FALSE
829 #define FTN_FALSE FALSE
830 #endif
831 
832 typedef void (*microtask_t)(int *gtid, int *npr, ...);
833 
834 #ifdef USE_VOLATILE_CAST
835 #define VOLATILE_CAST(x) (volatile x)
836 #else
837 #define VOLATILE_CAST(x) (x)
838 #endif
839 
840 #define KMP_WAIT_YIELD __kmp_wait_yield_4
841 #define KMP_WAIT_YIELD_PTR __kmp_wait_yield_4_ptr
842 #define KMP_EQ __kmp_eq_4
843 #define KMP_NEQ __kmp_neq_4
844 #define KMP_LT __kmp_lt_4
845 #define KMP_GE __kmp_ge_4
846 #define KMP_LE __kmp_le_4
847 
848 /* Workaround for Intel(R) 64 code gen bug when taking address of static array
849  * (Intel(R) 64 Tracker #138) */
850 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
851 #define STATIC_EFI2_WORKAROUND
852 #else
853 #define STATIC_EFI2_WORKAROUND static
854 #endif
855 
856 // Support of BGET usage
857 #ifndef KMP_USE_BGET
858 #define KMP_USE_BGET 1
859 #endif
860 
861 // Switches for OSS builds
862 #ifndef USE_CMPXCHG_FIX
863 #define USE_CMPXCHG_FIX 1
864 #endif
865 
866 // Enable dynamic user lock
867 #if OMP_45_ENABLED
868 #define KMP_USE_DYNAMIC_LOCK 1
869 #endif
870 
871 // Enable Intel(R) Transactional Synchronization Extensions (Intel(R) TSX) if
872 // dynamic user lock is turned on
873 #if KMP_USE_DYNAMIC_LOCK
874 // Visual studio can't handle the asm sections in this code
875 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
876 #ifdef KMP_USE_ADAPTIVE_LOCKS
877 #undef KMP_USE_ADAPTIVE_LOCKS
878 #endif
879 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
880 #endif
881 
882 // Enable tick time conversion of ticks to seconds
883 #if KMP_STATS_ENABLED
884 #define KMP_HAVE_TICK_TIME \
885  (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
886 #endif
887 
888 // Warning levels
889 enum kmp_warnings_level {
890  kmp_warnings_off = 0, /* No warnings */
891  kmp_warnings_low, /* Minimal warnings (default) */
892  kmp_warnings_explicit = 6, /* Explicitly set to ON - more warnings */
893  kmp_warnings_verbose /* reserved */
894 };
895 
896 #ifdef __cplusplus
897 } // extern "C"
898 #endif // __cplusplus
899 
900 #endif /* KMP_OS_H */
901 // Safe C API
902 #include "kmp_safe_c_api.h"