17 #include "kmp_config.h" 20 #define KMP_FTN_PLAIN 1 21 #define KMP_FTN_APPEND 2 22 #define KMP_FTN_UPPER 3 28 #define KMP_PTR_SKIP (sizeof(void *)) 35 #define KMP_MEM_CONS_VOLATILE 0 36 #define KMP_MEM_CONS_FENCE 1 38 #ifndef KMP_MEM_CONS_MODEL 39 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE 43 #define KMP_COMPILER_ICC 0 44 #define KMP_COMPILER_GCC 0 45 #define KMP_COMPILER_CLANG 0 46 #define KMP_COMPILER_MSVC 0 48 #if defined(__INTEL_COMPILER) 49 #undef KMP_COMPILER_ICC 50 #define KMP_COMPILER_ICC 1 51 #elif defined(__clang__) 52 #undef KMP_COMPILER_CLANG 53 #define KMP_COMPILER_CLANG 1 54 #elif defined(__GNUC__) 55 #undef KMP_COMPILER_GCC 56 #define KMP_COMPILER_GCC 1 57 #elif defined(_MSC_VER) 58 #undef KMP_COMPILER_MSVC 59 #define KMP_COMPILER_MSVC 1 61 #error Unknown compiler 64 #if (KMP_OS_LINUX || KMP_OS_WINDOWS) && !KMP_OS_CNK 65 #define KMP_AFFINITY_SUPPORTED 1 66 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64 67 #define KMP_GROUP_AFFINITY 1 69 #define KMP_GROUP_AFFINITY 0 72 #define KMP_AFFINITY_SUPPORTED 0 73 #define KMP_GROUP_AFFINITY 0 77 #define KMP_HAVE_QUAD 0 78 #if KMP_ARCH_X86 || KMP_ARCH_X86_64 82 #define KMP_HAVE_QUAD 1 83 #elif KMP_COMPILER_CLANG 86 typedef long double _Quad;
87 #elif KMP_COMPILER_GCC 88 typedef __float128 _Quad;
90 #define KMP_HAVE_QUAD 1 91 #elif KMP_COMPILER_MSVC 92 typedef long double _Quad;
95 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC 96 typedef long double _Quad;
98 #define KMP_HAVE_QUAD 1 103 typedef char kmp_int8;
104 typedef unsigned char kmp_uint8;
105 typedef short kmp_int16;
106 typedef unsigned short kmp_uint16;
107 typedef int kmp_int32;
108 typedef unsigned int kmp_uint32;
109 #define KMP_INT32_SPEC "d" 110 #define KMP_UINT32_SPEC "u" 112 typedef __int64 kmp_int64;
113 typedef unsigned __int64 kmp_uint64;
114 #define KMP_INT64_SPEC "I64d" 115 #define KMP_UINT64_SPEC "I64u" 117 struct kmp_struct64 {
120 typedef struct kmp_struct64 kmp_int64;
121 typedef struct kmp_struct64 kmp_uint64;
126 typedef __int64 kmp_intptr_t;
127 typedef unsigned __int64 kmp_uintptr_t;
128 #define KMP_INTPTR_SPEC "I64d" 129 #define KMP_UINTPTR_SPEC "I64u" 134 typedef char kmp_int8;
135 typedef unsigned char kmp_uint8;
136 typedef short kmp_int16;
137 typedef unsigned short kmp_uint16;
138 typedef int kmp_int32;
139 typedef unsigned int kmp_uint32;
140 typedef long long kmp_int64;
141 typedef unsigned long long kmp_uint64;
142 #define KMP_INT32_SPEC "d" 143 #define KMP_UINT32_SPEC "u" 144 #define KMP_INT64_SPEC "lld" 145 #define KMP_UINT64_SPEC "llu" 148 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS 149 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC 150 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS64 151 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC 153 #error "Can't determine size_t printf format specifier." 157 #define KMP_SIZE_T_MAX (0xFFFFFFFF) 159 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF) 162 typedef size_t kmp_size_t;
163 typedef float kmp_real32;
164 typedef double kmp_real64;
168 typedef long kmp_intptr_t;
169 typedef unsigned long kmp_uintptr_t;
170 #define KMP_INTPTR_SPEC "ld" 171 #define KMP_UINTPTR_SPEC "lu" 175 typedef kmp_int64 kmp_int;
176 typedef kmp_uint64 kmp_uint;
178 typedef kmp_int32 kmp_int;
179 typedef kmp_uint32 kmp_uint;
181 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF) 182 #define KMP_INT_MIN ((kmp_int32)0x80000000) 186 #define CCAST(type, var) const_cast<type>(var) 187 #define RCAST(type, var) reinterpret_cast<type>(var) 191 template <
typename T>
struct traits_t {};
193 template <>
struct traits_t<signed int> {
194 typedef signed int signed_t;
195 typedef unsigned int unsigned_t;
196 typedef double floating_t;
197 static char const *spec;
198 static const signed_t max_value = 0x7fffffff;
199 static const signed_t min_value = 0x80000000;
200 static const int type_size =
sizeof(signed_t);
203 template <>
struct traits_t<unsigned int> {
204 typedef signed int signed_t;
205 typedef unsigned int unsigned_t;
206 typedef double floating_t;
207 static char const *spec;
208 static const unsigned_t max_value = 0xffffffff;
209 static const unsigned_t min_value = 0x00000000;
210 static const int type_size =
sizeof(unsigned_t);
213 template <>
struct traits_t<signed long long> {
214 typedef signed long long signed_t;
215 typedef unsigned long long unsigned_t;
216 typedef long double floating_t;
217 static char const *spec;
218 static const signed_t max_value = 0x7fffffffffffffffLL;
219 static const signed_t min_value = 0x8000000000000000LL;
220 static const int type_size =
sizeof(signed_t);
223 template <>
struct traits_t<unsigned long long> {
224 typedef signed long long signed_t;
225 typedef unsigned long long unsigned_t;
226 typedef long double floating_t;
227 static char const *spec;
228 static const unsigned_t max_value = 0xffffffffffffffffLL;
229 static const unsigned_t min_value = 0x0000000000000000LL;
230 static const int type_size =
sizeof(unsigned_t);
234 #define CCAST(type, var) (type)(var) 235 #define RCAST(type, var) (type)(var) 236 #endif // __cplusplus 238 #define KMP_EXPORT extern 241 #define __forceinline __inline 247 static inline int KMP_GET_PAGE_SIZE(
void) {
250 return si.dwPageSize;
253 #define KMP_GET_PAGE_SIZE() getpagesize() 256 #define PAGE_ALIGNED(_addr) \ 257 (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1))) 258 #define ALIGN_TO_PAGE(x) \ 259 (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1))) 265 #endif // __cplusplus 267 #define INTERNODE_CACHE_LINE 4096 271 #define CACHE_LINE 128 273 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN) 275 #warning CACHE_LINE is too small. 279 #define KMP_CACHE_PREFETCH(ADDR) 282 #if __cplusplus >= 201103L 283 #define KMP_NORETURN [[noreturn]] 285 #define KMP_NORETURN __declspec(noreturn) 287 #define KMP_NORETURN __attribute__((noreturn)) 291 #define KMP_ALIGN(bytes) __declspec(align(bytes)) 292 #define KMP_THREAD_LOCAL __declspec(thread) 295 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes))) 296 #define KMP_THREAD_LOCAL __thread 297 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of))) 300 #if KMP_HAVE_WEAK_ATTRIBUTE 301 #define KMP_WEAK_ATTRIBUTE __attribute__((weak)) 303 #define KMP_WEAK_ATTRIBUTE 307 #ifdef KMP_USE_VERSION_SYMBOLS 308 #define KMP_STR(x) _KMP_STR(x) 309 #define _KMP_STR(x) #x 312 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name) 313 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name 314 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) \ 315 _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION") 316 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) \ 317 __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias \ 318 __attribute__((alias(KMP_STR(__kmp_api_##api_name)))); \ 320 ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \ 321 api_name) "@" ver_str "\n\t"); \ 322 __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR( \ 323 api_name) "@@" default_ver "\n\t") 324 #else // KMP_USE_VERSION_SYMBOLS 325 #define KMP_EXPAND_NAME(api_name) api_name 326 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) 327 #endif // KMP_USE_VERSION_SYMBOLS 331 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes) 332 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE) 333 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE) 336 enum kmp_mem_fence_type {
345 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS 347 #pragma intrinsic(InterlockedExchangeAdd) 348 #pragma intrinsic(InterlockedCompareExchange) 349 #pragma intrinsic(InterlockedExchange) 350 #pragma intrinsic(InterlockedExchange64) 354 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1) 355 #define KMP_TEST_THEN_INC_ACQ32(p) \ 356 InterlockedExchangeAdd((volatile long *)(p), 1) 357 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4) 358 #define KMP_TEST_THEN_ADD4_ACQ32(p) \ 359 InterlockedExchangeAdd((volatile long *)(p), 4) 360 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1) 361 #define KMP_TEST_THEN_DEC_ACQ32(p) \ 362 InterlockedExchangeAdd((volatile long *)(p), -1) 363 #define KMP_TEST_THEN_ADD32(p, v) \ 364 InterlockedExchangeAdd((volatile long *)(p), (v)) 366 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \ 367 InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv)) 369 #define KMP_XCHG_FIXED32(p, v) \ 370 InterlockedExchange((volatile long *)(p), (long)(v)) 371 #define KMP_XCHG_FIXED64(p, v) \ 372 InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v)) 374 inline kmp_real32 KMP_XCHG_REAL32(
volatile kmp_real32 *p, kmp_real32 v) {
375 kmp_int32 tmp = InterlockedExchange((
volatile long *)p, *(
long *)&v);
376 return *(kmp_real32 *)&tmp;
380 extern kmp_int8 __kmp_test_then_add8(
volatile kmp_int8 *p, kmp_int8 v);
381 extern kmp_int8 __kmp_test_then_or8(
volatile kmp_int8 *p, kmp_int8 v);
382 extern kmp_int8 __kmp_test_then_and8(
volatile kmp_int8 *p, kmp_int8 v);
383 extern kmp_int32 __kmp_test_then_add32(
volatile kmp_int32 *p, kmp_int32 v);
384 extern kmp_uint32 __kmp_test_then_or32(
volatile kmp_uint32 *p, kmp_uint32 v);
385 extern kmp_uint32 __kmp_test_then_and32(
volatile kmp_uint32 *p, kmp_uint32 v);
386 extern kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 v);
387 extern kmp_uint64 __kmp_test_then_or64(
volatile kmp_uint64 *p, kmp_uint64 v);
388 extern kmp_uint64 __kmp_test_then_and64(
volatile kmp_uint64 *p, kmp_uint64 v);
390 extern kmp_int8 __kmp_compare_and_store8(
volatile kmp_int8 *p, kmp_int8 cv,
392 extern kmp_int16 __kmp_compare_and_store16(
volatile kmp_int16 *p, kmp_int16 cv,
394 extern kmp_int32 __kmp_compare_and_store32(
volatile kmp_int32 *p, kmp_int32 cv,
396 extern kmp_int32 __kmp_compare_and_store64(
volatile kmp_int64 *p, kmp_int64 cv,
398 extern kmp_int8 __kmp_compare_and_store_ret8(
volatile kmp_int8 *p, kmp_int8 cv,
400 extern kmp_int16 __kmp_compare_and_store_ret16(
volatile kmp_int16 *p,
401 kmp_int16 cv, kmp_int16 sv);
402 extern kmp_int32 __kmp_compare_and_store_ret32(
volatile kmp_int32 *p,
403 kmp_int32 cv, kmp_int32 sv);
404 extern kmp_int64 __kmp_compare_and_store_ret64(
volatile kmp_int64 *p,
405 kmp_int64 cv, kmp_int64 sv);
407 extern kmp_int8 __kmp_xchg_fixed8(
volatile kmp_int8 *p, kmp_int8 v);
408 extern kmp_int16 __kmp_xchg_fixed16(
volatile kmp_int16 *p, kmp_int16 v);
409 extern kmp_int32 __kmp_xchg_fixed32(
volatile kmp_int32 *p, kmp_int32 v);
410 extern kmp_int64 __kmp_xchg_fixed64(
volatile kmp_int64 *p, kmp_int64 v);
411 extern kmp_real32 __kmp_xchg_real32(
volatile kmp_real32 *p, kmp_real32 v);
412 extern kmp_real64 __kmp_xchg_real64(
volatile kmp_real64 *p, kmp_real64 v);
416 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL) 417 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL) 420 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL) 421 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL) 424 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL) 425 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL) 427 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v)) 428 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v)) 430 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v)) 431 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v)) 432 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v)) 433 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v)) 434 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v)) 435 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v)) 437 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \ 438 __kmp_compare_and_store8((p), (cv), (sv)) 439 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \ 440 __kmp_compare_and_store8((p), (cv), (sv)) 441 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \ 442 __kmp_compare_and_store16((p), (cv), (sv)) 443 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \ 444 __kmp_compare_and_store16((p), (cv), (sv)) 445 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \ 446 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \ 448 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \ 449 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \ 451 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \ 452 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 454 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \ 455 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 459 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \ 460 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \ 463 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \ 464 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 468 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \ 469 __kmp_compare_and_store_ret8((p), (cv), (sv)) 470 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \ 471 __kmp_compare_and_store_ret16((p), (cv), (sv)) 472 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \ 473 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 476 #define KMP_XCHG_FIXED8(p, v) \ 477 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v)); 478 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v)); 482 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v)); 484 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64) 487 #define KMP_TEST_THEN_INC32(p) \ 488 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1) 489 #define KMP_TEST_THEN_INC_ACQ32(p) \ 490 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1) 491 #define KMP_TEST_THEN_INC64(p) \ 492 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL) 493 #define KMP_TEST_THEN_INC_ACQ64(p) \ 494 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL) 495 #define KMP_TEST_THEN_ADD4_32(p) \ 496 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4) 497 #define KMP_TEST_THEN_ADD4_ACQ32(p) \ 498 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4) 499 #define KMP_TEST_THEN_ADD4_64(p) \ 500 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL) 501 #define KMP_TEST_THEN_ADD4_ACQ64(p) \ 502 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL) 503 #define KMP_TEST_THEN_DEC32(p) \ 504 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1) 505 #define KMP_TEST_THEN_DEC_ACQ32(p) \ 506 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1) 507 #define KMP_TEST_THEN_DEC64(p) \ 508 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL) 509 #define KMP_TEST_THEN_DEC_ACQ64(p) \ 510 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL) 511 #define KMP_TEST_THEN_ADD8(p, v) \ 512 __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v)) 513 #define KMP_TEST_THEN_ADD32(p, v) \ 514 __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v)) 515 #define KMP_TEST_THEN_ADD64(p, v) \ 516 __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v)) 518 #define KMP_TEST_THEN_OR8(p, v) \ 519 __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v)) 520 #define KMP_TEST_THEN_AND8(p, v) \ 521 __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v)) 522 #define KMP_TEST_THEN_OR32(p, v) \ 523 __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v)) 524 #define KMP_TEST_THEN_AND32(p, v) \ 525 __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v)) 526 #define KMP_TEST_THEN_OR64(p, v) \ 527 __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v)) 528 #define KMP_TEST_THEN_AND64(p, v) \ 529 __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v)) 531 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \ 532 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \ 534 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \ 535 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \ 537 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \ 538 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \ 540 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \ 541 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \ 543 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \ 544 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \ 546 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \ 547 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \ 549 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \ 550 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \ 552 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \ 553 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \ 555 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \ 556 __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \ 559 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \ 560 __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \ 562 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \ 563 __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \ 565 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \ 566 __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \ 568 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \ 569 __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \ 572 #define KMP_XCHG_FIXED8(p, v) \ 573 __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v)) 574 #define KMP_XCHG_FIXED16(p, v) \ 575 __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v)) 576 #define KMP_XCHG_FIXED32(p, v) \ 577 __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v)) 578 #define KMP_XCHG_FIXED64(p, v) \ 579 __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v)) 581 inline kmp_real32 KMP_XCHG_REAL32(
volatile kmp_real32 *p, kmp_real32 v) {
583 __sync_lock_test_and_set((
volatile kmp_uint32 *)(p), *(kmp_uint32 *)&v);
584 return *(kmp_real32 *)&tmp;
587 inline kmp_real64 KMP_XCHG_REAL64(
volatile kmp_real64 *p, kmp_real64 v) {
589 __sync_lock_test_and_set((
volatile kmp_uint64 *)(p), *(kmp_uint64 *)&v);
590 return *(kmp_real64 *)&tmp;
595 extern kmp_int8 __kmp_test_then_add8(
volatile kmp_int8 *p, kmp_int8 v);
596 extern kmp_int8 __kmp_test_then_or8(
volatile kmp_int8 *p, kmp_int8 v);
597 extern kmp_int8 __kmp_test_then_and8(
volatile kmp_int8 *p, kmp_int8 v);
598 extern kmp_int32 __kmp_test_then_add32(
volatile kmp_int32 *p, kmp_int32 v);
599 extern kmp_uint32 __kmp_test_then_or32(
volatile kmp_uint32 *p, kmp_uint32 v);
600 extern kmp_uint32 __kmp_test_then_and32(
volatile kmp_uint32 *p, kmp_uint32 v);
601 extern kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 v);
602 extern kmp_uint64 __kmp_test_then_or64(
volatile kmp_uint64 *p, kmp_uint64 v);
603 extern kmp_uint64 __kmp_test_then_and64(
volatile kmp_uint64 *p, kmp_uint64 v);
605 extern kmp_int8 __kmp_compare_and_store8(
volatile kmp_int8 *p, kmp_int8 cv,
607 extern kmp_int16 __kmp_compare_and_store16(
volatile kmp_int16 *p, kmp_int16 cv,
609 extern kmp_int32 __kmp_compare_and_store32(
volatile kmp_int32 *p, kmp_int32 cv,
611 extern kmp_int32 __kmp_compare_and_store64(
volatile kmp_int64 *p, kmp_int64 cv,
613 extern kmp_int8 __kmp_compare_and_store_ret8(
volatile kmp_int8 *p, kmp_int8 cv,
615 extern kmp_int16 __kmp_compare_and_store_ret16(
volatile kmp_int16 *p,
616 kmp_int16 cv, kmp_int16 sv);
617 extern kmp_int32 __kmp_compare_and_store_ret32(
volatile kmp_int32 *p,
618 kmp_int32 cv, kmp_int32 sv);
619 extern kmp_int64 __kmp_compare_and_store_ret64(
volatile kmp_int64 *p,
620 kmp_int64 cv, kmp_int64 sv);
622 extern kmp_int8 __kmp_xchg_fixed8(
volatile kmp_int8 *p, kmp_int8 v);
623 extern kmp_int16 __kmp_xchg_fixed16(
volatile kmp_int16 *p, kmp_int16 v);
624 extern kmp_int32 __kmp_xchg_fixed32(
volatile kmp_int32 *p, kmp_int32 v);
625 extern kmp_int64 __kmp_xchg_fixed64(
volatile kmp_int64 *p, kmp_int64 v);
626 extern kmp_real32 __kmp_xchg_real32(
volatile kmp_real32 *p, kmp_real32 v);
627 extern kmp_real64 __kmp_xchg_real64(
volatile kmp_real64 *p, kmp_real64 v);
629 #define KMP_TEST_THEN_INC32(p) \ 630 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1) 631 #define KMP_TEST_THEN_INC_ACQ32(p) \ 632 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1) 633 #define KMP_TEST_THEN_INC64(p) \ 634 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL) 635 #define KMP_TEST_THEN_INC_ACQ64(p) \ 636 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL) 637 #define KMP_TEST_THEN_ADD4_32(p) \ 638 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4) 639 #define KMP_TEST_THEN_ADD4_ACQ32(p) \ 640 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4) 641 #define KMP_TEST_THEN_ADD4_64(p) \ 642 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL) 643 #define KMP_TEST_THEN_ADD4_ACQ64(p) \ 644 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL) 645 #define KMP_TEST_THEN_DEC32(p) \ 646 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1) 647 #define KMP_TEST_THEN_DEC_ACQ32(p) \ 648 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1) 649 #define KMP_TEST_THEN_DEC64(p) \ 650 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL) 651 #define KMP_TEST_THEN_DEC_ACQ64(p) \ 652 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL) 653 #define KMP_TEST_THEN_ADD8(p, v) \ 654 __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v)) 655 #define KMP_TEST_THEN_ADD32(p, v) \ 656 __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v)) 657 #define KMP_TEST_THEN_ADD64(p, v) \ 658 __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v)) 660 #define KMP_TEST_THEN_OR8(p, v) \ 661 __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v)) 662 #define KMP_TEST_THEN_AND8(p, v) \ 663 __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v)) 664 #define KMP_TEST_THEN_OR32(p, v) \ 665 __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v)) 666 #define KMP_TEST_THEN_AND32(p, v) \ 667 __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v)) 668 #define KMP_TEST_THEN_OR64(p, v) \ 669 __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v)) 670 #define KMP_TEST_THEN_AND64(p, v) \ 671 __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v)) 673 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \ 674 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \ 676 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \ 677 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \ 679 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \ 680 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \ 682 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \ 683 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \ 685 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \ 686 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \ 688 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \ 689 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \ 691 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \ 692 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 694 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \ 695 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 699 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \ 700 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \ 703 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \ 704 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 708 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \ 709 __kmp_compare_and_store_ret8((p), (cv), (sv)) 710 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \ 711 __kmp_compare_and_store_ret16((p), (cv), (sv)) 712 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \ 713 __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \ 715 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \ 716 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 719 #define KMP_XCHG_FIXED8(p, v) \ 720 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v)); 721 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v)); 722 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v)); 723 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v)); 724 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v)); 725 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v)); 733 #define KMP_MB() asm("nop") 734 #define KMP_IMB() asm("nop") 741 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \ 743 #define KMP_MB() __sync_synchronize() 755 #define KMP_ST_REL32(A, D) (*(A) = (D)) 759 #define KMP_ST_REL64(A, D) (*(A) = (D)) 763 #define KMP_LD_ACQ32(A) (*(A)) 767 #define KMP_LD_ACQ64(A) (*(A)) 783 #define TCW_1(a, b) (a) = (b) 785 #define TCW_4(a, b) (a) = (b) 786 #define TCI_4(a) (++(a)) 787 #define TCD_4(a) (--(a)) 789 #define TCW_8(a, b) (a) = (b) 790 #define TCI_8(a) (++(a)) 791 #define TCD_8(a) (--(a)) 792 #define TCR_SYNC_4(a) (a) 793 #define TCW_SYNC_4(a, b) (a) = (b) 794 #define TCX_SYNC_4(a, b, c) \ 795 KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \ 796 (kmp_int32)(b), (kmp_int32)(c)) 797 #define TCR_SYNC_8(a) (a) 798 #define TCW_SYNC_8(a, b) (a) = (b) 799 #define TCX_SYNC_8(a, b, c) \ 800 KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \ 801 (kmp_int64)(b), (kmp_int64)(c)) 803 #if KMP_ARCH_X86 || KMP_ARCH_MIPS 805 #define TCR_PTR(a) ((void *)TCR_4(a)) 806 #define TCW_PTR(a, b) TCW_4((a), (b)) 807 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a)) 808 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b)) 809 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c))) 813 #define TCR_PTR(a) ((void *)TCR_8(a)) 814 #define TCW_PTR(a, b) TCW_8((a), (b)) 815 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a)) 816 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b)) 817 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c))) 825 #define FTN_TRUE TRUE 829 #define FTN_FALSE FALSE 832 typedef void (*microtask_t)(
int *gtid,
int *npr, ...);
834 #ifdef USE_VOLATILE_CAST 835 #define VOLATILE_CAST(x) (volatile x) 837 #define VOLATILE_CAST(x) (x) 840 #define KMP_WAIT_YIELD __kmp_wait_yield_4 841 #define KMP_WAIT_YIELD_PTR __kmp_wait_yield_4_ptr 842 #define KMP_EQ __kmp_eq_4 843 #define KMP_NEQ __kmp_neq_4 844 #define KMP_LT __kmp_lt_4 845 #define KMP_GE __kmp_ge_4 846 #define KMP_LE __kmp_le_4 850 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX 851 #define STATIC_EFI2_WORKAROUND 853 #define STATIC_EFI2_WORKAROUND static 858 #define KMP_USE_BGET 1 862 #ifndef USE_CMPXCHG_FIX 863 #define USE_CMPXCHG_FIX 1 868 #define KMP_USE_DYNAMIC_LOCK 1 873 #if KMP_USE_DYNAMIC_LOCK 875 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC 876 #ifdef KMP_USE_ADAPTIVE_LOCKS 877 #undef KMP_USE_ADAPTIVE_LOCKS 879 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX 883 #if KMP_STATS_ENABLED 884 #define KMP_HAVE_TICK_TIME \ 885 (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64)) 889 enum kmp_warnings_level {
890 kmp_warnings_off = 0,
892 kmp_warnings_explicit = 6,
898 #endif // __cplusplus 902 #include "kmp_safe_c_api.h"