16 #include "kmp_config.h"
21 #define KMP_FTN_PLAIN 1
22 #define KMP_FTN_APPEND 2
23 #define KMP_FTN_UPPER 3
29 #define KMP_PTR_SKIP (sizeof(void *))
36 #define KMP_MEM_CONS_VOLATILE 0
37 #define KMP_MEM_CONS_FENCE 1
39 #ifndef KMP_MEM_CONS_MODEL
40 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
43 #ifndef __has_cpp_attribute
44 #define __has_cpp_attribute(x) 0
47 #ifndef __has_attribute
48 #define __has_attribute(x) 0
52 #define KMP_COMPILER_ICC 0
53 #define KMP_COMPILER_GCC 0
54 #define KMP_COMPILER_CLANG 0
55 #define KMP_COMPILER_MSVC 0
56 #define KMP_COMPILER_ICX 0
58 #if __INTEL_CLANG_COMPILER
59 #undef KMP_COMPILER_ICX
60 #define KMP_COMPILER_ICX 1
61 #elif defined(__INTEL_COMPILER)
62 #undef KMP_COMPILER_ICC
63 #define KMP_COMPILER_ICC 1
64 #elif defined(__clang__)
65 #undef KMP_COMPILER_CLANG
66 #define KMP_COMPILER_CLANG 1
67 #elif defined(__GNUC__)
68 #undef KMP_COMPILER_GCC
69 #define KMP_COMPILER_GCC 1
70 #elif defined(_MSC_VER)
71 #undef KMP_COMPILER_MSVC
72 #define KMP_COMPILER_MSVC 1
74 #error Unknown compiler
77 #if (KMP_OS_LINUX || KMP_OS_WINDOWS || KMP_OS_FREEBSD)
78 #define KMP_AFFINITY_SUPPORTED 1
79 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64
80 #define KMP_GROUP_AFFINITY 1
82 #define KMP_GROUP_AFFINITY 0
85 #define KMP_AFFINITY_SUPPORTED 0
86 #define KMP_GROUP_AFFINITY 0
90 #define KMP_HAVE_QUAD 0
91 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
92 #if KMP_COMPILER_ICC || KMP_COMPILER_ICX
95 #define KMP_HAVE_QUAD 1
96 #elif KMP_COMPILER_CLANG
99 typedef long double _Quad;
100 #elif KMP_COMPILER_GCC
103 typedef __float128 _Quad;
105 #define KMP_HAVE_QUAD 1
107 #elif KMP_COMPILER_MSVC
108 typedef long double _Quad;
111 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
112 typedef long double _Quad;
114 #define KMP_HAVE_QUAD 1
118 #define KMP_USE_X87CONTROL 0
120 #define KMP_END_OF_LINE "\r\n"
121 typedef char kmp_int8;
122 typedef unsigned char kmp_uint8;
123 typedef short kmp_int16;
124 typedef unsigned short kmp_uint16;
125 typedef int kmp_int32;
126 typedef unsigned int kmp_uint32;
127 #define KMP_INT32_SPEC "d"
128 #define KMP_UINT32_SPEC "u"
130 typedef __int64 kmp_int64;
131 typedef unsigned __int64 kmp_uint64;
132 #define KMP_INT64_SPEC "I64d"
133 #define KMP_UINT64_SPEC "I64u"
135 struct kmp_struct64 {
138 typedef struct kmp_struct64 kmp_int64;
139 typedef struct kmp_struct64 kmp_uint64;
142 #if KMP_ARCH_X86 && KMP_MSVC_COMPAT
143 #undef KMP_USE_X87CONTROL
144 #define KMP_USE_X87CONTROL 1
146 #if KMP_ARCH_X86_64 || KMP_ARCH_AARCH64
148 typedef __int64 kmp_intptr_t;
149 typedef unsigned __int64 kmp_uintptr_t;
150 #define KMP_INTPTR_SPEC "I64d"
151 #define KMP_UINTPTR_SPEC "I64u"
156 #define KMP_END_OF_LINE "\n"
157 typedef char kmp_int8;
158 typedef unsigned char kmp_uint8;
159 typedef short kmp_int16;
160 typedef unsigned short kmp_uint16;
161 typedef int kmp_int32;
162 typedef unsigned int kmp_uint32;
163 typedef long long kmp_int64;
164 typedef unsigned long long kmp_uint64;
165 #define KMP_INT32_SPEC "d"
166 #define KMP_UINT32_SPEC "u"
167 #define KMP_INT64_SPEC "lld"
168 #define KMP_UINT64_SPEC "llu"
171 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS
172 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
173 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || \
174 KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
175 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
177 #error "Can't determine size_t printf format specifier."
181 #define KMP_SIZE_T_MAX (0xFFFFFFFF)
183 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
186 typedef size_t kmp_size_t;
187 typedef float kmp_real32;
188 typedef double kmp_real64;
192 typedef long kmp_intptr_t;
193 typedef unsigned long kmp_uintptr_t;
194 #define KMP_INTPTR_SPEC "ld"
195 #define KMP_UINTPTR_SPEC "lu"
199 typedef kmp_int64 kmp_int;
200 typedef kmp_uint64 kmp_uint;
202 typedef kmp_int32 kmp_int;
203 typedef kmp_uint32 kmp_uint;
205 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
206 #define KMP_INT_MIN ((kmp_int32)0x80000000)
209 #if (KMP_ARCH_ARM || KMP_ARCH_X86_64 || KMP_ARCH_AARCH64) && \
210 (KMP_OS_FREEBSD || KMP_OS_LINUX)
211 typedef va_list *kmp_va_list;
212 #define kmp_va_deref(ap) (*(ap))
213 #define kmp_va_addr_of(ap) (&(ap))
215 typedef va_list kmp_va_list;
216 #define kmp_va_deref(ap) (ap)
217 #define kmp_va_addr_of(ap) (ap)
222 #define CCAST(type, var) const_cast<type>(var)
223 #define RCAST(type, var) reinterpret_cast<type>(var)
227 template <
typename T>
struct traits_t {};
229 template <>
struct traits_t<signed int> {
230 typedef signed int signed_t;
231 typedef unsigned int unsigned_t;
232 typedef double floating_t;
233 static char const *spec;
234 static const signed_t max_value = 0x7fffffff;
235 static const signed_t min_value = 0x80000000;
236 static const int type_size =
sizeof(signed_t);
239 template <>
struct traits_t<unsigned int> {
240 typedef signed int signed_t;
241 typedef unsigned int unsigned_t;
242 typedef double floating_t;
243 static char const *spec;
244 static const unsigned_t max_value = 0xffffffff;
245 static const unsigned_t min_value = 0x00000000;
246 static const int type_size =
sizeof(unsigned_t);
249 template <>
struct traits_t<signed long> {
250 typedef signed long signed_t;
251 typedef unsigned long unsigned_t;
252 typedef long double floating_t;
253 static char const *spec;
254 static const int type_size =
sizeof(signed_t);
257 template <>
struct traits_t<signed long long> {
258 typedef signed long long signed_t;
259 typedef unsigned long long unsigned_t;
260 typedef long double floating_t;
261 static char const *spec;
262 static const signed_t max_value = 0x7fffffffffffffffLL;
263 static const signed_t min_value = 0x8000000000000000LL;
264 static const int type_size =
sizeof(signed_t);
267 template <>
struct traits_t<unsigned long long> {
268 typedef signed long long signed_t;
269 typedef unsigned long long unsigned_t;
270 typedef long double floating_t;
271 static char const *spec;
272 static const unsigned_t max_value = 0xffffffffffffffffLL;
273 static const unsigned_t min_value = 0x0000000000000000LL;
274 static const int type_size =
sizeof(unsigned_t);
278 #define CCAST(type, var) (type)(var)
279 #define RCAST(type, var) (type)(var)
282 #define KMP_EXPORT extern
284 #if __GNUC__ >= 4 && !defined(__MINGW32__)
285 #define __forceinline __inline
291 #define KMP_HAVE_MWAIT \
292 ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) && \
294 #define KMP_HAVE_UMWAIT \
295 ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) && \
301 static inline int KMP_GET_PAGE_SIZE(
void) {
304 return si.dwPageSize;
307 #define KMP_GET_PAGE_SIZE() getpagesize()
310 #define PAGE_ALIGNED(_addr) \
311 (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
312 #define ALIGN_TO_PAGE(x) \
313 (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
321 #define INTERNODE_CACHE_LINE 4096
325 #define CACHE_LINE 128
327 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
329 #warning CACHE_LINE is too small.
333 #define KMP_CACHE_PREFETCH(ADDR)
339 #if __cplusplus > 201402L && __has_cpp_attribute(fallthrough)
340 #define KMP_FALLTHROUGH() [[fallthrough]]
341 #elif __has_cpp_attribute(clang::fallthrough)
342 #define KMP_FALLTHROUGH() [[clang::fallthrough]]
343 #elif __has_attribute(fallthrough) || __GNUC__ >= 7
344 #define KMP_FALLTHROUGH() __attribute__((__fallthrough__))
346 #define KMP_FALLTHROUGH() ((void)0)
349 #if KMP_HAVE_ATTRIBUTE_WAITPKG
350 #define KMP_ATTRIBUTE_TARGET_WAITPKG __attribute__((target("waitpkg")))
352 #define KMP_ATTRIBUTE_TARGET_WAITPKG
355 #if KMP_HAVE_ATTRIBUTE_RTM
356 #define KMP_ATTRIBUTE_TARGET_RTM __attribute__((target("rtm")))
358 #define KMP_ATTRIBUTE_TARGET_RTM
362 #if __cplusplus >= 201103L
363 #define KMP_NORETURN [[noreturn]]
365 #define KMP_NORETURN __declspec(noreturn)
367 #define KMP_NORETURN __attribute__((noreturn))
370 #if KMP_OS_WINDOWS && KMP_MSVC_COMPAT
371 #define KMP_ALIGN(bytes) __declspec(align(bytes))
372 #define KMP_THREAD_LOCAL __declspec(thread)
375 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
376 #define KMP_THREAD_LOCAL __thread
377 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
380 #if KMP_HAVE_WEAK_ATTRIBUTE && !KMP_DYNAMIC_LIB
381 #define KMP_WEAK_ATTRIBUTE_EXTERNAL __attribute__((weak))
383 #define KMP_WEAK_ATTRIBUTE_EXTERNAL
386 #if KMP_HAVE_WEAK_ATTRIBUTE
387 #define KMP_WEAK_ATTRIBUTE_INTERNAL __attribute__((weak))
389 #define KMP_WEAK_ATTRIBUTE_INTERNAL
394 #define KMP_STR(x) _KMP_STR(x)
395 #define _KMP_STR(x) #x
398 #ifdef KMP_USE_VERSION_SYMBOLS
401 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
402 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
403 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) \
404 _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
405 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) \
406 __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias \
407 __attribute__((alias(KMP_STR(__kmp_api_##api_name)))); \
409 ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
410 api_name) "@" ver_str "\n\t"); \
411 __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR( \
412 api_name) "@@" default_ver "\n\t")
414 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str) \
415 _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, "VERSION")
416 #define _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, \
418 __typeof__(__kmp_api_##apic_name) __kmp_api_##apic_name##_##ver_num##_alias \
419 __attribute__((alias(KMP_STR(__kmp_api_##apic_name)))); \
420 __asm__(".symver " KMP_STR(__kmp_api_##apic_name) "," KMP_STR( \
421 apic_name) "@@" default_ver "\n\t"); \
423 ".symver " KMP_STR(__kmp_api_##apic_name##_##ver_num##_alias) "," KMP_STR( \
424 api_name) "@" ver_str "\n\t")
427 #define KMP_EXPAND_NAME(api_name) api_name
428 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str)
429 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, \
435 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
436 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
437 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
440 enum kmp_mem_fence_type {
449 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS
451 #if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG
452 #pragma intrinsic(InterlockedExchangeAdd)
453 #pragma intrinsic(InterlockedCompareExchange)
454 #pragma intrinsic(InterlockedExchange)
455 #if !(KMP_COMPILER_ICX && KMP_32_BIT_ARCH)
456 #pragma intrinsic(InterlockedExchange64)
462 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
463 #define KMP_TEST_THEN_INC_ACQ32(p) \
464 InterlockedExchangeAdd((volatile long *)(p), 1)
465 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
466 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
467 InterlockedExchangeAdd((volatile long *)(p), 4)
468 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
469 #define KMP_TEST_THEN_DEC_ACQ32(p) \
470 InterlockedExchangeAdd((volatile long *)(p), -1)
471 #define KMP_TEST_THEN_ADD32(p, v) \
472 InterlockedExchangeAdd((volatile long *)(p), (v))
474 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
475 InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
477 #define KMP_XCHG_FIXED32(p, v) \
478 InterlockedExchange((volatile long *)(p), (long)(v))
479 #define KMP_XCHG_FIXED64(p, v) \
480 InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
482 inline kmp_real32 KMP_XCHG_REAL32(
volatile kmp_real32 *p, kmp_real32 v) {
483 kmp_int32 tmp = InterlockedExchange((
volatile long *)p, *(
long *)&v);
484 return *(kmp_real32 *)&tmp;
487 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
488 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
489 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
490 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
491 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
492 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
494 extern kmp_int8 __kmp_test_then_or8(
volatile kmp_int8 *p, kmp_int8 v);
495 extern kmp_int8 __kmp_test_then_and8(
volatile kmp_int8 *p, kmp_int8 v);
496 extern kmp_int32 __kmp_test_then_add32(
volatile kmp_int32 *p, kmp_int32 v);
497 extern kmp_uint32 __kmp_test_then_or32(
volatile kmp_uint32 *p, kmp_uint32 v);
498 extern kmp_uint32 __kmp_test_then_and32(
volatile kmp_uint32 *p, kmp_uint32 v);
499 extern kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 v);
500 extern kmp_uint64 __kmp_test_then_or64(
volatile kmp_uint64 *p, kmp_uint64 v);
501 extern kmp_uint64 __kmp_test_then_and64(
volatile kmp_uint64 *p, kmp_uint64 v);
503 #if KMP_ARCH_AARCH64 && KMP_COMPILER_MSVC && !KMP_COMPILER_CLANG
504 #define KMP_TEST_THEN_INC64(p) _InterlockedExchangeAdd64((p), 1LL)
505 #define KMP_TEST_THEN_INC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 1LL)
506 #define KMP_TEST_THEN_ADD4_64(p) _InterlockedExchangeAdd64((p), 4LL)
511 #define KMP_TEST_THEN_ADD64(p, v) _InterlockedExchangeAdd64((p), (v))
513 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
514 __kmp_compare_and_store_acq8((p), (cv), (sv))
515 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
516 __kmp_compare_and_store_rel8((p), (cv), (sv))
517 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
518 __kmp_compare_and_store_acq16((p), (cv), (sv))
523 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
524 __kmp_compare_and_store_acq32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
526 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
527 __kmp_compare_and_store_rel32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
529 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
530 __kmp_compare_and_store_acq64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
532 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
533 __kmp_compare_and_store_rel64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
535 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
536 __kmp_compare_and_store_ptr((void *volatile *)(p), (void *)(cv), (void *)(sv))
543 inline kmp_int8 __kmp_compare_and_store_acq8(
volatile kmp_int8 *p, kmp_int8 cv,
545 return _InterlockedCompareExchange8_acq(p, sv, cv) == cv;
548 inline kmp_int8 __kmp_compare_and_store_rel8(
volatile kmp_int8 *p, kmp_int8 cv,
550 return _InterlockedCompareExchange8_rel(p, sv, cv) == cv;
553 inline kmp_int16 __kmp_compare_and_store_acq16(
volatile kmp_int16 *p,
554 kmp_int16 cv, kmp_int16 sv) {
555 return _InterlockedCompareExchange16_acq(p, sv, cv) == cv;
558 inline kmp_int16 __kmp_compare_and_store_rel16(
volatile kmp_int16 *p,
559 kmp_int16 cv, kmp_int16 sv) {
560 return _InterlockedCompareExchange16_rel(p, sv, cv) == cv;
563 inline kmp_int32 __kmp_compare_and_store_acq32(
volatile kmp_int32 *p,
564 kmp_int32 cv, kmp_int32 sv) {
565 return _InterlockedCompareExchange_acq((
volatile long *)p, sv, cv) == cv;
568 inline kmp_int32 __kmp_compare_and_store_rel32(
volatile kmp_int32 *p,
569 kmp_int32 cv, kmp_int32 sv) {
570 return _InterlockedCompareExchange_rel((
volatile long *)p, sv, cv) == cv;
573 inline kmp_int32 __kmp_compare_and_store_acq64(
volatile kmp_int64 *p,
574 kmp_int64 cv, kmp_int64 sv) {
575 return _InterlockedCompareExchange64_acq(p, sv, cv) == cv;
578 inline kmp_int32 __kmp_compare_and_store_rel64(
volatile kmp_int64 *p,
579 kmp_int64 cv, kmp_int64 sv) {
580 return _InterlockedCompareExchange64_rel(p, sv, cv) == cv;
583 inline kmp_int32 __kmp_compare_and_store_ptr(
void *
volatile *p,
void *cv,
585 return _InterlockedCompareExchangePointer(p, sv, cv) == cv;
595 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
596 _InterlockedCompareExchange64((volatile kmp_int64 *)(p), (kmp_int64)(sv), \
614 extern kmp_int8 __kmp_test_then_add8(
volatile kmp_int8 *p, kmp_int8 v);
616 extern kmp_int8 __kmp_compare_and_store8(
volatile kmp_int8 *p, kmp_int8 cv,
618 extern kmp_int16 __kmp_compare_and_store16(
volatile kmp_int16 *p, kmp_int16 cv,
620 extern kmp_int32 __kmp_compare_and_store32(
volatile kmp_int32 *p, kmp_int32 cv,
622 extern kmp_int32 __kmp_compare_and_store64(
volatile kmp_int64 *p, kmp_int64 cv,
624 extern kmp_int8 __kmp_compare_and_store_ret8(
volatile kmp_int8 *p, kmp_int8 cv,
626 extern kmp_int16 __kmp_compare_and_store_ret16(
volatile kmp_int16 *p,
627 kmp_int16 cv, kmp_int16 sv);
628 extern kmp_int32 __kmp_compare_and_store_ret32(
volatile kmp_int32 *p,
629 kmp_int32 cv, kmp_int32 sv);
630 extern kmp_int64 __kmp_compare_and_store_ret64(
volatile kmp_int64 *p,
631 kmp_int64 cv, kmp_int64 sv);
633 extern kmp_int8 __kmp_xchg_fixed8(
volatile kmp_int8 *p, kmp_int8 v);
634 extern kmp_int16 __kmp_xchg_fixed16(
volatile kmp_int16 *p, kmp_int16 v);
635 extern kmp_int32 __kmp_xchg_fixed32(
volatile kmp_int32 *p, kmp_int32 v);
636 extern kmp_int64 __kmp_xchg_fixed64(
volatile kmp_int64 *p, kmp_int64 v);
637 extern kmp_real32 __kmp_xchg_real32(
volatile kmp_real32 *p, kmp_real32 v);
638 extern kmp_real64 __kmp_xchg_real64(
volatile kmp_real64 *p, kmp_real64 v);
642 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
643 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
646 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
647 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
650 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
651 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
653 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
654 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
657 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
658 __kmp_compare_and_store8((p), (cv), (sv))
659 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
660 __kmp_compare_and_store8((p), (cv), (sv))
661 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
662 __kmp_compare_and_store16((p), (cv), (sv))
663 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
664 __kmp_compare_and_store16((p), (cv), (sv))
665 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
666 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
668 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
669 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
671 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
672 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
674 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
675 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
679 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
680 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
683 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
684 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
688 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
689 __kmp_compare_and_store_ret8((p), (cv), (sv))
690 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
691 __kmp_compare_and_store_ret16((p), (cv), (sv))
692 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
693 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
696 #define KMP_XCHG_FIXED8(p, v) \
697 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
698 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
702 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
705 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
708 #define KMP_TEST_THEN_INC32(p) \
709 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
710 #define KMP_TEST_THEN_INC_ACQ32(p) \
711 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
713 #define KMP_TEST_THEN_INC64(p) \
714 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
715 #define KMP_TEST_THEN_INC_ACQ64(p) \
716 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
718 #define KMP_TEST_THEN_INC64(p) \
719 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
720 #define KMP_TEST_THEN_INC_ACQ64(p) \
721 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
723 #define KMP_TEST_THEN_ADD4_32(p) \
724 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
725 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
726 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
728 #define KMP_TEST_THEN_ADD4_64(p) \
729 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
730 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
731 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
732 #define KMP_TEST_THEN_DEC64(p) \
733 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
734 #define KMP_TEST_THEN_DEC_ACQ64(p) \
735 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
737 #define KMP_TEST_THEN_ADD4_64(p) \
738 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
739 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
740 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
741 #define KMP_TEST_THEN_DEC64(p) \
742 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
743 #define KMP_TEST_THEN_DEC_ACQ64(p) \
744 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
746 #define KMP_TEST_THEN_DEC32(p) \
747 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
748 #define KMP_TEST_THEN_DEC_ACQ32(p) \
749 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
750 #define KMP_TEST_THEN_ADD8(p, v) \
751 __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
752 #define KMP_TEST_THEN_ADD32(p, v) \
753 __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
755 #define KMP_TEST_THEN_ADD64(p, v) \
756 __atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
759 #define KMP_TEST_THEN_ADD64(p, v) \
760 __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
763 #define KMP_TEST_THEN_OR8(p, v) \
764 __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
765 #define KMP_TEST_THEN_AND8(p, v) \
766 __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
767 #define KMP_TEST_THEN_OR32(p, v) \
768 __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
769 #define KMP_TEST_THEN_AND32(p, v) \
770 __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
772 #define KMP_TEST_THEN_OR64(p, v) \
773 __atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
775 #define KMP_TEST_THEN_AND64(p, v) \
776 __atomic_fetch_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
779 #define KMP_TEST_THEN_OR64(p, v) \
780 __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
781 #define KMP_TEST_THEN_AND64(p, v) \
782 __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
785 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
786 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
788 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
789 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
791 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
792 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
794 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
795 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
797 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
798 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
800 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
801 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
803 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
804 __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \
807 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
808 __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
810 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
811 __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
813 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
814 __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
817 static inline bool mips_sync_bool_compare_and_swap(
volatile kmp_uint64 *p,
820 return __atomic_compare_exchange(p, &cv, &sv,
false, __ATOMIC_SEQ_CST,
823 static inline bool mips_sync_val_compare_and_swap(
volatile kmp_uint64 *p,
826 __atomic_compare_exchange(p, &cv, &sv,
false, __ATOMIC_SEQ_CST,
830 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
831 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
832 (kmp_uint64)(cv), (kmp_uint64)(sv))
833 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
834 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
835 (kmp_uint64)(cv), (kmp_uint64)(sv))
836 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
837 mips_sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
840 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
841 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
843 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
844 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
846 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
847 __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
851 #if KMP_OS_DARWIN && defined(__INTEL_COMPILER) && __INTEL_COMPILER >= 1800
852 #define KMP_XCHG_FIXED8(p, v) \
853 __atomic_exchange_1((volatile kmp_uint8 *)(p), (kmp_uint8)(v), \
856 #define KMP_XCHG_FIXED8(p, v) \
857 __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
859 #define KMP_XCHG_FIXED16(p, v) \
860 __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
861 #define KMP_XCHG_FIXED32(p, v) \
862 __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
863 #define KMP_XCHG_FIXED64(p, v) \
864 __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
866 inline kmp_real32 KMP_XCHG_REAL32(
volatile kmp_real32 *p, kmp_real32 v) {
868 __sync_lock_test_and_set((
volatile kmp_uint32 *)(p), *(kmp_uint32 *)&v);
869 return *(kmp_real32 *)&tmp;
872 inline kmp_real64 KMP_XCHG_REAL64(
volatile kmp_real64 *p, kmp_real64 v) {
874 __sync_lock_test_and_set((
volatile kmp_uint64 *)(p), *(kmp_uint64 *)&v);
875 return *(kmp_real64 *)&tmp;
880 extern kmp_int8 __kmp_test_then_add8(
volatile kmp_int8 *p, kmp_int8 v);
881 extern kmp_int8 __kmp_test_then_or8(
volatile kmp_int8 *p, kmp_int8 v);
882 extern kmp_int8 __kmp_test_then_and8(
volatile kmp_int8 *p, kmp_int8 v);
883 extern kmp_int32 __kmp_test_then_add32(
volatile kmp_int32 *p, kmp_int32 v);
884 extern kmp_uint32 __kmp_test_then_or32(
volatile kmp_uint32 *p, kmp_uint32 v);
885 extern kmp_uint32 __kmp_test_then_and32(
volatile kmp_uint32 *p, kmp_uint32 v);
886 extern kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 v);
887 extern kmp_uint64 __kmp_test_then_or64(
volatile kmp_uint64 *p, kmp_uint64 v);
888 extern kmp_uint64 __kmp_test_then_and64(
volatile kmp_uint64 *p, kmp_uint64 v);
890 extern kmp_int8 __kmp_compare_and_store8(
volatile kmp_int8 *p, kmp_int8 cv,
892 extern kmp_int16 __kmp_compare_and_store16(
volatile kmp_int16 *p, kmp_int16 cv,
894 extern kmp_int32 __kmp_compare_and_store32(
volatile kmp_int32 *p, kmp_int32 cv,
896 extern kmp_int32 __kmp_compare_and_store64(
volatile kmp_int64 *p, kmp_int64 cv,
898 extern kmp_int8 __kmp_compare_and_store_ret8(
volatile kmp_int8 *p, kmp_int8 cv,
900 extern kmp_int16 __kmp_compare_and_store_ret16(
volatile kmp_int16 *p,
901 kmp_int16 cv, kmp_int16 sv);
902 extern kmp_int32 __kmp_compare_and_store_ret32(
volatile kmp_int32 *p,
903 kmp_int32 cv, kmp_int32 sv);
904 extern kmp_int64 __kmp_compare_and_store_ret64(
volatile kmp_int64 *p,
905 kmp_int64 cv, kmp_int64 sv);
907 extern kmp_int8 __kmp_xchg_fixed8(
volatile kmp_int8 *p, kmp_int8 v);
908 extern kmp_int16 __kmp_xchg_fixed16(
volatile kmp_int16 *p, kmp_int16 v);
909 extern kmp_int32 __kmp_xchg_fixed32(
volatile kmp_int32 *p, kmp_int32 v);
910 extern kmp_int64 __kmp_xchg_fixed64(
volatile kmp_int64 *p, kmp_int64 v);
911 extern kmp_real32 __kmp_xchg_real32(
volatile kmp_real32 *p, kmp_real32 v);
912 extern kmp_real64 __kmp_xchg_real64(
volatile kmp_real64 *p, kmp_real64 v);
914 #define KMP_TEST_THEN_INC32(p) \
915 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
916 #define KMP_TEST_THEN_INC_ACQ32(p) \
917 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
918 #define KMP_TEST_THEN_INC64(p) \
919 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
920 #define KMP_TEST_THEN_INC_ACQ64(p) \
921 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
922 #define KMP_TEST_THEN_ADD4_32(p) \
923 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
924 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
925 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
926 #define KMP_TEST_THEN_ADD4_64(p) \
927 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
928 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
929 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
930 #define KMP_TEST_THEN_DEC32(p) \
931 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
932 #define KMP_TEST_THEN_DEC_ACQ32(p) \
933 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
934 #define KMP_TEST_THEN_DEC64(p) \
935 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
936 #define KMP_TEST_THEN_DEC_ACQ64(p) \
937 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
938 #define KMP_TEST_THEN_ADD8(p, v) \
939 __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
940 #define KMP_TEST_THEN_ADD32(p, v) \
941 __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
942 #define KMP_TEST_THEN_ADD64(p, v) \
943 __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
945 #define KMP_TEST_THEN_OR8(p, v) \
946 __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
947 #define KMP_TEST_THEN_AND8(p, v) \
948 __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
949 #define KMP_TEST_THEN_OR32(p, v) \
950 __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
951 #define KMP_TEST_THEN_AND32(p, v) \
952 __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
953 #define KMP_TEST_THEN_OR64(p, v) \
954 __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
955 #define KMP_TEST_THEN_AND64(p, v) \
956 __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
958 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
959 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
961 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
962 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
964 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
965 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
967 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
968 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
970 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
971 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
973 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
974 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
976 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
977 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
979 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
980 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
984 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
985 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
988 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
989 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
993 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
994 __kmp_compare_and_store_ret8((p), (cv), (sv))
995 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
996 __kmp_compare_and_store_ret16((p), (cv), (sv))
997 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
998 __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1000 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
1001 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1004 #define KMP_XCHG_FIXED8(p, v) \
1005 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
1006 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
1007 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
1008 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
1009 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
1010 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
1018 #define KMP_MB() asm("nop")
1019 #define KMP_IMB() asm("nop")
1026 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \
1027 KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
1030 #define KMP_MB() std::atomic_thread_fence(std::memory_order_seq_cst)
1032 #define KMP_MB() __sync_synchronize()
1040 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
1041 #if KMP_COMPILER_ICC || KMP_COMPILER_ICX
1042 #define KMP_MFENCE_() _mm_mfence()
1043 #define KMP_SFENCE_() _mm_sfence()
1044 #elif KMP_COMPILER_MSVC
1045 #define KMP_MFENCE_() MemoryBarrier()
1046 #define KMP_SFENCE_() MemoryBarrier()
1048 #define KMP_MFENCE_() __sync_synchronize()
1049 #define KMP_SFENCE_() __sync_synchronize()
1051 #define KMP_MFENCE() \
1052 if (UNLIKELY(!__kmp_cpuinfo.initialized)) { \
1053 __kmp_query_cpuid(&__kmp_cpuinfo); \
1055 if (__kmp_cpuinfo.flags.sse2) { \
1058 #define KMP_SFENCE() KMP_SFENCE_()
1060 #define KMP_MFENCE() KMP_MB()
1061 #define KMP_SFENCE() KMP_MB()
1068 #ifndef KMP_ST_REL32
1069 #define KMP_ST_REL32(A, D) (*(A) = (D))
1072 #ifndef KMP_ST_REL64
1073 #define KMP_ST_REL64(A, D) (*(A) = (D))
1076 #ifndef KMP_LD_ACQ32
1077 #define KMP_LD_ACQ32(A) (*(A))
1080 #ifndef KMP_LD_ACQ64
1081 #define KMP_LD_ACQ64(A) (*(A))
1096 #define TCR_1(a) (a)
1097 #define TCW_1(a, b) (a) = (b)
1098 #define TCR_4(a) (a)
1099 #define TCW_4(a, b) (a) = (b)
1100 #define TCI_4(a) (++(a))
1101 #define TCD_4(a) (--(a))
1102 #define TCR_8(a) (a)
1103 #define TCW_8(a, b) (a) = (b)
1104 #define TCI_8(a) (++(a))
1105 #define TCD_8(a) (--(a))
1106 #define TCR_SYNC_4(a) (a)
1107 #define TCW_SYNC_4(a, b) (a) = (b)
1108 #define TCX_SYNC_4(a, b, c) \
1109 KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \
1110 (kmp_int32)(b), (kmp_int32)(c))
1111 #define TCR_SYNC_8(a) (a)
1112 #define TCW_SYNC_8(a, b) (a) = (b)
1113 #define TCX_SYNC_8(a, b, c) \
1114 KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \
1115 (kmp_int64)(b), (kmp_int64)(c))
1117 #if KMP_ARCH_X86 || KMP_ARCH_MIPS
1119 #define TCR_PTR(a) ((void *)TCR_4(a))
1120 #define TCW_PTR(a, b) TCW_4((a), (b))
1121 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
1122 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
1123 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
1127 #define TCR_PTR(a) ((void *)TCR_8(a))
1128 #define TCW_PTR(a, b) TCW_8((a), (b))
1129 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
1130 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
1131 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
1139 #define FTN_TRUE TRUE
1143 #define FTN_FALSE FALSE
1146 typedef void (*microtask_t)(
int *gtid,
int *npr, ...);
1148 #ifdef USE_VOLATILE_CAST
1149 #define VOLATILE_CAST(x) (volatile x)
1151 #define VOLATILE_CAST(x) (x)
1154 #define KMP_WAIT __kmp_wait_4
1155 #define KMP_WAIT_PTR __kmp_wait_4_ptr
1156 #define KMP_EQ __kmp_eq_4
1157 #define KMP_NEQ __kmp_neq_4
1158 #define KMP_LT __kmp_lt_4
1159 #define KMP_GE __kmp_ge_4
1160 #define KMP_LE __kmp_le_4
1164 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
1165 #define STATIC_EFI2_WORKAROUND
1167 #define STATIC_EFI2_WORKAROUND static
1171 #ifndef KMP_USE_BGET
1172 #define KMP_USE_BGET 1
1176 #ifndef USE_CMPXCHG_FIX
1177 #define USE_CMPXCHG_FIX 1
1181 #define KMP_USE_DYNAMIC_LOCK 1
1185 #if KMP_USE_DYNAMIC_LOCK
1187 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
1188 #ifdef KMP_USE_ADAPTIVE_LOCKS
1189 #undef KMP_USE_ADAPTIVE_LOCKS
1191 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
1195 #if KMP_STATS_ENABLED
1196 #define KMP_HAVE_TICK_TIME \
1197 (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
1201 enum kmp_warnings_level {
1202 kmp_warnings_off = 0,
1204 kmp_warnings_explicit = 6,
1205 kmp_warnings_verbose
1213 #include "kmp_safe_c_api.h"
1216 #define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
1217 #define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
1220 #define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
1221 #define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
1222 #define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
1223 #define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
1226 #define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
1227 #define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
1228 #define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
1229 #define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
1230 #define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
1231 #define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
1232 #define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
1233 #define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
1236 template <
typename T>
1237 bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) {
1238 return p->compare_exchange_strong(
1239 expected, desired, std::memory_order_acq_rel, std::memory_order_relaxed);
1242 template <
typename T>
1243 bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) {
1244 return p->compare_exchange_strong(
1245 expected, desired, std::memory_order_acquire, std::memory_order_relaxed);
1248 template <
typename T>
1249 bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) {
1250 return p->compare_exchange_strong(
1251 expected, desired, std::memory_order_release, std::memory_order_relaxed);
1256 extern void *__kmp_lookup_symbol(
const char *name);
1257 #define KMP_DLSYM(name) __kmp_lookup_symbol(name)
1258 #define KMP_DLSYM_NEXT(name) nullptr
1260 #define KMP_DLSYM(name) dlsym(RTLD_DEFAULT, name)
1261 #define KMP_DLSYM_NEXT(name) dlsym(RTLD_NEXT, name)