16 #include "kmp_config.h"
21 #define KMP_FTN_PLAIN 1
22 #define KMP_FTN_APPEND 2
23 #define KMP_FTN_UPPER 3
29 #define KMP_PTR_SKIP (sizeof(void *))
36 #define KMP_MEM_CONS_VOLATILE 0
37 #define KMP_MEM_CONS_FENCE 1
39 #ifndef KMP_MEM_CONS_MODEL
40 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
43 #ifndef __has_cpp_attribute
44 #define __has_cpp_attribute(x) 0
47 #ifndef __has_attribute
48 #define __has_attribute(x) 0
52 #define KMP_COMPILER_ICC 0
53 #define KMP_COMPILER_GCC 0
54 #define KMP_COMPILER_CLANG 0
55 #define KMP_COMPILER_MSVC 0
57 #if defined(__INTEL_COMPILER)
58 #undef KMP_COMPILER_ICC
59 #define KMP_COMPILER_ICC 1
60 #elif defined(__clang__)
61 #undef KMP_COMPILER_CLANG
62 #define KMP_COMPILER_CLANG 1
63 #elif defined(__GNUC__)
64 #undef KMP_COMPILER_GCC
65 #define KMP_COMPILER_GCC 1
66 #elif defined(_MSC_VER)
67 #undef KMP_COMPILER_MSVC
68 #define KMP_COMPILER_MSVC 1
70 #error Unknown compiler
73 #if (KMP_OS_LINUX || KMP_OS_WINDOWS || KMP_OS_FREEBSD)
74 #define KMP_AFFINITY_SUPPORTED 1
75 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64
76 #define KMP_GROUP_AFFINITY 1
78 #define KMP_GROUP_AFFINITY 0
81 #define KMP_AFFINITY_SUPPORTED 0
82 #define KMP_GROUP_AFFINITY 0
86 #define KMP_HAVE_QUAD 0
87 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
91 #define KMP_HAVE_QUAD 1
92 #elif KMP_COMPILER_CLANG
95 typedef long double _Quad;
96 #elif KMP_COMPILER_GCC
99 typedef __float128 _Quad;
101 #define KMP_HAVE_QUAD 1
103 #elif KMP_COMPILER_MSVC
104 typedef long double _Quad;
107 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
108 typedef long double _Quad;
110 #define KMP_HAVE_QUAD 1
114 #define KMP_USE_X87CONTROL 0
116 #define KMP_END_OF_LINE "\r\n"
117 typedef char kmp_int8;
118 typedef unsigned char kmp_uint8;
119 typedef short kmp_int16;
120 typedef unsigned short kmp_uint16;
121 typedef int kmp_int32;
122 typedef unsigned int kmp_uint32;
123 #define KMP_INT32_SPEC "d"
124 #define KMP_UINT32_SPEC "u"
126 typedef __int64 kmp_int64;
127 typedef unsigned __int64 kmp_uint64;
128 #define KMP_INT64_SPEC "I64d"
129 #define KMP_UINT64_SPEC "I64u"
131 struct kmp_struct64 {
134 typedef struct kmp_struct64 kmp_int64;
135 typedef struct kmp_struct64 kmp_uint64;
138 #if KMP_ARCH_X86 && KMP_MSVC_COMPAT
139 #undef KMP_USE_X87CONTROL
140 #define KMP_USE_X87CONTROL 1
142 #if KMP_ARCH_X86_64 || KMP_ARCH_AARCH64
144 typedef __int64 kmp_intptr_t;
145 typedef unsigned __int64 kmp_uintptr_t;
146 #define KMP_INTPTR_SPEC "I64d"
147 #define KMP_UINTPTR_SPEC "I64u"
152 #define KMP_END_OF_LINE "\n"
153 typedef char kmp_int8;
154 typedef unsigned char kmp_uint8;
155 typedef short kmp_int16;
156 typedef unsigned short kmp_uint16;
157 typedef int kmp_int32;
158 typedef unsigned int kmp_uint32;
159 typedef long long kmp_int64;
160 typedef unsigned long long kmp_uint64;
161 #define KMP_INT32_SPEC "d"
162 #define KMP_UINT32_SPEC "u"
163 #define KMP_INT64_SPEC "lld"
164 #define KMP_UINT64_SPEC "llu"
167 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS
168 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
169 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || \
170 KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
171 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
173 #error "Can't determine size_t printf format specifier."
177 #define KMP_SIZE_T_MAX (0xFFFFFFFF)
179 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
182 typedef size_t kmp_size_t;
183 typedef float kmp_real32;
184 typedef double kmp_real64;
188 typedef long kmp_intptr_t;
189 typedef unsigned long kmp_uintptr_t;
190 #define KMP_INTPTR_SPEC "ld"
191 #define KMP_UINTPTR_SPEC "lu"
195 typedef kmp_int64 kmp_int;
196 typedef kmp_uint64 kmp_uint;
198 typedef kmp_int32 kmp_int;
199 typedef kmp_uint32 kmp_uint;
201 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
202 #define KMP_INT_MIN ((kmp_int32)0x80000000)
205 #if (KMP_ARCH_ARM || KMP_ARCH_X86_64 || KMP_ARCH_AARCH64) && \
206 (KMP_OS_FREEBSD || KMP_OS_LINUX)
207 typedef va_list *kmp_va_list;
208 #define kmp_va_deref(ap) (*(ap))
209 #define kmp_va_addr_of(ap) (&(ap))
211 typedef va_list kmp_va_list;
212 #define kmp_va_deref(ap) (ap)
213 #define kmp_va_addr_of(ap) (ap)
218 #define CCAST(type, var) const_cast<type>(var)
219 #define RCAST(type, var) reinterpret_cast<type>(var)
223 template <
typename T>
struct traits_t {};
225 template <>
struct traits_t<signed int> {
226 typedef signed int signed_t;
227 typedef unsigned int unsigned_t;
228 typedef double floating_t;
229 static char const *spec;
230 static const signed_t max_value = 0x7fffffff;
231 static const signed_t min_value = 0x80000000;
232 static const int type_size =
sizeof(signed_t);
235 template <>
struct traits_t<unsigned int> {
236 typedef signed int signed_t;
237 typedef unsigned int unsigned_t;
238 typedef double floating_t;
239 static char const *spec;
240 static const unsigned_t max_value = 0xffffffff;
241 static const unsigned_t min_value = 0x00000000;
242 static const int type_size =
sizeof(unsigned_t);
245 template <>
struct traits_t<signed long> {
246 typedef signed long signed_t;
247 typedef unsigned long unsigned_t;
248 typedef long double floating_t;
249 static char const *spec;
250 static const int type_size =
sizeof(signed_t);
253 template <>
struct traits_t<signed long long> {
254 typedef signed long long signed_t;
255 typedef unsigned long long unsigned_t;
256 typedef long double floating_t;
257 static char const *spec;
258 static const signed_t max_value = 0x7fffffffffffffffLL;
259 static const signed_t min_value = 0x8000000000000000LL;
260 static const int type_size =
sizeof(signed_t);
263 template <>
struct traits_t<unsigned long long> {
264 typedef signed long long signed_t;
265 typedef unsigned long long unsigned_t;
266 typedef long double floating_t;
267 static char const *spec;
268 static const unsigned_t max_value = 0xffffffffffffffffLL;
269 static const unsigned_t min_value = 0x0000000000000000LL;
270 static const int type_size =
sizeof(unsigned_t);
274 #define CCAST(type, var) (type)(var)
275 #define RCAST(type, var) (type)(var)
278 #define KMP_EXPORT extern
280 #if __GNUC__ >= 4 && !defined(__MINGW32__)
281 #define __forceinline __inline
287 #define KMP_HAVE_MWAIT \
288 ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) && \
290 #define KMP_HAVE_UMWAIT \
291 ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) && \
297 static inline int KMP_GET_PAGE_SIZE(
void) {
300 return si.dwPageSize;
303 #define KMP_GET_PAGE_SIZE() getpagesize()
306 #define PAGE_ALIGNED(_addr) \
307 (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
308 #define ALIGN_TO_PAGE(x) \
309 (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
317 #define INTERNODE_CACHE_LINE 4096
321 #define CACHE_LINE 128
323 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
325 #warning CACHE_LINE is too small.
329 #define KMP_CACHE_PREFETCH(ADDR)
335 #if __cplusplus > 201402L && __has_cpp_attribute(fallthrough)
336 #define KMP_FALLTHROUGH() [[fallthrough]]
337 #elif __has_cpp_attribute(clang::fallthrough)
338 #define KMP_FALLTHROUGH() [[clang::fallthrough]]
339 #elif __has_attribute(fallthrough) || __GNUC__ >= 7
340 #define KMP_FALLTHROUGH() __attribute__((__fallthrough__))
342 #define KMP_FALLTHROUGH() ((void)0)
345 #if KMP_HAVE_ATTRIBUTE_WAITPKG
346 #define KMP_ATTRIBUTE_TARGET_WAITPKG __attribute__((target("waitpkg")))
348 #define KMP_ATTRIBUTE_TARGET_WAITPKG
351 #if KMP_HAVE_ATTRIBUTE_RTM
352 #define KMP_ATTRIBUTE_TARGET_RTM __attribute__((target("rtm")))
354 #define KMP_ATTRIBUTE_TARGET_RTM
358 #if __cplusplus >= 201103L
359 #define KMP_NORETURN [[noreturn]]
361 #define KMP_NORETURN __declspec(noreturn)
363 #define KMP_NORETURN __attribute__((noreturn))
366 #if KMP_OS_WINDOWS && KMP_MSVC_COMPAT
367 #define KMP_ALIGN(bytes) __declspec(align(bytes))
368 #define KMP_THREAD_LOCAL __declspec(thread)
371 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
372 #define KMP_THREAD_LOCAL __thread
373 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
376 #if KMP_HAVE_WEAK_ATTRIBUTE && !KMP_DYNAMIC_LIB
377 #define KMP_WEAK_ATTRIBUTE_EXTERNAL __attribute__((weak))
379 #define KMP_WEAK_ATTRIBUTE_EXTERNAL
382 #if KMP_HAVE_WEAK_ATTRIBUTE
383 #define KMP_WEAK_ATTRIBUTE_INTERNAL __attribute__((weak))
385 #define KMP_WEAK_ATTRIBUTE_INTERNAL
390 #define KMP_STR(x) _KMP_STR(x)
391 #define _KMP_STR(x) #x
394 #ifdef KMP_USE_VERSION_SYMBOLS
397 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
398 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
399 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) \
400 _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
401 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) \
402 __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias \
403 __attribute__((alias(KMP_STR(__kmp_api_##api_name)))); \
405 ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
406 api_name) "@" ver_str "\n\t"); \
407 __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR( \
408 api_name) "@@" default_ver "\n\t")
410 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str) \
411 _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, "VERSION")
412 #define _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, \
414 __typeof__(__kmp_api_##apic_name) __kmp_api_##apic_name##_##ver_num##_alias \
415 __attribute__((alias(KMP_STR(__kmp_api_##apic_name)))); \
416 __asm__(".symver " KMP_STR(__kmp_api_##apic_name) "," KMP_STR( \
417 apic_name) "@@" default_ver "\n\t"); \
419 ".symver " KMP_STR(__kmp_api_##apic_name##_##ver_num##_alias) "," KMP_STR( \
420 api_name) "@" ver_str "\n\t")
423 #define KMP_EXPAND_NAME(api_name) api_name
424 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str)
425 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, \
431 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
432 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
433 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
436 enum kmp_mem_fence_type {
445 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS
447 #if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG
448 #pragma intrinsic(InterlockedExchangeAdd)
449 #pragma intrinsic(InterlockedCompareExchange)
450 #pragma intrinsic(InterlockedExchange)
451 #pragma intrinsic(InterlockedExchange64)
456 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
457 #define KMP_TEST_THEN_INC_ACQ32(p) \
458 InterlockedExchangeAdd((volatile long *)(p), 1)
459 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
460 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
461 InterlockedExchangeAdd((volatile long *)(p), 4)
462 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
463 #define KMP_TEST_THEN_DEC_ACQ32(p) \
464 InterlockedExchangeAdd((volatile long *)(p), -1)
465 #define KMP_TEST_THEN_ADD32(p, v) \
466 InterlockedExchangeAdd((volatile long *)(p), (v))
468 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
469 InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
471 #define KMP_XCHG_FIXED32(p, v) \
472 InterlockedExchange((volatile long *)(p), (long)(v))
473 #define KMP_XCHG_FIXED64(p, v) \
474 InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
476 inline kmp_real32 KMP_XCHG_REAL32(
volatile kmp_real32 *p, kmp_real32 v) {
477 kmp_int32 tmp = InterlockedExchange((
volatile long *)p, *(
long *)&v);
478 return *(kmp_real32 *)&tmp;
481 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
482 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
483 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
484 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
485 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
486 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
488 extern kmp_int8 __kmp_test_then_or8(
volatile kmp_int8 *p, kmp_int8 v);
489 extern kmp_int8 __kmp_test_then_and8(
volatile kmp_int8 *p, kmp_int8 v);
490 extern kmp_int32 __kmp_test_then_add32(
volatile kmp_int32 *p, kmp_int32 v);
491 extern kmp_uint32 __kmp_test_then_or32(
volatile kmp_uint32 *p, kmp_uint32 v);
492 extern kmp_uint32 __kmp_test_then_and32(
volatile kmp_uint32 *p, kmp_uint32 v);
493 extern kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 v);
494 extern kmp_uint64 __kmp_test_then_or64(
volatile kmp_uint64 *p, kmp_uint64 v);
495 extern kmp_uint64 __kmp_test_then_and64(
volatile kmp_uint64 *p, kmp_uint64 v);
497 #if KMP_ARCH_AARCH64 && KMP_COMPILER_MSVC && !KMP_COMPILER_CLANG
498 #define KMP_TEST_THEN_INC64(p) _InterlockedExchangeAdd64((p), 1LL)
499 #define KMP_TEST_THEN_INC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 1LL)
500 #define KMP_TEST_THEN_ADD4_64(p) _InterlockedExchangeAdd64((p), 4LL)
505 #define KMP_TEST_THEN_ADD64(p, v) _InterlockedExchangeAdd64((p), (v))
507 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
508 __kmp_compare_and_store_acq8((p), (cv), (sv))
509 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
510 __kmp_compare_and_store_rel8((p), (cv), (sv))
511 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
512 __kmp_compare_and_store_acq16((p), (cv), (sv))
517 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
518 __kmp_compare_and_store_acq32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
520 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
521 __kmp_compare_and_store_rel32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
523 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
524 __kmp_compare_and_store_acq64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
526 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
527 __kmp_compare_and_store_rel64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
529 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
530 __kmp_compare_and_store_ptr((void *volatile *)(p), (void *)(cv), (void *)(sv))
537 inline kmp_int8 __kmp_compare_and_store_acq8(
volatile kmp_int8 *p, kmp_int8 cv,
539 return _InterlockedCompareExchange8_acq(p, sv, cv) == cv;
542 inline kmp_int8 __kmp_compare_and_store_rel8(
volatile kmp_int8 *p, kmp_int8 cv,
544 return _InterlockedCompareExchange8_rel(p, sv, cv) == cv;
547 inline kmp_int16 __kmp_compare_and_store_acq16(
volatile kmp_int16 *p,
548 kmp_int16 cv, kmp_int16 sv) {
549 return _InterlockedCompareExchange16_acq(p, sv, cv) == cv;
552 inline kmp_int16 __kmp_compare_and_store_rel16(
volatile kmp_int16 *p,
553 kmp_int16 cv, kmp_int16 sv) {
554 return _InterlockedCompareExchange16_rel(p, sv, cv) == cv;
557 inline kmp_int32 __kmp_compare_and_store_acq32(
volatile kmp_int32 *p,
558 kmp_int32 cv, kmp_int32 sv) {
559 return _InterlockedCompareExchange_acq((
volatile long *)p, sv, cv) == cv;
562 inline kmp_int32 __kmp_compare_and_store_rel32(
volatile kmp_int32 *p,
563 kmp_int32 cv, kmp_int32 sv) {
564 return _InterlockedCompareExchange_rel((
volatile long *)p, sv, cv) == cv;
567 inline kmp_int32 __kmp_compare_and_store_acq64(
volatile kmp_int64 *p,
568 kmp_int64 cv, kmp_int64 sv) {
569 return _InterlockedCompareExchange64_acq(p, sv, cv) == cv;
572 inline kmp_int32 __kmp_compare_and_store_rel64(
volatile kmp_int64 *p,
573 kmp_int64 cv, kmp_int64 sv) {
574 return _InterlockedCompareExchange64_rel(p, sv, cv) == cv;
577 inline kmp_int32 __kmp_compare_and_store_ptr(
void *
volatile *p,
void *cv,
579 return _InterlockedCompareExchangePointer(p, sv, cv) == cv;
589 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
590 _InterlockedCompareExchange64((volatile kmp_int64 *)(p), (kmp_int64)(sv), \
608 extern kmp_int8 __kmp_test_then_add8(
volatile kmp_int8 *p, kmp_int8 v);
610 extern kmp_int8 __kmp_compare_and_store8(
volatile kmp_int8 *p, kmp_int8 cv,
612 extern kmp_int16 __kmp_compare_and_store16(
volatile kmp_int16 *p, kmp_int16 cv,
614 extern kmp_int32 __kmp_compare_and_store32(
volatile kmp_int32 *p, kmp_int32 cv,
616 extern kmp_int32 __kmp_compare_and_store64(
volatile kmp_int64 *p, kmp_int64 cv,
618 extern kmp_int8 __kmp_compare_and_store_ret8(
volatile kmp_int8 *p, kmp_int8 cv,
620 extern kmp_int16 __kmp_compare_and_store_ret16(
volatile kmp_int16 *p,
621 kmp_int16 cv, kmp_int16 sv);
622 extern kmp_int32 __kmp_compare_and_store_ret32(
volatile kmp_int32 *p,
623 kmp_int32 cv, kmp_int32 sv);
624 extern kmp_int64 __kmp_compare_and_store_ret64(
volatile kmp_int64 *p,
625 kmp_int64 cv, kmp_int64 sv);
627 extern kmp_int8 __kmp_xchg_fixed8(
volatile kmp_int8 *p, kmp_int8 v);
628 extern kmp_int16 __kmp_xchg_fixed16(
volatile kmp_int16 *p, kmp_int16 v);
629 extern kmp_int32 __kmp_xchg_fixed32(
volatile kmp_int32 *p, kmp_int32 v);
630 extern kmp_int64 __kmp_xchg_fixed64(
volatile kmp_int64 *p, kmp_int64 v);
631 extern kmp_real32 __kmp_xchg_real32(
volatile kmp_real32 *p, kmp_real32 v);
632 extern kmp_real64 __kmp_xchg_real64(
volatile kmp_real64 *p, kmp_real64 v);
636 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
637 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
640 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
641 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
644 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
645 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
647 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
648 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
651 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
652 __kmp_compare_and_store8((p), (cv), (sv))
653 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
654 __kmp_compare_and_store8((p), (cv), (sv))
655 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
656 __kmp_compare_and_store16((p), (cv), (sv))
657 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
658 __kmp_compare_and_store16((p), (cv), (sv))
659 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
660 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
662 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
663 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
665 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
666 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
668 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
669 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
673 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
674 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
677 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
678 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
682 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
683 __kmp_compare_and_store_ret8((p), (cv), (sv))
684 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
685 __kmp_compare_and_store_ret16((p), (cv), (sv))
686 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
687 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
690 #define KMP_XCHG_FIXED8(p, v) \
691 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
692 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
696 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
699 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
702 #define KMP_TEST_THEN_INC32(p) \
703 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
704 #define KMP_TEST_THEN_INC_ACQ32(p) \
705 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
707 #define KMP_TEST_THEN_INC64(p) \
708 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
709 #define KMP_TEST_THEN_INC_ACQ64(p) \
710 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
712 #define KMP_TEST_THEN_INC64(p) \
713 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
714 #define KMP_TEST_THEN_INC_ACQ64(p) \
715 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
717 #define KMP_TEST_THEN_ADD4_32(p) \
718 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
719 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
720 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
722 #define KMP_TEST_THEN_ADD4_64(p) \
723 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
724 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
725 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
726 #define KMP_TEST_THEN_DEC64(p) \
727 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
728 #define KMP_TEST_THEN_DEC_ACQ64(p) \
729 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
731 #define KMP_TEST_THEN_ADD4_64(p) \
732 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
733 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
734 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
735 #define KMP_TEST_THEN_DEC64(p) \
736 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
737 #define KMP_TEST_THEN_DEC_ACQ64(p) \
738 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
740 #define KMP_TEST_THEN_DEC32(p) \
741 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
742 #define KMP_TEST_THEN_DEC_ACQ32(p) \
743 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
744 #define KMP_TEST_THEN_ADD8(p, v) \
745 __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
746 #define KMP_TEST_THEN_ADD32(p, v) \
747 __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
749 #define KMP_TEST_THEN_ADD64(p, v) \
750 __atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
753 #define KMP_TEST_THEN_ADD64(p, v) \
754 __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
757 #define KMP_TEST_THEN_OR8(p, v) \
758 __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
759 #define KMP_TEST_THEN_AND8(p, v) \
760 __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
761 #define KMP_TEST_THEN_OR32(p, v) \
762 __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
763 #define KMP_TEST_THEN_AND32(p, v) \
764 __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
766 #define KMP_TEST_THEN_OR64(p, v) \
767 __atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
769 #define KMP_TEST_THEN_AND64(p, v) \
770 __atomic_fetch_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
773 #define KMP_TEST_THEN_OR64(p, v) \
774 __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
775 #define KMP_TEST_THEN_AND64(p, v) \
776 __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
779 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
780 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
782 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
783 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
785 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
786 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
788 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
789 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
791 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
792 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
794 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
795 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
797 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
798 __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \
801 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
802 __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
804 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
805 __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
807 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
808 __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
811 static inline bool mips_sync_bool_compare_and_swap(
volatile kmp_uint64 *p,
814 return __atomic_compare_exchange(p, &cv, &sv,
false, __ATOMIC_SEQ_CST,
817 static inline bool mips_sync_val_compare_and_swap(
volatile kmp_uint64 *p,
820 __atomic_compare_exchange(p, &cv, &sv,
false, __ATOMIC_SEQ_CST,
824 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
825 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
826 (kmp_uint64)(cv), (kmp_uint64)(sv))
827 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
828 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
829 (kmp_uint64)(cv), (kmp_uint64)(sv))
830 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
831 mips_sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
834 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
835 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
837 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
838 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
840 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
841 __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
845 #define KMP_XCHG_FIXED8(p, v) \
846 __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
847 #define KMP_XCHG_FIXED16(p, v) \
848 __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
849 #define KMP_XCHG_FIXED32(p, v) \
850 __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
851 #define KMP_XCHG_FIXED64(p, v) \
852 __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
854 inline kmp_real32 KMP_XCHG_REAL32(
volatile kmp_real32 *p, kmp_real32 v) {
856 __sync_lock_test_and_set((
volatile kmp_uint32 *)(p), *(kmp_uint32 *)&v);
857 return *(kmp_real32 *)&tmp;
860 inline kmp_real64 KMP_XCHG_REAL64(
volatile kmp_real64 *p, kmp_real64 v) {
862 __sync_lock_test_and_set((
volatile kmp_uint64 *)(p), *(kmp_uint64 *)&v);
863 return *(kmp_real64 *)&tmp;
868 extern kmp_int8 __kmp_test_then_add8(
volatile kmp_int8 *p, kmp_int8 v);
869 extern kmp_int8 __kmp_test_then_or8(
volatile kmp_int8 *p, kmp_int8 v);
870 extern kmp_int8 __kmp_test_then_and8(
volatile kmp_int8 *p, kmp_int8 v);
871 extern kmp_int32 __kmp_test_then_add32(
volatile kmp_int32 *p, kmp_int32 v);
872 extern kmp_uint32 __kmp_test_then_or32(
volatile kmp_uint32 *p, kmp_uint32 v);
873 extern kmp_uint32 __kmp_test_then_and32(
volatile kmp_uint32 *p, kmp_uint32 v);
874 extern kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 v);
875 extern kmp_uint64 __kmp_test_then_or64(
volatile kmp_uint64 *p, kmp_uint64 v);
876 extern kmp_uint64 __kmp_test_then_and64(
volatile kmp_uint64 *p, kmp_uint64 v);
878 extern kmp_int8 __kmp_compare_and_store8(
volatile kmp_int8 *p, kmp_int8 cv,
880 extern kmp_int16 __kmp_compare_and_store16(
volatile kmp_int16 *p, kmp_int16 cv,
882 extern kmp_int32 __kmp_compare_and_store32(
volatile kmp_int32 *p, kmp_int32 cv,
884 extern kmp_int32 __kmp_compare_and_store64(
volatile kmp_int64 *p, kmp_int64 cv,
886 extern kmp_int8 __kmp_compare_and_store_ret8(
volatile kmp_int8 *p, kmp_int8 cv,
888 extern kmp_int16 __kmp_compare_and_store_ret16(
volatile kmp_int16 *p,
889 kmp_int16 cv, kmp_int16 sv);
890 extern kmp_int32 __kmp_compare_and_store_ret32(
volatile kmp_int32 *p,
891 kmp_int32 cv, kmp_int32 sv);
892 extern kmp_int64 __kmp_compare_and_store_ret64(
volatile kmp_int64 *p,
893 kmp_int64 cv, kmp_int64 sv);
895 extern kmp_int8 __kmp_xchg_fixed8(
volatile kmp_int8 *p, kmp_int8 v);
896 extern kmp_int16 __kmp_xchg_fixed16(
volatile kmp_int16 *p, kmp_int16 v);
897 extern kmp_int32 __kmp_xchg_fixed32(
volatile kmp_int32 *p, kmp_int32 v);
898 extern kmp_int64 __kmp_xchg_fixed64(
volatile kmp_int64 *p, kmp_int64 v);
899 extern kmp_real32 __kmp_xchg_real32(
volatile kmp_real32 *p, kmp_real32 v);
900 extern kmp_real64 __kmp_xchg_real64(
volatile kmp_real64 *p, kmp_real64 v);
902 #define KMP_TEST_THEN_INC32(p) \
903 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
904 #define KMP_TEST_THEN_INC_ACQ32(p) \
905 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
906 #define KMP_TEST_THEN_INC64(p) \
907 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
908 #define KMP_TEST_THEN_INC_ACQ64(p) \
909 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
910 #define KMP_TEST_THEN_ADD4_32(p) \
911 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
912 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
913 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
914 #define KMP_TEST_THEN_ADD4_64(p) \
915 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
916 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
917 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
918 #define KMP_TEST_THEN_DEC32(p) \
919 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
920 #define KMP_TEST_THEN_DEC_ACQ32(p) \
921 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
922 #define KMP_TEST_THEN_DEC64(p) \
923 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
924 #define KMP_TEST_THEN_DEC_ACQ64(p) \
925 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
926 #define KMP_TEST_THEN_ADD8(p, v) \
927 __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
928 #define KMP_TEST_THEN_ADD32(p, v) \
929 __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
930 #define KMP_TEST_THEN_ADD64(p, v) \
931 __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
933 #define KMP_TEST_THEN_OR8(p, v) \
934 __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
935 #define KMP_TEST_THEN_AND8(p, v) \
936 __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
937 #define KMP_TEST_THEN_OR32(p, v) \
938 __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
939 #define KMP_TEST_THEN_AND32(p, v) \
940 __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
941 #define KMP_TEST_THEN_OR64(p, v) \
942 __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
943 #define KMP_TEST_THEN_AND64(p, v) \
944 __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
946 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
947 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
949 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
950 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
952 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
953 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
955 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
956 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
958 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
959 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
961 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
962 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
964 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
965 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
967 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
968 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
972 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
973 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
976 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
977 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
981 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
982 __kmp_compare_and_store_ret8((p), (cv), (sv))
983 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
984 __kmp_compare_and_store_ret16((p), (cv), (sv))
985 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
986 __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
988 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
989 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
992 #define KMP_XCHG_FIXED8(p, v) \
993 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
994 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
995 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
996 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
997 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
998 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
1006 #define KMP_MB() asm("nop")
1007 #define KMP_IMB() asm("nop")
1014 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \
1015 KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
1018 #define KMP_MB() std::atomic_thread_fence(std::memory_order_seq_cst)
1020 #define KMP_MB() __sync_synchronize()
1032 #ifndef KMP_ST_REL32
1033 #define KMP_ST_REL32(A, D) (*(A) = (D))
1036 #ifndef KMP_ST_REL64
1037 #define KMP_ST_REL64(A, D) (*(A) = (D))
1040 #ifndef KMP_LD_ACQ32
1041 #define KMP_LD_ACQ32(A) (*(A))
1044 #ifndef KMP_LD_ACQ64
1045 #define KMP_LD_ACQ64(A) (*(A))
1060 #define TCR_1(a) (a)
1061 #define TCW_1(a, b) (a) = (b)
1062 #define TCR_4(a) (a)
1063 #define TCW_4(a, b) (a) = (b)
1064 #define TCI_4(a) (++(a))
1065 #define TCD_4(a) (--(a))
1066 #define TCR_8(a) (a)
1067 #define TCW_8(a, b) (a) = (b)
1068 #define TCI_8(a) (++(a))
1069 #define TCD_8(a) (--(a))
1070 #define TCR_SYNC_4(a) (a)
1071 #define TCW_SYNC_4(a, b) (a) = (b)
1072 #define TCX_SYNC_4(a, b, c) \
1073 KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \
1074 (kmp_int32)(b), (kmp_int32)(c))
1075 #define TCR_SYNC_8(a) (a)
1076 #define TCW_SYNC_8(a, b) (a) = (b)
1077 #define TCX_SYNC_8(a, b, c) \
1078 KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \
1079 (kmp_int64)(b), (kmp_int64)(c))
1081 #if KMP_ARCH_X86 || KMP_ARCH_MIPS
1083 #define TCR_PTR(a) ((void *)TCR_4(a))
1084 #define TCW_PTR(a, b) TCW_4((a), (b))
1085 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
1086 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
1087 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
1091 #define TCR_PTR(a) ((void *)TCR_8(a))
1092 #define TCW_PTR(a, b) TCW_8((a), (b))
1093 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
1094 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
1095 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
1103 #define FTN_TRUE TRUE
1107 #define FTN_FALSE FALSE
1110 typedef void (*microtask_t)(
int *gtid,
int *npr, ...);
1112 #ifdef USE_VOLATILE_CAST
1113 #define VOLATILE_CAST(x) (volatile x)
1115 #define VOLATILE_CAST(x) (x)
1118 #define KMP_WAIT __kmp_wait_4
1119 #define KMP_WAIT_PTR __kmp_wait_4_ptr
1120 #define KMP_EQ __kmp_eq_4
1121 #define KMP_NEQ __kmp_neq_4
1122 #define KMP_LT __kmp_lt_4
1123 #define KMP_GE __kmp_ge_4
1124 #define KMP_LE __kmp_le_4
1128 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
1129 #define STATIC_EFI2_WORKAROUND
1131 #define STATIC_EFI2_WORKAROUND static
1135 #ifndef KMP_USE_BGET
1136 #define KMP_USE_BGET 1
1140 #ifndef USE_CMPXCHG_FIX
1141 #define USE_CMPXCHG_FIX 1
1145 #define KMP_USE_DYNAMIC_LOCK 1
1149 #if KMP_USE_DYNAMIC_LOCK
1151 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
1152 #ifdef KMP_USE_ADAPTIVE_LOCKS
1153 #undef KMP_USE_ADAPTIVE_LOCKS
1155 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
1159 #if KMP_STATS_ENABLED
1160 #define KMP_HAVE_TICK_TIME \
1161 (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
1165 enum kmp_warnings_level {
1166 kmp_warnings_off = 0,
1168 kmp_warnings_explicit = 6,
1169 kmp_warnings_verbose
1177 #include "kmp_safe_c_api.h"
1180 #define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
1181 #define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
1184 #define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
1185 #define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
1186 #define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
1187 #define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
1190 #define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
1191 #define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
1192 #define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
1193 #define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
1194 #define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
1195 #define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
1196 #define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
1197 #define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
1200 template <
typename T>
1201 bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) {
1202 return p->compare_exchange_strong(
1203 expected, desired, std::memory_order_acq_rel, std::memory_order_relaxed);
1206 template <
typename T>
1207 bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) {
1208 return p->compare_exchange_strong(
1209 expected, desired, std::memory_order_acquire, std::memory_order_relaxed);
1212 template <
typename T>
1213 bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) {
1214 return p->compare_exchange_strong(
1215 expected, desired, std::memory_order_release, std::memory_order_relaxed);
1220 extern void *__kmp_lookup_symbol(
const char *name);
1221 #define KMP_DLSYM(name) __kmp_lookup_symbol(name)
1222 #define KMP_DLSYM_NEXT(name) nullptr
1224 #define KMP_DLSYM(name) dlsym(RTLD_DEFAULT, name)
1225 #define KMP_DLSYM_NEXT(name) dlsym(RTLD_NEXT, name)