16#include "kmp_config.h"
22#define KMP_FTN_PLAIN 1
23#define KMP_FTN_APPEND 2
24#define KMP_FTN_UPPER 3
30#define KMP_PTR_SKIP (sizeof(void *))
37#define KMP_MEM_CONS_VOLATILE 0
38#define KMP_MEM_CONS_FENCE 1
40#ifndef KMP_MEM_CONS_MODEL
41#define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
44#ifndef __has_cpp_attribute
45#define __has_cpp_attribute(x) 0
48#ifndef __has_attribute
49#define __has_attribute(x) 0
53#define KMP_COMPILER_ICC 0
54#define KMP_COMPILER_GCC 0
55#define KMP_COMPILER_CLANG 0
56#define KMP_COMPILER_MSVC 0
57#define KMP_COMPILER_ICX 0
59#if __INTEL_CLANG_COMPILER
60#undef KMP_COMPILER_ICX
61#define KMP_COMPILER_ICX 1
62#elif defined(__INTEL_COMPILER)
63#undef KMP_COMPILER_ICC
64#define KMP_COMPILER_ICC 1
65#elif defined(__clang__)
66#undef KMP_COMPILER_CLANG
67#define KMP_COMPILER_CLANG 1
68#elif defined(__GNUC__)
69#undef KMP_COMPILER_GCC
70#define KMP_COMPILER_GCC 1
71#elif defined(_MSC_VER)
72#undef KMP_COMPILER_MSVC
73#define KMP_COMPILER_MSVC 1
75#error Unknown compiler
78#if (KMP_OS_LINUX || KMP_OS_WINDOWS || KMP_OS_FREEBSD || KMP_OS_NETBSD || \
79 KMP_OS_DRAGONFLY || KMP_OS_AIX) && \
80 !KMP_OS_WASI && !KMP_OS_EMSCRIPTEN
81#define KMP_AFFINITY_SUPPORTED 1
82#if KMP_OS_WINDOWS && KMP_ARCH_X86_64
83#define KMP_GROUP_AFFINITY 1
85#define KMP_GROUP_AFFINITY 0
88#define KMP_AFFINITY_SUPPORTED 0
89#define KMP_GROUP_AFFINITY 0
92#if (KMP_OS_LINUX || (KMP_OS_FREEBSD && __FreeBSD_version >= 1301000))
93#define KMP_HAVE_SCHED_GETCPU 1
95#define KMP_HAVE_SCHED_GETCPU 0
99#define KMP_HAVE_QUAD 0
100#if KMP_ARCH_X86 || KMP_ARCH_X86_64
101#if KMP_COMPILER_ICC || KMP_COMPILER_ICX
104#define KMP_HAVE_QUAD 1
105#elif KMP_COMPILER_CLANG
108typedef long double _Quad;
109#elif KMP_COMPILER_GCC
112#if (!KMP_OS_NETBSD || __GNUC__ >= 10)
113typedef __float128 _Quad;
115#define KMP_HAVE_QUAD 1
117#elif KMP_COMPILER_MSVC
118typedef long double _Quad;
121#if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
122typedef long double _Quad;
124#define KMP_HAVE_QUAD 1
128#define KMP_USE_X87CONTROL 0
130#define KMP_END_OF_LINE "\r\n"
134typedef unsigned short kmp_uint16;
137#define KMP_INT32_SPEC "d"
138#define KMP_UINT32_SPEC "u"
142#define KMP_INT64_SPEC "I64d"
143#define KMP_UINT64_SPEC "I64u"
152#if KMP_ARCH_X86 && KMP_MSVC_COMPAT
153#undef KMP_USE_X87CONTROL
154#define KMP_USE_X87CONTROL 1
156#if KMP_ARCH_X86_64 || KMP_ARCH_AARCH64
160#define KMP_INTPTR_SPEC "I64d"
161#define KMP_UINTPTR_SPEC "I64u"
166#define KMP_END_OF_LINE "\n"
170typedef unsigned short kmp_uint16;
175#define KMP_INT32_SPEC "d"
176#define KMP_UINT32_SPEC "u"
177#define KMP_INT64_SPEC "lld"
178#define KMP_UINT64_SPEC "llu"
181#if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS || KMP_ARCH_WASM || \
182 KMP_ARCH_PPC || KMP_ARCH_AARCH64_32
183#define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
184#elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || \
185 KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64 || KMP_ARCH_LOONGARCH64 || \
186 KMP_ARCH_VE || KMP_ARCH_S390X
187#define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
189#error "Can't determine size_t printf format specifier."
192#if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_WASM || KMP_ARCH_PPC
193#define KMP_SIZE_T_MAX (0xFFFFFFFF)
195#define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
206#define KMP_INTPTR_SPEC "ld"
207#define KMP_UINTPTR_SPEC "lu"
217#define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
218#define KMP_INT_MIN ((kmp_int32)0x80000000)
221#if (KMP_ARCH_ARM || KMP_ARCH_X86_64 || KMP_ARCH_AARCH64 || KMP_ARCH_WASM) && \
222 (KMP_OS_FREEBSD || KMP_OS_LINUX || KMP_OS_WASI)
224#define kmp_va_deref(ap) (*(ap))
225#define kmp_va_addr_of(ap) (&(ap))
228#define kmp_va_deref(ap) (ap)
229#define kmp_va_addr_of(ap) (ap)
234#define CCAST(type, var) const_cast<type>(var)
235#define RCAST(type, var) reinterpret_cast<type>(var)
239template <
typename T>
struct traits_t {};
241template <>
struct traits_t<signed
int> {
242 typedef signed int signed_t;
243 typedef unsigned int unsigned_t;
244 typedef double floating_t;
245 static char const *spec;
246 static const signed_t max_value = 0x7fffffff;
247 static const signed_t min_value = 0x80000000;
248 static const int type_size =
sizeof(signed_t);
251template <>
struct traits_t<unsigned
int> {
252 typedef signed int signed_t;
253 typedef unsigned int unsigned_t;
254 typedef double floating_t;
255 static char const *spec;
256 static const unsigned_t max_value = 0xffffffff;
257 static const unsigned_t min_value = 0x00000000;
258 static const int type_size =
sizeof(unsigned_t);
261template <>
struct traits_t<signed long> {
262 typedef signed long signed_t;
263 typedef unsigned long unsigned_t;
264 typedef long double floating_t;
265 static char const *spec;
266 static const int type_size =
sizeof(signed_t);
269template <>
struct traits_t<signed long long> {
270 typedef signed long long signed_t;
271 typedef unsigned long long unsigned_t;
272 typedef long double floating_t;
273 static char const *spec;
274 static const signed_t max_value = 0x7fffffffffffffffLL;
275 static const signed_t min_value = 0x8000000000000000LL;
276 static const int type_size =
sizeof(signed_t);
279template <>
struct traits_t<unsigned long long> {
280 typedef signed long long signed_t;
281 typedef unsigned long long unsigned_t;
282 typedef long double floating_t;
283 static char const *spec;
284 static const unsigned_t max_value = 0xffffffffffffffffLL;
285 static const unsigned_t min_value = 0x0000000000000000LL;
286 static const int type_size =
sizeof(unsigned_t);
290#define CCAST(type, var) (type)(var)
291#define RCAST(type, var) (type)(var)
294#define KMP_EXPORT extern
296#if __GNUC__ >= 4 && !defined(__MINGW32__)
297#define __forceinline __inline
303#define KMP_HAVE_MWAIT \
304 ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) && \
306#define KMP_HAVE_UMWAIT \
307 ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) && \
312#define WIN32_NO_STATUS
318 return si.dwPageSize;
321#define KMP_GET_PAGE_SIZE() getpagesize()
324#define PAGE_ALIGNED(_addr) \
325 (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
326#define ALIGN_TO_PAGE(x) \
327 (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
335#define INTERNODE_CACHE_LINE 4096
339#define CACHE_LINE 128
341#if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
343#warning CACHE_LINE is too small.
347#define KMP_CACHE_PREFETCH(ADDR)
353#if __cplusplus > 201402L && __has_cpp_attribute(fallthrough)
354#define KMP_FALLTHROUGH() [[fallthrough]]
356#elif KMP_COMPILER_ICC
357#define KMP_FALLTHROUGH() ((void)0)
358#elif __has_cpp_attribute(clang::fallthrough)
359#define KMP_FALLTHROUGH() [[clang::fallthrough]]
360#elif __has_attribute(fallthrough) || __GNUC__ >= 7
361#define KMP_FALLTHROUGH() __attribute__((__fallthrough__))
363#define KMP_FALLTHROUGH() ((void)0)
366#if KMP_HAVE_ATTRIBUTE_WAITPKG
367#define KMP_ATTRIBUTE_TARGET_WAITPKG __attribute__((target("waitpkg")))
369#define KMP_ATTRIBUTE_TARGET_WAITPKG
372#if KMP_HAVE_ATTRIBUTE_RTM
373#define KMP_ATTRIBUTE_TARGET_RTM __attribute__((target("rtm")))
375#define KMP_ATTRIBUTE_TARGET_RTM
379#if __cplusplus >= 201103L
380#define KMP_NORETURN [[noreturn]]
382#define KMP_NORETURN __declspec(noreturn)
384#define KMP_NORETURN __attribute__((noreturn))
387#if KMP_OS_WINDOWS && KMP_MSVC_COMPAT
388#define KMP_ALIGN(bytes) __declspec(align(bytes))
389#define KMP_THREAD_LOCAL __declspec(thread)
392#define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
393#define KMP_THREAD_LOCAL __thread
394#define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
397#if KMP_HAVE_WEAK_ATTRIBUTE && !KMP_DYNAMIC_LIB
398#define KMP_WEAK_ATTRIBUTE_EXTERNAL __attribute__((weak))
400#define KMP_WEAK_ATTRIBUTE_EXTERNAL
403#if KMP_HAVE_WEAK_ATTRIBUTE
404#define KMP_WEAK_ATTRIBUTE_INTERNAL __attribute__((weak))
406#define KMP_WEAK_ATTRIBUTE_INTERNAL
411#define KMP_STR(x) _KMP_STR(x)
412#define _KMP_STR(x) #x
415#ifdef KMP_USE_VERSION_SYMBOLS
418#define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
419#define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
420#define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) \
421 _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
422#define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) \
423 __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias \
424 __attribute__((alias(KMP_STR(__kmp_api_##api_name)))); \
426 ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
427 api_name) "@" ver_str "\n\t"); \
428 __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR( \
429 api_name) "@@" default_ver "\n\t")
431#define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str) \
432 _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, "VERSION")
433#define _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, \
435 __typeof__(__kmp_api_##apic_name) __kmp_api_##apic_name##_##ver_num##_alias \
436 __attribute__((alias(KMP_STR(__kmp_api_##apic_name)))); \
437 __asm__(".symver " KMP_STR(__kmp_api_##apic_name) "," KMP_STR( \
438 apic_name) "@@" default_ver "\n\t"); \
440 ".symver " KMP_STR(__kmp_api_##apic_name##_##ver_num##_alias) "," KMP_STR( \
441 api_name) "@" ver_str "\n\t")
444#define KMP_EXPAND_NAME(api_name) api_name
445#define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str)
446#define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, \
452#define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
453#define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
454#define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
466#if KMP_ASM_INTRINS && KMP_OS_WINDOWS && !((KMP_ARCH_AARCH64 || KMP_ARCH_ARM) && (KMP_COMPILER_CLANG || KMP_COMPILER_GCC))
468#if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG
469#pragma intrinsic(InterlockedExchangeAdd)
470#pragma intrinsic(InterlockedCompareExchange)
471#pragma intrinsic(InterlockedExchange)
473#pragma intrinsic(InterlockedExchange64)
479#define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
480#define KMP_TEST_THEN_INC_ACQ32(p) \
481 InterlockedExchangeAdd((volatile long *)(p), 1)
482#define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
483#define KMP_TEST_THEN_ADD4_ACQ32(p) \
484 InterlockedExchangeAdd((volatile long *)(p), 4)
485#define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
486#define KMP_TEST_THEN_DEC_ACQ32(p) \
487 InterlockedExchangeAdd((volatile long *)(p), -1)
488#define KMP_TEST_THEN_ADD32(p, v) \
489 InterlockedExchangeAdd((volatile long *)(p), (v))
491#define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
492 InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
494#define KMP_XCHG_FIXED32(p, v) \
495 InterlockedExchange((volatile long *)(p), (long)(v))
496#define KMP_XCHG_FIXED64(p, v) \
497 InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
500 kmp_int32 tmp = InterlockedExchange((
volatile long *)
p, *(
long *)&v);
504#define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
505#define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
506#define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
507#define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
508#define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
509#define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
520#if KMP_ARCH_AARCH64 && KMP_COMPILER_MSVC && !KMP_COMPILER_CLANG
521#define KMP_TEST_THEN_INC64(p) _InterlockedExchangeAdd64((p), 1LL)
522#define KMP_TEST_THEN_INC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 1LL)
523#define KMP_TEST_THEN_ADD4_64(p) _InterlockedExchangeAdd64((p), 4LL)
528#define KMP_TEST_THEN_ADD64(p, v) _InterlockedExchangeAdd64((p), (v))
530#define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
531 __kmp_compare_and_store_acq8((p), (cv), (sv))
532#define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
533 __kmp_compare_and_store_rel8((p), (cv), (sv))
534#define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
535 __kmp_compare_and_store_acq16((p), (cv), (sv))
540#define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
541 __kmp_compare_and_store_acq32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
543#define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
544 __kmp_compare_and_store_rel32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
546#define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
547 __kmp_compare_and_store_acq64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
549#define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
550 __kmp_compare_and_store_rel64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
552#define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
553 __kmp_compare_and_store_ptr((void *volatile *)(p), (void *)(cv), (void *)(sv))
562 return _InterlockedCompareExchange8_acq(
p, sv, cv) == cv;
567 return _InterlockedCompareExchange8_rel(
p, sv, cv) == cv;
572 return _InterlockedCompareExchange16_acq(
p, sv, cv) == cv;
577 return _InterlockedCompareExchange16_rel(
p, sv, cv) == cv;
582 return _InterlockedCompareExchange_acq((
volatile long *)
p, sv, cv) == cv;
587 return _InterlockedCompareExchange_rel((
volatile long *)
p, sv, cv) == cv;
592 return _InterlockedCompareExchange64_acq(
p, sv, cv) == cv;
597 return _InterlockedCompareExchange64_rel(
p, sv, cv) == cv;
600inline kmp_int32 __kmp_compare_and_store_ptr(
void *
volatile *
p,
void *cv,
602 return _InterlockedCompareExchangePointer(
p, sv, cv) == cv;
607#define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
608 _InterlockedCompareExchange8((p), (sv), (cv))
609#define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
610 _InterlockedCompareExchange16((p), (sv), (cv))
612#define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
613 _InterlockedCompareExchange64((volatile kmp_int64 *)(p), (kmp_int64)(sv), \
617#define KMP_XCHG_FIXED8(p, v) \
618 _InterlockedExchange8((volatile kmp_int8 *)(p), (kmp_int8)(v));
619#define KMP_XCHG_FIXED16(p, v) _InterlockedExchange16((p), (v));
620#define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
658#define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
659#define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
662#define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
663#define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
666#define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
667#define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
669#define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
670#define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
673#define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
674 __kmp_compare_and_store8((p), (cv), (sv))
675#define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
676 __kmp_compare_and_store8((p), (cv), (sv))
677#define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
678 __kmp_compare_and_store16((p), (cv), (sv))
679#define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
680 __kmp_compare_and_store16((p), (cv), (sv))
681#define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
682 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
684#define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
685 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
687#define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
688 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
690#define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
691 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
695#define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
696 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
699#define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
700 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
704#define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
705 __kmp_compare_and_store_ret8((p), (cv), (sv))
706#define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
707 __kmp_compare_and_store_ret16((p), (cv), (sv))
708#define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
709 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
712#define KMP_XCHG_FIXED8(p, v) \
713 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
714#define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
718#define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
721#elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
724#define KMP_TEST_THEN_INC32(p) \
725 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
726#define KMP_TEST_THEN_INC_ACQ32(p) \
727 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
729#define KMP_TEST_THEN_INC64(p) \
730 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
731#define KMP_TEST_THEN_INC_ACQ64(p) \
732 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
734#define KMP_TEST_THEN_INC64(p) \
735 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
736#define KMP_TEST_THEN_INC_ACQ64(p) \
737 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
739#define KMP_TEST_THEN_ADD4_32(p) \
740 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
741#define KMP_TEST_THEN_ADD4_ACQ32(p) \
742 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
744#define KMP_TEST_THEN_ADD4_64(p) \
745 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
746#define KMP_TEST_THEN_ADD4_ACQ64(p) \
747 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
748#define KMP_TEST_THEN_DEC64(p) \
749 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
750#define KMP_TEST_THEN_DEC_ACQ64(p) \
751 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
753#define KMP_TEST_THEN_ADD4_64(p) \
754 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
755#define KMP_TEST_THEN_ADD4_ACQ64(p) \
756 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
757#define KMP_TEST_THEN_DEC64(p) \
758 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
759#define KMP_TEST_THEN_DEC_ACQ64(p) \
760 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
762#define KMP_TEST_THEN_DEC32(p) \
763 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
764#define KMP_TEST_THEN_DEC_ACQ32(p) \
765 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
766#define KMP_TEST_THEN_ADD8(p, v) \
767 __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
768#define KMP_TEST_THEN_ADD32(p, v) \
769 __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
771#define KMP_TEST_THEN_ADD64(p, v) \
772 __atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
775#define KMP_TEST_THEN_ADD64(p, v) \
776 __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
779#define KMP_TEST_THEN_OR8(p, v) \
780 __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
781#define KMP_TEST_THEN_AND8(p, v) \
782 __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
783#define KMP_TEST_THEN_OR32(p, v) \
784 __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
785#define KMP_TEST_THEN_AND32(p, v) \
786 __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
788#define KMP_TEST_THEN_OR64(p, v) \
789 __atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
791#define KMP_TEST_THEN_AND64(p, v) \
792 __atomic_fetch_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
795#define KMP_TEST_THEN_OR64(p, v) \
796 __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
797#define KMP_TEST_THEN_AND64(p, v) \
798 __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
801#define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
802 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
804#define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
805 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
807#define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
808 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
810#define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
811 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
813#define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
814 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
816#define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
817 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
819#define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
820 __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \
823#define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
824 __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
826#define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
827 __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
829#define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
830 __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
833static inline bool mips_sync_bool_compare_and_swap(
volatile kmp_uint64 *
p,
836 return __atomic_compare_exchange(
p, &cv, &sv,
false, __ATOMIC_SEQ_CST,
839static inline bool mips_sync_val_compare_and_swap(
volatile kmp_uint64 *
p,
842 __atomic_compare_exchange(
p, &cv, &sv,
false, __ATOMIC_SEQ_CST,
846#define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
847 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
848 (kmp_uint64)(cv), (kmp_uint64)(sv))
849#define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
850 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
851 (kmp_uint64)(cv), (kmp_uint64)(sv))
852#define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
853 mips_sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
856#define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
857 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
859#define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
860 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
862#define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
863 __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
867#if KMP_OS_DARWIN && defined(__INTEL_COMPILER) && __INTEL_COMPILER >= 1800
868#define KMP_XCHG_FIXED8(p, v) \
869 __atomic_exchange_1((volatile kmp_uint8 *)(p), (kmp_uint8)(v), \
872#define KMP_XCHG_FIXED8(p, v) \
873 __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
875#define KMP_XCHG_FIXED16(p, v) \
876 __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
877#define KMP_XCHG_FIXED32(p, v) \
878 __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
879#define KMP_XCHG_FIXED64(p, v) \
880 __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
885 memcpy(&up, &
p,
sizeof(up));
886 memcpy(&uv, &v,
sizeof(uv));
887 kmp_int32 tmp = __sync_lock_test_and_set(up, uv);
889 memcpy(&ftmp, &tmp,
sizeof(tmp));
896 memcpy(&up, &
p,
sizeof(up));
897 memcpy(&uv, &v,
sizeof(uv));
898 kmp_int64 tmp = __sync_lock_test_and_set(up, uv);
900 memcpy(&dtmp, &tmp,
sizeof(tmp));
940#define KMP_TEST_THEN_INC32(p) \
941 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
942#define KMP_TEST_THEN_INC_ACQ32(p) \
943 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
944#define KMP_TEST_THEN_INC64(p) \
945 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
946#define KMP_TEST_THEN_INC_ACQ64(p) \
947 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
948#define KMP_TEST_THEN_ADD4_32(p) \
949 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
950#define KMP_TEST_THEN_ADD4_ACQ32(p) \
951 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
952#define KMP_TEST_THEN_ADD4_64(p) \
953 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
954#define KMP_TEST_THEN_ADD4_ACQ64(p) \
955 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
956#define KMP_TEST_THEN_DEC32(p) \
957 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
958#define KMP_TEST_THEN_DEC_ACQ32(p) \
959 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
960#define KMP_TEST_THEN_DEC64(p) \
961 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
962#define KMP_TEST_THEN_DEC_ACQ64(p) \
963 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
964#define KMP_TEST_THEN_ADD8(p, v) \
965 __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
966#define KMP_TEST_THEN_ADD32(p, v) \
967 __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
968#define KMP_TEST_THEN_ADD64(p, v) \
969 __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
971#define KMP_TEST_THEN_OR8(p, v) \
972 __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
973#define KMP_TEST_THEN_AND8(p, v) \
974 __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
975#define KMP_TEST_THEN_OR32(p, v) \
976 __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
977#define KMP_TEST_THEN_AND32(p, v) \
978 __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
979#define KMP_TEST_THEN_OR64(p, v) \
980 __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
981#define KMP_TEST_THEN_AND64(p, v) \
982 __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
984#define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
985 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
987#define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
988 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
990#define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
991 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
993#define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
994 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
996#define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
997 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
999#define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
1000 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1002#define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
1003 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1005#define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
1006 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1010#define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
1011 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1014#define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
1015 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1019#define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
1020 __kmp_compare_and_store_ret8((p), (cv), (sv))
1021#define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
1022 __kmp_compare_and_store_ret16((p), (cv), (sv))
1023#define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
1024 __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1026#define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
1027 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1030#define KMP_XCHG_FIXED8(p, v) \
1031 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
1032#define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
1033#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
1034#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
1035#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
1036#define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
1044#define KMP_MB() asm("nop")
1045#define KMP_IMB() asm("nop")
1052#if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \
1053 KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64 || KMP_ARCH_LOONGARCH64 || \
1054 KMP_ARCH_VE || KMP_ARCH_S390X || KMP_ARCH_PPC || KMP_ARCH_AARCH64_32
1057#define KMP_MB() std::atomic_thread_fence(std::memory_order_seq_cst)
1059#define KMP_MB() __sync_synchronize()
1067#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1077#if KMP_COMPILER_ICC || KMP_COMPILER_ICX
1078#define KMP_MFENCE_() _mm_mfence()
1079#define KMP_SFENCE_() _mm_sfence()
1080#elif KMP_COMPILER_MSVC
1081#define KMP_MFENCE_() MemoryBarrier()
1082#define KMP_SFENCE_() MemoryBarrier()
1084#define KMP_MFENCE_() __sync_synchronize()
1085#define KMP_SFENCE_() __sync_synchronize()
1087#define KMP_MFENCE() \
1088 if (UNLIKELY(!__kmp_cpuinfo.initialized)) { \
1089 __kmp_query_cpuid(&__kmp_cpuinfo); \
1091 if (__kmp_cpuinfo.flags.sse2) { \
1094#define KMP_SFENCE() KMP_SFENCE_()
1097#define KMP_MFENCE() KMP_MB()
1098#define KMP_SFENCE() KMP_MB()
1106#define KMP_ST_REL32(A, D) (*(A) = (D))
1110#define KMP_ST_REL64(A, D) (*(A) = (D))
1114#define KMP_LD_ACQ32(A) (*(A))
1118#define KMP_LD_ACQ64(A) (*(A))
1134#define TCW_1(a, b) (a) = (b)
1136#define TCW_4(a, b) (a) = (b)
1137#define TCI_4(a) (++(a))
1138#define TCD_4(a) (--(a))
1140#define TCW_8(a, b) (a) = (b)
1141#define TCI_8(a) (++(a))
1142#define TCD_8(a) (--(a))
1143#define TCR_SYNC_4(a) (a)
1144#define TCW_SYNC_4(a, b) (a) = (b)
1145#define TCX_SYNC_4(a, b, c) \
1146 KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \
1147 (kmp_int32)(b), (kmp_int32)(c))
1148#define TCR_SYNC_8(a) (a)
1149#define TCW_SYNC_8(a, b) (a) = (b)
1150#define TCX_SYNC_8(a, b, c) \
1151 KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \
1152 (kmp_int64)(b), (kmp_int64)(c))
1154#if KMP_ARCH_X86 || KMP_ARCH_MIPS || KMP_ARCH_WASM || KMP_ARCH_PPC
1156#define TCR_PTR(a) ((void *)TCR_4(a))
1157#define TCW_PTR(a, b) TCW_4((a), (b))
1158#define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
1159#define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
1160#define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
1164#define TCR_PTR(a) ((void *)TCR_8(a))
1165#define TCW_PTR(a, b) TCW_8((a), (b))
1166#define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
1167#define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
1168#define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
1176#define FTN_TRUE TRUE
1180#define FTN_FALSE FALSE
1185#ifdef USE_VOLATILE_CAST
1186#define VOLATILE_CAST(x) (volatile x)
1188#define VOLATILE_CAST(x) (x)
1191#define KMP_WAIT __kmp_wait_4
1192#define KMP_WAIT_PTR __kmp_wait_4_ptr
1193#define KMP_EQ __kmp_eq_4
1194#define KMP_NEQ __kmp_neq_4
1195#define KMP_LT __kmp_lt_4
1196#define KMP_GE __kmp_ge_4
1197#define KMP_LE __kmp_le_4
1201#if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
1202#define STATIC_EFI2_WORKAROUND
1204#define STATIC_EFI2_WORKAROUND static
1209#define KMP_USE_BGET 1
1213#ifndef USE_CMPXCHG_FIX
1214#define USE_CMPXCHG_FIX 1
1218#define KMP_USE_DYNAMIC_LOCK 1
1222#if KMP_USE_DYNAMIC_LOCK
1224#define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
1225#ifdef KMP_USE_ADAPTIVE_LOCKS
1226#undef KMP_USE_ADAPTIVE_LOCKS
1228#define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
1232#if KMP_STATS_ENABLED
1233#define KMP_HAVE_TICK_TIME \
1234 (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
1253#define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
1254#define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
1257#define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
1258#define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
1259#define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
1260#define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
1263#define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
1264#define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
1265#define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
1266#define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
1267#define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
1268#define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
1269#define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
1270#define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
1273template <
typename T>
1275 return p->compare_exchange_strong(
1276 expected, desired, std::memory_order_acq_rel, std::memory_order_relaxed);
1279template <
typename T>
1281 return p->compare_exchange_strong(
1282 expected, desired, std::memory_order_acquire, std::memory_order_relaxed);
1285template <
typename T>
1287 return p->compare_exchange_strong(
1288 expected, desired, std::memory_order_release, std::memory_order_relaxed);
1294#define KMP_DLSYM(name) __kmp_lookup_symbol(name)
1295#define KMP_DLSYM_NEXT(name) __kmp_lookup_symbol(name, true)
1296#elif KMP_OS_WASI || KMP_OS_EMSCRIPTEN
1297#define KMP_DLSYM(name) nullptr
1298#define KMP_DLSYM_NEXT(name) nullptr
1300#define KMP_DLSYM(name) dlsym(RTLD_DEFAULT, name)
1301#define KMP_DLSYM_NEXT(name) dlsym(RTLD_NEXT, name)
1305#ifndef __has_builtin
1306#define __has_builtin(x) 0
1310#if __has_builtin(__builtin_unreachable) || defined(__GNUC__)
1311#define KMP_BUILTIN_UNREACHABLE __builtin_unreachable()
1312#elif defined(_MSC_VER)
1313#define KMP_BUILTIN_UNREACHABLE __assume(false)
1315#define KMP_BUILTIN_UNREACHABLE
__itt_string_handle * name
void const char const char int ITT_FORMAT __itt_group_sync p
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp __itt_timestamp ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain ITT_FORMAT p const __itt_domain __itt_string_handle unsigned long long ITT_FORMAT lu const __itt_domain __itt_string_handle unsigned long long ITT_FORMAT lu const __itt_domain __itt_id __itt_string_handle __itt_metadata_type size_t void ITT_FORMAT p const __itt_domain __itt_id __itt_string_handle const wchar_t size_t ITT_FORMAT lu const __itt_domain __itt_id __itt_relation __itt_id ITT_FORMAT p const wchar_t int ITT_FORMAT __itt_group_mark d int
KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 kmp_int16
KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 kmp_int8
KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86<<, 2i, 1, KMP_ARCH_X86) ATOMIC_CMPXCHG(fixed2, shr, kmp_int16, 16, > KMP_ARCH_X86 KMP_ARCH_X86 kmp_uint32
unsigned long long kmp_uint64
void(* microtask_t)(int *gtid, int *npr,...)
kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v)
bool __kmp_atomic_compare_store_rel(std::atomic< T > *p, T expected, T desired)
bool __kmp_atomic_compare_store(std::atomic< T > *p, T expected, T desired)
kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v)
bool __kmp_atomic_compare_store_acq(std::atomic< T > *p, T expected, T desired)
#define KMP_GET_PAGE_SIZE()
unsigned long kmp_uintptr_t
void * __kmp_lookup_symbol(const char *name, bool next)