|
#define | KMP_FTN_PLAIN 1 |
|
#define | KMP_FTN_APPEND 2 |
|
#define | KMP_FTN_UPPER 3 |
|
#define | KMP_PTR_SKIP (sizeof(void *)) |
|
#define | KMP_OFF 0 |
|
#define | KMP_ON 1 |
|
#define | KMP_MEM_CONS_VOLATILE 0 |
|
#define | KMP_MEM_CONS_FENCE 1 |
|
#define | KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE |
|
#define | __has_cpp_attribute(x) 0 |
|
#define | __has_attribute(x) 0 |
|
#define | KMP_COMPILER_ICC 0 |
|
#define | KMP_COMPILER_GCC 0 |
|
#define | KMP_COMPILER_CLANG 0 |
|
#define | KMP_COMPILER_MSVC 0 |
|
#define | KMP_COMPILER_ICX 0 |
|
#define | KMP_AFFINITY_SUPPORTED 0 |
|
#define | KMP_GROUP_AFFINITY 0 |
|
#define | KMP_HAVE_SCHED_GETCPU 0 |
|
#define | KMP_HAVE_QUAD 0 |
|
#define | KMP_USE_X87CONTROL 0 |
|
#define | KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF) |
|
#define | KMP_INTPTR 1 |
|
#define | KMP_INTPTR_SPEC "ld" |
|
#define | KMP_UINTPTR_SPEC "lu" |
|
#define | KMP_INT_MAX ((kmp_int32)0x7FFFFFFF) |
|
#define | KMP_INT_MIN ((kmp_int32)0x80000000) |
|
#define | kmp_va_deref(ap) (ap) |
|
#define | kmp_va_addr_of(ap) (ap) |
|
#define | CCAST(type, var) (type)(var) |
|
#define | RCAST(type, var) (type)(var) |
|
#define | KMP_EXPORT extern /* export declaration in guide libraries */ |
|
#define | KMP_HAVE_MWAIT |
|
#define | KMP_HAVE_UMWAIT |
|
#define | KMP_GET_PAGE_SIZE() getpagesize() |
|
#define | PAGE_ALIGNED(_addr) (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1))) |
|
#define | ALIGN_TO_PAGE(x) (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1))) |
|
#define | INTERNODE_CACHE_LINE 4096 /* for multi-node systems */ |
|
#define | CACHE_LINE 128 /* cache line size in bytes */ |
|
#define | KMP_CACHE_PREFETCH(ADDR) /* nothing */ |
|
#define | KMP_FALLTHROUGH() ((void)0) |
|
#define | KMP_ATTRIBUTE_TARGET_WAITPKG /* Nothing */ |
|
#define | KMP_ATTRIBUTE_TARGET_RTM /* Nothing */ |
|
#define | KMP_NORETURN __attribute__((noreturn)) |
|
#define | KMP_ALIGN(bytes) __attribute__((aligned(bytes))) |
|
#define | KMP_THREAD_LOCAL __thread |
|
#define | KMP_ALIAS(alias_of) __attribute__((alias(alias_of))) |
|
#define | KMP_WEAK_ATTRIBUTE_EXTERNAL /* Nothing */ |
|
#define | KMP_WEAK_ATTRIBUTE_INTERNAL /* Nothing */ |
|
#define | KMP_STR(x) _KMP_STR(x) |
|
#define | _KMP_STR(x) #x |
|
#define | KMP_EXPAND_NAME(api_name) api_name |
|
#define | KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) /* Nothing */ |
|
#define | KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str) /* Nothing */ |
|
#define | KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes) |
|
#define | KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE) |
|
#define | KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE) |
|
#define | KMP_TEST_THEN_INC32(p) __sync_fetch_and_add((volatile kmp_int32 *)(p), 1) |
|
#define | KMP_TEST_THEN_INC_ACQ32(p) __sync_fetch_and_add((volatile kmp_int32 *)(p), 1) |
|
#define | KMP_TEST_THEN_INC64(p) __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL) |
|
#define | KMP_TEST_THEN_INC_ACQ64(p) __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL) |
|
#define | KMP_TEST_THEN_ADD4_32(p) __sync_fetch_and_add((volatile kmp_int32 *)(p), 4) |
|
#define | KMP_TEST_THEN_ADD4_ACQ32(p) __sync_fetch_and_add((volatile kmp_int32 *)(p), 4) |
|
#define | KMP_TEST_THEN_ADD4_64(p) __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL) |
|
#define | KMP_TEST_THEN_ADD4_ACQ64(p) __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL) |
|
#define | KMP_TEST_THEN_DEC64(p) __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL) |
|
#define | KMP_TEST_THEN_DEC_ACQ64(p) __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL) |
|
#define | KMP_TEST_THEN_DEC32(p) __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1) |
|
#define | KMP_TEST_THEN_DEC_ACQ32(p) __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1) |
|
#define | KMP_TEST_THEN_ADD8(p, v) __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v)) |
|
#define | KMP_TEST_THEN_ADD32(p, v) __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v)) |
|
#define | KMP_TEST_THEN_ADD64(p, v) __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v)) |
|
#define | KMP_TEST_THEN_OR8(p, v) __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v)) |
|
#define | KMP_TEST_THEN_AND8(p, v) __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v)) |
|
#define | KMP_TEST_THEN_OR32(p, v) __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v)) |
|
#define | KMP_TEST_THEN_AND32(p, v) __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v)) |
|
#define | KMP_TEST_THEN_OR64(p, v) __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v)) |
|
#define | KMP_TEST_THEN_AND64(p, v) __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v)) |
|
#define | KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) |
|
#define | KMP_COMPARE_AND_STORE_REL8(p, cv, sv) |
|
#define | KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) |
|
#define | KMP_COMPARE_AND_STORE_REL16(p, cv, sv) |
|
#define | KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) |
|
#define | KMP_COMPARE_AND_STORE_REL32(p, cv, sv) |
|
#define | KMP_COMPARE_AND_STORE_PTR(p, cv, sv) |
|
#define | KMP_COMPARE_AND_STORE_RET8(p, cv, sv) |
|
#define | KMP_COMPARE_AND_STORE_RET16(p, cv, sv) |
|
#define | KMP_COMPARE_AND_STORE_RET32(p, cv, sv) |
|
#define | KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) |
|
#define | KMP_COMPARE_AND_STORE_REL64(p, cv, sv) |
|
#define | KMP_COMPARE_AND_STORE_RET64(p, cv, sv) |
|
#define | KMP_XCHG_FIXED8(p, v) __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v)) |
|
#define | KMP_XCHG_FIXED16(p, v) __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v)) |
|
#define | KMP_XCHG_FIXED32(p, v) __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v)) |
|
#define | KMP_XCHG_FIXED64(p, v) __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v)) |
|
#define | KMP_MB() /* nothing to do */ |
|
#define | KMP_MFENCE() KMP_MB() |
|
#define | KMP_SFENCE() KMP_MB() |
|
#define | KMP_IMB() /* nothing to do */ |
|
#define | KMP_ST_REL32(A, D) (*(A) = (D)) |
|
#define | KMP_ST_REL64(A, D) (*(A) = (D)) |
|
#define | KMP_LD_ACQ32(A) (*(A)) |
|
#define | KMP_LD_ACQ64(A) (*(A)) |
|
#define | TCR_1(a) (a) |
|
#define | TCW_1(a, b) (a) = (b) |
|
#define | TCR_4(a) (a) |
|
#define | TCW_4(a, b) (a) = (b) |
|
#define | TCI_4(a) (++(a)) |
|
#define | TCD_4(a) (--(a)) |
|
#define | TCR_8(a) (a) |
|
#define | TCW_8(a, b) (a) = (b) |
|
#define | TCI_8(a) (++(a)) |
|
#define | TCD_8(a) (--(a)) |
|
#define | TCR_SYNC_4(a) (a) |
|
#define | TCW_SYNC_4(a, b) (a) = (b) |
|
#define | TCX_SYNC_4(a, b, c) |
|
#define | TCR_SYNC_8(a) (a) |
|
#define | TCW_SYNC_8(a, b) (a) = (b) |
|
#define | TCX_SYNC_8(a, b, c) |
|
#define | TCR_PTR(a) ((void *)TCR_8(a)) |
|
#define | TCW_PTR(a, b) TCW_8((a), (b)) |
|
#define | TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a)) |
|
#define | TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b)) |
|
#define | TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c))) |
|
#define | FTN_TRUE TRUE |
|
#define | FTN_FALSE FALSE |
|
#define | VOLATILE_CAST(x) (x) |
|
#define | KMP_WAIT __kmp_wait_4 |
|
#define | KMP_WAIT_PTR __kmp_wait_4_ptr |
|
#define | KMP_EQ __kmp_eq_4 |
|
#define | KMP_NEQ __kmp_neq_4 |
|
#define | KMP_LT __kmp_lt_4 |
|
#define | KMP_GE __kmp_ge_4 |
|
#define | KMP_LE __kmp_le_4 |
|
#define | STATIC_EFI2_WORKAROUND static |
|
#define | KMP_USE_BGET 1 |
|
#define | USE_CMPXCHG_FIX 1 |
|
#define | KMP_USE_DYNAMIC_LOCK 1 |
|
#define | KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order) |
|
#define | KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order) |
|
#define | KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire) |
|
#define | KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed) |
|
#define | KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release) |
|
#define | KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed) |
|
#define | KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel) |
|
#define | KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel) |
|
#define | KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel) |
|
#define | KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel) |
|
#define | KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel) |
|
#define | KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel) |
|
#define | KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed) |
|
#define | KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed) |
|
#define | KMP_DLSYM(name) dlsym(RTLD_DEFAULT, name) |
|
#define | KMP_DLSYM_NEXT(name) dlsym(RTLD_NEXT, name) |
|
#define | __has_builtin(x) 0 |
|
#define | KMP_BUILTIN_UNREACHABLE |
|