25#if KMP_OS_WINDOWS && KMP_ARCH_X86
49 typename traits_t<T>::signed_t st,
53 typename traits_t<T>::signed_t chunk,
59 T *p_ub,
typename traits_t<T>::signed_t *p_st, T nproc, T unit_id);
64#if KMP_STATIC_STEAL_ENABLED
69 typedef typename traits_t<T>::unsigned_t
UT;
70 typedef typename traits_t<T>::signed_t
ST;
96#if KMP_WEIGHTED_ITERATIONS_SUPPORTED
98 UT num_procs_with_pcore;
99 T first_thread_with_ecore;
111 typedef typename traits_t<T>::unsigned_t
UT;
112 typedef typename traits_t<T>::signed_t
ST;
146#if KMP_USE_HIER_SCHED
150 kmp_int32 get_hier_id()
const {
return hier_id; }
159 typedef typename traits_t<T>::unsigned_t
UT;
160 typedef typename traits_t<T>::signed_t
ST;
172 typedef typename traits_t<T>::unsigned_t
UT;
182#if KMP_USE_HIER_SCHED
293template <
typename UT>
297 volatile UT *spin = spinner;
325template <
typename UT>
329 int gtid = *gtid_ref;
334 KD_TRACE(100, (
"__kmp_dispatch_deo: T#%d called\n", gtid));
337 th->th.th_dispatch->th_dispatch_pr_current);
339#if KMP_USE_DYNAMIC_LOCK
347 if (!th->th.th_team->t.t_serialized) {
350 th->th.th_dispatch->th_dispatch_sh_current);
355 th->th.th_dispatch->th_dispatch_pr_current);
357 lower = pr->
u.
p.ordered_lower;
359#if !defined(KMP_GOMP_COMPAT)
365 &
p->stack_data[
p->w_top]);
376 "ordered_iter:%%%s lower:%%%s\n",
377 traits_t<UT>::spec, traits_t<UT>::spec);
390 "ordered_iter:%%%s lower:%%%s\n",
391 traits_t<UT>::spec, traits_t<UT>::spec);
397 KD_TRACE(100, (
"__kmp_dispatch_deo: T#%d returned\n", gtid));
400template <
typename UT>
402 typedef typename traits_t<UT>::signed_t
ST;
405 int gtid = *gtid_ref;
410 KD_TRACE(100, (
"__kmp_dispatch_dxo: T#%d called\n", gtid));
413 th->th.th_dispatch->th_dispatch_pr_current);
419 if (!th->th.th_team->t.t_serialized) {
422 th->th.th_dispatch->th_dispatch_sh_current);
426 th->th.th_dispatch->th_dispatch_pr_current);
430#if !defined(KMP_GOMP_COMPAT)
437 &
p->stack_data[
p->w_top]);
447 (
"__kmp_dispatch_dxo: T#%d bumping ordered ordered_bumped=%d\n",
457 KD_TRACE(100, (
"__kmp_dispatch_dxo: T#%d returned\n", gtid));
462template <
typename UT>
463static __forceinline
long double __kmp_pow(
long double x, UT y) {
464 long double s = 1.0L;
486static __inline
typename traits_t<T>::unsigned_t
488 typename traits_t<T>::unsigned_t idx) {
496 typedef typename traits_t<T>::unsigned_t UT;
498 long double x = tc * __kmp_pow<UT>(base, idx);
sched_type
Describes the loop schedule to be used for a parallel for loop.
void const char const char int ITT_FORMAT __itt_group_sync s
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp __itt_timestamp ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain ITT_FORMAT p const __itt_domain __itt_string_handle unsigned long long value
void const char const char int ITT_FORMAT __itt_group_sync p
#define KMP_YIELD_OVERSUB_ELSE_SPIN(count, time)
struct KMP_ALIGN_CACHE dispatch_private_info64 dispatch_private_info64_t
kmp_info_t ** __kmp_threads
#define KMP_INIT_YIELD(count)
#define KMP_INIT_BACKOFF(time)
int __kmp_env_consistency_check
union KMP_ALIGN_CACHE kmp_info kmp_info_t
KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86 KMP_ARCH_X86<<, 2i, 1, KMP_ARCH_X86) ATOMIC_CMPXCHG(fixed2, shr, kmp_int16, 16, > KMP_ARCH_X86 KMP_ARCH_X86 kmp_uint32
#define KMP_DEBUG_ASSERT(cond)
unsigned long long kmp_uint64
static __inline traits_t< T >::unsigned_t __kmp_dispatch_guided_remaining(T tc, typename traits_t< T >::floating_t base, typename traits_t< T >::unsigned_t idx)
static const int guided_int_param
__forceinline kmp_int64 test_then_inc_acq< kmp_int64 >(volatile kmp_int64 *p)
__forceinline kmp_int32 test_then_inc< kmp_int32 >(volatile kmp_int32 *p)
kmp_uint32 __kmp_eq(T value, T checker)
void __kmp_dispatch_dxo(int *gtid_ref, int *cid_ref, ident_t *loc_ref)
static UT __kmp_wait(volatile UT *spinner, UT checker, kmp_uint32(*pred)(UT, UT) USE_ITT_BUILD_ARG(void *obj))
static __forceinline T test_then_inc_acq(volatile T *p)
__forceinline kmp_int64 test_then_add< kmp_int64 >(volatile kmp_int64 *p, kmp_int64 d)
void __kmp_dispatch_deo_error(int *gtid_ref, int *cid_ref, ident_t *loc_ref)
__forceinline kmp_int64 test_then_inc< kmp_int64 >(volatile kmp_int64 *p)
static __forceinline kmp_int32 compare_and_swap(volatile T *p, T c, T s)
int __kmp_dispatch_next_algorithm(int gtid, dispatch_private_info_template< T > *pr, dispatch_shared_info_template< T > volatile *sh, kmp_int32 *p_last, T *p_lb, T *p_ub, typename traits_t< T >::signed_t *p_st, T nproc, T unit_id)
__forceinline kmp_int32 test_then_inc_acq< kmp_int32 >(volatile kmp_int32 *p)
static __forceinline T test_then_inc(volatile T *p)
__forceinline kmp_int32 compare_and_swap< kmp_int64 >(volatile kmp_int64 *p, kmp_int64 c, kmp_int64 s)
void __kmp_dispatch_init_algorithm(ident_t *loc, int gtid, dispatch_private_info_template< T > *pr, enum sched_type schedule, T lb, T ub, typename traits_t< T >::signed_t st, typename traits_t< T >::signed_t chunk, T nproc, T unit_id)
static __forceinline long double __kmp_pow(long double x, UT y)
static const double guided_flt_param
static __forceinline T test_then_add(volatile T *p, T d)
void __kmp_dispatch_dxo_error(int *gtid_ref, int *cid_ref, ident_t *loc_ref)
void __kmp_dispatch_deo(int *gtid_ref, int *cid_ref, ident_t *loc_ref)
__forceinline kmp_int32 test_then_add< kmp_int32 >(volatile kmp_int32 *p, kmp_int32 d)
__forceinline kmp_int32 compare_and_swap< kmp_int32 >(volatile kmp_int32 *p, kmp_int32 c, kmp_int32 s)
kmp_uint32 __kmp_ge(T value, T checker)
void __kmp_push_sync(int gtid, enum cons_type ct, ident_t const *ident, kmp_user_lock_p lck)
void __kmp_pop_sync(int gtid, enum cons_type ct, ident_t const *ident)
void __kmp_error_construct2(kmp_i18n_id_t id, enum cons_type ct, ident_t const *ident, struct cons_data const *cons)
#define KMP_FSYNC_RELEASING(obj)
#define KMP_FSYNC_SPIN_ACQUIRED(obj)
#define KMP_FSYNC_SPIN_PREPARE(obj)
#define USE_ITT_BUILD_ARG(x)
#define KMP_FSYNC_SPIN_INIT(obj, spin)
#define KMP_TEST_THEN_ADD32(p, v)
#define KMP_TEST_THEN_ADD64(p, v)
#define KMP_TEST_THEN_INC32(p)
#define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)
#define KMP_COMPARE_AND_STORE_REL32(p, cv, sv)
#define KMP_TEST_THEN_INC_ACQ64(p)
#define KMP_TEST_THEN_INC_ACQ32(p)
#define KMP_TEST_THEN_INC64(p)
Functions for collecting statistics.
char * __kmp_str_format(char const *format,...)
void __kmp_str_free(char **str)
traits_t< T >::unsigned_t UT
traits_t< T >::signed_t ST
std::atomic< kmp_uint32 > steal_flag
union KMP_ALIGN_CACHE dispatch_private_info_template::private_info_tmpl u
kmp_uint32 ordered_bumped
dispatch_private_info * next
traits_t< T >::signed_t ST
UT ordered_dummy[KMP_MAX_ORDERED - 3]
volatile UT ordered_iteration
traits_t< T >::unsigned_t UT
traits_t< T >::unsigned_t UT
kmp_int32 doacross_num_done
kmp_uint32 * doacross_flags
union dispatch_shared_info_template::shared_info_tmpl u
volatile kmp_int32 doacross_buf_idx
volatile kmp_uint32 buffer_index
dispatch_private_info64_t p64
dispatch_private_infoXX_template< T > p
dispatch_shared_info64_t s64
dispatch_shared_infoXX_template< UT > s