42 #define KMP_FTN_PLAIN 1
43 #define KMP_FTN_APPEND 2
44 #define KMP_FTN_UPPER 3
50 #define KMP_PTR_SKIP (sizeof(void*))
57 #define KMP_MEM_CONS_VOLATILE 0
58 #define KMP_MEM_CONS_FENCE 1
60 #ifndef KMP_MEM_CONS_MODEL
61 # define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
66 #define KMP_OS_LINUX 0
67 #define KMP_OS_DARWIN 0
68 #define KMP_OS_WINDOWS 0
71 #define KMP_ARCH_X86 0
72 #define KMP_ARCH_X86_64 0
75 # undef KMP_OS_WINDOWS
76 # define KMP_OS_WINDOWS 1
79 #if ( defined __APPLE__ && defined __MACH__ )
81 # define KMP_OS_DARWIN 1
84 #if ( defined __linux )
86 # define KMP_OS_LINUX 1
89 #if (1 != KMP_OS_LINUX + KMP_OS_DARWIN + KMP_OS_WINDOWS)
93 #if KMP_OS_LINUX || KMP_OS_DARWIN
95 # define KMP_OS_UNIX 1
100 # undef KMP_ARCH_X86_64
101 # define KMP_ARCH_X86_64 1
104 # define KMP_ARCH_X86 1
109 # if defined __x86_64
110 # undef KMP_ARCH_X86_64
111 # define KMP_ARCH_X86_64 1
114 # define KMP_ARCH_X86 1
118 #if (1 != KMP_ARCH_X86 + KMP_ARCH_X86_64)
119 # error Unknown or unsupported architecture
123 # if defined KMP_WIN_CDECL || !defined GUIDEDLL_EXPORTS
124 # define USE_FTN_CDECL KMP_FTN_UPPER
127 # define KMP_FTN KMP_FTN_PLAIN
128 # define USE_FTN_EXTRA KMP_FTN_PLAIN
130 # if defined KMP_WIN_STDCALL || !defined GUIDEDLL_EXPORTS
131 # define USE_FTN_STDCALL KMP_FTN_UPPER
134 typedef char kmp_int8;
135 typedef unsigned char kmp_uint8;
136 typedef short kmp_int16;
137 typedef unsigned short kmp_uint16;
138 typedef int kmp_int32;
139 typedef unsigned int kmp_uint32;
140 # define KMP_INT32_SPEC "d"
141 # define KMP_UINT32_SPEC "u"
142 # ifndef KMP_STRUCT64
143 typedef __int64 kmp_int64;
144 typedef unsigned __int64 kmp_uint64;
145 #define KMP_INT64_SPEC "I64d"
146 #define KMP_UINT64_SPEC "I64u"
148 struct kmp_struct64 {
151 typedef struct kmp_struct64 kmp_int64;
152 typedef struct kmp_struct64 kmp_uint64;
156 # define KMP_INTPTR 1
157 typedef __int64 kmp_intptr_t;
158 typedef unsigned __int64 kmp_uintptr_t;
159 # define KMP_INTPTR_SPEC "I64d"
160 # define KMP_UINTPTR_SPEC "I64u"
165 # define KMP_FTN KMP_FTN_PLAIN
166 # define USE_FTN_CDECL KMP_FTN_PLAIN
167 # define USE_FTN_EXTRA KMP_FTN_APPEND
168 typedef char kmp_int8;
169 typedef unsigned char kmp_uint8;
170 typedef short kmp_int16;
171 typedef unsigned short kmp_uint16;
172 typedef int kmp_int32;
173 typedef unsigned int kmp_uint32;
174 typedef long long kmp_int64;
175 typedef unsigned long long kmp_uint64;
176 # define KMP_INT32_SPEC "d"
177 # define KMP_UINT32_SPEC "u"
178 # define KMP_INT64_SPEC "lld"
179 # define KMP_UINT64_SPEC "llu"
183 # define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
184 #elif KMP_ARCH_X86_64
185 # define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
187 # error "Can't determine size_t printf format specifier."
191 # define KMP_SIZE_T_MAX (0xFFFFFFFF)
193 # define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
196 typedef size_t kmp_size_t;
197 typedef float kmp_real32;
198 typedef double kmp_real64;
201 # define KMP_INTPTR 1
202 typedef long kmp_intptr_t;
203 typedef unsigned long kmp_uintptr_t;
204 # define KMP_INTPTR_SPEC "ld"
205 # define KMP_UINTPTR_SPEC "lu"
209 typedef kmp_int64 kmp_int;
210 typedef kmp_uint64 kmp_uint;
211 # define KMP_INT_SPEC KMP_INT64_SPEC
212 # define KMP_UINT_SPEC KMP_UINT64_SPEC
213 # define KMP_INT_MAX ((kmp_int64)0x7FFFFFFFFFFFFFFFLL)
214 # define KMP_INT_MIN ((kmp_int64)0x8000000000000000LL)
216 typedef kmp_int32 kmp_int;
217 typedef kmp_uint32 kmp_uint;
218 # define KMP_INT_SPEC KMP_INT32_SPEC
219 # define KMP_UINT_SPEC KMP_UINT32_SPEC
220 # define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
221 # define KMP_INT_MIN ((kmp_int64)0x80000000)
228 template<
typename T >
231 typedef T unsigned_t;
232 typedef T floating_t;
233 static char const * spec;
237 struct traits_t< signed int > {
238 typedef signed int signed_t;
239 typedef unsigned int unsigned_t;
240 typedef double floating_t;
241 static char const * spec;
245 struct traits_t< unsigned int > {
246 typedef signed int signed_t;
247 typedef unsigned int unsigned_t;
248 typedef double floating_t;
249 static char const * spec;
253 struct traits_t< signed long long > {
254 typedef signed long long signed_t;
255 typedef unsigned long long unsigned_t;
256 typedef long double floating_t;
257 static char const * spec;
261 struct traits_t< unsigned long long > {
262 typedef signed long long signed_t;
263 typedef unsigned long long unsigned_t;
264 typedef long double floating_t;
265 static char const * spec;
268 #endif // __cplusplus
271 # define KMP_STDCALL __stdcall
278 #define KMP_EXPORT extern
281 #define __forceinline __inline
284 #define PAGE_SIZE (0x4000)
285 #define PAGE_ALIGNED(_addr) ( ! ((size_t) _addr & \
286 (size_t)(PAGE_SIZE - 1)))
287 #define ALIGN_TO_PAGE(x) (void *)(((size_t)(x)) & ~((size_t)(PAGE_SIZE - 1)))
293 #endif // __cplusplus
297 #define CACHE_LINE 128
299 #if ( CACHE_LINE < 64 ) && ! defined( KMP_OS_DARWIN )
301 #warning CACHE_LINE is too small.
306 #if !defined KMP_PERF_V19
307 # define KMP_PERF_V19 KMP_ON
311 #if !defined KMP_PERF_V106
312 # define KMP_PERF_V106 KMP_ON
315 #define KMP_CACHE_PREFETCH(ADDR)
319 #if KMP_OS_UNIX && defined(__GNUC__)
320 # define KMP_DO_ALIGN(bytes) __attribute__((aligned(bytes)))
321 # define KMP_ALIGN_CACHE __attribute__((aligned(CACHE_LINE)))
322 # define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
324 # define KMP_DO_ALIGN(bytes) __declspec( align(bytes) )
325 # define KMP_ALIGN_CACHE __declspec( align(CACHE_LINE) )
326 # define KMP_ALIGN(bytes) __declspec( align(bytes) )
329 #if defined(__MIC__) || defined(__MIC2__)
332 # if __MIC2__ || __KNC__
346 enum kmp_mem_fence_type {
361 # define KMP_TEST_THEN_INC32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 1 )
362 # define KMP_TEST_THEN_INC_ACQ32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 1 )
363 # define KMP_TEST_THEN_INC64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 1LL )
364 # define KMP_TEST_THEN_INC_ACQ64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 1LL )
365 # define KMP_TEST_THEN_ADD4_32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 4 )
366 # define KMP_TEST_THEN_ADD4_ACQ32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 4 )
367 # define KMP_TEST_THEN_ADD4_64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 4LL )
368 # define KMP_TEST_THEN_ADD4_ACQ64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 4LL )
369 # define KMP_TEST_THEN_DEC32(p) __sync_fetch_and_sub( (kmp_int32 *)(p), 1 )
370 # define KMP_TEST_THEN_DEC_ACQ32(p) __sync_fetch_and_sub( (kmp_int32 *)(p), 1 )
371 # define KMP_TEST_THEN_DEC64(p) __sync_fetch_and_sub( (kmp_int64 *)(p), 1LL )
372 # define KMP_TEST_THEN_DEC_ACQ64(p) __sync_fetch_and_sub( (kmp_int64 *)(p), 1LL )
373 # define KMP_TEST_THEN_ADD32(p, v) __sync_fetch_and_add( (kmp_int32 *)(p), (v) )
374 # define KMP_TEST_THEN_ADD64(p, v) __sync_fetch_and_add( (kmp_int64 *)(p), (v) )
376 # define KMP_TEST_THEN_OR32(p, v) __sync_fetch_and_or( (kmp_int32 *)(p), (v) )
377 # define KMP_TEST_THEN_AND32(p, v) __sync_fetch_and_and( (kmp_int32 *)(p), (v) )
378 # define KMP_TEST_THEN_OR64(p, v) __sync_fetch_and_or( (kmp_int64 *)(p), (v) )
379 # define KMP_TEST_THEN_AND64(p, v) __sync_fetch_and_and( (kmp_int64 *)(p), (v) )
381 # define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint8 *)(p),(kmp_uint8)(cv),(kmp_uint8)(sv) )
382 # define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint8 *)(p),(kmp_uint8)(cv),(kmp_uint8)(sv) )
383 # define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint16 *)(p),(kmp_uint16)(cv),(kmp_uint16)(sv) )
384 # define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint16 *)(p),(kmp_uint16)(cv),(kmp_uint16)(sv) )
385 # define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint32 *)(p),(kmp_uint32)(cv),(kmp_uint32)(sv) )
386 # define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint32 *)(p),(kmp_uint32)(cv),(kmp_uint32)(sv) )
387 # define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint64 *)(p),(kmp_uint64)(cv),(kmp_uint64)(sv) )
388 # define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint64 *)(p),(kmp_uint64)(cv),(kmp_uint64)(sv) )
389 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __sync_bool_compare_and_swap( (volatile void **)(p),(void *)(cv),(void *)(sv) )
391 # define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint8 *)(p),(kmp_uint8)(cv),(kmp_uint8)(sv) )
392 # define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint16 *)(p),(kmp_uint16)(cv),(kmp_uint16)(sv) )
393 # define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint32 *)(p),(kmp_uint32)(cv),(kmp_uint32)(sv) )
394 # define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint64 *)(p),(kmp_uint64)(cv),(kmp_uint64)(sv) )
396 #define KMP_XCHG_FIXED8(p, v) __sync_lock_test_and_set( (volatile kmp_uint8 *)(p), (kmp_uint8)(v) )
397 #define KMP_XCHG_FIXED16(p, v) __sync_lock_test_and_set( (volatile kmp_uint16 *)(p), (kmp_uint16)(v) )
398 #define KMP_XCHG_FIXED32(p, v) __sync_lock_test_and_set( (volatile kmp_uint32 *)(p), (kmp_uint32)(v) )
399 #define KMP_XCHG_FIXED64(p, v) __sync_lock_test_and_set( (volatile kmp_uint64 *)(p), (kmp_uint64)(v) )
401 inline kmp_real32 KMP_XCHG_REAL32(
volatile kmp_real32 *p, kmp_real32 v)
403 kmp_int32 tmp = __sync_lock_test_and_set( (kmp_int32*)p, *(kmp_int32*)&v);
404 return *(kmp_real32*)&tmp;
407 static kmp_real64 KMP_XCHG_REAL64(
volatile kmp_real64 *p, kmp_real64 v)
409 kmp_int64 tmp = __sync_lock_test_and_set( (kmp_int64*)p, *(kmp_int64*)&v);
410 return *(kmp_real64*)&tmp;
415 extern kmp_int32 __kmp_test_then_add32(
volatile kmp_int32 *p, kmp_int32 v );
416 extern kmp_int32 __kmp_test_then_or32(
volatile kmp_int32 *p, kmp_int32 v );
417 extern kmp_int32 __kmp_test_then_and32(
volatile kmp_int32 *p, kmp_int32 v );
418 extern kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 v );
419 extern kmp_int64 __kmp_test_then_or64(
volatile kmp_int64 *p, kmp_int64 v );
420 extern kmp_int64 __kmp_test_then_and64(
volatile kmp_int64 *p, kmp_int64 v );
422 extern kmp_int8 __kmp_compare_and_store8(
volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
423 extern kmp_int16 __kmp_compare_and_store16(
volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
424 extern kmp_int32 __kmp_compare_and_store32(
volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
425 extern kmp_int32 __kmp_compare_and_store64(
volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
426 extern kmp_int8 __kmp_compare_and_store_ret8(
volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
427 extern kmp_int16 __kmp_compare_and_store_ret16(
volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
428 extern kmp_int32 __kmp_compare_and_store_ret32(
volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
429 extern kmp_int64 __kmp_compare_and_store_ret64(
volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
431 extern kmp_int8 __kmp_xchg_fixed8(
volatile kmp_int8 *p, kmp_int8 v );
432 extern kmp_int16 __kmp_xchg_fixed16(
volatile kmp_int16 *p, kmp_int16 v );
433 extern kmp_int32 __kmp_xchg_fixed32(
volatile kmp_int32 *p, kmp_int32 v );
434 extern kmp_int64 __kmp_xchg_fixed64(
volatile kmp_int64 *p, kmp_int64 v );
435 extern kmp_real32 __kmp_xchg_real32(
volatile kmp_real32 *p, kmp_real32 v );
436 extern kmp_real64 __kmp_xchg_real64(
volatile kmp_real64 *p, kmp_real64 v );
438 # define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32( (p), 1 )
439 # define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32( (p), 1 )
440 # define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64( (p), 1LL )
441 # define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64( (p), 1LL )
442 # define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32( (p), 4 )
443 # define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32( (p), 4 )
444 # define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64( (p), 4LL )
445 # define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64( (p), 4LL )
446 # define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32( (p), -1 )
447 # define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32( (p), -1 )
448 # define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64( (p), -1LL )
449 # define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64( (p), -1LL )
450 # define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32( (p), (v) )
451 # define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64( (p), (v) )
453 # define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32( (p), (v) )
454 # define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32( (p), (v) )
455 # define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64( (p), (v) )
456 # define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64( (p), (v) )
458 # define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) )
459 # define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) )
460 # define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) )
461 # define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) )
462 # define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) )
463 # define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) )
464 # define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) )
465 # define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) )
468 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store32( (volatile kmp_int32*)(p), (kmp_int32)(cv), (kmp_int32)(sv) )
470 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store64( (volatile kmp_int64*)(p), (kmp_int64)(cv), (kmp_int64)(sv) )
473 # define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) __kmp_compare_and_store_ret8( (p), (cv), (sv) )
474 # define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) __kmp_compare_and_store_ret16( (p), (cv), (sv) )
475 # define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) __kmp_compare_and_store_ret32( (p), (cv), (sv) )
476 # define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) __kmp_compare_and_store_ret64( (p), (cv), (sv) )
478 # define KMP_XCHG_FIXED8(p, v) __kmp_xchg_fixed8( (p), (v) );
479 # define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16( (p), (v) );
480 # define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32( (p), (v) );
481 # define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64( (p), (v) );
482 # define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32( (p), (v) );
483 # define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64( (p), (v) );
492 extern kmp_real32 __kmp_test_then_add_real32 (
volatile kmp_real32 *p, kmp_real32 v );
493 extern kmp_real64 __kmp_test_then_add_real64 (
volatile kmp_real64 *p, kmp_real64 v );
494 # define KMP_TEST_THEN_ADD_REAL32(p, v) __kmp_test_then_add_real32( (p), (v) )
495 # define KMP_TEST_THEN_ADD_REAL64(p, v) __kmp_test_then_add_real64( (p), (v) )
503 # define KMP_MB() asm ("nop")
504 # define KMP_IMB() asm ("nop")
520 # define KMP_ST_REL32(A,D) ( *(A) = (D) )
524 # define KMP_ST_REL64(A,D) ( *(A) = (D) )
528 # define KMP_LD_ACQ32(A) ( *(A) )
532 # define KMP_LD_ACQ64(A) ( *(A) )
550 #define TCW_4(a,b) (a) = (b)
552 #define TCW_8(a,b) (a) = (b)
553 #define TCR_SYNC_4(a) (a)
554 #define TCW_SYNC_4(a,b) (a) = (b)
555 #define TCX_SYNC_4(a,b,c) KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), (kmp_int32)(b), (kmp_int32)(c))
556 #define TCR_SYNC_8(a) (a)
557 #define TCW_SYNC_8(a,b) (a) = (b)
558 #define TCX_SYNC_8(a,b,c) KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), (kmp_int64)(b), (kmp_int64)(c))
562 #define TCR_PTR(a) ((void *)TCR_4(a))
563 #define TCW_PTR(a,b) TCW_4((a),(b))
564 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
565 #define TCW_SYNC_PTR(a,b) TCW_SYNC_4((a),(b))
566 #define TCX_SYNC_PTR(a,b,c) ((void *)TCX_SYNC_4((a),(b),(c)))
570 #define TCR_PTR(a) ((void *)TCR_8(a))
571 #define TCW_PTR(a,b) TCW_8((a),(b))
572 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
573 #define TCW_SYNC_PTR(a,b) TCW_SYNC_8((a),(b))
574 #define TCX_SYNC_PTR(a,b,c) ((void *)TCX_SYNC_8((a),(b),(c)))
585 # define FTN_TRUE TRUE
589 # define FTN_FALSE FALSE
592 typedef void (*microtask_t)(
int *gtid,
int *npr, ... );
594 #ifdef USE_VOLATILE_CAST
595 # define VOLATILE_CAST(x) (volatile x)
597 # define VOLATILE_CAST(x) (x)
601 # define KMP_WAIT_YIELD __kmp_wait_yield_8
602 # define KMP_EQ __kmp_eq_8
603 # define KMP_NEQ __kmp_neq_8
604 # define KMP_LT __kmp_lt_8
605 # define KMP_GE __kmp_ge_8
606 # define KMP_LE __kmp_le_8
608 # define KMP_WAIT_YIELD __kmp_wait_yield_4
609 # define KMP_EQ __kmp_eq_4
610 # define KMP_NEQ __kmp_neq_4
611 # define KMP_LT __kmp_lt_4
612 # define KMP_GE __kmp_ge_4
613 # define KMP_LE __kmp_le_4
617 #if KMP_ARCH_X86_64 && KMP_OS_LINUX
618 # define STATIC_EFI2_WORKAROUND
620 # define STATIC_EFI2_WORKAROUND static
625 #define KMP_USE_BGET 1
630 enum kmp_warnings_level {
631 kmp_warnings_off = 0,
633 kmp_warnings_explicit = 6,
639 #endif // __cplusplus