Intel® OpenMP* Runtime Library
 All Classes Functions Variables Typedefs Enumerations Enumerator Modules Pages
kmp_os.h
1 /*
2  * kmp_os.h -- KPTS runtime header file.
3  */
4 
5 /* <copyright>
6  Copyright (c) 1997-2015 Intel Corporation. All Rights Reserved.
7 
8  Redistribution and use in source and binary forms, with or without
9  modification, are permitted provided that the following conditions
10  are met:
11 
12  * Redistributions of source code must retain the above copyright
13  notice, this list of conditions and the following disclaimer.
14  * Redistributions in binary form must reproduce the above copyright
15  notice, this list of conditions and the following disclaimer in the
16  documentation and/or other materials provided with the distribution.
17  * Neither the name of Intel Corporation nor the names of its
18  contributors may be used to endorse or promote products derived
19  from this software without specific prior written permission.
20 
21  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25  HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 
33 </copyright> */
34 
35 #ifndef KMP_OS_H
36 #define KMP_OS_H
37 
38 #include <stdlib.h>
39 
40 #define KMP_FTN_PLAIN 1
41 #define KMP_FTN_APPEND 2
42 #define KMP_FTN_UPPER 3
43 /*
44 #define KMP_FTN_PREPEND 4
45 #define KMP_FTN_UAPPEND 5
46 */
47 
48 #define KMP_PTR_SKIP (sizeof(void*))
49 
50 /* -------------------------- Compiler variations ------------------------ */
51 
52 #define KMP_OFF 0
53 #define KMP_ON 1
54 
55 #define KMP_MEM_CONS_VOLATILE 0
56 #define KMP_MEM_CONS_FENCE 1
57 
58 #ifndef KMP_MEM_CONS_MODEL
59 # define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
60 #endif
61 
62 /* ------------------------- Compiler recognition ---------------------- */
63 #define KMP_COMPILER_ICC 0
64 #define KMP_COMPILER_GCC 0
65 #define KMP_COMPILER_CLANG 0
66 #define KMP_COMPILER_MSVC 0
67 
68 #if defined( __INTEL_COMPILER )
69 # undef KMP_COMPILER_ICC
70 # define KMP_COMPILER_ICC 1
71 #elif defined( __clang__ )
72 # undef KMP_COMPILER_CLANG
73 # define KMP_COMPILER_CLANG 1
74 #elif defined( __GNUC__ )
75 # undef KMP_COMPILER_GCC
76 # define KMP_COMPILER_GCC 1
77 #elif defined( _MSC_VER )
78 # undef KMP_COMPILER_MSVC
79 # define KMP_COMPILER_MSVC 1
80 #else
81 # error Unknown compiler
82 #endif
83 
84 /* ---------------------- Operating system recognition ------------------- */
85 
86 #define KMP_OS_LINUX 0
87 #define KMP_OS_FREEBSD 0
88 #define KMP_OS_DARWIN 0
89 #define KMP_OS_WINDOWS 0
90 #define KMP_OS_CNK 0
91 #define KMP_OS_UNIX 0 /* disjunction of KMP_OS_LINUX, KMP_OS_DARWIN etc. */
92 
93 #define KMP_ARCH_X86 0
94 #define KMP_ARCH_X86_64 0
95 #define KMP_ARCH_AARCH64 0
96 #define KMP_ARCH_PPC64_BE 0
97 #define KMP_ARCH_PPC64_LE 0
98 
99 #define KMP_ARCH_PPC64 (KMP_ARCH_PPC64_LE || KMP_ARCH_PPC64_BE)
100 
101 
102 #ifdef _WIN32
103 # undef KMP_OS_WINDOWS
104 # define KMP_OS_WINDOWS 1
105 #endif
106 
107 #if ( defined __APPLE__ && defined __MACH__ )
108 # undef KMP_OS_DARWIN
109 # define KMP_OS_DARWIN 1
110 #endif
111 
112 // in some ppc64 linux installations, only the second condition is met
113 #if ( defined __linux )
114 # undef KMP_OS_LINUX
115 # define KMP_OS_LINUX 1
116 #elif ( defined __linux__)
117 # undef KMP_OS_LINUX
118 # define KMP_OS_LINUX 1
119 #else
120 #endif
121 
122 #if ( defined __FreeBSD__ )
123 # undef KMP_OS_FREEBSD
124 # define KMP_OS_FREEBSD 1
125 #endif
126 
127 #if ( defined __bgq__ )
128 # undef KMP_OS_CNK
129 # define KMP_OS_CNK 1
130 #endif
131 
132 #if (1 != KMP_OS_LINUX + KMP_OS_FREEBSD + KMP_OS_DARWIN + KMP_OS_WINDOWS)
133 # error Unknown OS
134 #endif
135 
136 #if KMP_OS_LINUX || KMP_OS_FREEBSD || KMP_OS_DARWIN
137 # undef KMP_OS_UNIX
138 # define KMP_OS_UNIX 1
139 #endif
140 
141 #if KMP_OS_WINDOWS
142 # if defined _M_AMD64
143 # undef KMP_ARCH_X86_64
144 # define KMP_ARCH_X86_64 1
145 # else
146 # undef KMP_ARCH_X86
147 # define KMP_ARCH_X86 1
148 # endif
149 #endif
150 
151 #if KMP_OS_UNIX
152 # if defined __x86_64
153 # undef KMP_ARCH_X86_64
154 # define KMP_ARCH_X86_64 1
155 # elif defined __i386
156 # undef KMP_ARCH_X86
157 # define KMP_ARCH_X86 1
158 # elif defined __powerpc64__
159 # if defined __LITTLE_ENDIAN__
160 # undef KMP_ARCH_PPC64_LE
161 # define KMP_ARCH_PPC64_LE 1
162 # else
163 # undef KMP_ARCH_PPC64_BE
164 # define KMP_ARCH_PPC64_BE 1
165 # endif
166 # elif defined __aarch64__
167 # undef KMP_ARCH_AARCH64
168 # define KMP_ARCH_AARCH64 1
169 # endif
170 #endif
171 
172 #if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7R__) || \
173  defined(__ARM_ARCH_7A__)
174 # define KMP_ARCH_ARMV7 1
175 #endif
176 
177 #if defined(KMP_ARCH_ARMV7) || defined(__ARM_ARCH_6__) || \
178  defined(__ARM_ARCH_6J__) || defined(__ARM_ARCH_6K__) || \
179  defined(__ARM_ARCH_6Z__) || defined(__ARM_ARCH_6T2__) || \
180  defined(__ARM_ARCH_6ZK__)
181 # define KMP_ARCH_ARMV6 1
182 #endif
183 
184 #if defined(KMP_ARCH_ARMV6) || defined(__ARM_ARCH_5T__) || \
185  defined(__ARM_ARCH_5E__) || defined(__ARM_ARCH_5TE__) || \
186  defined(__ARM_ARCH_5TEJ__)
187 # define KMP_ARCH_ARMV5 1
188 #endif
189 
190 #if defined(KMP_ARCH_ARMV5) || defined(__ARM_ARCH_4__) || \
191  defined(__ARM_ARCH_4T__)
192 # define KMP_ARCH_ARMV4 1
193 #endif
194 
195 #if defined(KMP_ARCH_ARMV4) || defined(__ARM_ARCH_3__) || \
196  defined(__ARM_ARCH_3M__)
197 # define KMP_ARCH_ARMV3 1
198 #endif
199 
200 #if defined(KMP_ARCH_ARMV3) || defined(__ARM_ARCH_2__)
201 # define KMP_ARCH_ARMV2 1
202 #endif
203 
204 #if defined(KMP_ARCH_ARMV2)
205 # define KMP_ARCH_ARM 1
206 #endif
207 
208 // TODO: Fixme - This is clever, but really fugly
209 #if (1 != KMP_ARCH_X86 + KMP_ARCH_X86_64 + KMP_ARCH_ARM + KMP_ARCH_PPC64 + KMP_ARCH_AARCH64)
210 # error Unknown or unsupported architecture
211 #endif
212 
213 #if (KMP_OS_LINUX || KMP_OS_WINDOWS) && !KMP_OS_CNK && !KMP_ARCH_PPC64
214 # define KMP_AFFINITY_SUPPORTED 1
215 # if KMP_OS_WINDOWS && KMP_ARCH_X86_64
216 # define KMP_GROUP_AFFINITY 1
217 # else
218 # define KMP_GROUP_AFFINITY 0
219 # endif
220 #else
221 # define KMP_AFFINITY_SUPPORTED 0
222 # define KMP_GROUP_AFFINITY 0
223 #endif
224 
225 /* Check for quad-precision extension. */
226 #define KMP_HAVE_QUAD 0
227 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
228 # if KMP_COMPILER_ICC
229  /* _Quad is already defined for icc */
230 # undef KMP_HAVE_QUAD
231 # define KMP_HAVE_QUAD 1
232 # elif KMP_COMPILER_CLANG
233  /* Clang doesn't support a software-implemented
234  128-bit extended precision type yet */
235  typedef long double _Quad;
236 # elif KMP_COMPILER_GCC
237  typedef __float128 _Quad;
238 # undef KMP_HAVE_QUAD
239 # define KMP_HAVE_QUAD 1
240 # elif KMP_COMPILER_MSVC
241  typedef long double _Quad;
242 # endif
243 #else
244 # if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
245  typedef long double _Quad;
246 # undef KMP_HAVE_QUAD
247 # define KMP_HAVE_QUAD 1
248 # endif
249 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
250 
251 #if KMP_OS_WINDOWS
252  typedef char kmp_int8;
253  typedef unsigned char kmp_uint8;
254  typedef short kmp_int16;
255  typedef unsigned short kmp_uint16;
256  typedef int kmp_int32;
257  typedef unsigned int kmp_uint32;
258 # define KMP_INT32_SPEC "d"
259 # define KMP_UINT32_SPEC "u"
260 # ifndef KMP_STRUCT64
261  typedef __int64 kmp_int64;
262  typedef unsigned __int64 kmp_uint64;
263  #define KMP_INT64_SPEC "I64d"
264  #define KMP_UINT64_SPEC "I64u"
265 # else
266  struct kmp_struct64 {
267  kmp_int32 a,b;
268  };
269  typedef struct kmp_struct64 kmp_int64;
270  typedef struct kmp_struct64 kmp_uint64;
271  /* Not sure what to use for KMP_[U]INT64_SPEC here */
272 # endif
273 # if KMP_ARCH_X86_64
274 # define KMP_INTPTR 1
275  typedef __int64 kmp_intptr_t;
276  typedef unsigned __int64 kmp_uintptr_t;
277 # define KMP_INTPTR_SPEC "I64d"
278 # define KMP_UINTPTR_SPEC "I64u"
279 # endif
280 #endif /* KMP_OS_WINDOWS */
281 
282 #if KMP_OS_UNIX
283  typedef char kmp_int8;
284  typedef unsigned char kmp_uint8;
285  typedef short kmp_int16;
286  typedef unsigned short kmp_uint16;
287  typedef int kmp_int32;
288  typedef unsigned int kmp_uint32;
289  typedef long long kmp_int64;
290  typedef unsigned long long kmp_uint64;
291 # define KMP_INT32_SPEC "d"
292 # define KMP_UINT32_SPEC "u"
293 # define KMP_INT64_SPEC "lld"
294 # define KMP_UINT64_SPEC "llu"
295 #endif /* KMP_OS_UNIX */
296 
297 #if KMP_ARCH_X86 || KMP_ARCH_ARM
298 # define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
299 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64
300 # define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
301 #else
302 # error "Can't determine size_t printf format specifier."
303 #endif
304 
305 #if KMP_ARCH_X86
306 # define KMP_SIZE_T_MAX (0xFFFFFFFF)
307 #else
308 # define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
309 #endif
310 
311 typedef size_t kmp_size_t;
312 typedef float kmp_real32;
313 typedef double kmp_real64;
314 
315 #ifndef KMP_INTPTR
316 # define KMP_INTPTR 1
317  typedef long kmp_intptr_t;
318  typedef unsigned long kmp_uintptr_t;
319 # define KMP_INTPTR_SPEC "ld"
320 # define KMP_UINTPTR_SPEC "lu"
321 #endif
322 
323 #ifdef KMP_I8
324  typedef kmp_int64 kmp_int;
325  typedef kmp_uint64 kmp_uint;
326 # define KMP_INT_SPEC KMP_INT64_SPEC
327 # define KMP_UINT_SPEC KMP_UINT64_SPEC
328 # define KMP_INT_MAX ((kmp_int64)0x7FFFFFFFFFFFFFFFLL)
329 # define KMP_INT_MIN ((kmp_int64)0x8000000000000000LL)
330 #else
331  typedef kmp_int32 kmp_int;
332  typedef kmp_uint32 kmp_uint;
333 # define KMP_INT_SPEC KMP_INT32_SPEC
334 # define KMP_UINT_SPEC KMP_UINT32_SPEC
335 # define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
336 # define KMP_INT_MIN ((kmp_int32)0x80000000)
337 #endif /* KMP_I8 */
338 
339 #ifdef __cplusplus
340  //-------------------------------------------------------------------------
341  // template for debug prints specification ( d, u, lld, llu ), and to obtain
342  // signed/unsigned flavors of a type
343  template< typename T >
344  struct traits_t {
345  typedef T signed_t;
346  typedef T unsigned_t;
347  typedef T floating_t;
348  static char const * spec;
349  };
350  // int
351  template<>
352  struct traits_t< signed int > {
353  typedef signed int signed_t;
354  typedef unsigned int unsigned_t;
355  typedef double floating_t;
356  static char const * spec;
357  };
358  // unsigned int
359  template<>
360  struct traits_t< unsigned int > {
361  typedef signed int signed_t;
362  typedef unsigned int unsigned_t;
363  typedef double floating_t;
364  static char const * spec;
365  };
366  // long long
367  template<>
368  struct traits_t< signed long long > {
369  typedef signed long long signed_t;
370  typedef unsigned long long unsigned_t;
371  typedef long double floating_t;
372  static char const * spec;
373  };
374  // unsigned long long
375  template<>
376  struct traits_t< unsigned long long > {
377  typedef signed long long signed_t;
378  typedef unsigned long long unsigned_t;
379  typedef long double floating_t;
380  static char const * spec;
381  };
382  //-------------------------------------------------------------------------
383 #endif // __cplusplus
384 
385 #define KMP_EXPORT extern /* export declaration in guide libraries */
386 
387 #if __GNUC__ == 4
388  #define __forceinline __inline
389 #endif
390 
391 #define PAGE_SIZE (0x4000)
392 #define PAGE_ALIGNED(_addr) ( ! ((size_t) _addr & \
393  (size_t)(PAGE_SIZE - 1)))
394 #define ALIGN_TO_PAGE(x) (void *)(((size_t)(x)) & ~((size_t)(PAGE_SIZE - 1)))
395 
396 /* ---------------------- Support for cache alignment, padding, etc. -----------------*/
397 
398 #ifdef __cplusplus
399 extern "C" {
400 #endif // __cplusplus
401 
402 #define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */
403 
404 /* Define the default size of the cache line */
405 #ifndef CACHE_LINE
406  #define CACHE_LINE 128 /* cache line size in bytes */
407 #else
408  #if ( CACHE_LINE < 64 ) && ! defined( KMP_OS_DARWIN )
409  // 2006-02-13: This produces too many warnings on OS X*. Disable it for a while...
410  #warning CACHE_LINE is too small.
411  #endif
412 #endif /* CACHE_LINE */
413 
414 #define KMP_CACHE_PREFETCH(ADDR) /* nothing */
415 
416 /* Temporary note: if performance testing of this passes, we can remove
417  all references to KMP_DO_ALIGN and replace with KMP_ALIGN. */
418 #if KMP_OS_UNIX && defined(__GNUC__)
419 # define KMP_DO_ALIGN(bytes) __attribute__((aligned(bytes)))
420 # define KMP_ALIGN_CACHE __attribute__((aligned(CACHE_LINE)))
421 # define KMP_ALIGN_CACHE_INTERNODE __attribute__((aligned(INTERNODE_CACHE_LINE)))
422 # define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
423 #else
424 # define KMP_DO_ALIGN(bytes) __declspec( align(bytes) )
425 # define KMP_ALIGN_CACHE __declspec( align(CACHE_LINE) )
426 # define KMP_ALIGN_CACHE_INTERNODE __declspec( align(INTERNODE_CACHE_LINE) )
427 # define KMP_ALIGN(bytes) __declspec( align(bytes) )
428 #endif
429 
430 #if defined(__MIC__) || defined(__MIC2__)
431  #define KMP_MIC 1
432 // Intel(R) Composer XE (13.0) defines both __MIC__ and __MIC2__ !
433 # if __MIC2__ || __KNC__
434  #define KMP_MIC1 0
435  #define KMP_MIC2 1
436 # else
437  #define KMP_MIC1 1
438  #define KMP_MIC2 0
439 # endif
440 #else
441  #define KMP_MIC 0
442  #define KMP_MIC1 0
443  #define KMP_MIC2 0
444 #endif
445 
446 /* General purpose fence types for memory operations */
447 enum kmp_mem_fence_type {
448  kmp_no_fence, /* No memory fence */
449  kmp_acquire_fence, /* Acquire (read) memory fence */
450  kmp_release_fence, /* Release (write) memory fence */
451  kmp_full_fence /* Full (read+write) memory fence */
452 };
453 
454 
455 //
456 // Synchronization primitives
457 //
458 
459 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS
460 
461 #include <Windows.h>
462 
463 #pragma intrinsic(InterlockedExchangeAdd)
464 #pragma intrinsic(InterlockedCompareExchange)
465 #pragma intrinsic(InterlockedExchange)
466 #pragma intrinsic(InterlockedExchange64)
467 
468 //
469 // Using InterlockedIncrement / InterlockedDecrement causes a library loading
470 // ordering problem, so we use InterlockedExchangeAdd instead.
471 //
472 # define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd( (volatile long *)(p), 1 )
473 # define KMP_TEST_THEN_INC_ACQ32(p) InterlockedExchangeAdd( (volatile long *)(p), 1 )
474 # define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd( (volatile long *)(p), 4 )
475 # define KMP_TEST_THEN_ADD4_ACQ32(p) InterlockedExchangeAdd( (volatile long *)(p), 4 )
476 # define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd( (volatile long *)(p), -1 )
477 # define KMP_TEST_THEN_DEC_ACQ32(p) InterlockedExchangeAdd( (volatile long *)(p), -1 )
478 # define KMP_TEST_THEN_ADD32(p, v) InterlockedExchangeAdd( (volatile long *)(p), (v) )
479 
480 # define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) InterlockedCompareExchange( (volatile long *)(p),(long)(sv),(long)(cv) )
481 
482 # define KMP_XCHG_FIXED32(p, v) InterlockedExchange( (volatile long *)(p), (long)(v) )
483 # define KMP_XCHG_FIXED64(p, v) InterlockedExchange64( (volatile kmp_int64 *)(p), (kmp_int64)(v) )
484 
485 inline kmp_real32 KMP_XCHG_REAL32( volatile kmp_real32 *p, kmp_real32 v)
486 {
487  kmp_int32 tmp = InterlockedExchange( (volatile long *)p, *(long *)&v);
488  return *(kmp_real32*)&tmp;
489 }
490 
491 //
492 // Routines that we still need to implement in assembly.
493 //
494 extern kmp_int8 __kmp_test_then_add8( volatile kmp_int8 *p, kmp_int8 v );
495 extern kmp_int8 __kmp_test_then_or8( volatile kmp_int8 *p, kmp_int8 v );
496 extern kmp_int8 __kmp_test_then_and8( volatile kmp_int8 *p, kmp_int8 v );
497 extern kmp_int32 __kmp_test_then_add32( volatile kmp_int32 *p, kmp_int32 v );
498 extern kmp_int32 __kmp_test_then_or32( volatile kmp_int32 *p, kmp_int32 v );
499 extern kmp_int32 __kmp_test_then_and32( volatile kmp_int32 *p, kmp_int32 v );
500 extern kmp_int64 __kmp_test_then_add64( volatile kmp_int64 *p, kmp_int64 v );
501 extern kmp_int64 __kmp_test_then_or64( volatile kmp_int64 *p, kmp_int64 v );
502 extern kmp_int64 __kmp_test_then_and64( volatile kmp_int64 *p, kmp_int64 v );
503 
504 extern kmp_int8 __kmp_compare_and_store8( volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
505 extern kmp_int16 __kmp_compare_and_store16( volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
506 extern kmp_int32 __kmp_compare_and_store32( volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
507 extern kmp_int32 __kmp_compare_and_store64( volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
508 extern kmp_int8 __kmp_compare_and_store_ret8( volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
509 extern kmp_int16 __kmp_compare_and_store_ret16( volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
510 extern kmp_int32 __kmp_compare_and_store_ret32( volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
511 extern kmp_int64 __kmp_compare_and_store_ret64( volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
512 
513 extern kmp_int8 __kmp_xchg_fixed8( volatile kmp_int8 *p, kmp_int8 v );
514 extern kmp_int16 __kmp_xchg_fixed16( volatile kmp_int16 *p, kmp_int16 v );
515 extern kmp_int32 __kmp_xchg_fixed32( volatile kmp_int32 *p, kmp_int32 v );
516 extern kmp_int64 __kmp_xchg_fixed64( volatile kmp_int64 *p, kmp_int64 v );
517 extern kmp_real32 __kmp_xchg_real32( volatile kmp_real32 *p, kmp_real32 v );
518 extern kmp_real64 __kmp_xchg_real64( volatile kmp_real64 *p, kmp_real64 v );
519 
520 //# define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32( (p), 1 )
521 //# define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32( (p), 1 )
522 # define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64( (p), 1LL )
523 # define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64( (p), 1LL )
524 //# define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32( (p), 4 )
525 //# define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32( (p), 4 )
526 # define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64( (p), 4LL )
527 # define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64( (p), 4LL )
528 //# define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32( (p), -1 )
529 //# define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32( (p), -1 )
530 # define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64( (p), -1LL )
531 # define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64( (p), -1LL )
532 //# define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32( (p), (v) )
533 # define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8( (p), (v) )
534 # define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64( (p), (v) )
535 
536 # define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8( (p), (v) )
537 # define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8( (p), (v) )
538 # define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32( (p), (v) )
539 # define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32( (p), (v) )
540 # define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64( (p), (v) )
541 # define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64( (p), (v) )
542 
543 # define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) )
544 # define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) )
545 # define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) )
546 # define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) )
547 # define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) )
548 # define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) )
549 # define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) )
550 # define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) )
551 
552 # if KMP_ARCH_X86
553 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store32( (volatile kmp_int32*)(p), (kmp_int32)(cv), (kmp_int32)(sv) )
554 # else /* 64 bit pointers */
555 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store64( (volatile kmp_int64*)(p), (kmp_int64)(cv), (kmp_int64)(sv) )
556 # endif /* KMP_ARCH_X86 */
557 
558 # define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) __kmp_compare_and_store_ret8( (p), (cv), (sv) )
559 # define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) __kmp_compare_and_store_ret16( (p), (cv), (sv) )
560 //# define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) __kmp_compare_and_store_ret32( (p), (cv), (sv) )
561 # define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) __kmp_compare_and_store_ret64( (p), (cv), (sv) )
562 
563 # define KMP_XCHG_FIXED8(p, v) __kmp_xchg_fixed8( (p), (v) );
564 # define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16( (p), (v) );
565 //# define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32( (p), (v) );
566 //# define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64( (p), (v) );
567 //# define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32( (p), (v) );
568 # define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64( (p), (v) );
569 
570 
571 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
572 
573 /* cast p to correct type so that proper intrinsic will be used */
574 # define KMP_TEST_THEN_INC32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 1 )
575 # define KMP_TEST_THEN_INC_ACQ32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 1 )
576 # define KMP_TEST_THEN_INC64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 1LL )
577 # define KMP_TEST_THEN_INC_ACQ64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 1LL )
578 # define KMP_TEST_THEN_ADD4_32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 4 )
579 # define KMP_TEST_THEN_ADD4_ACQ32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 4 )
580 # define KMP_TEST_THEN_ADD4_64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 4LL )
581 # define KMP_TEST_THEN_ADD4_ACQ64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 4LL )
582 # define KMP_TEST_THEN_DEC32(p) __sync_fetch_and_sub( (kmp_int32 *)(p), 1 )
583 # define KMP_TEST_THEN_DEC_ACQ32(p) __sync_fetch_and_sub( (kmp_int32 *)(p), 1 )
584 # define KMP_TEST_THEN_DEC64(p) __sync_fetch_and_sub( (kmp_int64 *)(p), 1LL )
585 # define KMP_TEST_THEN_DEC_ACQ64(p) __sync_fetch_and_sub( (kmp_int64 *)(p), 1LL )
586 # define KMP_TEST_THEN_ADD8(p, v) __sync_fetch_and_add( (kmp_int8 *)(p), (v) )
587 # define KMP_TEST_THEN_ADD32(p, v) __sync_fetch_and_add( (kmp_int32 *)(p), (v) )
588 # define KMP_TEST_THEN_ADD64(p, v) __sync_fetch_and_add( (kmp_int64 *)(p), (v) )
589 
590 # define KMP_TEST_THEN_OR8(p, v) __sync_fetch_and_or( (kmp_int8 *)(p), (v) )
591 # define KMP_TEST_THEN_AND8(p, v) __sync_fetch_and_and( (kmp_int8 *)(p), (v) )
592 # define KMP_TEST_THEN_OR32(p, v) __sync_fetch_and_or( (kmp_int32 *)(p), (v) )
593 # define KMP_TEST_THEN_AND32(p, v) __sync_fetch_and_and( (kmp_int32 *)(p), (v) )
594 # define KMP_TEST_THEN_OR64(p, v) __sync_fetch_and_or( (kmp_int64 *)(p), (v) )
595 # define KMP_TEST_THEN_AND64(p, v) __sync_fetch_and_and( (kmp_int64 *)(p), (v) )
596 
597 # define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint8 *)(p),(kmp_uint8)(cv),(kmp_uint8)(sv) )
598 # define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint8 *)(p),(kmp_uint8)(cv),(kmp_uint8)(sv) )
599 # define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint16 *)(p),(kmp_uint16)(cv),(kmp_uint16)(sv) )
600 # define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint16 *)(p),(kmp_uint16)(cv),(kmp_uint16)(sv) )
601 # define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint32 *)(p),(kmp_uint32)(cv),(kmp_uint32)(sv) )
602 # define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint32 *)(p),(kmp_uint32)(cv),(kmp_uint32)(sv) )
603 # define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint64 *)(p),(kmp_uint64)(cv),(kmp_uint64)(sv) )
604 # define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint64 *)(p),(kmp_uint64)(cv),(kmp_uint64)(sv) )
605 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __sync_bool_compare_and_swap( (volatile void **)(p),(void *)(cv),(void *)(sv) )
606 
607 # define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint8 *)(p),(kmp_uint8)(cv),(kmp_uint8)(sv) )
608 # define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint16 *)(p),(kmp_uint16)(cv),(kmp_uint16)(sv) )
609 # define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint32 *)(p),(kmp_uint32)(cv),(kmp_uint32)(sv) )
610 # define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint64 *)(p),(kmp_uint64)(cv),(kmp_uint64)(sv) )
611 
612 #define KMP_XCHG_FIXED8(p, v) __sync_lock_test_and_set( (volatile kmp_uint8 *)(p), (kmp_uint8)(v) )
613 #define KMP_XCHG_FIXED16(p, v) __sync_lock_test_and_set( (volatile kmp_uint16 *)(p), (kmp_uint16)(v) )
614 #define KMP_XCHG_FIXED32(p, v) __sync_lock_test_and_set( (volatile kmp_uint32 *)(p), (kmp_uint32)(v) )
615 #define KMP_XCHG_FIXED64(p, v) __sync_lock_test_and_set( (volatile kmp_uint64 *)(p), (kmp_uint64)(v) )
616 
617 inline kmp_real32 KMP_XCHG_REAL32( volatile kmp_real32 *p, kmp_real32 v)
618 {
619  kmp_int32 tmp = __sync_lock_test_and_set( (kmp_int32*)p, *(kmp_int32*)&v);
620  return *(kmp_real32*)&tmp;
621 }
622 
623 inline kmp_real64 KMP_XCHG_REAL64( volatile kmp_real64 *p, kmp_real64 v)
624 {
625  kmp_int64 tmp = __sync_lock_test_and_set( (kmp_int64*)p, *(kmp_int64*)&v);
626  return *(kmp_real64*)&tmp;
627 }
628 
629 #else
630 
631 extern kmp_int8 __kmp_test_then_add8( volatile kmp_int8 *p, kmp_int8 v );
632 extern kmp_int8 __kmp_test_then_or8( volatile kmp_int8 *p, kmp_int8 v );
633 extern kmp_int8 __kmp_test_then_and8( volatile kmp_int8 *p, kmp_int8 v );
634 extern kmp_int32 __kmp_test_then_add32( volatile kmp_int32 *p, kmp_int32 v );
635 extern kmp_int32 __kmp_test_then_or32( volatile kmp_int32 *p, kmp_int32 v );
636 extern kmp_int32 __kmp_test_then_and32( volatile kmp_int32 *p, kmp_int32 v );
637 extern kmp_int64 __kmp_test_then_add64( volatile kmp_int64 *p, kmp_int64 v );
638 extern kmp_int64 __kmp_test_then_or64( volatile kmp_int64 *p, kmp_int64 v );
639 extern kmp_int64 __kmp_test_then_and64( volatile kmp_int64 *p, kmp_int64 v );
640 
641 extern kmp_int8 __kmp_compare_and_store8( volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
642 extern kmp_int16 __kmp_compare_and_store16( volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
643 extern kmp_int32 __kmp_compare_and_store32( volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
644 extern kmp_int32 __kmp_compare_and_store64( volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
645 extern kmp_int8 __kmp_compare_and_store_ret8( volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
646 extern kmp_int16 __kmp_compare_and_store_ret16( volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
647 extern kmp_int32 __kmp_compare_and_store_ret32( volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
648 extern kmp_int64 __kmp_compare_and_store_ret64( volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
649 
650 extern kmp_int8 __kmp_xchg_fixed8( volatile kmp_int8 *p, kmp_int8 v );
651 extern kmp_int16 __kmp_xchg_fixed16( volatile kmp_int16 *p, kmp_int16 v );
652 extern kmp_int32 __kmp_xchg_fixed32( volatile kmp_int32 *p, kmp_int32 v );
653 extern kmp_int64 __kmp_xchg_fixed64( volatile kmp_int64 *p, kmp_int64 v );
654 extern kmp_real32 __kmp_xchg_real32( volatile kmp_real32 *p, kmp_real32 v );
655 extern kmp_real64 __kmp_xchg_real64( volatile kmp_real64 *p, kmp_real64 v );
656 
657 # define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32( (p), 1 )
658 # define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32( (p), 1 )
659 # define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64( (p), 1LL )
660 # define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64( (p), 1LL )
661 # define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32( (p), 4 )
662 # define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32( (p), 4 )
663 # define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64( (p), 4LL )
664 # define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64( (p), 4LL )
665 # define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32( (p), -1 )
666 # define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32( (p), -1 )
667 # define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64( (p), -1LL )
668 # define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64( (p), -1LL )
669 # define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8( (p), (v) )
670 # define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32( (p), (v) )
671 # define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64( (p), (v) )
672 
673 # define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8( (p), (v) )
674 # define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8( (p), (v) )
675 # define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32( (p), (v) )
676 # define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32( (p), (v) )
677 # define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64( (p), (v) )
678 # define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64( (p), (v) )
679 
680 # define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) )
681 # define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) )
682 # define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) )
683 # define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) )
684 # define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) )
685 # define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) )
686 # define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) )
687 # define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) )
688 
689 # if KMP_ARCH_X86
690 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store32( (volatile kmp_int32*)(p), (kmp_int32)(cv), (kmp_int32)(sv) )
691 # else /* 64 bit pointers */
692 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store64( (volatile kmp_int64*)(p), (kmp_int64)(cv), (kmp_int64)(sv) )
693 # endif /* KMP_ARCH_X86 */
694 
695 # define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) __kmp_compare_and_store_ret8( (p), (cv), (sv) )
696 # define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) __kmp_compare_and_store_ret16( (p), (cv), (sv) )
697 # define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) __kmp_compare_and_store_ret32( (p), (cv), (sv) )
698 # define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) __kmp_compare_and_store_ret64( (p), (cv), (sv) )
699 
700 # define KMP_XCHG_FIXED8(p, v) __kmp_xchg_fixed8( (p), (v) );
701 # define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16( (p), (v) );
702 # define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32( (p), (v) );
703 # define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64( (p), (v) );
704 # define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32( (p), (v) );
705 # define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64( (p), (v) );
706 
707 #endif /* KMP_ASM_INTRINS */
708 
709 
710 /* ------------- relaxed consistency memory model stuff ------------------ */
711 
712 #if KMP_OS_WINDOWS
713 # ifdef __ABSOFT_WIN
714 # define KMP_MB() asm ("nop")
715 # define KMP_IMB() asm ("nop")
716 # else
717 # define KMP_MB() /* _asm{ nop } */
718 # define KMP_IMB() /* _asm{ nop } */
719 # endif
720 #endif /* KMP_OS_WINDOWS */
721 
722 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64
723 # define KMP_MB() __sync_synchronize()
724 #endif
725 
726 #ifndef KMP_MB
727 # define KMP_MB() /* nothing to do */
728 #endif
729 
730 #ifndef KMP_IMB
731 # define KMP_IMB() /* nothing to do */
732 #endif
733 
734 #ifndef KMP_ST_REL32
735 # define KMP_ST_REL32(A,D) ( *(A) = (D) )
736 #endif
737 
738 #ifndef KMP_ST_REL64
739 # define KMP_ST_REL64(A,D) ( *(A) = (D) )
740 #endif
741 
742 #ifndef KMP_LD_ACQ32
743 # define KMP_LD_ACQ32(A) ( *(A) )
744 #endif
745 
746 #ifndef KMP_LD_ACQ64
747 # define KMP_LD_ACQ64(A) ( *(A) )
748 #endif
749 
750 /* ------------------------------------------------------------------------ */
751 //
752 // FIXME - maybe this should this be
753 //
754 // #define TCR_4(a) (*(volatile kmp_int32 *)(&a))
755 // #define TCW_4(a,b) (a) = (*(volatile kmp_int32 *)&(b))
756 //
757 // #define TCR_8(a) (*(volatile kmp_int64 *)(a))
758 // #define TCW_8(a,b) (a) = (*(volatile kmp_int64 *)(&b))
759 //
760 // I'm fairly certain this is the correct thing to do, but I'm afraid
761 // of performance regressions.
762 //
763 
764 #define TCR_1(a) (a)
765 #define TCW_1(a,b) (a) = (b)
766 #define TCR_4(a) (a)
767 #define TCW_4(a,b) (a) = (b)
768 #define TCR_8(a) (a)
769 #define TCW_8(a,b) (a) = (b)
770 #define TCR_SYNC_4(a) (a)
771 #define TCW_SYNC_4(a,b) (a) = (b)
772 #define TCX_SYNC_4(a,b,c) KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), (kmp_int32)(b), (kmp_int32)(c))
773 #define TCR_SYNC_8(a) (a)
774 #define TCW_SYNC_8(a,b) (a) = (b)
775 #define TCX_SYNC_8(a,b,c) KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), (kmp_int64)(b), (kmp_int64)(c))
776 
777 #if KMP_ARCH_X86
778 // What about ARM?
779  #define TCR_PTR(a) ((void *)TCR_4(a))
780  #define TCW_PTR(a,b) TCW_4((a),(b))
781  #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
782  #define TCW_SYNC_PTR(a,b) TCW_SYNC_4((a),(b))
783  #define TCX_SYNC_PTR(a,b,c) ((void *)TCX_SYNC_4((a),(b),(c)))
784 
785 #else /* 64 bit pointers */
786 
787  #define TCR_PTR(a) ((void *)TCR_8(a))
788  #define TCW_PTR(a,b) TCW_8((a),(b))
789  #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
790  #define TCW_SYNC_PTR(a,b) TCW_SYNC_8((a),(b))
791  #define TCX_SYNC_PTR(a,b,c) ((void *)TCX_SYNC_8((a),(b),(c)))
792 
793 #endif /* KMP_ARCH_X86 */
794 
795 /*
796  * If these FTN_{TRUE,FALSE} values change, may need to
797  * change several places where they are used to check that
798  * language is Fortran, not C.
799  */
800 
801 #ifndef FTN_TRUE
802 # define FTN_TRUE TRUE
803 #endif
804 
805 #ifndef FTN_FALSE
806 # define FTN_FALSE FALSE
807 #endif
808 
809 typedef void (*microtask_t)( int *gtid, int *npr, ... );
810 
811 #ifdef USE_VOLATILE_CAST
812 # define VOLATILE_CAST(x) (volatile x)
813 #else
814 # define VOLATILE_CAST(x) (x)
815 #endif
816 
817 #ifdef KMP_I8
818 # define KMP_WAIT_YIELD __kmp_wait_yield_8
819 # define KMP_EQ __kmp_eq_8
820 # define KMP_NEQ __kmp_neq_8
821 # define KMP_LT __kmp_lt_8
822 # define KMP_GE __kmp_ge_8
823 # define KMP_LE __kmp_le_8
824 #else
825 # define KMP_WAIT_YIELD __kmp_wait_yield_4
826 # define KMP_EQ __kmp_eq_4
827 # define KMP_NEQ __kmp_neq_4
828 # define KMP_LT __kmp_lt_4
829 # define KMP_GE __kmp_ge_4
830 # define KMP_LE __kmp_le_4
831 #endif /* KMP_I8 */
832 
833 /* Workaround for Intel(R) 64 code gen bug when taking address of static array (Intel(R) 64 Tracker #138) */
834 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
835 # define STATIC_EFI2_WORKAROUND
836 #else
837 # define STATIC_EFI2_WORKAROUND static
838 #endif
839 
840 // Support of BGET usage
841 #ifndef KMP_USE_BGET
842 #define KMP_USE_BGET 1
843 #endif
844 
845 
846 // Switches for OSS builds
847 #ifndef USE_SYSFS_INFO
848 # define USE_SYSFS_INFO 0
849 #endif
850 #ifndef USE_CMPXCHG_FIX
851 # define USE_CMPXCHG_FIX 1
852 #endif
853 
854 // Enable dynamic user lock
855 #ifndef KMP_USE_DYNAMIC_LOCK
856 # define KMP_USE_DYNAMIC_LOCK 0
857 #endif
858 
859 // Warning levels
860 enum kmp_warnings_level {
861  kmp_warnings_off = 0, /* No warnings */
862  kmp_warnings_low, /* Minimal warnings (default) */
863  kmp_warnings_explicit = 6, /* Explicitly set to ON - more warnings */
864  kmp_warnings_verbose /* reserved */
865 };
866 
867 // Safe C API
868 #include "kmp_safe_c_api.h"
869 
870 #ifdef __cplusplus
871 } // extern "C"
872 #endif // __cplusplus
873 
874 #endif /* KMP_OS_H */