52 #if defined( __cplusplus ) && ( KMP_OS_WINDOWS )
60 #define _DEBUG_TEMPORARILY_UNSET_
65 template<
typename type_lhs,
typename type_rhs >
66 std::complex< type_lhs > __kmp_lhs_div_rhs(
67 const std::complex< type_lhs >& lhs,
68 const std::complex< type_rhs >& rhs ) {
69 type_lhs a = lhs.real();
70 type_lhs b = lhs.imag();
71 type_rhs c = rhs.real();
72 type_rhs d = rhs.imag();
73 type_rhs den = c*c + d*d;
74 type_rhs r = ( a*c + b*d );
75 type_rhs i = ( b*c - a*d );
76 std::complex< type_lhs > ret( r/den, i/den );
81 struct __kmp_cmplx64_t : std::complex< double > {
83 __kmp_cmplx64_t() : std::complex< double > () {}
85 __kmp_cmplx64_t(
const std::complex< double >& cd )
86 : std::complex< double > ( cd ) {}
88 void operator /= (
const __kmp_cmplx64_t& rhs ) {
89 std::complex< double > lhs = *
this;
90 *
this = __kmp_lhs_div_rhs( lhs, rhs );
93 __kmp_cmplx64_t operator / (
const __kmp_cmplx64_t& rhs ) {
94 std::complex< double > lhs = *
this;
95 return __kmp_lhs_div_rhs( lhs, rhs );
99 typedef struct __kmp_cmplx64_t kmp_cmplx64;
102 struct __kmp_cmplx32_t : std::complex< float > {
104 __kmp_cmplx32_t() : std::complex< float > () {}
106 __kmp_cmplx32_t(
const std::complex<float>& cf )
107 : std::complex< float > ( cf ) {}
109 __kmp_cmplx32_t operator + (
const __kmp_cmplx32_t& b ) {
110 std::complex< float > lhs = *
this;
111 std::complex< float > rhs = b;
112 return ( lhs + rhs );
114 __kmp_cmplx32_t operator - (
const __kmp_cmplx32_t& b ) {
115 std::complex< float > lhs = *
this;
116 std::complex< float > rhs = b;
117 return ( lhs - rhs );
119 __kmp_cmplx32_t operator * (
const __kmp_cmplx32_t& b ) {
120 std::complex< float > lhs = *
this;
121 std::complex< float > rhs = b;
122 return ( lhs * rhs );
125 __kmp_cmplx32_t operator + (
const kmp_cmplx64& b ) {
126 kmp_cmplx64 t = kmp_cmplx64( *
this ) + b;
127 std::complex< double > d( t );
128 std::complex< float > f( d );
129 __kmp_cmplx32_t r( f );
132 __kmp_cmplx32_t operator - (
const kmp_cmplx64& b ) {
133 kmp_cmplx64 t = kmp_cmplx64( *
this ) - b;
134 std::complex< double > d( t );
135 std::complex< float > f( d );
136 __kmp_cmplx32_t r( f );
139 __kmp_cmplx32_t operator * (
const kmp_cmplx64& b ) {
140 kmp_cmplx64 t = kmp_cmplx64( *
this ) * b;
141 std::complex< double > d( t );
142 std::complex< float > f( d );
143 __kmp_cmplx32_t r( f );
147 void operator /= (
const __kmp_cmplx32_t& rhs ) {
148 std::complex< float > lhs = *
this;
149 *
this = __kmp_lhs_div_rhs( lhs, rhs );
152 __kmp_cmplx32_t operator / (
const __kmp_cmplx32_t& rhs ) {
153 std::complex< float > lhs = *
this;
154 return __kmp_lhs_div_rhs( lhs, rhs );
157 void operator /= (
const kmp_cmplx64& rhs ) {
158 std::complex< float > lhs = *
this;
159 *
this = __kmp_lhs_div_rhs( lhs, rhs );
162 __kmp_cmplx32_t operator / (
const kmp_cmplx64& rhs ) {
163 std::complex< float > lhs = *
this;
164 return __kmp_lhs_div_rhs( lhs, rhs );
167 typedef struct __kmp_cmplx32_t kmp_cmplx32;
170 struct KMP_DO_ALIGN( 16 ) __kmp_cmplx80_t : std::complex< long double > {
172 __kmp_cmplx80_t() : std::complex< long double > () {}
174 __kmp_cmplx80_t(
const std::complex< long double >& cld )
175 : std::complex< long double > ( cld ) {}
177 void operator /= (
const __kmp_cmplx80_t& rhs ) {
178 std::complex< long double > lhs = *
this;
179 *
this = __kmp_lhs_div_rhs( lhs, rhs );
182 __kmp_cmplx80_t operator / (
const __kmp_cmplx80_t& rhs ) {
183 std::complex< long double > lhs = *
this;
184 return __kmp_lhs_div_rhs( lhs, rhs );
188 typedef KMP_DO_ALIGN( 16 ) struct __kmp_cmplx80_t kmp_cmplx80;
192 struct __kmp_cmplx128_t : std::complex< _Quad > {
194 __kmp_cmplx128_t() : std::complex< _Quad > () {}
196 __kmp_cmplx128_t(
const std::complex< _Quad >& cq )
197 : std::complex< _Quad > ( cq ) {}
199 void operator /= (
const __kmp_cmplx128_t& rhs ) {
200 std::complex< _Quad > lhs = *
this;
201 *
this = __kmp_lhs_div_rhs( lhs, rhs );
204 __kmp_cmplx128_t operator / (
const __kmp_cmplx128_t& rhs ) {
205 std::complex< _Quad > lhs = *
this;
206 return __kmp_lhs_div_rhs( lhs, rhs );
210 typedef struct __kmp_cmplx128_t kmp_cmplx128;
213 #ifdef _DEBUG_TEMPORARILY_UNSET_
214 #undef _DEBUG_TEMPORARILY_UNSET_
221 typedef float _Complex kmp_cmplx32;
222 typedef double _Complex kmp_cmplx64;
223 typedef long double _Complex kmp_cmplx80;
225 typedef _Quad _Complex kmp_cmplx128;
233 #if KMP_ARCH_X86 && KMP_HAVE_QUAD
237 #pragma pack( push, 4 )
240 struct KMP_DO_ALIGN( 4 ) Quad_a4_t {
243 Quad_a4_t( ) : q( ) {}
244 Quad_a4_t(
const _Quad & cq ) : q ( cq ) {}
246 Quad_a4_t operator + (
const Quad_a4_t& b ) {
247 _Quad lhs = (*this).q;
249 return (Quad_a4_t)( lhs + rhs );
252 Quad_a4_t operator - (
const Quad_a4_t& b ) {
253 _Quad lhs = (*this).q;
255 return (Quad_a4_t)( lhs - rhs );
257 Quad_a4_t operator * (
const Quad_a4_t& b ) {
258 _Quad lhs = (*this).q;
260 return (Quad_a4_t)( lhs * rhs );
263 Quad_a4_t operator / (
const Quad_a4_t& b ) {
264 _Quad lhs = (*this).q;
266 return (Quad_a4_t)( lhs / rhs );
271 struct KMP_DO_ALIGN( 4 ) kmp_cmplx128_a4_t {
274 kmp_cmplx128_a4_t() : q () {}
276 kmp_cmplx128_a4_t(
const kmp_cmplx128 & c128 ) : q ( c128 ) {}
278 kmp_cmplx128_a4_t operator + (
const kmp_cmplx128_a4_t& b ) {
279 kmp_cmplx128 lhs = (*this).q;
280 kmp_cmplx128 rhs = b.q;
281 return (kmp_cmplx128_a4_t)( lhs + rhs );
283 kmp_cmplx128_a4_t operator - (
const kmp_cmplx128_a4_t& b ) {
284 kmp_cmplx128 lhs = (*this).q;
285 kmp_cmplx128 rhs = b.q;
286 return (kmp_cmplx128_a4_t)( lhs - rhs );
288 kmp_cmplx128_a4_t operator * (
const kmp_cmplx128_a4_t& b ) {
289 kmp_cmplx128 lhs = (*this).q;
290 kmp_cmplx128 rhs = b.q;
291 return (kmp_cmplx128_a4_t)( lhs * rhs );
294 kmp_cmplx128_a4_t operator / (
const kmp_cmplx128_a4_t& b ) {
295 kmp_cmplx128 lhs = (*this).q;
296 kmp_cmplx128 rhs = b.q;
297 return (kmp_cmplx128_a4_t)( lhs / rhs );
305 struct KMP_DO_ALIGN( 16 ) Quad_a16_t {
308 Quad_a16_t( ) : q( ) {}
309 Quad_a16_t(
const _Quad & cq ) : q ( cq ) {}
311 Quad_a16_t operator + (
const Quad_a16_t& b ) {
312 _Quad lhs = (*this).q;
314 return (Quad_a16_t)( lhs + rhs );
317 Quad_a16_t operator - (
const Quad_a16_t& b ) {
318 _Quad lhs = (*this).q;
320 return (Quad_a16_t)( lhs - rhs );
322 Quad_a16_t operator * (
const Quad_a16_t& b ) {
323 _Quad lhs = (*this).q;
325 return (Quad_a16_t)( lhs * rhs );
328 Quad_a16_t operator / (
const Quad_a16_t& b ) {
329 _Quad lhs = (*this).q;
331 return (Quad_a16_t)( lhs / rhs );
335 struct KMP_DO_ALIGN( 16 ) kmp_cmplx128_a16_t {
338 kmp_cmplx128_a16_t() : q () {}
340 kmp_cmplx128_a16_t(
const kmp_cmplx128 & c128 ) : q ( c128 ) {}
342 kmp_cmplx128_a16_t operator + (
const kmp_cmplx128_a16_t& b ) {
343 kmp_cmplx128 lhs = (*this).q;
344 kmp_cmplx128 rhs = b.q;
345 return (kmp_cmplx128_a16_t)( lhs + rhs );
347 kmp_cmplx128_a16_t operator - (
const kmp_cmplx128_a16_t& b ) {
348 kmp_cmplx128 lhs = (*this).q;
349 kmp_cmplx128 rhs = b.q;
350 return (kmp_cmplx128_a16_t)( lhs - rhs );
352 kmp_cmplx128_a16_t operator * (
const kmp_cmplx128_a16_t& b ) {
353 kmp_cmplx128 lhs = (*this).q;
354 kmp_cmplx128 rhs = b.q;
355 return (kmp_cmplx128_a16_t)( lhs * rhs );
358 kmp_cmplx128_a16_t operator / (
const kmp_cmplx128_a16_t& b ) {
359 kmp_cmplx128 lhs = (*this).q;
360 kmp_cmplx128 rhs = b.q;
361 return (kmp_cmplx128_a16_t)( lhs / rhs );
368 #define QUAD_LEGACY Quad_a4_t
369 #define CPLX128_LEG kmp_cmplx128_a4_t
371 #define QUAD_LEGACY _Quad
372 #define CPLX128_LEG kmp_cmplx128
379 extern int __kmp_atomic_mode;
385 typedef kmp_queuing_lock_t kmp_atomic_lock_t;
388 __kmp_acquire_atomic_lock( kmp_atomic_lock_t *lck, kmp_int32 gtid )
390 __kmp_acquire_queuing_lock( lck, gtid );
394 __kmp_test_atomic_lock( kmp_atomic_lock_t *lck, kmp_int32 gtid )
396 return __kmp_test_queuing_lock( lck, gtid );
400 __kmp_release_atomic_lock( kmp_atomic_lock_t *lck, kmp_int32 gtid )
402 __kmp_release_queuing_lock( lck, gtid );
406 __kmp_init_atomic_lock( kmp_atomic_lock_t *lck )
408 __kmp_init_queuing_lock( lck );
412 __kmp_destroy_atomic_lock( kmp_atomic_lock_t *lck )
414 __kmp_destroy_queuing_lock( lck );
419 extern kmp_atomic_lock_t __kmp_atomic_lock;
420 extern kmp_atomic_lock_t __kmp_atomic_lock_1i;
421 extern kmp_atomic_lock_t __kmp_atomic_lock_2i;
422 extern kmp_atomic_lock_t __kmp_atomic_lock_4i;
423 extern kmp_atomic_lock_t __kmp_atomic_lock_4r;
424 extern kmp_atomic_lock_t __kmp_atomic_lock_8i;
425 extern kmp_atomic_lock_t __kmp_atomic_lock_8r;
426 extern kmp_atomic_lock_t __kmp_atomic_lock_8c;
427 extern kmp_atomic_lock_t __kmp_atomic_lock_10r;
428 extern kmp_atomic_lock_t __kmp_atomic_lock_16r;
429 extern kmp_atomic_lock_t __kmp_atomic_lock_16c;
430 extern kmp_atomic_lock_t __kmp_atomic_lock_20c;
431 extern kmp_atomic_lock_t __kmp_atomic_lock_32c;
438 void __kmpc_atomic_fixed1_add(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
439 void __kmpc_atomic_fixed1_andb(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
440 void __kmpc_atomic_fixed1_div(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
441 void __kmpc_atomic_fixed1u_div(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs );
442 void __kmpc_atomic_fixed1_mul(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
443 void __kmpc_atomic_fixed1_orb(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
444 void __kmpc_atomic_fixed1_shl(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
445 void __kmpc_atomic_fixed1_shr(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
446 void __kmpc_atomic_fixed1u_shr(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs );
447 void __kmpc_atomic_fixed1_sub(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
448 void __kmpc_atomic_fixed1_xor(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
450 void __kmpc_atomic_fixed2_add(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
451 void __kmpc_atomic_fixed2_andb(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
452 void __kmpc_atomic_fixed2_div(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
453 void __kmpc_atomic_fixed2u_div(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs );
454 void __kmpc_atomic_fixed2_mul(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
455 void __kmpc_atomic_fixed2_orb(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
456 void __kmpc_atomic_fixed2_shl(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
457 void __kmpc_atomic_fixed2_shr(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
458 void __kmpc_atomic_fixed2u_shr(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs );
459 void __kmpc_atomic_fixed2_sub(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
460 void __kmpc_atomic_fixed2_xor(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
462 void __kmpc_atomic_fixed4_add(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
463 void __kmpc_atomic_fixed4_sub(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
465 void __kmpc_atomic_float4_add(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
466 void __kmpc_atomic_float4_sub(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
468 void __kmpc_atomic_fixed8_add(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
469 void __kmpc_atomic_fixed8_sub(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
471 void __kmpc_atomic_float8_add(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
472 void __kmpc_atomic_float8_sub(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
474 void __kmpc_atomic_fixed4_andb(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
475 void __kmpc_atomic_fixed4_div(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
476 void __kmpc_atomic_fixed4u_div(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs );
477 void __kmpc_atomic_fixed4_mul(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
478 void __kmpc_atomic_fixed4_orb(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
479 void __kmpc_atomic_fixed4_shl(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
480 void __kmpc_atomic_fixed4_shr(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
481 void __kmpc_atomic_fixed4u_shr(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs );
482 void __kmpc_atomic_fixed4_xor(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
484 void __kmpc_atomic_fixed8_andb(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
485 void __kmpc_atomic_fixed8_div(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
486 void __kmpc_atomic_fixed8u_div(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs );
487 void __kmpc_atomic_fixed8_mul(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
488 void __kmpc_atomic_fixed8_orb(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
489 void __kmpc_atomic_fixed8_shl(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
490 void __kmpc_atomic_fixed8_shr(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
491 void __kmpc_atomic_fixed8u_shr(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs );
492 void __kmpc_atomic_fixed8_xor(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
494 void __kmpc_atomic_float4_div(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
495 void __kmpc_atomic_float4_mul(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
497 void __kmpc_atomic_float8_div(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
498 void __kmpc_atomic_float8_mul(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
500 void __kmpc_atomic_fixed1_andl(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
501 void __kmpc_atomic_fixed1_orl(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
502 void __kmpc_atomic_fixed2_andl(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
503 void __kmpc_atomic_fixed2_orl(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
504 void __kmpc_atomic_fixed4_andl(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
505 void __kmpc_atomic_fixed4_orl(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
506 void __kmpc_atomic_fixed8_andl(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
507 void __kmpc_atomic_fixed8_orl(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
509 void __kmpc_atomic_fixed1_max(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
510 void __kmpc_atomic_fixed1_min(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
511 void __kmpc_atomic_fixed2_max(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
512 void __kmpc_atomic_fixed2_min(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
513 void __kmpc_atomic_fixed4_max(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
514 void __kmpc_atomic_fixed4_min(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
515 void __kmpc_atomic_fixed8_max(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
516 void __kmpc_atomic_fixed8_min(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
517 void __kmpc_atomic_float4_max(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
518 void __kmpc_atomic_float4_min(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
519 void __kmpc_atomic_float8_max(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
520 void __kmpc_atomic_float8_min(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
522 void __kmpc_atomic_float16_max(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
523 void __kmpc_atomic_float16_min(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
526 void __kmpc_atomic_float16_max_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
527 void __kmpc_atomic_float16_min_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
531 void __kmpc_atomic_fixed1_neqv(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
532 void __kmpc_atomic_fixed2_neqv(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
533 void __kmpc_atomic_fixed4_neqv(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
534 void __kmpc_atomic_fixed8_neqv(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
536 void __kmpc_atomic_fixed1_eqv(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
537 void __kmpc_atomic_fixed2_eqv(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
538 void __kmpc_atomic_fixed4_eqv(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
539 void __kmpc_atomic_fixed8_eqv(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
541 void __kmpc_atomic_float10_add(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
542 void __kmpc_atomic_float10_sub(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
543 void __kmpc_atomic_float10_mul(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
544 void __kmpc_atomic_float10_div(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
547 void __kmpc_atomic_float16_add(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
548 void __kmpc_atomic_float16_sub(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
549 void __kmpc_atomic_float16_mul(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
550 void __kmpc_atomic_float16_div(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
553 void __kmpc_atomic_float16_add_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
554 void __kmpc_atomic_float16_sub_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
555 void __kmpc_atomic_float16_mul_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
556 void __kmpc_atomic_float16_div_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
560 void __kmpc_atomic_cmplx4_add(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
561 void __kmpc_atomic_cmplx4_sub(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
562 void __kmpc_atomic_cmplx4_mul(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
563 void __kmpc_atomic_cmplx4_div(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
564 void __kmpc_atomic_cmplx8_add(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
565 void __kmpc_atomic_cmplx8_sub(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
566 void __kmpc_atomic_cmplx8_mul(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
567 void __kmpc_atomic_cmplx8_div(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
568 void __kmpc_atomic_cmplx10_add(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
569 void __kmpc_atomic_cmplx10_sub(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
570 void __kmpc_atomic_cmplx10_mul(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
571 void __kmpc_atomic_cmplx10_div(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
573 void __kmpc_atomic_cmplx16_add(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
574 void __kmpc_atomic_cmplx16_sub(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
575 void __kmpc_atomic_cmplx16_mul(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
576 void __kmpc_atomic_cmplx16_div(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
579 void __kmpc_atomic_cmplx16_add_a16(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
580 void __kmpc_atomic_cmplx16_sub_a16(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
581 void __kmpc_atomic_cmplx16_mul_a16(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
582 void __kmpc_atomic_cmplx16_div_a16(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
590 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
592 void __kmpc_atomic_fixed1_sub_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
593 void __kmpc_atomic_fixed1_div_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
594 void __kmpc_atomic_fixed1u_div_rev(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs );
595 void __kmpc_atomic_fixed1_shl_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
596 void __kmpc_atomic_fixed1_shr_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
597 void __kmpc_atomic_fixed1u_shr_rev(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs );
598 void __kmpc_atomic_fixed2_sub_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
599 void __kmpc_atomic_fixed2_div_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
600 void __kmpc_atomic_fixed2u_div_rev(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs );
601 void __kmpc_atomic_fixed2_shl_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
602 void __kmpc_atomic_fixed2_shr_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
603 void __kmpc_atomic_fixed2u_shr_rev(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs );
604 void __kmpc_atomic_fixed4_sub_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
605 void __kmpc_atomic_fixed4_div_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
606 void __kmpc_atomic_fixed4u_div_rev(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs );
607 void __kmpc_atomic_fixed4_shl_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
608 void __kmpc_atomic_fixed4_shr_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
609 void __kmpc_atomic_fixed4u_shr_rev(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs );
610 void __kmpc_atomic_fixed8_sub_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
611 void __kmpc_atomic_fixed8_div_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
612 void __kmpc_atomic_fixed8u_div_rev(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs );
613 void __kmpc_atomic_fixed8_shl_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
614 void __kmpc_atomic_fixed8_shr_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
615 void __kmpc_atomic_fixed8u_shr_rev(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs );
616 void __kmpc_atomic_float4_sub_rev(
ident_t *id_ref,
int gtid,
float * lhs,
float rhs );
617 void __kmpc_atomic_float4_div_rev(
ident_t *id_ref,
int gtid,
float * lhs,
float rhs );
618 void __kmpc_atomic_float8_sub_rev(
ident_t *id_ref,
int gtid,
double * lhs,
double rhs );
619 void __kmpc_atomic_float8_div_rev(
ident_t *id_ref,
int gtid,
double * lhs,
double rhs );
620 void __kmpc_atomic_float10_sub_rev(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
621 void __kmpc_atomic_float10_div_rev(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
623 void __kmpc_atomic_float16_sub_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
624 void __kmpc_atomic_float16_div_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
626 void __kmpc_atomic_cmplx4_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
627 void __kmpc_atomic_cmplx4_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
628 void __kmpc_atomic_cmplx8_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
629 void __kmpc_atomic_cmplx8_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
630 void __kmpc_atomic_cmplx10_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
631 void __kmpc_atomic_cmplx10_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
633 void __kmpc_atomic_cmplx16_sub_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
634 void __kmpc_atomic_cmplx16_div_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
637 void __kmpc_atomic_float16_sub_a16_rev(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
638 void __kmpc_atomic_float16_div_a16_rev(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
639 void __kmpc_atomic_cmplx16_sub_a16_rev(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
640 void __kmpc_atomic_cmplx16_div_a16_rev(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
642 #endif // KMP_HAVE_QUAD
644 #endif //KMP_ARCH_X86 || KMP_ARCH_X86_64
646 #endif //OMP_40_ENABLED
651 void __kmpc_atomic_fixed1_mul_float8(
ident_t *id_ref,
int gtid,
char * lhs, kmp_real64 rhs );
652 void __kmpc_atomic_fixed1_div_float8(
ident_t *id_ref,
int gtid,
char * lhs, kmp_real64 rhs );
653 void __kmpc_atomic_fixed2_mul_float8(
ident_t *id_ref,
int gtid,
short * lhs, kmp_real64 rhs );
654 void __kmpc_atomic_fixed2_div_float8(
ident_t *id_ref,
int gtid,
short * lhs, kmp_real64 rhs );
655 void __kmpc_atomic_fixed4_mul_float8(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_real64 rhs );
656 void __kmpc_atomic_fixed4_div_float8(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_real64 rhs );
657 void __kmpc_atomic_fixed8_mul_float8(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_real64 rhs );
658 void __kmpc_atomic_fixed8_div_float8(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_real64 rhs );
659 void __kmpc_atomic_float4_add_float8(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real64 rhs );
660 void __kmpc_atomic_float4_sub_float8(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real64 rhs );
661 void __kmpc_atomic_float4_mul_float8(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real64 rhs );
662 void __kmpc_atomic_float4_div_float8(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real64 rhs );
666 void __kmpc_atomic_fixed1_add_fp(
ident_t *id_ref,
int gtid,
char * lhs, _Quad rhs );
667 void __kmpc_atomic_fixed1_sub_fp(
ident_t *id_ref,
int gtid,
char * lhs, _Quad rhs );
668 void __kmpc_atomic_fixed1_mul_fp(
ident_t *id_ref,
int gtid,
char * lhs, _Quad rhs );
669 void __kmpc_atomic_fixed1_div_fp(
ident_t *id_ref,
int gtid,
char * lhs, _Quad rhs );
670 void __kmpc_atomic_fixed1u_div_fp(
ident_t *id_ref,
int gtid,
unsigned char * lhs, _Quad rhs );
672 void __kmpc_atomic_fixed2_add_fp(
ident_t *id_ref,
int gtid,
short * lhs, _Quad rhs );
673 void __kmpc_atomic_fixed2_sub_fp(
ident_t *id_ref,
int gtid,
short * lhs, _Quad rhs );
674 void __kmpc_atomic_fixed2_mul_fp(
ident_t *id_ref,
int gtid,
short * lhs, _Quad rhs );
675 void __kmpc_atomic_fixed2_div_fp(
ident_t *id_ref,
int gtid,
short * lhs, _Quad rhs );
676 void __kmpc_atomic_fixed2u_div_fp(
ident_t *id_ref,
int gtid,
unsigned short * lhs, _Quad rhs );
678 void __kmpc_atomic_fixed4_add_fp(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, _Quad rhs );
679 void __kmpc_atomic_fixed4_sub_fp(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, _Quad rhs );
680 void __kmpc_atomic_fixed4_mul_fp(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, _Quad rhs );
681 void __kmpc_atomic_fixed4_div_fp(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, _Quad rhs );
682 void __kmpc_atomic_fixed4u_div_fp(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, _Quad rhs );
684 void __kmpc_atomic_fixed8_add_fp(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, _Quad rhs );
685 void __kmpc_atomic_fixed8_sub_fp(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, _Quad rhs );
686 void __kmpc_atomic_fixed8_mul_fp(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, _Quad rhs );
687 void __kmpc_atomic_fixed8_div_fp(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, _Quad rhs );
688 void __kmpc_atomic_fixed8u_div_fp(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, _Quad rhs );
690 void __kmpc_atomic_float4_add_fp(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, _Quad rhs );
691 void __kmpc_atomic_float4_sub_fp(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, _Quad rhs );
692 void __kmpc_atomic_float4_mul_fp(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, _Quad rhs );
693 void __kmpc_atomic_float4_div_fp(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, _Quad rhs );
695 void __kmpc_atomic_float8_add_fp(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, _Quad rhs );
696 void __kmpc_atomic_float8_sub_fp(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, _Quad rhs );
697 void __kmpc_atomic_float8_mul_fp(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, _Quad rhs );
698 void __kmpc_atomic_float8_div_fp(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, _Quad rhs );
700 void __kmpc_atomic_float10_add_fp(
ident_t *id_ref,
int gtid,
long double * lhs, _Quad rhs );
701 void __kmpc_atomic_float10_sub_fp(
ident_t *id_ref,
int gtid,
long double * lhs, _Quad rhs );
702 void __kmpc_atomic_float10_mul_fp(
ident_t *id_ref,
int gtid,
long double * lhs, _Quad rhs );
703 void __kmpc_atomic_float10_div_fp(
ident_t *id_ref,
int gtid,
long double * lhs, _Quad rhs );
704 #endif // KMP_HAVE_QUAD
707 void __kmpc_atomic_cmplx4_add_cmplx8(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx64 rhs );
708 void __kmpc_atomic_cmplx4_sub_cmplx8(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx64 rhs );
709 void __kmpc_atomic_cmplx4_mul_cmplx8(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx64 rhs );
710 void __kmpc_atomic_cmplx4_div_cmplx8(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx64 rhs );
713 void __kmpc_atomic_1(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
714 void __kmpc_atomic_2(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
715 void __kmpc_atomic_4(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
716 void __kmpc_atomic_8(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
717 void __kmpc_atomic_10(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
718 void __kmpc_atomic_16(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
719 void __kmpc_atomic_20(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
720 void __kmpc_atomic_32(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
723 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
729 char __kmpc_atomic_fixed1_rd(
ident_t *id_ref,
int gtid,
char * loc );
730 short __kmpc_atomic_fixed2_rd(
ident_t *id_ref,
int gtid,
short * loc );
731 kmp_int32 __kmpc_atomic_fixed4_rd(
ident_t *id_ref,
int gtid, kmp_int32 * loc );
732 kmp_int64 __kmpc_atomic_fixed8_rd(
ident_t *id_ref,
int gtid, kmp_int64 * loc );
733 kmp_real32 __kmpc_atomic_float4_rd(
ident_t *id_ref,
int gtid, kmp_real32 * loc );
734 kmp_real64 __kmpc_atomic_float8_rd(
ident_t *id_ref,
int gtid, kmp_real64 * loc );
735 long double __kmpc_atomic_float10_rd(
ident_t *id_ref,
int gtid,
long double * loc );
737 QUAD_LEGACY __kmpc_atomic_float16_rd(
ident_t *id_ref,
int gtid, QUAD_LEGACY * loc );
741 #if ( KMP_OS_WINDOWS )
742 void __kmpc_atomic_cmplx4_rd( kmp_cmplx32 * out,
ident_t *id_ref,
int gtid, kmp_cmplx32 * loc );
744 kmp_cmplx32 __kmpc_atomic_cmplx4_rd(
ident_t *id_ref,
int gtid, kmp_cmplx32 * loc );
746 kmp_cmplx64 __kmpc_atomic_cmplx8_rd(
ident_t *id_ref,
int gtid, kmp_cmplx64 * loc );
747 kmp_cmplx80 __kmpc_atomic_cmplx10_rd(
ident_t *id_ref,
int gtid, kmp_cmplx80 * loc );
749 CPLX128_LEG __kmpc_atomic_cmplx16_rd(
ident_t *id_ref,
int gtid, CPLX128_LEG * loc );
752 Quad_a16_t __kmpc_atomic_float16_a16_rd(
ident_t * id_ref,
int gtid, Quad_a16_t * loc );
753 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_a16_rd(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * loc );
762 void __kmpc_atomic_fixed1_wr(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
763 void __kmpc_atomic_fixed2_wr(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
764 void __kmpc_atomic_fixed4_wr(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
765 void __kmpc_atomic_fixed8_wr(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
766 void __kmpc_atomic_float4_wr(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
767 void __kmpc_atomic_float8_wr(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
768 void __kmpc_atomic_float10_wr(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
770 void __kmpc_atomic_float16_wr(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
772 void __kmpc_atomic_cmplx4_wr(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
773 void __kmpc_atomic_cmplx8_wr(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
774 void __kmpc_atomic_cmplx10_wr(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
776 void __kmpc_atomic_cmplx16_wr(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
779 void __kmpc_atomic_float16_a16_wr(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
780 void __kmpc_atomic_cmplx16_a16_wr(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
789 char __kmpc_atomic_fixed1_add_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
790 char __kmpc_atomic_fixed1_andb_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
791 char __kmpc_atomic_fixed1_div_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
792 unsigned char __kmpc_atomic_fixed1u_div_cpt(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs,
int flag);
793 char __kmpc_atomic_fixed1_mul_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
794 char __kmpc_atomic_fixed1_orb_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
795 char __kmpc_atomic_fixed1_shl_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
796 char __kmpc_atomic_fixed1_shr_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
797 unsigned char __kmpc_atomic_fixed1u_shr_cpt(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs,
int flag);
798 char __kmpc_atomic_fixed1_sub_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
799 char __kmpc_atomic_fixed1_xor_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
801 short __kmpc_atomic_fixed2_add_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
802 short __kmpc_atomic_fixed2_andb_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
803 short __kmpc_atomic_fixed2_div_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
804 unsigned short __kmpc_atomic_fixed2u_div_cpt(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs,
int flag);
805 short __kmpc_atomic_fixed2_mul_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
806 short __kmpc_atomic_fixed2_orb_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
807 short __kmpc_atomic_fixed2_shl_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
808 short __kmpc_atomic_fixed2_shr_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
809 unsigned short __kmpc_atomic_fixed2u_shr_cpt(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs,
int flag);
810 short __kmpc_atomic_fixed2_sub_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
811 short __kmpc_atomic_fixed2_xor_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
813 kmp_int32 __kmpc_atomic_fixed4_add_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
814 kmp_int32 __kmpc_atomic_fixed4_sub_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
816 kmp_real32 __kmpc_atomic_float4_add_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
817 kmp_real32 __kmpc_atomic_float4_sub_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
819 kmp_int64 __kmpc_atomic_fixed8_add_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
820 kmp_int64 __kmpc_atomic_fixed8_sub_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
822 kmp_real64 __kmpc_atomic_float8_add_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
823 kmp_real64 __kmpc_atomic_float8_sub_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
825 kmp_int32 __kmpc_atomic_fixed4_andb_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
826 kmp_int32 __kmpc_atomic_fixed4_div_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
827 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs,
int flag);
828 kmp_int32 __kmpc_atomic_fixed4_mul_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
829 kmp_int32 __kmpc_atomic_fixed4_orb_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
830 kmp_int32 __kmpc_atomic_fixed4_shl_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
831 kmp_int32 __kmpc_atomic_fixed4_shr_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
832 kmp_uint32 __kmpc_atomic_fixed4u_shr_cpt(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs,
int flag);
833 kmp_int32 __kmpc_atomic_fixed4_xor_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
835 kmp_int64 __kmpc_atomic_fixed8_andb_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
836 kmp_int64 __kmpc_atomic_fixed8_div_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
837 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs,
int flag);
838 kmp_int64 __kmpc_atomic_fixed8_mul_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
839 kmp_int64 __kmpc_atomic_fixed8_orb_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
840 kmp_int64 __kmpc_atomic_fixed8_shl_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
841 kmp_int64 __kmpc_atomic_fixed8_shr_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
842 kmp_uint64 __kmpc_atomic_fixed8u_shr_cpt(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs,
int flag);
843 kmp_int64 __kmpc_atomic_fixed8_xor_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
845 kmp_real32 __kmpc_atomic_float4_div_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
846 kmp_real32 __kmpc_atomic_float4_mul_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
848 kmp_real64 __kmpc_atomic_float8_div_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
849 kmp_real64 __kmpc_atomic_float8_mul_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
851 char __kmpc_atomic_fixed1_andl_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
852 char __kmpc_atomic_fixed1_orl_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
853 short __kmpc_atomic_fixed2_andl_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
854 short __kmpc_atomic_fixed2_orl_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
855 kmp_int32 __kmpc_atomic_fixed4_andl_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
856 kmp_int32 __kmpc_atomic_fixed4_orl_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
857 kmp_int64 __kmpc_atomic_fixed8_andl_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
858 kmp_int64 __kmpc_atomic_fixed8_orl_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
860 char __kmpc_atomic_fixed1_max_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
861 char __kmpc_atomic_fixed1_min_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
862 short __kmpc_atomic_fixed2_max_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
863 short __kmpc_atomic_fixed2_min_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
864 kmp_int32 __kmpc_atomic_fixed4_max_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
865 kmp_int32 __kmpc_atomic_fixed4_min_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
866 kmp_int64 __kmpc_atomic_fixed8_max_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
867 kmp_int64 __kmpc_atomic_fixed8_min_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
868 kmp_real32 __kmpc_atomic_float4_max_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
869 kmp_real32 __kmpc_atomic_float4_min_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
870 kmp_real64 __kmpc_atomic_float8_max_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
871 kmp_real64 __kmpc_atomic_float8_min_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
873 QUAD_LEGACY __kmpc_atomic_float16_max_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
874 QUAD_LEGACY __kmpc_atomic_float16_min_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
877 char __kmpc_atomic_fixed1_neqv_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
878 short __kmpc_atomic_fixed2_neqv_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
879 kmp_int32 __kmpc_atomic_fixed4_neqv_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
880 kmp_int64 __kmpc_atomic_fixed8_neqv_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
882 char __kmpc_atomic_fixed1_eqv_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
883 short __kmpc_atomic_fixed2_eqv_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
884 kmp_int32 __kmpc_atomic_fixed4_eqv_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
885 kmp_int64 __kmpc_atomic_fixed8_eqv_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
887 long double __kmpc_atomic_float10_add_cpt(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag);
888 long double __kmpc_atomic_float10_sub_cpt(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag);
889 long double __kmpc_atomic_float10_mul_cpt(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag);
890 long double __kmpc_atomic_float10_div_cpt(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag);
893 QUAD_LEGACY __kmpc_atomic_float16_add_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
894 QUAD_LEGACY __kmpc_atomic_float16_sub_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
895 QUAD_LEGACY __kmpc_atomic_float16_mul_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
896 QUAD_LEGACY __kmpc_atomic_float16_div_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
900 void __kmpc_atomic_cmplx4_add_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag);
901 void __kmpc_atomic_cmplx4_sub_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag);
902 void __kmpc_atomic_cmplx4_mul_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag);
903 void __kmpc_atomic_cmplx4_div_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag);
905 kmp_cmplx64 __kmpc_atomic_cmplx8_add_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag);
906 kmp_cmplx64 __kmpc_atomic_cmplx8_sub_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag);
907 kmp_cmplx64 __kmpc_atomic_cmplx8_mul_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag);
908 kmp_cmplx64 __kmpc_atomic_cmplx8_div_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag);
909 kmp_cmplx80 __kmpc_atomic_cmplx10_add_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag);
910 kmp_cmplx80 __kmpc_atomic_cmplx10_sub_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag);
911 kmp_cmplx80 __kmpc_atomic_cmplx10_mul_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag);
912 kmp_cmplx80 __kmpc_atomic_cmplx10_div_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag);
914 CPLX128_LEG __kmpc_atomic_cmplx16_add_cpt(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag);
915 CPLX128_LEG __kmpc_atomic_cmplx16_sub_cpt(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag);
916 CPLX128_LEG __kmpc_atomic_cmplx16_mul_cpt(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag);
917 CPLX128_LEG __kmpc_atomic_cmplx16_div_cpt(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag);
920 Quad_a16_t __kmpc_atomic_float16_add_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
921 Quad_a16_t __kmpc_atomic_float16_sub_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
922 Quad_a16_t __kmpc_atomic_float16_mul_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
923 Quad_a16_t __kmpc_atomic_float16_div_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
924 Quad_a16_t __kmpc_atomic_float16_max_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
925 Quad_a16_t __kmpc_atomic_float16_min_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
926 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_add_a16_cpt(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag);
927 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_sub_a16_cpt(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag);
928 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_mul_a16_cpt(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag);
929 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_div_a16_cpt(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag);
933 void __kmpc_atomic_start(
void);
934 void __kmpc_atomic_end(
void);
940 char __kmpc_atomic_fixed1_sub_cpt_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag );
941 char __kmpc_atomic_fixed1_div_cpt_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag );
942 unsigned char __kmpc_atomic_fixed1u_div_cpt_rev(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs,
int flag );
943 char __kmpc_atomic_fixed1_shl_cpt_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs ,
int flag);
944 char __kmpc_atomic_fixed1_shr_cpt_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag );
945 unsigned char __kmpc_atomic_fixed1u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs,
int flag );
946 short __kmpc_atomic_fixed2_sub_cpt_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag );
947 short __kmpc_atomic_fixed2_div_cpt_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag );
948 unsigned short __kmpc_atomic_fixed2u_div_cpt_rev(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs,
int flag );
949 short __kmpc_atomic_fixed2_shl_cpt_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag );
950 short __kmpc_atomic_fixed2_shr_cpt_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag );
951 unsigned short __kmpc_atomic_fixed2u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs,
int flag );
952 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag );
953 kmp_int32 __kmpc_atomic_fixed4_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag );
954 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs,
int flag );
955 kmp_int32 __kmpc_atomic_fixed4_shl_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag );
956 kmp_int32 __kmpc_atomic_fixed4_shr_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag );
957 kmp_uint32 __kmpc_atomic_fixed4u_shr_cpt_rev(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs,
int flag );
958 kmp_int64 __kmpc_atomic_fixed8_sub_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag );
959 kmp_int64 __kmpc_atomic_fixed8_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag );
960 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs,
int flag );
961 kmp_int64 __kmpc_atomic_fixed8_shl_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag );
962 kmp_int64 __kmpc_atomic_fixed8_shr_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag );
963 kmp_uint64 __kmpc_atomic_fixed8u_shr_cpt_rev(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs,
int flag );
964 float __kmpc_atomic_float4_sub_cpt_rev(
ident_t *id_ref,
int gtid,
float * lhs,
float rhs,
int flag );
965 float __kmpc_atomic_float4_div_cpt_rev(
ident_t *id_ref,
int gtid,
float * lhs,
float rhs,
int flag );
966 double __kmpc_atomic_float8_sub_cpt_rev(
ident_t *id_ref,
int gtid,
double * lhs,
double rhs,
int flag );
967 double __kmpc_atomic_float8_div_cpt_rev(
ident_t *id_ref,
int gtid,
double * lhs,
double rhs,
int flag );
968 long double __kmpc_atomic_float10_sub_cpt_rev(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag );
969 long double __kmpc_atomic_float10_div_cpt_rev(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag );
971 QUAD_LEGACY __kmpc_atomic_float16_sub_cpt_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag );
972 QUAD_LEGACY __kmpc_atomic_float16_div_cpt_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag );
975 void __kmpc_atomic_cmplx4_sub_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag );
976 void __kmpc_atomic_cmplx4_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag );
977 kmp_cmplx64 __kmpc_atomic_cmplx8_sub_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag );
978 kmp_cmplx64 __kmpc_atomic_cmplx8_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag );
979 kmp_cmplx80 __kmpc_atomic_cmplx10_sub_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag );
980 kmp_cmplx80 __kmpc_atomic_cmplx10_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag );
982 CPLX128_LEG __kmpc_atomic_cmplx16_sub_cpt_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag );
983 CPLX128_LEG __kmpc_atomic_cmplx16_div_cpt_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag );
985 Quad_a16_t __kmpc_atomic_float16_sub_a16_cpt_rev(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag );
986 Quad_a16_t __kmpc_atomic_float16_div_a16_cpt_rev(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag );
987 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_sub_a16_cpt_rev(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag );
988 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_div_a16_cpt_rev(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag );
993 char __kmpc_atomic_fixed1_swp(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
994 short __kmpc_atomic_fixed2_swp(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
995 kmp_int32 __kmpc_atomic_fixed4_swp(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
996 kmp_int64 __kmpc_atomic_fixed8_swp(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
997 float __kmpc_atomic_float4_swp(
ident_t *id_ref,
int gtid,
float * lhs,
float rhs );
998 double __kmpc_atomic_float8_swp(
ident_t *id_ref,
int gtid,
double * lhs,
double rhs );
999 long double __kmpc_atomic_float10_swp(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
1001 QUAD_LEGACY __kmpc_atomic_float16_swp(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
1004 void __kmpc_atomic_cmplx4_swp(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out );
1007 kmp_cmplx64 __kmpc_atomic_cmplx8_swp(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
1008 kmp_cmplx80 __kmpc_atomic_cmplx10_swp(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
1010 CPLX128_LEG __kmpc_atomic_cmplx16_swp(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
1011 #if ( KMP_ARCH_X86 )
1012 Quad_a16_t __kmpc_atomic_float16_a16_swp(
ident_t *id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
1013 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_a16_swp(
ident_t *id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
1019 #endif //OMP_40_ENABLED
1021 #endif //KMP_ARCH_X86 || KMP_ARCH_X86_64