52 #if defined( __GNUC__ ) && !defined( __INTEL_COMPILER )
53 typedef __float128 _Quad;
56 #if defined( __cplusplus ) && ( KMP_OS_WINDOWS )
64 #define _DEBUG_TEMPORARILY_UNSET_
69 template<
typename type_lhs,
typename type_rhs >
70 std::complex< type_lhs > __kmp_lhs_div_rhs(
71 const std::complex< type_lhs >& lhs,
72 const std::complex< type_rhs >& rhs ) {
73 type_lhs a = lhs.real();
74 type_lhs b = lhs.imag();
75 type_rhs c = rhs.real();
76 type_rhs d = rhs.imag();
77 type_rhs den = c*c + d*d;
78 type_rhs r = ( a*c + b*d );
79 type_rhs i = ( b*c - a*d );
80 std::complex< type_lhs > ret( r/den, i/den );
85 struct __kmp_cmplx64_t : std::complex< double > {
87 __kmp_cmplx64_t() : std::complex< double > () {}
89 __kmp_cmplx64_t(
const std::complex< double >& cd )
90 : std::complex< double > ( cd ) {}
92 void operator /= (
const __kmp_cmplx64_t& rhs ) {
93 std::complex< double > lhs = *
this;
94 *
this = __kmp_lhs_div_rhs( lhs, rhs );
97 __kmp_cmplx64_t operator / (
const __kmp_cmplx64_t& rhs ) {
98 std::complex< double > lhs = *
this;
99 return __kmp_lhs_div_rhs( lhs, rhs );
103 typedef struct __kmp_cmplx64_t kmp_cmplx64;
106 struct __kmp_cmplx32_t : std::complex< float > {
108 __kmp_cmplx32_t() : std::complex< float > () {}
110 __kmp_cmplx32_t(
const std::complex<float>& cf )
111 : std::complex< float > ( cf ) {}
113 __kmp_cmplx32_t operator + (
const __kmp_cmplx32_t& b ) {
114 std::complex< float > lhs = *
this;
115 std::complex< float > rhs = b;
116 return ( lhs + rhs );
118 __kmp_cmplx32_t operator - (
const __kmp_cmplx32_t& b ) {
119 std::complex< float > lhs = *
this;
120 std::complex< float > rhs = b;
121 return ( lhs - rhs );
123 __kmp_cmplx32_t operator * (
const __kmp_cmplx32_t& b ) {
124 std::complex< float > lhs = *
this;
125 std::complex< float > rhs = b;
126 return ( lhs * rhs );
129 __kmp_cmplx32_t operator + (
const kmp_cmplx64& b ) {
130 kmp_cmplx64 t = kmp_cmplx64( *
this ) + b;
131 std::complex< double > d( t );
132 std::complex< float > f( d );
133 __kmp_cmplx32_t r( f );
136 __kmp_cmplx32_t operator - (
const kmp_cmplx64& b ) {
137 kmp_cmplx64 t = kmp_cmplx64( *
this ) - b;
138 std::complex< double > d( t );
139 std::complex< float > f( d );
140 __kmp_cmplx32_t r( f );
143 __kmp_cmplx32_t operator * (
const kmp_cmplx64& b ) {
144 kmp_cmplx64 t = kmp_cmplx64( *
this ) * b;
145 std::complex< double > d( t );
146 std::complex< float > f( d );
147 __kmp_cmplx32_t r( f );
151 void operator /= (
const __kmp_cmplx32_t& rhs ) {
152 std::complex< float > lhs = *
this;
153 *
this = __kmp_lhs_div_rhs( lhs, rhs );
156 __kmp_cmplx32_t operator / (
const __kmp_cmplx32_t& rhs ) {
157 std::complex< float > lhs = *
this;
158 return __kmp_lhs_div_rhs( lhs, rhs );
161 void operator /= (
const kmp_cmplx64& rhs ) {
162 std::complex< float > lhs = *
this;
163 *
this = __kmp_lhs_div_rhs( lhs, rhs );
166 __kmp_cmplx32_t operator / (
const kmp_cmplx64& rhs ) {
167 std::complex< float > lhs = *
this;
168 return __kmp_lhs_div_rhs( lhs, rhs );
171 typedef struct __kmp_cmplx32_t kmp_cmplx32;
174 struct KMP_DO_ALIGN( 16 ) __kmp_cmplx80_t : std::complex< long double > {
176 __kmp_cmplx80_t() : std::complex< long double > () {}
178 __kmp_cmplx80_t(
const std::complex< long double >& cld )
179 : std::complex< long double > ( cld ) {}
181 void operator /= (
const __kmp_cmplx80_t& rhs ) {
182 std::complex< long double > lhs = *
this;
183 *
this = __kmp_lhs_div_rhs( lhs, rhs );
186 __kmp_cmplx80_t operator / (
const __kmp_cmplx80_t& rhs ) {
187 std::complex< long double > lhs = *
this;
188 return __kmp_lhs_div_rhs( lhs, rhs );
192 typedef KMP_DO_ALIGN( 16 ) struct __kmp_cmplx80_t kmp_cmplx80;
195 struct __kmp_cmplx128_t : std::complex< _Quad > {
197 __kmp_cmplx128_t() : std::complex< _Quad > () {}
199 __kmp_cmplx128_t(
const std::complex< _Quad >& cq )
200 : std::complex< _Quad > ( cq ) {}
202 void operator /= (
const __kmp_cmplx128_t& rhs ) {
203 std::complex< _Quad > lhs = *
this;
204 *
this = __kmp_lhs_div_rhs( lhs, rhs );
207 __kmp_cmplx128_t operator / (
const __kmp_cmplx128_t& rhs ) {
208 std::complex< _Quad > lhs = *
this;
209 return __kmp_lhs_div_rhs( lhs, rhs );
213 typedef struct __kmp_cmplx128_t kmp_cmplx128;
215 #ifdef _DEBUG_TEMPORARILY_UNSET_
216 #undef _DEBUG_TEMPORARILY_UNSET_
223 typedef float _Complex kmp_cmplx32;
224 typedef double _Complex kmp_cmplx64;
225 typedef long double _Complex kmp_cmplx80;
226 typedef _Quad _Complex kmp_cmplx128;
237 #pragma pack( push, 4 )
239 struct KMP_DO_ALIGN( 4 ) Quad_a4_t {
242 Quad_a4_t( ) : q( ) {}
243 Quad_a4_t(
const _Quad & cq ) : q ( cq ) {}
245 Quad_a4_t operator + (
const Quad_a4_t& b ) {
246 _Quad lhs = (*this).q;
248 return (Quad_a4_t)( lhs + rhs );
251 Quad_a4_t operator - (
const Quad_a4_t& b ) {
252 _Quad lhs = (*this).q;
254 return (Quad_a4_t)( lhs - rhs );
256 Quad_a4_t operator * (
const Quad_a4_t& b ) {
257 _Quad lhs = (*this).q;
259 return (Quad_a4_t)( lhs * rhs );
262 Quad_a4_t operator / (
const Quad_a4_t& b ) {
263 _Quad lhs = (*this).q;
265 return (Quad_a4_t)( lhs / rhs );
270 struct KMP_DO_ALIGN( 4 ) kmp_cmplx128_a4_t {
273 kmp_cmplx128_a4_t() : q () {}
275 kmp_cmplx128_a4_t(
const kmp_cmplx128 & c128 ) : q ( c128 ) {}
277 kmp_cmplx128_a4_t operator + (
const kmp_cmplx128_a4_t& b ) {
278 kmp_cmplx128 lhs = (*this).q;
279 kmp_cmplx128 rhs = b.q;
280 return (kmp_cmplx128_a4_t)( lhs + rhs );
282 kmp_cmplx128_a4_t operator - (
const kmp_cmplx128_a4_t& b ) {
283 kmp_cmplx128 lhs = (*this).q;
284 kmp_cmplx128 rhs = b.q;
285 return (kmp_cmplx128_a4_t)( lhs - rhs );
287 kmp_cmplx128_a4_t operator * (
const kmp_cmplx128_a4_t& b ) {
288 kmp_cmplx128 lhs = (*this).q;
289 kmp_cmplx128 rhs = b.q;
290 return (kmp_cmplx128_a4_t)( lhs * rhs );
293 kmp_cmplx128_a4_t operator / (
const kmp_cmplx128_a4_t& b ) {
294 kmp_cmplx128 lhs = (*this).q;
295 kmp_cmplx128 rhs = b.q;
296 return (kmp_cmplx128_a4_t)( lhs / rhs );
304 struct KMP_DO_ALIGN( 16 ) Quad_a16_t {
307 Quad_a16_t( ) : q( ) {}
308 Quad_a16_t(
const _Quad & cq ) : q ( cq ) {}
310 Quad_a16_t operator + (
const Quad_a16_t& b ) {
311 _Quad lhs = (*this).q;
313 return (Quad_a16_t)( lhs + rhs );
316 Quad_a16_t operator - (
const Quad_a16_t& b ) {
317 _Quad lhs = (*this).q;
319 return (Quad_a16_t)( lhs - rhs );
321 Quad_a16_t operator * (
const Quad_a16_t& b ) {
322 _Quad lhs = (*this).q;
324 return (Quad_a16_t)( lhs * rhs );
327 Quad_a16_t operator / (
const Quad_a16_t& b ) {
328 _Quad lhs = (*this).q;
330 return (Quad_a16_t)( lhs / rhs );
334 struct KMP_DO_ALIGN( 16 ) kmp_cmplx128_a16_t {
337 kmp_cmplx128_a16_t() : q () {}
339 kmp_cmplx128_a16_t(
const kmp_cmplx128 & c128 ) : q ( c128 ) {}
341 kmp_cmplx128_a16_t operator + (
const kmp_cmplx128_a16_t& b ) {
342 kmp_cmplx128 lhs = (*this).q;
343 kmp_cmplx128 rhs = b.q;
344 return (kmp_cmplx128_a16_t)( lhs + rhs );
346 kmp_cmplx128_a16_t operator - (
const kmp_cmplx128_a16_t& b ) {
347 kmp_cmplx128 lhs = (*this).q;
348 kmp_cmplx128 rhs = b.q;
349 return (kmp_cmplx128_a16_t)( lhs - rhs );
351 kmp_cmplx128_a16_t operator * (
const kmp_cmplx128_a16_t& b ) {
352 kmp_cmplx128 lhs = (*this).q;
353 kmp_cmplx128 rhs = b.q;
354 return (kmp_cmplx128_a16_t)( lhs * rhs );
357 kmp_cmplx128_a16_t operator / (
const kmp_cmplx128_a16_t& b ) {
358 kmp_cmplx128 lhs = (*this).q;
359 kmp_cmplx128 rhs = b.q;
360 return (kmp_cmplx128_a16_t)( lhs / rhs );
367 #define QUAD_LEGACY Quad_a4_t
368 #define CPLX128_LEG kmp_cmplx128_a4_t
370 #define QUAD_LEGACY _Quad
371 #define CPLX128_LEG kmp_cmplx128
378 extern int __kmp_atomic_mode;
384 typedef kmp_queuing_lock_t kmp_atomic_lock_t;
387 __kmp_acquire_atomic_lock( kmp_atomic_lock_t *lck, kmp_int32 gtid )
389 __kmp_acquire_queuing_lock( lck, gtid );
393 __kmp_test_atomic_lock( kmp_atomic_lock_t *lck, kmp_int32 gtid )
395 return __kmp_test_queuing_lock( lck, gtid );
399 __kmp_release_atomic_lock( kmp_atomic_lock_t *lck, kmp_int32 gtid )
401 __kmp_release_queuing_lock( lck, gtid );
405 __kmp_init_atomic_lock( kmp_atomic_lock_t *lck )
407 __kmp_init_queuing_lock( lck );
411 __kmp_destroy_atomic_lock( kmp_atomic_lock_t *lck )
413 __kmp_destroy_queuing_lock( lck );
418 extern kmp_atomic_lock_t __kmp_atomic_lock;
419 extern kmp_atomic_lock_t __kmp_atomic_lock_1i;
420 extern kmp_atomic_lock_t __kmp_atomic_lock_2i;
421 extern kmp_atomic_lock_t __kmp_atomic_lock_4i;
422 extern kmp_atomic_lock_t __kmp_atomic_lock_4r;
423 extern kmp_atomic_lock_t __kmp_atomic_lock_8i;
424 extern kmp_atomic_lock_t __kmp_atomic_lock_8r;
425 extern kmp_atomic_lock_t __kmp_atomic_lock_8c;
426 extern kmp_atomic_lock_t __kmp_atomic_lock_10r;
427 extern kmp_atomic_lock_t __kmp_atomic_lock_16r;
428 extern kmp_atomic_lock_t __kmp_atomic_lock_16c;
429 extern kmp_atomic_lock_t __kmp_atomic_lock_20c;
430 extern kmp_atomic_lock_t __kmp_atomic_lock_32c;
437 void __kmpc_atomic_fixed1_add(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
438 void __kmpc_atomic_fixed1_andb(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
439 void __kmpc_atomic_fixed1_div(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
440 void __kmpc_atomic_fixed1u_div(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs );
441 void __kmpc_atomic_fixed1_mul(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
442 void __kmpc_atomic_fixed1_orb(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
443 void __kmpc_atomic_fixed1_shl(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
444 void __kmpc_atomic_fixed1_shr(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
445 void __kmpc_atomic_fixed1u_shr(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs );
446 void __kmpc_atomic_fixed1_sub(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
447 void __kmpc_atomic_fixed1_xor(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
449 void __kmpc_atomic_fixed2_add(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
450 void __kmpc_atomic_fixed2_andb(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
451 void __kmpc_atomic_fixed2_div(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
452 void __kmpc_atomic_fixed2u_div(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs );
453 void __kmpc_atomic_fixed2_mul(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
454 void __kmpc_atomic_fixed2_orb(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
455 void __kmpc_atomic_fixed2_shl(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
456 void __kmpc_atomic_fixed2_shr(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
457 void __kmpc_atomic_fixed2u_shr(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs );
458 void __kmpc_atomic_fixed2_sub(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
459 void __kmpc_atomic_fixed2_xor(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
461 void __kmpc_atomic_fixed4_add(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
462 void __kmpc_atomic_fixed4_sub(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
464 void __kmpc_atomic_float4_add(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
465 void __kmpc_atomic_float4_sub(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
467 void __kmpc_atomic_fixed8_add(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
468 void __kmpc_atomic_fixed8_sub(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
470 void __kmpc_atomic_float8_add(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
471 void __kmpc_atomic_float8_sub(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
473 void __kmpc_atomic_fixed4_andb(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
474 void __kmpc_atomic_fixed4_div(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
475 void __kmpc_atomic_fixed4u_div(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs );
476 void __kmpc_atomic_fixed4_mul(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
477 void __kmpc_atomic_fixed4_orb(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
478 void __kmpc_atomic_fixed4_shl(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
479 void __kmpc_atomic_fixed4_shr(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
480 void __kmpc_atomic_fixed4u_shr(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs );
481 void __kmpc_atomic_fixed4_xor(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
483 void __kmpc_atomic_fixed8_andb(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
484 void __kmpc_atomic_fixed8_div(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
485 void __kmpc_atomic_fixed8u_div(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs );
486 void __kmpc_atomic_fixed8_mul(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
487 void __kmpc_atomic_fixed8_orb(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
488 void __kmpc_atomic_fixed8_shl(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
489 void __kmpc_atomic_fixed8_shr(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
490 void __kmpc_atomic_fixed8u_shr(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs );
491 void __kmpc_atomic_fixed8_xor(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
493 void __kmpc_atomic_float4_div(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
494 void __kmpc_atomic_float4_mul(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
496 void __kmpc_atomic_float8_div(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
497 void __kmpc_atomic_float8_mul(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
499 void __kmpc_atomic_fixed1_andl(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
500 void __kmpc_atomic_fixed1_orl(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
501 void __kmpc_atomic_fixed2_andl(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
502 void __kmpc_atomic_fixed2_orl(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
503 void __kmpc_atomic_fixed4_andl(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
504 void __kmpc_atomic_fixed4_orl(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
505 void __kmpc_atomic_fixed8_andl(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
506 void __kmpc_atomic_fixed8_orl(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
508 void __kmpc_atomic_fixed1_max(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
509 void __kmpc_atomic_fixed1_min(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
510 void __kmpc_atomic_fixed2_max(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
511 void __kmpc_atomic_fixed2_min(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
512 void __kmpc_atomic_fixed4_max(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
513 void __kmpc_atomic_fixed4_min(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
514 void __kmpc_atomic_fixed8_max(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
515 void __kmpc_atomic_fixed8_min(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
516 void __kmpc_atomic_float4_max(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
517 void __kmpc_atomic_float4_min(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
518 void __kmpc_atomic_float8_max(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
519 void __kmpc_atomic_float8_min(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
520 void __kmpc_atomic_float16_max(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
521 void __kmpc_atomic_float16_min(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
524 void __kmpc_atomic_float16_max_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
525 void __kmpc_atomic_float16_min_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
528 void __kmpc_atomic_fixed1_neqv(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
529 void __kmpc_atomic_fixed2_neqv(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
530 void __kmpc_atomic_fixed4_neqv(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
531 void __kmpc_atomic_fixed8_neqv(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
533 void __kmpc_atomic_fixed1_eqv(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
534 void __kmpc_atomic_fixed2_eqv(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
535 void __kmpc_atomic_fixed4_eqv(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
536 void __kmpc_atomic_fixed8_eqv(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
538 void __kmpc_atomic_float10_add(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
539 void __kmpc_atomic_float10_sub(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
540 void __kmpc_atomic_float10_mul(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
541 void __kmpc_atomic_float10_div(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
543 void __kmpc_atomic_float16_add(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
544 void __kmpc_atomic_float16_sub(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
545 void __kmpc_atomic_float16_mul(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
546 void __kmpc_atomic_float16_div(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
549 void __kmpc_atomic_float16_add_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
550 void __kmpc_atomic_float16_sub_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
551 void __kmpc_atomic_float16_mul_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
552 void __kmpc_atomic_float16_div_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
555 void __kmpc_atomic_cmplx4_add(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
556 void __kmpc_atomic_cmplx4_sub(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
557 void __kmpc_atomic_cmplx4_mul(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
558 void __kmpc_atomic_cmplx4_div(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
559 void __kmpc_atomic_cmplx8_add(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
560 void __kmpc_atomic_cmplx8_sub(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
561 void __kmpc_atomic_cmplx8_mul(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
562 void __kmpc_atomic_cmplx8_div(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
563 void __kmpc_atomic_cmplx10_add(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
564 void __kmpc_atomic_cmplx10_sub(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
565 void __kmpc_atomic_cmplx10_mul(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
566 void __kmpc_atomic_cmplx10_div(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
567 void __kmpc_atomic_cmplx16_add(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
568 void __kmpc_atomic_cmplx16_sub(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
569 void __kmpc_atomic_cmplx16_mul(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
570 void __kmpc_atomic_cmplx16_div(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
573 void __kmpc_atomic_cmplx16_add_a16(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
574 void __kmpc_atomic_cmplx16_sub_a16(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
575 void __kmpc_atomic_cmplx16_mul_a16(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
576 void __kmpc_atomic_cmplx16_div_a16(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
583 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
585 void __kmpc_atomic_fixed1_sub_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
586 void __kmpc_atomic_fixed1_div_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
587 void __kmpc_atomic_fixed1u_div_rev(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs );
588 void __kmpc_atomic_fixed1_shl_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
589 void __kmpc_atomic_fixed1_shr_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
590 void __kmpc_atomic_fixed1u_shr_rev(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs );
591 void __kmpc_atomic_fixed2_sub_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
592 void __kmpc_atomic_fixed2_div_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
593 void __kmpc_atomic_fixed2u_div_rev(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs );
594 void __kmpc_atomic_fixed2_shl_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
595 void __kmpc_atomic_fixed2_shr_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
596 void __kmpc_atomic_fixed2u_shr_rev(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs );
597 void __kmpc_atomic_fixed4_sub_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
598 void __kmpc_atomic_fixed4_div_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
599 void __kmpc_atomic_fixed4u_div_rev(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs );
600 void __kmpc_atomic_fixed4_shl_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
601 void __kmpc_atomic_fixed4_shr_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
602 void __kmpc_atomic_fixed4u_shr_rev(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs );
603 void __kmpc_atomic_fixed8_sub_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
604 void __kmpc_atomic_fixed8_div_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
605 void __kmpc_atomic_fixed8u_div_rev(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs );
606 void __kmpc_atomic_fixed8_shl_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
607 void __kmpc_atomic_fixed8_shr_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
608 void __kmpc_atomic_fixed8u_shr_rev(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs );
609 void __kmpc_atomic_float4_sub_rev(
ident_t *id_ref,
int gtid,
float * lhs,
float rhs );
610 void __kmpc_atomic_float4_div_rev(
ident_t *id_ref,
int gtid,
float * lhs,
float rhs );
611 void __kmpc_atomic_float8_sub_rev(
ident_t *id_ref,
int gtid,
double * lhs,
double rhs );
612 void __kmpc_atomic_float8_div_rev(
ident_t *id_ref,
int gtid,
double * lhs,
double rhs );
613 void __kmpc_atomic_float10_sub_rev(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
614 void __kmpc_atomic_float10_div_rev(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
615 void __kmpc_atomic_float16_sub_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
616 void __kmpc_atomic_float16_div_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
617 void __kmpc_atomic_cmplx4_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
618 void __kmpc_atomic_cmplx4_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
619 void __kmpc_atomic_cmplx8_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
620 void __kmpc_atomic_cmplx8_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
621 void __kmpc_atomic_cmplx10_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
622 void __kmpc_atomic_cmplx10_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
623 void __kmpc_atomic_cmplx16_sub_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
624 void __kmpc_atomic_cmplx16_div_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
627 void __kmpc_atomic_float16_sub_a16_rev(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
628 void __kmpc_atomic_float16_div_a16_rev(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
629 void __kmpc_atomic_cmplx16_sub_a16_rev(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
630 void __kmpc_atomic_cmplx16_div_a16_rev(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
633 #endif //KMP_ARCH_X86 || KMP_ARCH_X86_64
635 #endif //OMP_40_ENABLED
640 void __kmpc_atomic_fixed1_mul_float8(
ident_t *id_ref,
int gtid,
char * lhs, kmp_real64 rhs );
641 void __kmpc_atomic_fixed1_div_float8(
ident_t *id_ref,
int gtid,
char * lhs, kmp_real64 rhs );
642 void __kmpc_atomic_fixed2_mul_float8(
ident_t *id_ref,
int gtid,
short * lhs, kmp_real64 rhs );
643 void __kmpc_atomic_fixed2_div_float8(
ident_t *id_ref,
int gtid,
short * lhs, kmp_real64 rhs );
644 void __kmpc_atomic_fixed4_mul_float8(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_real64 rhs );
645 void __kmpc_atomic_fixed4_div_float8(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_real64 rhs );
646 void __kmpc_atomic_fixed8_mul_float8(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_real64 rhs );
647 void __kmpc_atomic_fixed8_div_float8(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_real64 rhs );
648 void __kmpc_atomic_float4_add_float8(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real64 rhs );
649 void __kmpc_atomic_float4_sub_float8(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real64 rhs );
650 void __kmpc_atomic_float4_mul_float8(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real64 rhs );
651 void __kmpc_atomic_float4_div_float8(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real64 rhs );
654 void __kmpc_atomic_fixed1_add_fp(
ident_t *id_ref,
int gtid,
char * lhs, _Quad rhs );
655 void __kmpc_atomic_fixed1_sub_fp(
ident_t *id_ref,
int gtid,
char * lhs, _Quad rhs );
656 void __kmpc_atomic_fixed1_mul_fp(
ident_t *id_ref,
int gtid,
char * lhs, _Quad rhs );
657 void __kmpc_atomic_fixed1_div_fp(
ident_t *id_ref,
int gtid,
char * lhs, _Quad rhs );
658 void __kmpc_atomic_fixed1u_div_fp(
ident_t *id_ref,
int gtid,
unsigned char * lhs, _Quad rhs );
660 void __kmpc_atomic_fixed2_add_fp(
ident_t *id_ref,
int gtid,
short * lhs, _Quad rhs );
661 void __kmpc_atomic_fixed2_sub_fp(
ident_t *id_ref,
int gtid,
short * lhs, _Quad rhs );
662 void __kmpc_atomic_fixed2_mul_fp(
ident_t *id_ref,
int gtid,
short * lhs, _Quad rhs );
663 void __kmpc_atomic_fixed2_div_fp(
ident_t *id_ref,
int gtid,
short * lhs, _Quad rhs );
664 void __kmpc_atomic_fixed2u_div_fp(
ident_t *id_ref,
int gtid,
unsigned short * lhs, _Quad rhs );
666 void __kmpc_atomic_fixed4_add_fp(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, _Quad rhs );
667 void __kmpc_atomic_fixed4_sub_fp(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, _Quad rhs );
668 void __kmpc_atomic_fixed4_mul_fp(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, _Quad rhs );
669 void __kmpc_atomic_fixed4_div_fp(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, _Quad rhs );
670 void __kmpc_atomic_fixed4u_div_fp(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, _Quad rhs );
672 void __kmpc_atomic_fixed8_add_fp(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, _Quad rhs );
673 void __kmpc_atomic_fixed8_sub_fp(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, _Quad rhs );
674 void __kmpc_atomic_fixed8_mul_fp(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, _Quad rhs );
675 void __kmpc_atomic_fixed8_div_fp(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, _Quad rhs );
676 void __kmpc_atomic_fixed8u_div_fp(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, _Quad rhs );
678 void __kmpc_atomic_float4_add_fp(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, _Quad rhs );
679 void __kmpc_atomic_float4_sub_fp(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, _Quad rhs );
680 void __kmpc_atomic_float4_mul_fp(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, _Quad rhs );
681 void __kmpc_atomic_float4_div_fp(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, _Quad rhs );
683 void __kmpc_atomic_float8_add_fp(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, _Quad rhs );
684 void __kmpc_atomic_float8_sub_fp(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, _Quad rhs );
685 void __kmpc_atomic_float8_mul_fp(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, _Quad rhs );
686 void __kmpc_atomic_float8_div_fp(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, _Quad rhs );
688 void __kmpc_atomic_float10_add_fp(
ident_t *id_ref,
int gtid,
long double * lhs, _Quad rhs );
689 void __kmpc_atomic_float10_sub_fp(
ident_t *id_ref,
int gtid,
long double * lhs, _Quad rhs );
690 void __kmpc_atomic_float10_mul_fp(
ident_t *id_ref,
int gtid,
long double * lhs, _Quad rhs );
691 void __kmpc_atomic_float10_div_fp(
ident_t *id_ref,
int gtid,
long double * lhs, _Quad rhs );
694 void __kmpc_atomic_cmplx4_add_cmplx8(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx64 rhs );
695 void __kmpc_atomic_cmplx4_sub_cmplx8(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx64 rhs );
696 void __kmpc_atomic_cmplx4_mul_cmplx8(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx64 rhs );
697 void __kmpc_atomic_cmplx4_div_cmplx8(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx64 rhs );
700 void __kmpc_atomic_1(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
701 void __kmpc_atomic_2(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
702 void __kmpc_atomic_4(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
703 void __kmpc_atomic_8(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
704 void __kmpc_atomic_10(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
705 void __kmpc_atomic_16(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
706 void __kmpc_atomic_20(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
707 void __kmpc_atomic_32(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
710 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
716 char __kmpc_atomic_fixed1_rd(
ident_t *id_ref,
int gtid,
char * loc );
717 short __kmpc_atomic_fixed2_rd(
ident_t *id_ref,
int gtid,
short * loc );
718 kmp_int32 __kmpc_atomic_fixed4_rd(
ident_t *id_ref,
int gtid, kmp_int32 * loc );
719 kmp_int64 __kmpc_atomic_fixed8_rd(
ident_t *id_ref,
int gtid, kmp_int64 * loc );
720 kmp_real32 __kmpc_atomic_float4_rd(
ident_t *id_ref,
int gtid, kmp_real32 * loc );
721 kmp_real64 __kmpc_atomic_float8_rd(
ident_t *id_ref,
int gtid, kmp_real64 * loc );
722 long double __kmpc_atomic_float10_rd(
ident_t *id_ref,
int gtid,
long double * loc );
723 QUAD_LEGACY __kmpc_atomic_float16_rd(
ident_t *id_ref,
int gtid, QUAD_LEGACY * loc );
726 #if ( KMP_OS_WINDOWS )
727 void __kmpc_atomic_cmplx4_rd( kmp_cmplx32 * out,
ident_t *id_ref,
int gtid, kmp_cmplx32 * loc );
729 kmp_cmplx32 __kmpc_atomic_cmplx4_rd(
ident_t *id_ref,
int gtid, kmp_cmplx32 * loc );
731 kmp_cmplx64 __kmpc_atomic_cmplx8_rd(
ident_t *id_ref,
int gtid, kmp_cmplx64 * loc );
732 kmp_cmplx80 __kmpc_atomic_cmplx10_rd(
ident_t *id_ref,
int gtid, kmp_cmplx80 * loc );
733 CPLX128_LEG __kmpc_atomic_cmplx16_rd(
ident_t *id_ref,
int gtid, CPLX128_LEG * loc );
736 Quad_a16_t __kmpc_atomic_float16_a16_rd(
ident_t * id_ref,
int gtid, Quad_a16_t * loc );
737 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_a16_rd(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * loc );
745 void __kmpc_atomic_fixed1_wr(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
746 void __kmpc_atomic_fixed2_wr(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
747 void __kmpc_atomic_fixed4_wr(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
748 void __kmpc_atomic_fixed8_wr(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
749 void __kmpc_atomic_float4_wr(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
750 void __kmpc_atomic_float8_wr(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
751 void __kmpc_atomic_float10_wr(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
752 void __kmpc_atomic_float16_wr(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
753 void __kmpc_atomic_cmplx4_wr(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
754 void __kmpc_atomic_cmplx8_wr(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
755 void __kmpc_atomic_cmplx10_wr(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
756 void __kmpc_atomic_cmplx16_wr(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
759 void __kmpc_atomic_float16_a16_wr(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
760 void __kmpc_atomic_cmplx16_a16_wr(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
769 char __kmpc_atomic_fixed1_add_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
770 char __kmpc_atomic_fixed1_andb_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
771 char __kmpc_atomic_fixed1_div_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
772 unsigned char __kmpc_atomic_fixed1u_div_cpt(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs,
int flag);
773 char __kmpc_atomic_fixed1_mul_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
774 char __kmpc_atomic_fixed1_orb_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
775 char __kmpc_atomic_fixed1_shl_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
776 char __kmpc_atomic_fixed1_shr_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
777 unsigned char __kmpc_atomic_fixed1u_shr_cpt(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs,
int flag);
778 char __kmpc_atomic_fixed1_sub_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
779 char __kmpc_atomic_fixed1_xor_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
781 short __kmpc_atomic_fixed2_add_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
782 short __kmpc_atomic_fixed2_andb_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
783 short __kmpc_atomic_fixed2_div_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
784 unsigned short __kmpc_atomic_fixed2u_div_cpt(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs,
int flag);
785 short __kmpc_atomic_fixed2_mul_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
786 short __kmpc_atomic_fixed2_orb_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
787 short __kmpc_atomic_fixed2_shl_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
788 short __kmpc_atomic_fixed2_shr_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
789 unsigned short __kmpc_atomic_fixed2u_shr_cpt(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs,
int flag);
790 short __kmpc_atomic_fixed2_sub_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
791 short __kmpc_atomic_fixed2_xor_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
793 kmp_int32 __kmpc_atomic_fixed4_add_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
794 kmp_int32 __kmpc_atomic_fixed4_sub_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
796 kmp_real32 __kmpc_atomic_float4_add_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
797 kmp_real32 __kmpc_atomic_float4_sub_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
799 kmp_int64 __kmpc_atomic_fixed8_add_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
800 kmp_int64 __kmpc_atomic_fixed8_sub_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
802 kmp_real64 __kmpc_atomic_float8_add_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
803 kmp_real64 __kmpc_atomic_float8_sub_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
805 kmp_int32 __kmpc_atomic_fixed4_andb_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
806 kmp_int32 __kmpc_atomic_fixed4_div_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
807 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs,
int flag);
808 kmp_int32 __kmpc_atomic_fixed4_mul_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
809 kmp_int32 __kmpc_atomic_fixed4_orb_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
810 kmp_int32 __kmpc_atomic_fixed4_shl_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
811 kmp_int32 __kmpc_atomic_fixed4_shr_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
812 kmp_uint32 __kmpc_atomic_fixed4u_shr_cpt(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs,
int flag);
813 kmp_int32 __kmpc_atomic_fixed4_xor_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
815 kmp_int64 __kmpc_atomic_fixed8_andb_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
816 kmp_int64 __kmpc_atomic_fixed8_div_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
817 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs,
int flag);
818 kmp_int64 __kmpc_atomic_fixed8_mul_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
819 kmp_int64 __kmpc_atomic_fixed8_orb_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
820 kmp_int64 __kmpc_atomic_fixed8_shl_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
821 kmp_int64 __kmpc_atomic_fixed8_shr_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
822 kmp_uint64 __kmpc_atomic_fixed8u_shr_cpt(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs,
int flag);
823 kmp_int64 __kmpc_atomic_fixed8_xor_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
825 kmp_real32 __kmpc_atomic_float4_div_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
826 kmp_real32 __kmpc_atomic_float4_mul_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
828 kmp_real64 __kmpc_atomic_float8_div_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
829 kmp_real64 __kmpc_atomic_float8_mul_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
831 char __kmpc_atomic_fixed1_andl_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
832 char __kmpc_atomic_fixed1_orl_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
833 short __kmpc_atomic_fixed2_andl_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
834 short __kmpc_atomic_fixed2_orl_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
835 kmp_int32 __kmpc_atomic_fixed4_andl_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
836 kmp_int32 __kmpc_atomic_fixed4_orl_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
837 kmp_int64 __kmpc_atomic_fixed8_andl_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
838 kmp_int64 __kmpc_atomic_fixed8_orl_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
840 char __kmpc_atomic_fixed1_max_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
841 char __kmpc_atomic_fixed1_min_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
842 short __kmpc_atomic_fixed2_max_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
843 short __kmpc_atomic_fixed2_min_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
844 kmp_int32 __kmpc_atomic_fixed4_max_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
845 kmp_int32 __kmpc_atomic_fixed4_min_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
846 kmp_int64 __kmpc_atomic_fixed8_max_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
847 kmp_int64 __kmpc_atomic_fixed8_min_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
848 kmp_real32 __kmpc_atomic_float4_max_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
849 kmp_real32 __kmpc_atomic_float4_min_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
850 kmp_real64 __kmpc_atomic_float8_max_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
851 kmp_real64 __kmpc_atomic_float8_min_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
852 QUAD_LEGACY __kmpc_atomic_float16_max_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
853 QUAD_LEGACY __kmpc_atomic_float16_min_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
855 char __kmpc_atomic_fixed1_neqv_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
856 short __kmpc_atomic_fixed2_neqv_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
857 kmp_int32 __kmpc_atomic_fixed4_neqv_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
858 kmp_int64 __kmpc_atomic_fixed8_neqv_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
860 char __kmpc_atomic_fixed1_eqv_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
861 short __kmpc_atomic_fixed2_eqv_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
862 kmp_int32 __kmpc_atomic_fixed4_eqv_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
863 kmp_int64 __kmpc_atomic_fixed8_eqv_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
865 long double __kmpc_atomic_float10_add_cpt(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag);
866 long double __kmpc_atomic_float10_sub_cpt(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag);
867 long double __kmpc_atomic_float10_mul_cpt(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag);
868 long double __kmpc_atomic_float10_div_cpt(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag);
870 QUAD_LEGACY __kmpc_atomic_float16_add_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
871 QUAD_LEGACY __kmpc_atomic_float16_sub_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
872 QUAD_LEGACY __kmpc_atomic_float16_mul_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
873 QUAD_LEGACY __kmpc_atomic_float16_div_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
876 void __kmpc_atomic_cmplx4_add_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag);
877 void __kmpc_atomic_cmplx4_sub_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag);
878 void __kmpc_atomic_cmplx4_mul_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag);
879 void __kmpc_atomic_cmplx4_div_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag);
881 kmp_cmplx64 __kmpc_atomic_cmplx8_add_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag);
882 kmp_cmplx64 __kmpc_atomic_cmplx8_sub_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag);
883 kmp_cmplx64 __kmpc_atomic_cmplx8_mul_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag);
884 kmp_cmplx64 __kmpc_atomic_cmplx8_div_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag);
885 kmp_cmplx80 __kmpc_atomic_cmplx10_add_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag);
886 kmp_cmplx80 __kmpc_atomic_cmplx10_sub_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag);
887 kmp_cmplx80 __kmpc_atomic_cmplx10_mul_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag);
888 kmp_cmplx80 __kmpc_atomic_cmplx10_div_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag);
889 CPLX128_LEG __kmpc_atomic_cmplx16_add_cpt(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag);
890 CPLX128_LEG __kmpc_atomic_cmplx16_sub_cpt(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag);
891 CPLX128_LEG __kmpc_atomic_cmplx16_mul_cpt(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag);
892 CPLX128_LEG __kmpc_atomic_cmplx16_div_cpt(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag);
895 Quad_a16_t __kmpc_atomic_float16_add_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
896 Quad_a16_t __kmpc_atomic_float16_sub_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
897 Quad_a16_t __kmpc_atomic_float16_mul_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
898 Quad_a16_t __kmpc_atomic_float16_div_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
899 Quad_a16_t __kmpc_atomic_float16_max_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
900 Quad_a16_t __kmpc_atomic_float16_min_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
901 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_add_a16_cpt(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag);
902 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_sub_a16_cpt(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag);
903 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_mul_a16_cpt(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag);
904 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_div_a16_cpt(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag);
907 void __kmpc_atomic_start(
void);
908 void __kmpc_atomic_end(
void);
914 char __kmpc_atomic_fixed1_sub_cpt_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag );
915 char __kmpc_atomic_fixed1_div_cpt_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag );
916 unsigned char __kmpc_atomic_fixed1u_div_cpt_rev(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs,
int flag );
917 char __kmpc_atomic_fixed1_shl_cpt_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs ,
int flag);
918 char __kmpc_atomic_fixed1_shr_cpt_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag );
919 unsigned char __kmpc_atomic_fixed1u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs,
int flag );
920 short __kmpc_atomic_fixed2_sub_cpt_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag );
921 short __kmpc_atomic_fixed2_div_cpt_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag );
922 unsigned short __kmpc_atomic_fixed2u_div_cpt_rev(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs,
int flag );
923 short __kmpc_atomic_fixed2_shl_cpt_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag );
924 short __kmpc_atomic_fixed2_shr_cpt_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag );
925 unsigned short __kmpc_atomic_fixed2u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs,
int flag );
926 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag );
927 kmp_int32 __kmpc_atomic_fixed4_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag );
928 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs,
int flag );
929 kmp_int32 __kmpc_atomic_fixed4_shl_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag );
930 kmp_int32 __kmpc_atomic_fixed4_shr_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag );
931 kmp_uint32 __kmpc_atomic_fixed4u_shr_cpt_rev(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs,
int flag );
932 kmp_int64 __kmpc_atomic_fixed8_sub_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag );
933 kmp_int64 __kmpc_atomic_fixed8_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag );
934 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs,
int flag );
935 kmp_int64 __kmpc_atomic_fixed8_shl_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag );
936 kmp_int64 __kmpc_atomic_fixed8_shr_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag );
937 kmp_uint64 __kmpc_atomic_fixed8u_shr_cpt_rev(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs,
int flag );
938 float __kmpc_atomic_float4_sub_cpt_rev(
ident_t *id_ref,
int gtid,
float * lhs,
float rhs,
int flag );
939 float __kmpc_atomic_float4_div_cpt_rev(
ident_t *id_ref,
int gtid,
float * lhs,
float rhs,
int flag );
940 double __kmpc_atomic_float8_sub_cpt_rev(
ident_t *id_ref,
int gtid,
double * lhs,
double rhs,
int flag );
941 double __kmpc_atomic_float8_div_cpt_rev(
ident_t *id_ref,
int gtid,
double * lhs,
double rhs,
int flag );
942 long double __kmpc_atomic_float10_sub_cpt_rev(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag );
943 long double __kmpc_atomic_float10_div_cpt_rev(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag );
944 QUAD_LEGACY __kmpc_atomic_float16_sub_cpt_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag );
945 QUAD_LEGACY __kmpc_atomic_float16_div_cpt_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag );
947 void __kmpc_atomic_cmplx4_sub_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag );
948 void __kmpc_atomic_cmplx4_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag );
949 kmp_cmplx64 __kmpc_atomic_cmplx8_sub_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag );
950 kmp_cmplx64 __kmpc_atomic_cmplx8_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag );
951 kmp_cmplx80 __kmpc_atomic_cmplx10_sub_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag );
952 kmp_cmplx80 __kmpc_atomic_cmplx10_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag );
953 CPLX128_LEG __kmpc_atomic_cmplx16_sub_cpt_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag );
954 CPLX128_LEG __kmpc_atomic_cmplx16_div_cpt_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag );
956 Quad_a16_t __kmpc_atomic_float16_sub_a16_cpt_rev(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag );
957 Quad_a16_t __kmpc_atomic_float16_div_a16_cpt_rev(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag );
958 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_sub_a16_cpt_rev(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag );
959 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_div_a16_cpt_rev(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag );
963 char __kmpc_atomic_fixed1_swp(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
964 short __kmpc_atomic_fixed2_swp(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
965 kmp_int32 __kmpc_atomic_fixed4_swp(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
966 kmp_int64 __kmpc_atomic_fixed8_swp(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
967 float __kmpc_atomic_float4_swp(
ident_t *id_ref,
int gtid,
float * lhs,
float rhs );
968 double __kmpc_atomic_float8_swp(
ident_t *id_ref,
int gtid,
double * lhs,
double rhs );
969 long double __kmpc_atomic_float10_swp(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
970 QUAD_LEGACY __kmpc_atomic_float16_swp(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
972 void __kmpc_atomic_cmplx4_swp(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out );
975 kmp_cmplx64 __kmpc_atomic_cmplx8_swp(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
976 kmp_cmplx80 __kmpc_atomic_cmplx10_swp(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
977 CPLX128_LEG __kmpc_atomic_cmplx16_swp(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
979 Quad_a16_t __kmpc_atomic_float16_a16_swp(
ident_t *id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
980 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_a16_swp(
ident_t *id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
985 #endif //OMP_40_ENABLED
987 #endif //KMP_ARCH_X86 || KMP_ARCH_X86_64