23 #include "ompt-specific.h" 35 #if defined( __cplusplus ) && ( KMP_OS_WINDOWS ) 38 #if (_MSC_VER < 1600) && defined(_DEBUG) 43 #define _DEBUG_TEMPORARILY_UNSET_ 48 template<
typename type_lhs,
typename type_rhs >
49 std::complex< type_lhs > __kmp_lhs_div_rhs(
50 const std::complex< type_lhs >& lhs,
51 const std::complex< type_rhs >& rhs ) {
52 type_lhs a = lhs.real();
53 type_lhs b = lhs.imag();
54 type_rhs c = rhs.real();
55 type_rhs d = rhs.imag();
56 type_rhs den = c*c + d*d;
57 type_rhs r = ( a*c + b*d );
58 type_rhs i = ( b*c - a*d );
59 std::complex< type_lhs > ret( r/den, i/den );
64 struct __kmp_cmplx64_t : std::complex< double > {
66 __kmp_cmplx64_t() :
std::complex< double > () {}
68 __kmp_cmplx64_t(
const std::complex< double >& cd )
69 :
std::complex< double > ( cd ) {}
71 void operator /= (
const __kmp_cmplx64_t& rhs ) {
72 std::complex< double > lhs = *
this;
73 *
this = __kmp_lhs_div_rhs( lhs, rhs );
76 __kmp_cmplx64_t operator / (
const __kmp_cmplx64_t& rhs ) {
77 std::complex< double > lhs = *
this;
78 return __kmp_lhs_div_rhs( lhs, rhs );
82 typedef struct __kmp_cmplx64_t kmp_cmplx64;
85 struct __kmp_cmplx32_t : std::complex< float > {
87 __kmp_cmplx32_t() :
std::complex< float > () {}
89 __kmp_cmplx32_t(
const std::complex<float>& cf )
90 :
std::complex< float > ( cf ) {}
92 __kmp_cmplx32_t operator + (
const __kmp_cmplx32_t& b ) {
93 std::complex< float > lhs = *
this;
94 std::complex< float > rhs = b;
97 __kmp_cmplx32_t operator - (
const __kmp_cmplx32_t& b ) {
98 std::complex< float > lhs = *
this;
99 std::complex< float > rhs = b;
100 return ( lhs - rhs );
102 __kmp_cmplx32_t operator * (
const __kmp_cmplx32_t& b ) {
103 std::complex< float > lhs = *
this;
104 std::complex< float > rhs = b;
105 return ( lhs * rhs );
108 __kmp_cmplx32_t operator + (
const kmp_cmplx64& b ) {
109 kmp_cmplx64 t = kmp_cmplx64( *
this ) + b;
110 std::complex< double > d( t );
111 std::complex< float > f( d );
112 __kmp_cmplx32_t r( f );
115 __kmp_cmplx32_t operator - (
const kmp_cmplx64& b ) {
116 kmp_cmplx64 t = kmp_cmplx64( *
this ) - b;
117 std::complex< double > d( t );
118 std::complex< float > f( d );
119 __kmp_cmplx32_t r( f );
122 __kmp_cmplx32_t operator * (
const kmp_cmplx64& b ) {
123 kmp_cmplx64 t = kmp_cmplx64( *
this ) * b;
124 std::complex< double > d( t );
125 std::complex< float > f( d );
126 __kmp_cmplx32_t r( f );
130 void operator /= (
const __kmp_cmplx32_t& rhs ) {
131 std::complex< float > lhs = *
this;
132 *
this = __kmp_lhs_div_rhs( lhs, rhs );
135 __kmp_cmplx32_t operator / (
const __kmp_cmplx32_t& rhs ) {
136 std::complex< float > lhs = *
this;
137 return __kmp_lhs_div_rhs( lhs, rhs );
140 void operator /= (
const kmp_cmplx64& rhs ) {
141 std::complex< float > lhs = *
this;
142 *
this = __kmp_lhs_div_rhs( lhs, rhs );
145 __kmp_cmplx32_t operator / (
const kmp_cmplx64& rhs ) {
146 std::complex< float > lhs = *
this;
147 return __kmp_lhs_div_rhs( lhs, rhs );
150 typedef struct __kmp_cmplx32_t kmp_cmplx32;
153 struct KMP_DO_ALIGN( 16 ) __kmp_cmplx80_t : std::complex< long double > {
155 __kmp_cmplx80_t() :
std::complex< long double > () {}
157 __kmp_cmplx80_t(
const std::complex< long double >& cld )
158 :
std::complex< long double > ( cld ) {}
160 void operator /= (
const __kmp_cmplx80_t& rhs ) {
161 std::complex< long double > lhs = *
this;
162 *
this = __kmp_lhs_div_rhs( lhs, rhs );
165 __kmp_cmplx80_t operator / (
const __kmp_cmplx80_t& rhs ) {
166 std::complex< long double > lhs = *
this;
167 return __kmp_lhs_div_rhs( lhs, rhs );
171 typedef KMP_DO_ALIGN( 16 ) struct __kmp_cmplx80_t kmp_cmplx80;
175 struct __kmp_cmplx128_t : std::complex< _Quad > {
177 __kmp_cmplx128_t() :
std::complex< _Quad > () {}
179 __kmp_cmplx128_t(
const std::complex< _Quad >& cq )
180 :
std::complex< _Quad > ( cq ) {}
182 void operator /= (
const __kmp_cmplx128_t& rhs ) {
183 std::complex< _Quad > lhs = *
this;
184 *
this = __kmp_lhs_div_rhs( lhs, rhs );
187 __kmp_cmplx128_t operator / (
const __kmp_cmplx128_t& rhs ) {
188 std::complex< _Quad > lhs = *
this;
189 return __kmp_lhs_div_rhs( lhs, rhs );
193 typedef struct __kmp_cmplx128_t kmp_cmplx128;
196 #ifdef _DEBUG_TEMPORARILY_UNSET_ 197 #undef _DEBUG_TEMPORARILY_UNSET_ 204 typedef float _Complex kmp_cmplx32;
205 typedef double _Complex kmp_cmplx64;
206 typedef long double _Complex kmp_cmplx80;
208 typedef _Quad _Complex kmp_cmplx128;
216 #if KMP_ARCH_X86 && KMP_HAVE_QUAD 220 #pragma pack( push, 4 ) 223 struct KMP_DO_ALIGN( 4 ) Quad_a4_t {
226 Quad_a4_t( ) : q( ) {}
227 Quad_a4_t(
const _Quad & cq ) : q ( cq ) {}
229 Quad_a4_t operator + (
const Quad_a4_t& b ) {
230 _Quad lhs = (*this).q;
232 return (Quad_a4_t)( lhs + rhs );
235 Quad_a4_t operator - (
const Quad_a4_t& b ) {
236 _Quad lhs = (*this).q;
238 return (Quad_a4_t)( lhs - rhs );
240 Quad_a4_t operator * (
const Quad_a4_t& b ) {
241 _Quad lhs = (*this).q;
243 return (Quad_a4_t)( lhs * rhs );
246 Quad_a4_t operator / (
const Quad_a4_t& b ) {
247 _Quad lhs = (*this).q;
249 return (Quad_a4_t)( lhs / rhs );
254 struct KMP_DO_ALIGN( 4 ) kmp_cmplx128_a4_t {
257 kmp_cmplx128_a4_t() : q () {}
259 kmp_cmplx128_a4_t(
const kmp_cmplx128 & c128 ) : q ( c128 ) {}
261 kmp_cmplx128_a4_t operator + (
const kmp_cmplx128_a4_t& b ) {
262 kmp_cmplx128 lhs = (*this).q;
263 kmp_cmplx128 rhs = b.q;
264 return (kmp_cmplx128_a4_t)( lhs + rhs );
266 kmp_cmplx128_a4_t operator - (
const kmp_cmplx128_a4_t& b ) {
267 kmp_cmplx128 lhs = (*this).q;
268 kmp_cmplx128 rhs = b.q;
269 return (kmp_cmplx128_a4_t)( lhs - rhs );
271 kmp_cmplx128_a4_t operator * (
const kmp_cmplx128_a4_t& b ) {
272 kmp_cmplx128 lhs = (*this).q;
273 kmp_cmplx128 rhs = b.q;
274 return (kmp_cmplx128_a4_t)( lhs * rhs );
277 kmp_cmplx128_a4_t operator / (
const kmp_cmplx128_a4_t& b ) {
278 kmp_cmplx128 lhs = (*this).q;
279 kmp_cmplx128 rhs = b.q;
280 return (kmp_cmplx128_a4_t)( lhs / rhs );
288 struct KMP_DO_ALIGN( 16 ) Quad_a16_t {
291 Quad_a16_t( ) : q( ) {}
292 Quad_a16_t(
const _Quad & cq ) : q ( cq ) {}
294 Quad_a16_t operator + (
const Quad_a16_t& b ) {
295 _Quad lhs = (*this).q;
297 return (Quad_a16_t)( lhs + rhs );
300 Quad_a16_t operator - (
const Quad_a16_t& b ) {
301 _Quad lhs = (*this).q;
303 return (Quad_a16_t)( lhs - rhs );
305 Quad_a16_t operator * (
const Quad_a16_t& b ) {
306 _Quad lhs = (*this).q;
308 return (Quad_a16_t)( lhs * rhs );
311 Quad_a16_t operator / (
const Quad_a16_t& b ) {
312 _Quad lhs = (*this).q;
314 return (Quad_a16_t)( lhs / rhs );
318 struct KMP_DO_ALIGN( 16 ) kmp_cmplx128_a16_t {
321 kmp_cmplx128_a16_t() : q () {}
323 kmp_cmplx128_a16_t(
const kmp_cmplx128 & c128 ) : q ( c128 ) {}
325 kmp_cmplx128_a16_t operator + (
const kmp_cmplx128_a16_t& b ) {
326 kmp_cmplx128 lhs = (*this).q;
327 kmp_cmplx128 rhs = b.q;
328 return (kmp_cmplx128_a16_t)( lhs + rhs );
330 kmp_cmplx128_a16_t operator - (
const kmp_cmplx128_a16_t& b ) {
331 kmp_cmplx128 lhs = (*this).q;
332 kmp_cmplx128 rhs = b.q;
333 return (kmp_cmplx128_a16_t)( lhs - rhs );
335 kmp_cmplx128_a16_t operator * (
const kmp_cmplx128_a16_t& b ) {
336 kmp_cmplx128 lhs = (*this).q;
337 kmp_cmplx128 rhs = b.q;
338 return (kmp_cmplx128_a16_t)( lhs * rhs );
341 kmp_cmplx128_a16_t operator / (
const kmp_cmplx128_a16_t& b ) {
342 kmp_cmplx128 lhs = (*this).q;
343 kmp_cmplx128 rhs = b.q;
344 return (kmp_cmplx128_a16_t)( lhs / rhs );
351 #define QUAD_LEGACY Quad_a4_t 352 #define CPLX128_LEG kmp_cmplx128_a4_t 354 #define QUAD_LEGACY _Quad 355 #define CPLX128_LEG kmp_cmplx128 362 extern int __kmp_atomic_mode;
368 typedef kmp_queuing_lock_t kmp_atomic_lock_t;
371 __kmp_acquire_atomic_lock( kmp_atomic_lock_t *lck, kmp_int32 gtid )
373 #if OMPT_SUPPORT && OMPT_TRACE 375 ompt_callbacks.ompt_callback(ompt_event_wait_atomic)) {
376 ompt_callbacks.ompt_callback(ompt_event_wait_atomic)(
377 (ompt_wait_id_t) lck);
381 __kmp_acquire_queuing_lock( lck, gtid );
383 #if OMPT_SUPPORT && OMPT_TRACE 385 ompt_callbacks.ompt_callback(ompt_event_acquired_atomic)) {
386 ompt_callbacks.ompt_callback(ompt_event_acquired_atomic)(
387 (ompt_wait_id_t) lck);
393 __kmp_test_atomic_lock( kmp_atomic_lock_t *lck, kmp_int32 gtid )
395 return __kmp_test_queuing_lock( lck, gtid );
399 __kmp_release_atomic_lock( kmp_atomic_lock_t *lck, kmp_int32 gtid )
401 __kmp_release_queuing_lock( lck, gtid );
402 #if OMPT_SUPPORT && OMPT_BLAME 404 ompt_callbacks.ompt_callback(ompt_event_release_atomic)) {
405 ompt_callbacks.ompt_callback(ompt_event_release_atomic)(
406 (ompt_wait_id_t) lck);
412 __kmp_init_atomic_lock( kmp_atomic_lock_t *lck )
414 __kmp_init_queuing_lock( lck );
418 __kmp_destroy_atomic_lock( kmp_atomic_lock_t *lck )
420 __kmp_destroy_queuing_lock( lck );
425 extern kmp_atomic_lock_t __kmp_atomic_lock;
426 extern kmp_atomic_lock_t __kmp_atomic_lock_1i;
427 extern kmp_atomic_lock_t __kmp_atomic_lock_2i;
428 extern kmp_atomic_lock_t __kmp_atomic_lock_4i;
429 extern kmp_atomic_lock_t __kmp_atomic_lock_4r;
430 extern kmp_atomic_lock_t __kmp_atomic_lock_8i;
431 extern kmp_atomic_lock_t __kmp_atomic_lock_8r;
432 extern kmp_atomic_lock_t __kmp_atomic_lock_8c;
433 extern kmp_atomic_lock_t __kmp_atomic_lock_10r;
434 extern kmp_atomic_lock_t __kmp_atomic_lock_16r;
435 extern kmp_atomic_lock_t __kmp_atomic_lock_16c;
436 extern kmp_atomic_lock_t __kmp_atomic_lock_20c;
437 extern kmp_atomic_lock_t __kmp_atomic_lock_32c;
444 void __kmpc_atomic_fixed1_add(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
445 void __kmpc_atomic_fixed1_andb(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
446 void __kmpc_atomic_fixed1_div(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
447 void __kmpc_atomic_fixed1u_div(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs );
448 void __kmpc_atomic_fixed1_mul(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
449 void __kmpc_atomic_fixed1_orb(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
450 void __kmpc_atomic_fixed1_shl(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
451 void __kmpc_atomic_fixed1_shr(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
452 void __kmpc_atomic_fixed1u_shr(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs );
453 void __kmpc_atomic_fixed1_sub(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
454 void __kmpc_atomic_fixed1_xor(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
456 void __kmpc_atomic_fixed2_add(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
457 void __kmpc_atomic_fixed2_andb(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
458 void __kmpc_atomic_fixed2_div(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
459 void __kmpc_atomic_fixed2u_div(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs );
460 void __kmpc_atomic_fixed2_mul(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
461 void __kmpc_atomic_fixed2_orb(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
462 void __kmpc_atomic_fixed2_shl(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
463 void __kmpc_atomic_fixed2_shr(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
464 void __kmpc_atomic_fixed2u_shr(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs );
465 void __kmpc_atomic_fixed2_sub(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
466 void __kmpc_atomic_fixed2_xor(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
468 void __kmpc_atomic_fixed4_add(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
469 void __kmpc_atomic_fixed4_sub(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
471 void __kmpc_atomic_float4_add(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
472 void __kmpc_atomic_float4_sub(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
474 void __kmpc_atomic_fixed8_add(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
475 void __kmpc_atomic_fixed8_sub(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
477 void __kmpc_atomic_float8_add(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
478 void __kmpc_atomic_float8_sub(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
480 void __kmpc_atomic_fixed4_andb(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
481 void __kmpc_atomic_fixed4_div(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
482 void __kmpc_atomic_fixed4u_div(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs );
483 void __kmpc_atomic_fixed4_mul(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
484 void __kmpc_atomic_fixed4_orb(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
485 void __kmpc_atomic_fixed4_shl(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
486 void __kmpc_atomic_fixed4_shr(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
487 void __kmpc_atomic_fixed4u_shr(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs );
488 void __kmpc_atomic_fixed4_xor(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
490 void __kmpc_atomic_fixed8_andb(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
491 void __kmpc_atomic_fixed8_div(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
492 void __kmpc_atomic_fixed8u_div(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs );
493 void __kmpc_atomic_fixed8_mul(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
494 void __kmpc_atomic_fixed8_orb(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
495 void __kmpc_atomic_fixed8_shl(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
496 void __kmpc_atomic_fixed8_shr(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
497 void __kmpc_atomic_fixed8u_shr(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs );
498 void __kmpc_atomic_fixed8_xor(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
500 void __kmpc_atomic_float4_div(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
501 void __kmpc_atomic_float4_mul(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
503 void __kmpc_atomic_float8_div(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
504 void __kmpc_atomic_float8_mul(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
506 void __kmpc_atomic_fixed1_andl(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
507 void __kmpc_atomic_fixed1_orl(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
508 void __kmpc_atomic_fixed2_andl(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
509 void __kmpc_atomic_fixed2_orl(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
510 void __kmpc_atomic_fixed4_andl(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
511 void __kmpc_atomic_fixed4_orl(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
512 void __kmpc_atomic_fixed8_andl(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
513 void __kmpc_atomic_fixed8_orl(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
515 void __kmpc_atomic_fixed1_max(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
516 void __kmpc_atomic_fixed1_min(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
517 void __kmpc_atomic_fixed2_max(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
518 void __kmpc_atomic_fixed2_min(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
519 void __kmpc_atomic_fixed4_max(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
520 void __kmpc_atomic_fixed4_min(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
521 void __kmpc_atomic_fixed8_max(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
522 void __kmpc_atomic_fixed8_min(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
523 void __kmpc_atomic_float4_max(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
524 void __kmpc_atomic_float4_min(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
525 void __kmpc_atomic_float8_max(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
526 void __kmpc_atomic_float8_min(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
528 void __kmpc_atomic_float16_max(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
529 void __kmpc_atomic_float16_min(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
532 void __kmpc_atomic_float16_max_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
533 void __kmpc_atomic_float16_min_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
537 void __kmpc_atomic_fixed1_neqv(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
538 void __kmpc_atomic_fixed2_neqv(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
539 void __kmpc_atomic_fixed4_neqv(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
540 void __kmpc_atomic_fixed8_neqv(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
542 void __kmpc_atomic_fixed1_eqv(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
543 void __kmpc_atomic_fixed2_eqv(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
544 void __kmpc_atomic_fixed4_eqv(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
545 void __kmpc_atomic_fixed8_eqv(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
547 void __kmpc_atomic_float10_add(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
548 void __kmpc_atomic_float10_sub(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
549 void __kmpc_atomic_float10_mul(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
550 void __kmpc_atomic_float10_div(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
553 void __kmpc_atomic_float16_add(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
554 void __kmpc_atomic_float16_sub(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
555 void __kmpc_atomic_float16_mul(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
556 void __kmpc_atomic_float16_div(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
559 void __kmpc_atomic_float16_add_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
560 void __kmpc_atomic_float16_sub_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
561 void __kmpc_atomic_float16_mul_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
562 void __kmpc_atomic_float16_div_a16(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
566 void __kmpc_atomic_cmplx4_add(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
567 void __kmpc_atomic_cmplx4_sub(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
568 void __kmpc_atomic_cmplx4_mul(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
569 void __kmpc_atomic_cmplx4_div(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
570 void __kmpc_atomic_cmplx8_add(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
571 void __kmpc_atomic_cmplx8_sub(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
572 void __kmpc_atomic_cmplx8_mul(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
573 void __kmpc_atomic_cmplx8_div(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
574 void __kmpc_atomic_cmplx10_add(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
575 void __kmpc_atomic_cmplx10_sub(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
576 void __kmpc_atomic_cmplx10_mul(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
577 void __kmpc_atomic_cmplx10_div(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
579 void __kmpc_atomic_cmplx16_add(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
580 void __kmpc_atomic_cmplx16_sub(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
581 void __kmpc_atomic_cmplx16_mul(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
582 void __kmpc_atomic_cmplx16_div(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
585 void __kmpc_atomic_cmplx16_add_a16(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
586 void __kmpc_atomic_cmplx16_sub_a16(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
587 void __kmpc_atomic_cmplx16_mul_a16(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
588 void __kmpc_atomic_cmplx16_div_a16(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
596 #if KMP_ARCH_X86 || KMP_ARCH_X86_64 598 void __kmpc_atomic_fixed1_sub_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
599 void __kmpc_atomic_fixed1_div_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
600 void __kmpc_atomic_fixed1u_div_rev(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs );
601 void __kmpc_atomic_fixed1_shl_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
602 void __kmpc_atomic_fixed1_shr_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
603 void __kmpc_atomic_fixed1u_shr_rev(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs );
604 void __kmpc_atomic_fixed2_sub_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
605 void __kmpc_atomic_fixed2_div_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
606 void __kmpc_atomic_fixed2u_div_rev(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs );
607 void __kmpc_atomic_fixed2_shl_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
608 void __kmpc_atomic_fixed2_shr_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
609 void __kmpc_atomic_fixed2u_shr_rev(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs );
610 void __kmpc_atomic_fixed4_sub_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
611 void __kmpc_atomic_fixed4_div_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
612 void __kmpc_atomic_fixed4u_div_rev(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs );
613 void __kmpc_atomic_fixed4_shl_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
614 void __kmpc_atomic_fixed4_shr_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
615 void __kmpc_atomic_fixed4u_shr_rev(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs );
616 void __kmpc_atomic_fixed8_sub_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
617 void __kmpc_atomic_fixed8_div_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
618 void __kmpc_atomic_fixed8u_div_rev(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs );
619 void __kmpc_atomic_fixed8_shl_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
620 void __kmpc_atomic_fixed8_shr_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
621 void __kmpc_atomic_fixed8u_shr_rev(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs );
622 void __kmpc_atomic_float4_sub_rev(
ident_t *id_ref,
int gtid,
float * lhs,
float rhs );
623 void __kmpc_atomic_float4_div_rev(
ident_t *id_ref,
int gtid,
float * lhs,
float rhs );
624 void __kmpc_atomic_float8_sub_rev(
ident_t *id_ref,
int gtid,
double * lhs,
double rhs );
625 void __kmpc_atomic_float8_div_rev(
ident_t *id_ref,
int gtid,
double * lhs,
double rhs );
626 void __kmpc_atomic_float10_sub_rev(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
627 void __kmpc_atomic_float10_div_rev(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
629 void __kmpc_atomic_float16_sub_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
630 void __kmpc_atomic_float16_div_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
632 void __kmpc_atomic_cmplx4_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
633 void __kmpc_atomic_cmplx4_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
634 void __kmpc_atomic_cmplx8_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
635 void __kmpc_atomic_cmplx8_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
636 void __kmpc_atomic_cmplx10_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
637 void __kmpc_atomic_cmplx10_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
639 void __kmpc_atomic_cmplx16_sub_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
640 void __kmpc_atomic_cmplx16_div_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
643 void __kmpc_atomic_float16_sub_a16_rev(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
644 void __kmpc_atomic_float16_div_a16_rev(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
645 void __kmpc_atomic_cmplx16_sub_a16_rev(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
646 void __kmpc_atomic_cmplx16_div_a16_rev(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
648 #endif // KMP_HAVE_QUAD 650 #endif //KMP_ARCH_X86 || KMP_ARCH_X86_64 652 #endif //OMP_40_ENABLED 657 void __kmpc_atomic_fixed1_mul_float8(
ident_t *id_ref,
int gtid,
char * lhs, kmp_real64 rhs );
658 void __kmpc_atomic_fixed1_div_float8(
ident_t *id_ref,
int gtid,
char * lhs, kmp_real64 rhs );
659 void __kmpc_atomic_fixed2_mul_float8(
ident_t *id_ref,
int gtid,
short * lhs, kmp_real64 rhs );
660 void __kmpc_atomic_fixed2_div_float8(
ident_t *id_ref,
int gtid,
short * lhs, kmp_real64 rhs );
661 void __kmpc_atomic_fixed4_mul_float8(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_real64 rhs );
662 void __kmpc_atomic_fixed4_div_float8(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_real64 rhs );
663 void __kmpc_atomic_fixed8_mul_float8(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_real64 rhs );
664 void __kmpc_atomic_fixed8_div_float8(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_real64 rhs );
665 void __kmpc_atomic_float4_add_float8(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real64 rhs );
666 void __kmpc_atomic_float4_sub_float8(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real64 rhs );
667 void __kmpc_atomic_float4_mul_float8(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real64 rhs );
668 void __kmpc_atomic_float4_div_float8(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real64 rhs );
672 void __kmpc_atomic_fixed1_add_fp(
ident_t *id_ref,
int gtid,
char * lhs, _Quad rhs );
673 void __kmpc_atomic_fixed1_sub_fp(
ident_t *id_ref,
int gtid,
char * lhs, _Quad rhs );
674 void __kmpc_atomic_fixed1_mul_fp(
ident_t *id_ref,
int gtid,
char * lhs, _Quad rhs );
675 void __kmpc_atomic_fixed1_div_fp(
ident_t *id_ref,
int gtid,
char * lhs, _Quad rhs );
676 void __kmpc_atomic_fixed1u_div_fp(
ident_t *id_ref,
int gtid,
unsigned char * lhs, _Quad rhs );
678 void __kmpc_atomic_fixed2_add_fp(
ident_t *id_ref,
int gtid,
short * lhs, _Quad rhs );
679 void __kmpc_atomic_fixed2_sub_fp(
ident_t *id_ref,
int gtid,
short * lhs, _Quad rhs );
680 void __kmpc_atomic_fixed2_mul_fp(
ident_t *id_ref,
int gtid,
short * lhs, _Quad rhs );
681 void __kmpc_atomic_fixed2_div_fp(
ident_t *id_ref,
int gtid,
short * lhs, _Quad rhs );
682 void __kmpc_atomic_fixed2u_div_fp(
ident_t *id_ref,
int gtid,
unsigned short * lhs, _Quad rhs );
684 void __kmpc_atomic_fixed4_add_fp(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, _Quad rhs );
685 void __kmpc_atomic_fixed4_sub_fp(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, _Quad rhs );
686 void __kmpc_atomic_fixed4_mul_fp(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, _Quad rhs );
687 void __kmpc_atomic_fixed4_div_fp(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, _Quad rhs );
688 void __kmpc_atomic_fixed4u_div_fp(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, _Quad rhs );
690 void __kmpc_atomic_fixed8_add_fp(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, _Quad rhs );
691 void __kmpc_atomic_fixed8_sub_fp(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, _Quad rhs );
692 void __kmpc_atomic_fixed8_mul_fp(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, _Quad rhs );
693 void __kmpc_atomic_fixed8_div_fp(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, _Quad rhs );
694 void __kmpc_atomic_fixed8u_div_fp(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, _Quad rhs );
696 void __kmpc_atomic_float4_add_fp(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, _Quad rhs );
697 void __kmpc_atomic_float4_sub_fp(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, _Quad rhs );
698 void __kmpc_atomic_float4_mul_fp(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, _Quad rhs );
699 void __kmpc_atomic_float4_div_fp(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, _Quad rhs );
701 void __kmpc_atomic_float8_add_fp(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, _Quad rhs );
702 void __kmpc_atomic_float8_sub_fp(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, _Quad rhs );
703 void __kmpc_atomic_float8_mul_fp(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, _Quad rhs );
704 void __kmpc_atomic_float8_div_fp(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, _Quad rhs );
706 void __kmpc_atomic_float10_add_fp(
ident_t *id_ref,
int gtid,
long double * lhs, _Quad rhs );
707 void __kmpc_atomic_float10_sub_fp(
ident_t *id_ref,
int gtid,
long double * lhs, _Quad rhs );
708 void __kmpc_atomic_float10_mul_fp(
ident_t *id_ref,
int gtid,
long double * lhs, _Quad rhs );
709 void __kmpc_atomic_float10_div_fp(
ident_t *id_ref,
int gtid,
long double * lhs, _Quad rhs );
710 #endif // KMP_HAVE_QUAD 713 void __kmpc_atomic_cmplx4_add_cmplx8(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx64 rhs );
714 void __kmpc_atomic_cmplx4_sub_cmplx8(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx64 rhs );
715 void __kmpc_atomic_cmplx4_mul_cmplx8(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx64 rhs );
716 void __kmpc_atomic_cmplx4_div_cmplx8(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx64 rhs );
719 void __kmpc_atomic_1(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
720 void __kmpc_atomic_2(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
721 void __kmpc_atomic_4(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
722 void __kmpc_atomic_8(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
723 void __kmpc_atomic_10(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
724 void __kmpc_atomic_16(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
725 void __kmpc_atomic_20(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
726 void __kmpc_atomic_32(
ident_t *id_ref,
int gtid,
void* lhs,
void* rhs,
void (*f)(
void *,
void *,
void * ) );
729 #if KMP_ARCH_X86 || KMP_ARCH_X86_64 735 char __kmpc_atomic_fixed1_rd(
ident_t *id_ref,
int gtid,
char * loc );
736 short __kmpc_atomic_fixed2_rd(
ident_t *id_ref,
int gtid,
short * loc );
737 kmp_int32 __kmpc_atomic_fixed4_rd(
ident_t *id_ref,
int gtid, kmp_int32 * loc );
738 kmp_int64 __kmpc_atomic_fixed8_rd(
ident_t *id_ref,
int gtid, kmp_int64 * loc );
739 kmp_real32 __kmpc_atomic_float4_rd(
ident_t *id_ref,
int gtid, kmp_real32 * loc );
740 kmp_real64 __kmpc_atomic_float8_rd(
ident_t *id_ref,
int gtid, kmp_real64 * loc );
741 long double __kmpc_atomic_float10_rd(
ident_t *id_ref,
int gtid,
long double * loc );
743 QUAD_LEGACY __kmpc_atomic_float16_rd(
ident_t *id_ref,
int gtid, QUAD_LEGACY * loc );
747 #if ( KMP_OS_WINDOWS ) 748 void __kmpc_atomic_cmplx4_rd( kmp_cmplx32 * out,
ident_t *id_ref,
int gtid, kmp_cmplx32 * loc );
750 kmp_cmplx32 __kmpc_atomic_cmplx4_rd(
ident_t *id_ref,
int gtid, kmp_cmplx32 * loc );
752 kmp_cmplx64 __kmpc_atomic_cmplx8_rd(
ident_t *id_ref,
int gtid, kmp_cmplx64 * loc );
753 kmp_cmplx80 __kmpc_atomic_cmplx10_rd(
ident_t *id_ref,
int gtid, kmp_cmplx80 * loc );
755 CPLX128_LEG __kmpc_atomic_cmplx16_rd(
ident_t *id_ref,
int gtid, CPLX128_LEG * loc );
758 Quad_a16_t __kmpc_atomic_float16_a16_rd(
ident_t * id_ref,
int gtid, Quad_a16_t * loc );
759 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_a16_rd(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * loc );
768 void __kmpc_atomic_fixed1_wr(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
769 void __kmpc_atomic_fixed2_wr(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
770 void __kmpc_atomic_fixed4_wr(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
771 void __kmpc_atomic_fixed8_wr(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
772 void __kmpc_atomic_float4_wr(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs );
773 void __kmpc_atomic_float8_wr(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs );
774 void __kmpc_atomic_float10_wr(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
776 void __kmpc_atomic_float16_wr(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
778 void __kmpc_atomic_cmplx4_wr(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs );
779 void __kmpc_atomic_cmplx8_wr(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
780 void __kmpc_atomic_cmplx10_wr(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
782 void __kmpc_atomic_cmplx16_wr(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
785 void __kmpc_atomic_float16_a16_wr(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
786 void __kmpc_atomic_cmplx16_a16_wr(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
795 char __kmpc_atomic_fixed1_add_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
796 char __kmpc_atomic_fixed1_andb_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
797 char __kmpc_atomic_fixed1_div_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
798 unsigned char __kmpc_atomic_fixed1u_div_cpt(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs,
int flag);
799 char __kmpc_atomic_fixed1_mul_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
800 char __kmpc_atomic_fixed1_orb_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
801 char __kmpc_atomic_fixed1_shl_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
802 char __kmpc_atomic_fixed1_shr_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
803 unsigned char __kmpc_atomic_fixed1u_shr_cpt(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs,
int flag);
804 char __kmpc_atomic_fixed1_sub_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
805 char __kmpc_atomic_fixed1_xor_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
807 short __kmpc_atomic_fixed2_add_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
808 short __kmpc_atomic_fixed2_andb_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
809 short __kmpc_atomic_fixed2_div_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
810 unsigned short __kmpc_atomic_fixed2u_div_cpt(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs,
int flag);
811 short __kmpc_atomic_fixed2_mul_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
812 short __kmpc_atomic_fixed2_orb_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
813 short __kmpc_atomic_fixed2_shl_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
814 short __kmpc_atomic_fixed2_shr_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
815 unsigned short __kmpc_atomic_fixed2u_shr_cpt(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs,
int flag);
816 short __kmpc_atomic_fixed2_sub_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
817 short __kmpc_atomic_fixed2_xor_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
819 kmp_int32 __kmpc_atomic_fixed4_add_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
820 kmp_int32 __kmpc_atomic_fixed4_sub_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
822 kmp_real32 __kmpc_atomic_float4_add_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
823 kmp_real32 __kmpc_atomic_float4_sub_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
825 kmp_int64 __kmpc_atomic_fixed8_add_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
826 kmp_int64 __kmpc_atomic_fixed8_sub_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
828 kmp_real64 __kmpc_atomic_float8_add_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
829 kmp_real64 __kmpc_atomic_float8_sub_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
831 kmp_int32 __kmpc_atomic_fixed4_andb_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
832 kmp_int32 __kmpc_atomic_fixed4_div_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
833 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs,
int flag);
834 kmp_int32 __kmpc_atomic_fixed4_mul_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
835 kmp_int32 __kmpc_atomic_fixed4_orb_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
836 kmp_int32 __kmpc_atomic_fixed4_shl_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
837 kmp_int32 __kmpc_atomic_fixed4_shr_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
838 kmp_uint32 __kmpc_atomic_fixed4u_shr_cpt(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs,
int flag);
839 kmp_int32 __kmpc_atomic_fixed4_xor_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
841 kmp_int64 __kmpc_atomic_fixed8_andb_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
842 kmp_int64 __kmpc_atomic_fixed8_div_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
843 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs,
int flag);
844 kmp_int64 __kmpc_atomic_fixed8_mul_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
845 kmp_int64 __kmpc_atomic_fixed8_orb_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
846 kmp_int64 __kmpc_atomic_fixed8_shl_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
847 kmp_int64 __kmpc_atomic_fixed8_shr_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
848 kmp_uint64 __kmpc_atomic_fixed8u_shr_cpt(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs,
int flag);
849 kmp_int64 __kmpc_atomic_fixed8_xor_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
851 kmp_real32 __kmpc_atomic_float4_div_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
852 kmp_real32 __kmpc_atomic_float4_mul_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
854 kmp_real64 __kmpc_atomic_float8_div_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
855 kmp_real64 __kmpc_atomic_float8_mul_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
857 char __kmpc_atomic_fixed1_andl_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
858 char __kmpc_atomic_fixed1_orl_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
859 short __kmpc_atomic_fixed2_andl_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
860 short __kmpc_atomic_fixed2_orl_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
861 kmp_int32 __kmpc_atomic_fixed4_andl_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
862 kmp_int32 __kmpc_atomic_fixed4_orl_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
863 kmp_int64 __kmpc_atomic_fixed8_andl_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
864 kmp_int64 __kmpc_atomic_fixed8_orl_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
866 char __kmpc_atomic_fixed1_max_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
867 char __kmpc_atomic_fixed1_min_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
868 short __kmpc_atomic_fixed2_max_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
869 short __kmpc_atomic_fixed2_min_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
870 kmp_int32 __kmpc_atomic_fixed4_max_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
871 kmp_int32 __kmpc_atomic_fixed4_min_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
872 kmp_int64 __kmpc_atomic_fixed8_max_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
873 kmp_int64 __kmpc_atomic_fixed8_min_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
874 kmp_real32 __kmpc_atomic_float4_max_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
875 kmp_real32 __kmpc_atomic_float4_min_cpt(
ident_t *id_ref,
int gtid, kmp_real32 * lhs, kmp_real32 rhs,
int flag);
876 kmp_real64 __kmpc_atomic_float8_max_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
877 kmp_real64 __kmpc_atomic_float8_min_cpt(
ident_t *id_ref,
int gtid, kmp_real64 * lhs, kmp_real64 rhs,
int flag);
879 QUAD_LEGACY __kmpc_atomic_float16_max_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
880 QUAD_LEGACY __kmpc_atomic_float16_min_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
883 char __kmpc_atomic_fixed1_neqv_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
884 short __kmpc_atomic_fixed2_neqv_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
885 kmp_int32 __kmpc_atomic_fixed4_neqv_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
886 kmp_int64 __kmpc_atomic_fixed8_neqv_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
888 char __kmpc_atomic_fixed1_eqv_cpt(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag);
889 short __kmpc_atomic_fixed2_eqv_cpt(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag);
890 kmp_int32 __kmpc_atomic_fixed4_eqv_cpt(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag);
891 kmp_int64 __kmpc_atomic_fixed8_eqv_cpt(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag);
893 long double __kmpc_atomic_float10_add_cpt(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag);
894 long double __kmpc_atomic_float10_sub_cpt(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag);
895 long double __kmpc_atomic_float10_mul_cpt(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag);
896 long double __kmpc_atomic_float10_div_cpt(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag);
899 QUAD_LEGACY __kmpc_atomic_float16_add_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
900 QUAD_LEGACY __kmpc_atomic_float16_sub_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
901 QUAD_LEGACY __kmpc_atomic_float16_mul_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
902 QUAD_LEGACY __kmpc_atomic_float16_div_cpt(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag);
906 void __kmpc_atomic_cmplx4_add_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag);
907 void __kmpc_atomic_cmplx4_sub_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag);
908 void __kmpc_atomic_cmplx4_mul_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag);
909 void __kmpc_atomic_cmplx4_div_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag);
911 kmp_cmplx64 __kmpc_atomic_cmplx8_add_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag);
912 kmp_cmplx64 __kmpc_atomic_cmplx8_sub_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag);
913 kmp_cmplx64 __kmpc_atomic_cmplx8_mul_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag);
914 kmp_cmplx64 __kmpc_atomic_cmplx8_div_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag);
915 kmp_cmplx80 __kmpc_atomic_cmplx10_add_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag);
916 kmp_cmplx80 __kmpc_atomic_cmplx10_sub_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag);
917 kmp_cmplx80 __kmpc_atomic_cmplx10_mul_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag);
918 kmp_cmplx80 __kmpc_atomic_cmplx10_div_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag);
920 CPLX128_LEG __kmpc_atomic_cmplx16_add_cpt(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag);
921 CPLX128_LEG __kmpc_atomic_cmplx16_sub_cpt(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag);
922 CPLX128_LEG __kmpc_atomic_cmplx16_mul_cpt(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag);
923 CPLX128_LEG __kmpc_atomic_cmplx16_div_cpt(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag);
926 Quad_a16_t __kmpc_atomic_float16_add_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
927 Quad_a16_t __kmpc_atomic_float16_sub_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
928 Quad_a16_t __kmpc_atomic_float16_mul_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
929 Quad_a16_t __kmpc_atomic_float16_div_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
930 Quad_a16_t __kmpc_atomic_float16_max_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
931 Quad_a16_t __kmpc_atomic_float16_min_a16_cpt(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag);
932 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_add_a16_cpt(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag);
933 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_sub_a16_cpt(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag);
934 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_mul_a16_cpt(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag);
935 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_div_a16_cpt(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag);
939 void __kmpc_atomic_start(
void);
940 void __kmpc_atomic_end(
void);
946 char __kmpc_atomic_fixed1_sub_cpt_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag );
947 char __kmpc_atomic_fixed1_div_cpt_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag );
948 unsigned char __kmpc_atomic_fixed1u_div_cpt_rev(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs,
int flag );
949 char __kmpc_atomic_fixed1_shl_cpt_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs ,
int flag);
950 char __kmpc_atomic_fixed1_shr_cpt_rev(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs,
int flag );
951 unsigned char __kmpc_atomic_fixed1u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
unsigned char * lhs,
unsigned char rhs,
int flag );
952 short __kmpc_atomic_fixed2_sub_cpt_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag );
953 short __kmpc_atomic_fixed2_div_cpt_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag );
954 unsigned short __kmpc_atomic_fixed2u_div_cpt_rev(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs,
int flag );
955 short __kmpc_atomic_fixed2_shl_cpt_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag );
956 short __kmpc_atomic_fixed2_shr_cpt_rev(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs,
int flag );
957 unsigned short __kmpc_atomic_fixed2u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
unsigned short * lhs,
unsigned short rhs,
int flag );
958 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag );
959 kmp_int32 __kmpc_atomic_fixed4_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag );
960 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs,
int flag );
961 kmp_int32 __kmpc_atomic_fixed4_shl_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag );
962 kmp_int32 __kmpc_atomic_fixed4_shr_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs,
int flag );
963 kmp_uint32 __kmpc_atomic_fixed4u_shr_cpt_rev(
ident_t *id_ref,
int gtid, kmp_uint32 * lhs, kmp_uint32 rhs,
int flag );
964 kmp_int64 __kmpc_atomic_fixed8_sub_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag );
965 kmp_int64 __kmpc_atomic_fixed8_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag );
966 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs,
int flag );
967 kmp_int64 __kmpc_atomic_fixed8_shl_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag );
968 kmp_int64 __kmpc_atomic_fixed8_shr_cpt_rev(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs,
int flag );
969 kmp_uint64 __kmpc_atomic_fixed8u_shr_cpt_rev(
ident_t *id_ref,
int gtid, kmp_uint64 * lhs, kmp_uint64 rhs,
int flag );
970 float __kmpc_atomic_float4_sub_cpt_rev(
ident_t *id_ref,
int gtid,
float * lhs,
float rhs,
int flag );
971 float __kmpc_atomic_float4_div_cpt_rev(
ident_t *id_ref,
int gtid,
float * lhs,
float rhs,
int flag );
972 double __kmpc_atomic_float8_sub_cpt_rev(
ident_t *id_ref,
int gtid,
double * lhs,
double rhs,
int flag );
973 double __kmpc_atomic_float8_div_cpt_rev(
ident_t *id_ref,
int gtid,
double * lhs,
double rhs,
int flag );
974 long double __kmpc_atomic_float10_sub_cpt_rev(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag );
975 long double __kmpc_atomic_float10_div_cpt_rev(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs,
int flag );
977 QUAD_LEGACY __kmpc_atomic_float16_sub_cpt_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag );
978 QUAD_LEGACY __kmpc_atomic_float16_div_cpt_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs,
int flag );
981 void __kmpc_atomic_cmplx4_sub_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag );
982 void __kmpc_atomic_cmplx4_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out,
int flag );
983 kmp_cmplx64 __kmpc_atomic_cmplx8_sub_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag );
984 kmp_cmplx64 __kmpc_atomic_cmplx8_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs,
int flag );
985 kmp_cmplx80 __kmpc_atomic_cmplx10_sub_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag );
986 kmp_cmplx80 __kmpc_atomic_cmplx10_div_cpt_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs,
int flag );
988 CPLX128_LEG __kmpc_atomic_cmplx16_sub_cpt_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag );
989 CPLX128_LEG __kmpc_atomic_cmplx16_div_cpt_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs,
int flag );
991 Quad_a16_t __kmpc_atomic_float16_sub_a16_cpt_rev(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag );
992 Quad_a16_t __kmpc_atomic_float16_div_a16_cpt_rev(
ident_t * id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs,
int flag );
993 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_sub_a16_cpt_rev(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag );
994 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_div_a16_cpt_rev(
ident_t * id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs,
int flag );
999 char __kmpc_atomic_fixed1_swp(
ident_t *id_ref,
int gtid,
char * lhs,
char rhs );
1000 short __kmpc_atomic_fixed2_swp(
ident_t *id_ref,
int gtid,
short * lhs,
short rhs );
1001 kmp_int32 __kmpc_atomic_fixed4_swp(
ident_t *id_ref,
int gtid, kmp_int32 * lhs, kmp_int32 rhs );
1002 kmp_int64 __kmpc_atomic_fixed8_swp(
ident_t *id_ref,
int gtid, kmp_int64 * lhs, kmp_int64 rhs );
1003 float __kmpc_atomic_float4_swp(
ident_t *id_ref,
int gtid,
float * lhs,
float rhs );
1004 double __kmpc_atomic_float8_swp(
ident_t *id_ref,
int gtid,
double * lhs,
double rhs );
1005 long double __kmpc_atomic_float10_swp(
ident_t *id_ref,
int gtid,
long double * lhs,
long double rhs );
1007 QUAD_LEGACY __kmpc_atomic_float16_swp(
ident_t *id_ref,
int gtid, QUAD_LEGACY * lhs, QUAD_LEGACY rhs );
1010 void __kmpc_atomic_cmplx4_swp(
ident_t *id_ref,
int gtid, kmp_cmplx32 * lhs, kmp_cmplx32 rhs, kmp_cmplx32 * out );
1013 kmp_cmplx64 __kmpc_atomic_cmplx8_swp(
ident_t *id_ref,
int gtid, kmp_cmplx64 * lhs, kmp_cmplx64 rhs );
1014 kmp_cmplx80 __kmpc_atomic_cmplx10_swp(
ident_t *id_ref,
int gtid, kmp_cmplx80 * lhs, kmp_cmplx80 rhs );
1016 CPLX128_LEG __kmpc_atomic_cmplx16_swp(
ident_t *id_ref,
int gtid, CPLX128_LEG * lhs, CPLX128_LEG rhs );
1017 #if ( KMP_ARCH_X86 ) 1018 Quad_a16_t __kmpc_atomic_float16_a16_swp(
ident_t *id_ref,
int gtid, Quad_a16_t * lhs, Quad_a16_t rhs );
1019 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_a16_swp(
ident_t *id_ref,
int gtid, kmp_cmplx128_a16_t * lhs, kmp_cmplx128_a16_t rhs );
1025 #endif //OMP_40_ENABLED 1027 #endif //KMP_ARCH_X86 || KMP_ARCH_X86_64