21 #include "ompt-specific.h" 33 #if defined(__cplusplus) && (KMP_OS_WINDOWS) 40 #define KMP_DO_ALIGN(alignment) 43 #if (_MSC_VER < 1600) && defined(_DEBUG) 48 #define _DEBUG_TEMPORARILY_UNSET_ 53 template <
typename type_lhs,
typename type_rhs>
54 std::complex<type_lhs> __kmp_lhs_div_rhs(
const std::complex<type_lhs> &lhs,
55 const std::complex<type_rhs> &rhs) {
56 type_lhs a = lhs.real();
57 type_lhs b = lhs.imag();
58 type_rhs c = rhs.real();
59 type_rhs d = rhs.imag();
60 type_rhs den = c * c + d * d;
61 type_rhs r = (a * c + b * d);
62 type_rhs i = (b * c - a * d);
63 std::complex<type_lhs> ret(r / den, i / den);
68 struct __kmp_cmplx64_t : std::complex<double> {
70 __kmp_cmplx64_t() :
std::complex<double>() {}
72 __kmp_cmplx64_t(
const std::complex<double> &cd) :
std::complex<double>(cd) {}
74 void operator/=(
const __kmp_cmplx64_t &rhs) {
75 std::complex<double> lhs = *
this;
76 *
this = __kmp_lhs_div_rhs(lhs, rhs);
79 __kmp_cmplx64_t operator/(
const __kmp_cmplx64_t &rhs) {
80 std::complex<double> lhs = *
this;
81 return __kmp_lhs_div_rhs(lhs, rhs);
84 typedef struct __kmp_cmplx64_t kmp_cmplx64;
87 struct __kmp_cmplx32_t : std::complex<float> {
89 __kmp_cmplx32_t() :
std::complex<float>() {}
91 __kmp_cmplx32_t(
const std::complex<float> &cf) :
std::complex<float>(cf) {}
93 __kmp_cmplx32_t operator+(
const __kmp_cmplx32_t &b) {
94 std::complex<float> lhs = *
this;
95 std::complex<float> rhs = b;
98 __kmp_cmplx32_t operator-(
const __kmp_cmplx32_t &b) {
99 std::complex<float> lhs = *
this;
100 std::complex<float> rhs = b;
103 __kmp_cmplx32_t operator*(
const __kmp_cmplx32_t &b) {
104 std::complex<float> lhs = *
this;
105 std::complex<float> rhs = b;
109 __kmp_cmplx32_t operator+(
const kmp_cmplx64 &b) {
110 kmp_cmplx64 t = kmp_cmplx64(*
this) + b;
111 std::complex<double> d(t);
112 std::complex<float> f(d);
113 __kmp_cmplx32_t r(f);
116 __kmp_cmplx32_t operator-(
const kmp_cmplx64 &b) {
117 kmp_cmplx64 t = kmp_cmplx64(*
this) - b;
118 std::complex<double> d(t);
119 std::complex<float> f(d);
120 __kmp_cmplx32_t r(f);
123 __kmp_cmplx32_t operator*(
const kmp_cmplx64 &b) {
124 kmp_cmplx64 t = kmp_cmplx64(*
this) * b;
125 std::complex<double> d(t);
126 std::complex<float> f(d);
127 __kmp_cmplx32_t r(f);
131 void operator/=(
const __kmp_cmplx32_t &rhs) {
132 std::complex<float> lhs = *
this;
133 *
this = __kmp_lhs_div_rhs(lhs, rhs);
136 __kmp_cmplx32_t operator/(
const __kmp_cmplx32_t &rhs) {
137 std::complex<float> lhs = *
this;
138 return __kmp_lhs_div_rhs(lhs, rhs);
141 void operator/=(
const kmp_cmplx64 &rhs) {
142 std::complex<float> lhs = *
this;
143 *
this = __kmp_lhs_div_rhs(lhs, rhs);
146 __kmp_cmplx32_t operator/(
const kmp_cmplx64 &rhs) {
147 std::complex<float> lhs = *
this;
148 return __kmp_lhs_div_rhs(lhs, rhs);
151 typedef struct __kmp_cmplx32_t kmp_cmplx32;
154 struct KMP_DO_ALIGN(16) __kmp_cmplx80_t : std::complex<long double> {
156 __kmp_cmplx80_t() :
std::complex<long double>() {}
158 __kmp_cmplx80_t(
const std::complex<long double> &cld)
159 :
std::complex<long double>(cld) {}
161 void operator/=(
const __kmp_cmplx80_t &rhs) {
162 std::complex<long double> lhs = *
this;
163 *
this = __kmp_lhs_div_rhs(lhs, rhs);
166 __kmp_cmplx80_t operator/(
const __kmp_cmplx80_t &rhs) {
167 std::complex<long double> lhs = *
this;
168 return __kmp_lhs_div_rhs(lhs, rhs);
171 typedef KMP_DO_ALIGN(16) struct __kmp_cmplx80_t kmp_cmplx80;
175 struct __kmp_cmplx128_t : std::complex<_Quad> {
177 __kmp_cmplx128_t() :
std::complex<_Quad>() {}
179 __kmp_cmplx128_t(
const std::complex<_Quad> &cq) :
std::complex<_Quad>(cq) {}
181 void operator/=(
const __kmp_cmplx128_t &rhs) {
182 std::complex<_Quad> lhs = *
this;
183 *
this = __kmp_lhs_div_rhs(lhs, rhs);
186 __kmp_cmplx128_t operator/(
const __kmp_cmplx128_t &rhs) {
187 std::complex<_Quad> lhs = *
this;
188 return __kmp_lhs_div_rhs(lhs, rhs);
191 typedef struct __kmp_cmplx128_t kmp_cmplx128;
194 #ifdef _DEBUG_TEMPORARILY_UNSET_ 195 #undef _DEBUG_TEMPORARILY_UNSET_ 202 typedef float _Complex kmp_cmplx32;
203 typedef double _Complex kmp_cmplx64;
204 typedef long double _Complex kmp_cmplx80;
206 typedef _Quad _Complex kmp_cmplx128;
214 #if KMP_ARCH_X86 && KMP_HAVE_QUAD 218 #pragma pack(push, 4) 220 struct KMP_DO_ALIGN(4) Quad_a4_t {
224 Quad_a4_t(
const _Quad &cq) : q(cq) {}
226 Quad_a4_t operator+(
const Quad_a4_t &b) {
227 _Quad lhs = (*this).q;
229 return (Quad_a4_t)(lhs + rhs);
232 Quad_a4_t operator-(
const Quad_a4_t &b) {
233 _Quad lhs = (*this).q;
235 return (Quad_a4_t)(lhs - rhs);
237 Quad_a4_t operator*(
const Quad_a4_t &b) {
238 _Quad lhs = (*this).q;
240 return (Quad_a4_t)(lhs * rhs);
243 Quad_a4_t operator/(
const Quad_a4_t &b) {
244 _Quad lhs = (*this).q;
246 return (Quad_a4_t)(lhs / rhs);
250 struct KMP_DO_ALIGN(4) kmp_cmplx128_a4_t {
253 kmp_cmplx128_a4_t() : q() {}
255 kmp_cmplx128_a4_t(
const kmp_cmplx128 &c128) : q(c128) {}
257 kmp_cmplx128_a4_t operator+(
const kmp_cmplx128_a4_t &b) {
258 kmp_cmplx128 lhs = (*this).q;
259 kmp_cmplx128 rhs = b.q;
260 return (kmp_cmplx128_a4_t)(lhs + rhs);
262 kmp_cmplx128_a4_t operator-(
const kmp_cmplx128_a4_t &b) {
263 kmp_cmplx128 lhs = (*this).q;
264 kmp_cmplx128 rhs = b.q;
265 return (kmp_cmplx128_a4_t)(lhs - rhs);
267 kmp_cmplx128_a4_t operator*(
const kmp_cmplx128_a4_t &b) {
268 kmp_cmplx128 lhs = (*this).q;
269 kmp_cmplx128 rhs = b.q;
270 return (kmp_cmplx128_a4_t)(lhs * rhs);
273 kmp_cmplx128_a4_t operator/(
const kmp_cmplx128_a4_t &b) {
274 kmp_cmplx128 lhs = (*this).q;
275 kmp_cmplx128 rhs = b.q;
276 return (kmp_cmplx128_a4_t)(lhs / rhs);
283 struct KMP_DO_ALIGN(16) Quad_a16_t {
286 Quad_a16_t() : q() {}
287 Quad_a16_t(
const _Quad &cq) : q(cq) {}
289 Quad_a16_t operator+(
const Quad_a16_t &b) {
290 _Quad lhs = (*this).q;
292 return (Quad_a16_t)(lhs + rhs);
295 Quad_a16_t operator-(
const Quad_a16_t &b) {
296 _Quad lhs = (*this).q;
298 return (Quad_a16_t)(lhs - rhs);
300 Quad_a16_t operator*(
const Quad_a16_t &b) {
301 _Quad lhs = (*this).q;
303 return (Quad_a16_t)(lhs * rhs);
306 Quad_a16_t operator/(
const Quad_a16_t &b) {
307 _Quad lhs = (*this).q;
309 return (Quad_a16_t)(lhs / rhs);
313 struct KMP_DO_ALIGN(16) kmp_cmplx128_a16_t {
316 kmp_cmplx128_a16_t() : q() {}
318 kmp_cmplx128_a16_t(
const kmp_cmplx128 &c128) : q(c128) {}
320 kmp_cmplx128_a16_t operator+(
const kmp_cmplx128_a16_t &b) {
321 kmp_cmplx128 lhs = (*this).q;
322 kmp_cmplx128 rhs = b.q;
323 return (kmp_cmplx128_a16_t)(lhs + rhs);
325 kmp_cmplx128_a16_t operator-(
const kmp_cmplx128_a16_t &b) {
326 kmp_cmplx128 lhs = (*this).q;
327 kmp_cmplx128 rhs = b.q;
328 return (kmp_cmplx128_a16_t)(lhs - rhs);
330 kmp_cmplx128_a16_t operator*(
const kmp_cmplx128_a16_t &b) {
331 kmp_cmplx128 lhs = (*this).q;
332 kmp_cmplx128 rhs = b.q;
333 return (kmp_cmplx128_a16_t)(lhs * rhs);
336 kmp_cmplx128_a16_t operator/(
const kmp_cmplx128_a16_t &b) {
337 kmp_cmplx128 lhs = (*this).q;
338 kmp_cmplx128 rhs = b.q;
339 return (kmp_cmplx128_a16_t)(lhs / rhs);
346 #define QUAD_LEGACY Quad_a4_t 347 #define CPLX128_LEG kmp_cmplx128_a4_t 349 #define QUAD_LEGACY _Quad 350 #define CPLX128_LEG kmp_cmplx128 357 extern int __kmp_atomic_mode;
360 typedef kmp_queuing_lock_t kmp_atomic_lock_t;
362 static inline void __kmp_acquire_atomic_lock(kmp_atomic_lock_t *lck,
364 #if OMPT_SUPPORT && OMPT_OPTIONAL 365 if (ompt_enabled.ompt_callback_mutex_acquire) {
366 ompt_callbacks.ompt_callback(ompt_callback_mutex_acquire)(
367 ompt_mutex_atomic, 0, kmp_mutex_impl_queuing, (ompt_wait_id_t)lck,
368 OMPT_GET_RETURN_ADDRESS(0));
372 __kmp_acquire_queuing_lock(lck, gtid);
374 #if OMPT_SUPPORT && OMPT_OPTIONAL 375 if (ompt_enabled.ompt_callback_mutex_acquired) {
376 ompt_callbacks.ompt_callback(ompt_callback_mutex_acquired)(
377 ompt_mutex_atomic, (ompt_wait_id_t)lck, OMPT_GET_RETURN_ADDRESS(0));
382 static inline int __kmp_test_atomic_lock(kmp_atomic_lock_t *lck,
384 return __kmp_test_queuing_lock(lck, gtid);
387 static inline void __kmp_release_atomic_lock(kmp_atomic_lock_t *lck,
389 __kmp_release_queuing_lock(lck, gtid);
390 #if OMPT_SUPPORT && OMPT_OPTIONAL 391 if (ompt_enabled.ompt_callback_mutex_released) {
392 ompt_callbacks.ompt_callback(ompt_callback_mutex_released)(
393 ompt_mutex_atomic, (ompt_wait_id_t)lck, OMPT_GET_RETURN_ADDRESS(0));
398 static inline void __kmp_init_atomic_lock(kmp_atomic_lock_t *lck) {
399 __kmp_init_queuing_lock(lck);
402 static inline void __kmp_destroy_atomic_lock(kmp_atomic_lock_t *lck) {
403 __kmp_destroy_queuing_lock(lck);
407 extern kmp_atomic_lock_t __kmp_atomic_lock;
409 extern kmp_atomic_lock_t __kmp_atomic_lock_1i;
412 extern kmp_atomic_lock_t __kmp_atomic_lock_2i;
415 extern kmp_atomic_lock_t __kmp_atomic_lock_4i;
418 extern kmp_atomic_lock_t __kmp_atomic_lock_4r;
421 extern kmp_atomic_lock_t __kmp_atomic_lock_8i;
424 extern kmp_atomic_lock_t __kmp_atomic_lock_8r;
427 extern kmp_atomic_lock_t
428 __kmp_atomic_lock_8c;
430 extern kmp_atomic_lock_t
431 __kmp_atomic_lock_10r;
433 extern kmp_atomic_lock_t __kmp_atomic_lock_16r;
436 extern kmp_atomic_lock_t __kmp_atomic_lock_16c;
439 extern kmp_atomic_lock_t
440 __kmp_atomic_lock_20c;
442 extern kmp_atomic_lock_t __kmp_atomic_lock_32c;
449 void __kmpc_atomic_fixed1_add(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
450 void __kmpc_atomic_fixed1_andb(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
451 void __kmpc_atomic_fixed1_div(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
452 void __kmpc_atomic_fixed1u_div(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
454 void __kmpc_atomic_fixed1_mul(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
455 void __kmpc_atomic_fixed1_orb(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
456 void __kmpc_atomic_fixed1_shl(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
457 void __kmpc_atomic_fixed1_shr(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
458 void __kmpc_atomic_fixed1u_shr(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
460 void __kmpc_atomic_fixed1_sub(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
461 void __kmpc_atomic_fixed1_xor(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
463 void __kmpc_atomic_fixed2_add(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
464 void __kmpc_atomic_fixed2_andb(
ident_t *id_ref,
int gtid,
short *lhs,
466 void __kmpc_atomic_fixed2_div(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
467 void __kmpc_atomic_fixed2u_div(
ident_t *id_ref,
int gtid,
unsigned short *lhs,
469 void __kmpc_atomic_fixed2_mul(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
470 void __kmpc_atomic_fixed2_orb(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
471 void __kmpc_atomic_fixed2_shl(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
472 void __kmpc_atomic_fixed2_shr(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
473 void __kmpc_atomic_fixed2u_shr(
ident_t *id_ref,
int gtid,
unsigned short *lhs,
475 void __kmpc_atomic_fixed2_sub(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
476 void __kmpc_atomic_fixed2_xor(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
478 void __kmpc_atomic_fixed4_add(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
480 void __kmpc_atomic_fixed4_sub(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
483 void __kmpc_atomic_float4_add(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
485 void __kmpc_atomic_float4_sub(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
488 void __kmpc_atomic_fixed8_add(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
490 void __kmpc_atomic_fixed8_sub(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
493 void __kmpc_atomic_float8_add(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
495 void __kmpc_atomic_float8_sub(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
498 void __kmpc_atomic_fixed4_andb(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
500 void __kmpc_atomic_fixed4_div(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
502 void __kmpc_atomic_fixed4u_div(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
504 void __kmpc_atomic_fixed4_mul(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
506 void __kmpc_atomic_fixed4_orb(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
508 void __kmpc_atomic_fixed4_shl(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
510 void __kmpc_atomic_fixed4_shr(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
512 void __kmpc_atomic_fixed4u_shr(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
514 void __kmpc_atomic_fixed4_xor(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
517 void __kmpc_atomic_fixed8_andb(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
519 void __kmpc_atomic_fixed8_div(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
521 void __kmpc_atomic_fixed8u_div(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
523 void __kmpc_atomic_fixed8_mul(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
525 void __kmpc_atomic_fixed8_orb(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
527 void __kmpc_atomic_fixed8_shl(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
529 void __kmpc_atomic_fixed8_shr(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
531 void __kmpc_atomic_fixed8u_shr(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
533 void __kmpc_atomic_fixed8_xor(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
536 void __kmpc_atomic_float4_div(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
538 void __kmpc_atomic_float4_mul(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
541 void __kmpc_atomic_float8_div(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
543 void __kmpc_atomic_float8_mul(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
546 void __kmpc_atomic_fixed1_andl(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
547 void __kmpc_atomic_fixed1_orl(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
548 void __kmpc_atomic_fixed2_andl(
ident_t *id_ref,
int gtid,
short *lhs,
550 void __kmpc_atomic_fixed2_orl(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
551 void __kmpc_atomic_fixed4_andl(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
553 void __kmpc_atomic_fixed4_orl(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
555 void __kmpc_atomic_fixed8_andl(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
557 void __kmpc_atomic_fixed8_orl(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
560 void __kmpc_atomic_fixed1_max(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
561 void __kmpc_atomic_fixed1_min(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
562 void __kmpc_atomic_fixed2_max(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
563 void __kmpc_atomic_fixed2_min(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
564 void __kmpc_atomic_fixed4_max(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
566 void __kmpc_atomic_fixed4_min(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
568 void __kmpc_atomic_fixed8_max(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
570 void __kmpc_atomic_fixed8_min(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
572 void __kmpc_atomic_float4_max(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
574 void __kmpc_atomic_float4_min(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
576 void __kmpc_atomic_float8_max(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
578 void __kmpc_atomic_float8_min(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
581 void __kmpc_atomic_float16_max(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
583 void __kmpc_atomic_float16_min(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
588 void __kmpc_atomic_float16_max_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
590 void __kmpc_atomic_float16_min_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
595 void __kmpc_atomic_fixed1_neqv(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
596 void __kmpc_atomic_fixed2_neqv(
ident_t *id_ref,
int gtid,
short *lhs,
598 void __kmpc_atomic_fixed4_neqv(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
600 void __kmpc_atomic_fixed8_neqv(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
603 void __kmpc_atomic_fixed1_eqv(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
604 void __kmpc_atomic_fixed2_eqv(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
605 void __kmpc_atomic_fixed4_eqv(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
607 void __kmpc_atomic_fixed8_eqv(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
610 void __kmpc_atomic_float10_add(
ident_t *id_ref,
int gtid,
long double *lhs,
612 void __kmpc_atomic_float10_sub(
ident_t *id_ref,
int gtid,
long double *lhs,
614 void __kmpc_atomic_float10_mul(
ident_t *id_ref,
int gtid,
long double *lhs,
616 void __kmpc_atomic_float10_div(
ident_t *id_ref,
int gtid,
long double *lhs,
620 void __kmpc_atomic_float16_add(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
622 void __kmpc_atomic_float16_sub(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
624 void __kmpc_atomic_float16_mul(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
626 void __kmpc_atomic_float16_div(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
630 void __kmpc_atomic_float16_add_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
632 void __kmpc_atomic_float16_sub_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
634 void __kmpc_atomic_float16_mul_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
636 void __kmpc_atomic_float16_div_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
641 void __kmpc_atomic_cmplx4_add(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
643 void __kmpc_atomic_cmplx4_sub(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
645 void __kmpc_atomic_cmplx4_mul(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
647 void __kmpc_atomic_cmplx4_div(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
649 void __kmpc_atomic_cmplx8_add(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
651 void __kmpc_atomic_cmplx8_sub(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
653 void __kmpc_atomic_cmplx8_mul(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
655 void __kmpc_atomic_cmplx8_div(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
657 void __kmpc_atomic_cmplx10_add(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
659 void __kmpc_atomic_cmplx10_sub(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
661 void __kmpc_atomic_cmplx10_mul(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
663 void __kmpc_atomic_cmplx10_div(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
666 void __kmpc_atomic_cmplx16_add(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
668 void __kmpc_atomic_cmplx16_sub(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
670 void __kmpc_atomic_cmplx16_mul(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
672 void __kmpc_atomic_cmplx16_div(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
676 void __kmpc_atomic_cmplx16_add_a16(
ident_t *id_ref,
int gtid,
677 kmp_cmplx128_a16_t *lhs,
678 kmp_cmplx128_a16_t rhs);
679 void __kmpc_atomic_cmplx16_sub_a16(
ident_t *id_ref,
int gtid,
680 kmp_cmplx128_a16_t *lhs,
681 kmp_cmplx128_a16_t rhs);
682 void __kmpc_atomic_cmplx16_mul_a16(
ident_t *id_ref,
int gtid,
683 kmp_cmplx128_a16_t *lhs,
684 kmp_cmplx128_a16_t rhs);
685 void __kmpc_atomic_cmplx16_div_a16(
ident_t *id_ref,
int gtid,
686 kmp_cmplx128_a16_t *lhs,
687 kmp_cmplx128_a16_t rhs);
695 #if KMP_ARCH_X86 || KMP_ARCH_X86_64 697 void __kmpc_atomic_fixed1_sub_rev(
ident_t *id_ref,
int gtid,
char *lhs,
699 void __kmpc_atomic_fixed1_div_rev(
ident_t *id_ref,
int gtid,
char *lhs,
701 void __kmpc_atomic_fixed1u_div_rev(
ident_t *id_ref,
int gtid,
702 unsigned char *lhs,
unsigned char rhs);
703 void __kmpc_atomic_fixed1_shl_rev(
ident_t *id_ref,
int gtid,
char *lhs,
705 void __kmpc_atomic_fixed1_shr_rev(
ident_t *id_ref,
int gtid,
char *lhs,
707 void __kmpc_atomic_fixed1u_shr_rev(
ident_t *id_ref,
int gtid,
708 unsigned char *lhs,
unsigned char rhs);
709 void __kmpc_atomic_fixed2_sub_rev(
ident_t *id_ref,
int gtid,
short *lhs,
711 void __kmpc_atomic_fixed2_div_rev(
ident_t *id_ref,
int gtid,
short *lhs,
713 void __kmpc_atomic_fixed2u_div_rev(
ident_t *id_ref,
int gtid,
714 unsigned short *lhs,
unsigned short rhs);
715 void __kmpc_atomic_fixed2_shl_rev(
ident_t *id_ref,
int gtid,
short *lhs,
717 void __kmpc_atomic_fixed2_shr_rev(
ident_t *id_ref,
int gtid,
short *lhs,
719 void __kmpc_atomic_fixed2u_shr_rev(
ident_t *id_ref,
int gtid,
720 unsigned short *lhs,
unsigned short rhs);
721 void __kmpc_atomic_fixed4_sub_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
723 void __kmpc_atomic_fixed4_div_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
725 void __kmpc_atomic_fixed4u_div_rev(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
727 void __kmpc_atomic_fixed4_shl_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
729 void __kmpc_atomic_fixed4_shr_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
731 void __kmpc_atomic_fixed4u_shr_rev(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
733 void __kmpc_atomic_fixed8_sub_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
735 void __kmpc_atomic_fixed8_div_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
737 void __kmpc_atomic_fixed8u_div_rev(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
739 void __kmpc_atomic_fixed8_shl_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
741 void __kmpc_atomic_fixed8_shr_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
743 void __kmpc_atomic_fixed8u_shr_rev(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
745 void __kmpc_atomic_float4_sub_rev(
ident_t *id_ref,
int gtid,
float *lhs,
747 void __kmpc_atomic_float4_div_rev(
ident_t *id_ref,
int gtid,
float *lhs,
749 void __kmpc_atomic_float8_sub_rev(
ident_t *id_ref,
int gtid,
double *lhs,
751 void __kmpc_atomic_float8_div_rev(
ident_t *id_ref,
int gtid,
double *lhs,
753 void __kmpc_atomic_float10_sub_rev(
ident_t *id_ref,
int gtid,
long double *lhs,
755 void __kmpc_atomic_float10_div_rev(
ident_t *id_ref,
int gtid,
long double *lhs,
758 void __kmpc_atomic_float16_sub_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
760 void __kmpc_atomic_float16_div_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
763 void __kmpc_atomic_cmplx4_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
765 void __kmpc_atomic_cmplx4_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
767 void __kmpc_atomic_cmplx8_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
769 void __kmpc_atomic_cmplx8_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
771 void __kmpc_atomic_cmplx10_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
773 void __kmpc_atomic_cmplx10_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
776 void __kmpc_atomic_cmplx16_sub_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
778 void __kmpc_atomic_cmplx16_div_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
782 void __kmpc_atomic_float16_sub_a16_rev(
ident_t *id_ref,
int gtid,
783 Quad_a16_t *lhs, Quad_a16_t rhs);
784 void __kmpc_atomic_float16_div_a16_rev(
ident_t *id_ref,
int gtid,
785 Quad_a16_t *lhs, Quad_a16_t rhs);
786 void __kmpc_atomic_cmplx16_sub_a16_rev(
ident_t *id_ref,
int gtid,
787 kmp_cmplx128_a16_t *lhs,
788 kmp_cmplx128_a16_t rhs);
789 void __kmpc_atomic_cmplx16_div_a16_rev(
ident_t *id_ref,
int gtid,
790 kmp_cmplx128_a16_t *lhs,
791 kmp_cmplx128_a16_t rhs);
793 #endif // KMP_HAVE_QUAD 795 #endif // KMP_ARCH_X86 || KMP_ARCH_X86_64 797 #endif // OMP_40_ENABLED 802 void __kmpc_atomic_fixed1_mul_float8(
ident_t *id_ref,
int gtid,
char *lhs,
804 void __kmpc_atomic_fixed1_div_float8(
ident_t *id_ref,
int gtid,
char *lhs,
806 void __kmpc_atomic_fixed2_mul_float8(
ident_t *id_ref,
int gtid,
short *lhs,
808 void __kmpc_atomic_fixed2_div_float8(
ident_t *id_ref,
int gtid,
short *lhs,
810 void __kmpc_atomic_fixed4_mul_float8(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
812 void __kmpc_atomic_fixed4_div_float8(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
814 void __kmpc_atomic_fixed8_mul_float8(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
816 void __kmpc_atomic_fixed8_div_float8(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
818 void __kmpc_atomic_float4_add_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
820 void __kmpc_atomic_float4_sub_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
822 void __kmpc_atomic_float4_mul_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
824 void __kmpc_atomic_float4_div_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
830 void __kmpc_atomic_fixed1_add_fp(
ident_t *id_ref,
int gtid,
char *lhs,
832 void __kmpc_atomic_fixed1u_add_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
834 void __kmpc_atomic_fixed1_sub_fp(
ident_t *id_ref,
int gtid,
char *lhs,
836 void __kmpc_atomic_fixed1u_sub_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
838 void __kmpc_atomic_fixed1_mul_fp(
ident_t *id_ref,
int gtid,
char *lhs,
840 void __kmpc_atomic_fixed1u_mul_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
842 void __kmpc_atomic_fixed1_div_fp(
ident_t *id_ref,
int gtid,
char *lhs,
844 void __kmpc_atomic_fixed1u_div_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
847 void __kmpc_atomic_fixed2_add_fp(
ident_t *id_ref,
int gtid,
short *lhs,
849 void __kmpc_atomic_fixed2u_add_fp(
ident_t *id_ref,
int gtid,
850 unsigned short *lhs, _Quad rhs);
851 void __kmpc_atomic_fixed2_sub_fp(
ident_t *id_ref,
int gtid,
short *lhs,
853 void __kmpc_atomic_fixed2u_sub_fp(
ident_t *id_ref,
int gtid,
854 unsigned short *lhs, _Quad rhs);
855 void __kmpc_atomic_fixed2_mul_fp(
ident_t *id_ref,
int gtid,
short *lhs,
857 void __kmpc_atomic_fixed2u_mul_fp(
ident_t *id_ref,
int gtid,
858 unsigned short *lhs, _Quad rhs);
859 void __kmpc_atomic_fixed2_div_fp(
ident_t *id_ref,
int gtid,
short *lhs,
861 void __kmpc_atomic_fixed2u_div_fp(
ident_t *id_ref,
int gtid,
862 unsigned short *lhs, _Quad rhs);
864 void __kmpc_atomic_fixed4_add_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
866 void __kmpc_atomic_fixed4u_add_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
868 void __kmpc_atomic_fixed4_sub_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
870 void __kmpc_atomic_fixed4u_sub_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
872 void __kmpc_atomic_fixed4_mul_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
874 void __kmpc_atomic_fixed4u_mul_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
876 void __kmpc_atomic_fixed4_div_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
878 void __kmpc_atomic_fixed4u_div_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
881 void __kmpc_atomic_fixed8_add_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
883 void __kmpc_atomic_fixed8u_add_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
885 void __kmpc_atomic_fixed8_sub_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
887 void __kmpc_atomic_fixed8u_sub_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
889 void __kmpc_atomic_fixed8_mul_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
891 void __kmpc_atomic_fixed8u_mul_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
893 void __kmpc_atomic_fixed8_div_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
895 void __kmpc_atomic_fixed8u_div_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
898 void __kmpc_atomic_float4_add_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
900 void __kmpc_atomic_float4_sub_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
902 void __kmpc_atomic_float4_mul_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
904 void __kmpc_atomic_float4_div_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
907 void __kmpc_atomic_float8_add_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
909 void __kmpc_atomic_float8_sub_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
911 void __kmpc_atomic_float8_mul_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
913 void __kmpc_atomic_float8_div_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
916 void __kmpc_atomic_float10_add_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
918 void __kmpc_atomic_float10_sub_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
920 void __kmpc_atomic_float10_mul_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
922 void __kmpc_atomic_float10_div_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
926 void __kmpc_atomic_fixed1_sub_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
928 void __kmpc_atomic_fixed1u_sub_rev_fp(
ident_t *id_ref,
int gtid,
929 unsigned char *lhs, _Quad rhs);
930 void __kmpc_atomic_fixed1_div_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
932 void __kmpc_atomic_fixed1u_div_rev_fp(
ident_t *id_ref,
int gtid,
933 unsigned char *lhs, _Quad rhs);
934 void __kmpc_atomic_fixed2_sub_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
936 void __kmpc_atomic_fixed2u_sub_rev_fp(
ident_t *id_ref,
int gtid,
937 unsigned short *lhs, _Quad rhs);
938 void __kmpc_atomic_fixed2_div_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
940 void __kmpc_atomic_fixed2u_div_rev_fp(
ident_t *id_ref,
int gtid,
941 unsigned short *lhs, _Quad rhs);
942 void __kmpc_atomic_fixed4_sub_rev_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
944 void __kmpc_atomic_fixed4u_sub_rev_fp(
ident_t *id_ref,
int gtid,
945 kmp_uint32 *lhs, _Quad rhs);
946 void __kmpc_atomic_fixed4_div_rev_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
948 void __kmpc_atomic_fixed4u_div_rev_fp(
ident_t *id_ref,
int gtid,
949 kmp_uint32 *lhs, _Quad rhs);
950 void __kmpc_atomic_fixed8_sub_rev_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
952 void __kmpc_atomic_fixed8u_sub_rev_fp(
ident_t *id_ref,
int gtid,
953 kmp_uint64 *lhs, _Quad rhs);
954 void __kmpc_atomic_fixed8_div_rev_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
956 void __kmpc_atomic_fixed8u_div_rev_fp(
ident_t *id_ref,
int gtid,
957 kmp_uint64 *lhs, _Quad rhs);
958 void __kmpc_atomic_float4_sub_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
960 void __kmpc_atomic_float4_div_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
962 void __kmpc_atomic_float8_sub_rev_fp(
ident_t *id_ref,
int gtid,
double *lhs,
964 void __kmpc_atomic_float8_div_rev_fp(
ident_t *id_ref,
int gtid,
double *lhs,
966 void __kmpc_atomic_float10_sub_rev_fp(
ident_t *id_ref,
int gtid,
967 long double *lhs, _Quad rhs);
968 void __kmpc_atomic_float10_div_rev_fp(
ident_t *id_ref,
int gtid,
969 long double *lhs, _Quad rhs);
971 #endif // KMP_HAVE_QUAD 974 void __kmpc_atomic_cmplx4_add_cmplx8(
ident_t *id_ref,
int gtid,
975 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
976 void __kmpc_atomic_cmplx4_sub_cmplx8(
ident_t *id_ref,
int gtid,
977 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
978 void __kmpc_atomic_cmplx4_mul_cmplx8(
ident_t *id_ref,
int gtid,
979 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
980 void __kmpc_atomic_cmplx4_div_cmplx8(
ident_t *id_ref,
int gtid,
981 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
984 void __kmpc_atomic_1(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
985 void (*f)(
void *,
void *,
void *));
986 void __kmpc_atomic_2(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
987 void (*f)(
void *,
void *,
void *));
988 void __kmpc_atomic_4(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
989 void (*f)(
void *,
void *,
void *));
990 void __kmpc_atomic_8(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
991 void (*f)(
void *,
void *,
void *));
992 void __kmpc_atomic_10(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
993 void (*f)(
void *,
void *,
void *));
994 void __kmpc_atomic_16(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
995 void (*f)(
void *,
void *,
void *));
996 void __kmpc_atomic_20(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
997 void (*f)(
void *,
void *,
void *));
998 void __kmpc_atomic_32(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
999 void (*f)(
void *,
void *,
void *));
1002 #if KMP_ARCH_X86 || KMP_ARCH_X86_64 1005 char __kmpc_atomic_fixed1_rd(
ident_t *id_ref,
int gtid,
char *loc);
1006 short __kmpc_atomic_fixed2_rd(
ident_t *id_ref,
int gtid,
short *loc);
1007 kmp_int32 __kmpc_atomic_fixed4_rd(
ident_t *id_ref,
int gtid, kmp_int32 *loc);
1008 kmp_int64 __kmpc_atomic_fixed8_rd(
ident_t *id_ref,
int gtid, kmp_int64 *loc);
1009 kmp_real32 __kmpc_atomic_float4_rd(
ident_t *id_ref,
int gtid, kmp_real32 *loc);
1010 kmp_real64 __kmpc_atomic_float8_rd(
ident_t *id_ref,
int gtid, kmp_real64 *loc);
1011 long double __kmpc_atomic_float10_rd(
ident_t *id_ref,
int gtid,
1014 QUAD_LEGACY __kmpc_atomic_float16_rd(
ident_t *id_ref,
int gtid,
1019 #if (KMP_OS_WINDOWS) 1020 void __kmpc_atomic_cmplx4_rd(kmp_cmplx32 *out,
ident_t *id_ref,
int gtid,
1023 kmp_cmplx32 __kmpc_atomic_cmplx4_rd(
ident_t *id_ref,
int gtid,
1026 kmp_cmplx64 __kmpc_atomic_cmplx8_rd(
ident_t *id_ref,
int gtid,
1028 kmp_cmplx80 __kmpc_atomic_cmplx10_rd(
ident_t *id_ref,
int gtid,
1031 CPLX128_LEG __kmpc_atomic_cmplx16_rd(
ident_t *id_ref,
int gtid,
1035 Quad_a16_t __kmpc_atomic_float16_a16_rd(
ident_t *id_ref,
int gtid,
1037 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_a16_rd(
ident_t *id_ref,
int gtid,
1038 kmp_cmplx128_a16_t *loc);
1043 void __kmpc_atomic_fixed1_wr(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
1044 void __kmpc_atomic_fixed2_wr(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
1045 void __kmpc_atomic_fixed4_wr(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
1047 void __kmpc_atomic_fixed8_wr(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
1049 void __kmpc_atomic_float4_wr(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
1051 void __kmpc_atomic_float8_wr(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
1053 void __kmpc_atomic_float10_wr(
ident_t *id_ref,
int gtid,
long double *lhs,
1056 void __kmpc_atomic_float16_wr(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
1059 void __kmpc_atomic_cmplx4_wr(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1061 void __kmpc_atomic_cmplx8_wr(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
1063 void __kmpc_atomic_cmplx10_wr(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
1066 void __kmpc_atomic_cmplx16_wr(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
1070 void __kmpc_atomic_float16_a16_wr(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
1072 void __kmpc_atomic_cmplx16_a16_wr(
ident_t *id_ref,
int gtid,
1073 kmp_cmplx128_a16_t *lhs,
1074 kmp_cmplx128_a16_t rhs);
1081 char __kmpc_atomic_fixed1_add_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1082 char rhs,
int flag);
1083 char __kmpc_atomic_fixed1_andb_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1084 char rhs,
int flag);
1085 char __kmpc_atomic_fixed1_div_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1086 char rhs,
int flag);
1087 unsigned char __kmpc_atomic_fixed1u_div_cpt(
ident_t *id_ref,
int gtid,
1089 unsigned char rhs,
int flag);
1090 char __kmpc_atomic_fixed1_mul_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1091 char rhs,
int flag);
1092 char __kmpc_atomic_fixed1_orb_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1093 char rhs,
int flag);
1094 char __kmpc_atomic_fixed1_shl_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1095 char rhs,
int flag);
1096 char __kmpc_atomic_fixed1_shr_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1097 char rhs,
int flag);
1098 unsigned char __kmpc_atomic_fixed1u_shr_cpt(
ident_t *id_ref,
int gtid,
1100 unsigned char rhs,
int flag);
1101 char __kmpc_atomic_fixed1_sub_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1102 char rhs,
int flag);
1103 char __kmpc_atomic_fixed1_xor_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1104 char rhs,
int flag);
1106 short __kmpc_atomic_fixed2_add_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1107 short rhs,
int flag);
1108 short __kmpc_atomic_fixed2_andb_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1109 short rhs,
int flag);
1110 short __kmpc_atomic_fixed2_div_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1111 short rhs,
int flag);
1112 unsigned short __kmpc_atomic_fixed2u_div_cpt(
ident_t *id_ref,
int gtid,
1113 unsigned short *lhs,
1114 unsigned short rhs,
int flag);
1115 short __kmpc_atomic_fixed2_mul_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1116 short rhs,
int flag);
1117 short __kmpc_atomic_fixed2_orb_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1118 short rhs,
int flag);
1119 short __kmpc_atomic_fixed2_shl_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1120 short rhs,
int flag);
1121 short __kmpc_atomic_fixed2_shr_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1122 short rhs,
int flag);
1123 unsigned short __kmpc_atomic_fixed2u_shr_cpt(
ident_t *id_ref,
int gtid,
1124 unsigned short *lhs,
1125 unsigned short rhs,
int flag);
1126 short __kmpc_atomic_fixed2_sub_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1127 short rhs,
int flag);
1128 short __kmpc_atomic_fixed2_xor_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1129 short rhs,
int flag);
1131 kmp_int32 __kmpc_atomic_fixed4_add_cpt(
ident_t *id_ref,
int gtid,
1132 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1133 kmp_int32 __kmpc_atomic_fixed4_sub_cpt(
ident_t *id_ref,
int gtid,
1134 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1136 kmp_real32 __kmpc_atomic_float4_add_cpt(
ident_t *id_ref,
int gtid,
1137 kmp_real32 *lhs, kmp_real32 rhs,
1139 kmp_real32 __kmpc_atomic_float4_sub_cpt(
ident_t *id_ref,
int gtid,
1140 kmp_real32 *lhs, kmp_real32 rhs,
1143 kmp_int64 __kmpc_atomic_fixed8_add_cpt(
ident_t *id_ref,
int gtid,
1144 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1145 kmp_int64 __kmpc_atomic_fixed8_sub_cpt(
ident_t *id_ref,
int gtid,
1146 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1148 kmp_real64 __kmpc_atomic_float8_add_cpt(
ident_t *id_ref,
int gtid,
1149 kmp_real64 *lhs, kmp_real64 rhs,
1151 kmp_real64 __kmpc_atomic_float8_sub_cpt(
ident_t *id_ref,
int gtid,
1152 kmp_real64 *lhs, kmp_real64 rhs,
1155 kmp_int32 __kmpc_atomic_fixed4_andb_cpt(
ident_t *id_ref,
int gtid,
1156 kmp_int32 *lhs, kmp_int32 rhs,
1158 kmp_int32 __kmpc_atomic_fixed4_div_cpt(
ident_t *id_ref,
int gtid,
1159 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1160 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt(
ident_t *id_ref,
int gtid,
1161 kmp_uint32 *lhs, kmp_uint32 rhs,
1163 kmp_int32 __kmpc_atomic_fixed4_mul_cpt(
ident_t *id_ref,
int gtid,
1164 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1165 kmp_int32 __kmpc_atomic_fixed4_orb_cpt(
ident_t *id_ref,
int gtid,
1166 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1167 kmp_int32 __kmpc_atomic_fixed4_shl_cpt(
ident_t *id_ref,
int gtid,
1168 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1169 kmp_int32 __kmpc_atomic_fixed4_shr_cpt(
ident_t *id_ref,
int gtid,
1170 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1171 kmp_uint32 __kmpc_atomic_fixed4u_shr_cpt(
ident_t *id_ref,
int gtid,
1172 kmp_uint32 *lhs, kmp_uint32 rhs,
1174 kmp_int32 __kmpc_atomic_fixed4_xor_cpt(
ident_t *id_ref,
int gtid,
1175 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1177 kmp_int64 __kmpc_atomic_fixed8_andb_cpt(
ident_t *id_ref,
int gtid,
1178 kmp_int64 *lhs, kmp_int64 rhs,
1180 kmp_int64 __kmpc_atomic_fixed8_div_cpt(
ident_t *id_ref,
int gtid,
1181 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1182 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt(
ident_t *id_ref,
int gtid,
1183 kmp_uint64 *lhs, kmp_uint64 rhs,
1185 kmp_int64 __kmpc_atomic_fixed8_mul_cpt(
ident_t *id_ref,
int gtid,
1186 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1187 kmp_int64 __kmpc_atomic_fixed8_orb_cpt(
ident_t *id_ref,
int gtid,
1188 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1189 kmp_int64 __kmpc_atomic_fixed8_shl_cpt(
ident_t *id_ref,
int gtid,
1190 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1191 kmp_int64 __kmpc_atomic_fixed8_shr_cpt(
ident_t *id_ref,
int gtid,
1192 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1193 kmp_uint64 __kmpc_atomic_fixed8u_shr_cpt(
ident_t *id_ref,
int gtid,
1194 kmp_uint64 *lhs, kmp_uint64 rhs,
1196 kmp_int64 __kmpc_atomic_fixed8_xor_cpt(
ident_t *id_ref,
int gtid,
1197 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1199 kmp_real32 __kmpc_atomic_float4_div_cpt(
ident_t *id_ref,
int gtid,
1200 kmp_real32 *lhs, kmp_real32 rhs,
1202 kmp_real32 __kmpc_atomic_float4_mul_cpt(
ident_t *id_ref,
int gtid,
1203 kmp_real32 *lhs, kmp_real32 rhs,
1206 kmp_real64 __kmpc_atomic_float8_div_cpt(
ident_t *id_ref,
int gtid,
1207 kmp_real64 *lhs, kmp_real64 rhs,
1209 kmp_real64 __kmpc_atomic_float8_mul_cpt(
ident_t *id_ref,
int gtid,
1210 kmp_real64 *lhs, kmp_real64 rhs,
1213 char __kmpc_atomic_fixed1_andl_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1214 char rhs,
int flag);
1215 char __kmpc_atomic_fixed1_orl_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1216 char rhs,
int flag);
1217 short __kmpc_atomic_fixed2_andl_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1218 short rhs,
int flag);
1219 short __kmpc_atomic_fixed2_orl_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1220 short rhs,
int flag);
1221 kmp_int32 __kmpc_atomic_fixed4_andl_cpt(
ident_t *id_ref,
int gtid,
1222 kmp_int32 *lhs, kmp_int32 rhs,
1224 kmp_int32 __kmpc_atomic_fixed4_orl_cpt(
ident_t *id_ref,
int gtid,
1225 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1226 kmp_int64 __kmpc_atomic_fixed8_andl_cpt(
ident_t *id_ref,
int gtid,
1227 kmp_int64 *lhs, kmp_int64 rhs,
1229 kmp_int64 __kmpc_atomic_fixed8_orl_cpt(
ident_t *id_ref,
int gtid,
1230 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1232 char __kmpc_atomic_fixed1_max_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1233 char rhs,
int flag);
1234 char __kmpc_atomic_fixed1_min_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1235 char rhs,
int flag);
1236 short __kmpc_atomic_fixed2_max_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1237 short rhs,
int flag);
1238 short __kmpc_atomic_fixed2_min_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1239 short rhs,
int flag);
1240 kmp_int32 __kmpc_atomic_fixed4_max_cpt(
ident_t *id_ref,
int gtid,
1241 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1242 kmp_int32 __kmpc_atomic_fixed4_min_cpt(
ident_t *id_ref,
int gtid,
1243 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1244 kmp_int64 __kmpc_atomic_fixed8_max_cpt(
ident_t *id_ref,
int gtid,
1245 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1246 kmp_int64 __kmpc_atomic_fixed8_min_cpt(
ident_t *id_ref,
int gtid,
1247 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1248 kmp_real32 __kmpc_atomic_float4_max_cpt(
ident_t *id_ref,
int gtid,
1249 kmp_real32 *lhs, kmp_real32 rhs,
1251 kmp_real32 __kmpc_atomic_float4_min_cpt(
ident_t *id_ref,
int gtid,
1252 kmp_real32 *lhs, kmp_real32 rhs,
1254 kmp_real64 __kmpc_atomic_float8_max_cpt(
ident_t *id_ref,
int gtid,
1255 kmp_real64 *lhs, kmp_real64 rhs,
1257 kmp_real64 __kmpc_atomic_float8_min_cpt(
ident_t *id_ref,
int gtid,
1258 kmp_real64 *lhs, kmp_real64 rhs,
1261 QUAD_LEGACY __kmpc_atomic_float16_max_cpt(
ident_t *id_ref,
int gtid,
1262 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1264 QUAD_LEGACY __kmpc_atomic_float16_min_cpt(
ident_t *id_ref,
int gtid,
1265 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1269 char __kmpc_atomic_fixed1_neqv_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1270 char rhs,
int flag);
1271 short __kmpc_atomic_fixed2_neqv_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1272 short rhs,
int flag);
1273 kmp_int32 __kmpc_atomic_fixed4_neqv_cpt(
ident_t *id_ref,
int gtid,
1274 kmp_int32 *lhs, kmp_int32 rhs,
1276 kmp_int64 __kmpc_atomic_fixed8_neqv_cpt(
ident_t *id_ref,
int gtid,
1277 kmp_int64 *lhs, kmp_int64 rhs,
1280 char __kmpc_atomic_fixed1_eqv_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1281 char rhs,
int flag);
1282 short __kmpc_atomic_fixed2_eqv_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1283 short rhs,
int flag);
1284 kmp_int32 __kmpc_atomic_fixed4_eqv_cpt(
ident_t *id_ref,
int gtid,
1285 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1286 kmp_int64 __kmpc_atomic_fixed8_eqv_cpt(
ident_t *id_ref,
int gtid,
1287 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1289 long double __kmpc_atomic_float10_add_cpt(
ident_t *id_ref,
int gtid,
1290 long double *lhs,
long double rhs,
1292 long double __kmpc_atomic_float10_sub_cpt(
ident_t *id_ref,
int gtid,
1293 long double *lhs,
long double rhs,
1295 long double __kmpc_atomic_float10_mul_cpt(
ident_t *id_ref,
int gtid,
1296 long double *lhs,
long double rhs,
1298 long double __kmpc_atomic_float10_div_cpt(
ident_t *id_ref,
int gtid,
1299 long double *lhs,
long double rhs,
1303 QUAD_LEGACY __kmpc_atomic_float16_add_cpt(
ident_t *id_ref,
int gtid,
1304 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1306 QUAD_LEGACY __kmpc_atomic_float16_sub_cpt(
ident_t *id_ref,
int gtid,
1307 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1309 QUAD_LEGACY __kmpc_atomic_float16_mul_cpt(
ident_t *id_ref,
int gtid,
1310 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1312 QUAD_LEGACY __kmpc_atomic_float16_div_cpt(
ident_t *id_ref,
int gtid,
1313 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1319 void __kmpc_atomic_cmplx4_add_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1320 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1321 void __kmpc_atomic_cmplx4_sub_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1322 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1323 void __kmpc_atomic_cmplx4_mul_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1324 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1325 void __kmpc_atomic_cmplx4_div_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1326 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1328 kmp_cmplx64 __kmpc_atomic_cmplx8_add_cpt(
ident_t *id_ref,
int gtid,
1329 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1331 kmp_cmplx64 __kmpc_atomic_cmplx8_sub_cpt(
ident_t *id_ref,
int gtid,
1332 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1334 kmp_cmplx64 __kmpc_atomic_cmplx8_mul_cpt(
ident_t *id_ref,
int gtid,
1335 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1337 kmp_cmplx64 __kmpc_atomic_cmplx8_div_cpt(
ident_t *id_ref,
int gtid,
1338 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1340 kmp_cmplx80 __kmpc_atomic_cmplx10_add_cpt(
ident_t *id_ref,
int gtid,
1341 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1343 kmp_cmplx80 __kmpc_atomic_cmplx10_sub_cpt(
ident_t *id_ref,
int gtid,
1344 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1346 kmp_cmplx80 __kmpc_atomic_cmplx10_mul_cpt(
ident_t *id_ref,
int gtid,
1347 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1349 kmp_cmplx80 __kmpc_atomic_cmplx10_div_cpt(
ident_t *id_ref,
int gtid,
1350 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1353 CPLX128_LEG __kmpc_atomic_cmplx16_add_cpt(
ident_t *id_ref,
int gtid,
1354 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1356 CPLX128_LEG __kmpc_atomic_cmplx16_sub_cpt(
ident_t *id_ref,
int gtid,
1357 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1359 CPLX128_LEG __kmpc_atomic_cmplx16_mul_cpt(
ident_t *id_ref,
int gtid,
1360 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1362 CPLX128_LEG __kmpc_atomic_cmplx16_div_cpt(
ident_t *id_ref,
int gtid,
1363 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1367 Quad_a16_t __kmpc_atomic_float16_add_a16_cpt(
ident_t *id_ref,
int gtid,
1368 Quad_a16_t *lhs, Quad_a16_t rhs,
1370 Quad_a16_t __kmpc_atomic_float16_sub_a16_cpt(
ident_t *id_ref,
int gtid,
1371 Quad_a16_t *lhs, Quad_a16_t rhs,
1373 Quad_a16_t __kmpc_atomic_float16_mul_a16_cpt(
ident_t *id_ref,
int gtid,
1374 Quad_a16_t *lhs, Quad_a16_t rhs,
1376 Quad_a16_t __kmpc_atomic_float16_div_a16_cpt(
ident_t *id_ref,
int gtid,
1377 Quad_a16_t *lhs, Quad_a16_t rhs,
1379 Quad_a16_t __kmpc_atomic_float16_max_a16_cpt(
ident_t *id_ref,
int gtid,
1380 Quad_a16_t *lhs, Quad_a16_t rhs,
1382 Quad_a16_t __kmpc_atomic_float16_min_a16_cpt(
ident_t *id_ref,
int gtid,
1383 Quad_a16_t *lhs, Quad_a16_t rhs,
1385 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_add_a16_cpt(
ident_t *id_ref,
int gtid,
1386 kmp_cmplx128_a16_t *lhs,
1387 kmp_cmplx128_a16_t rhs,
1389 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_sub_a16_cpt(
ident_t *id_ref,
int gtid,
1390 kmp_cmplx128_a16_t *lhs,
1391 kmp_cmplx128_a16_t rhs,
1393 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_mul_a16_cpt(
ident_t *id_ref,
int gtid,
1394 kmp_cmplx128_a16_t *lhs,
1395 kmp_cmplx128_a16_t rhs,
1397 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_div_a16_cpt(
ident_t *id_ref,
int gtid,
1398 kmp_cmplx128_a16_t *lhs,
1399 kmp_cmplx128_a16_t rhs,
1404 void __kmpc_atomic_start(
void);
1405 void __kmpc_atomic_end(
void);
1412 char __kmpc_atomic_fixed1_sub_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1413 char rhs,
int flag);
1414 char __kmpc_atomic_fixed1_div_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1415 char rhs,
int flag);
1416 unsigned char __kmpc_atomic_fixed1u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1418 unsigned char rhs,
int flag);
1419 char __kmpc_atomic_fixed1_shl_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1420 char rhs,
int flag);
1421 char __kmpc_atomic_fixed1_shr_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1422 char rhs,
int flag);
1423 unsigned char __kmpc_atomic_fixed1u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1425 unsigned char rhs,
int flag);
1426 short __kmpc_atomic_fixed2_sub_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1427 short rhs,
int flag);
1428 short __kmpc_atomic_fixed2_div_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1429 short rhs,
int flag);
1430 unsigned short __kmpc_atomic_fixed2u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1431 unsigned short *lhs,
1432 unsigned short rhs,
int flag);
1433 short __kmpc_atomic_fixed2_shl_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1434 short rhs,
int flag);
1435 short __kmpc_atomic_fixed2_shr_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1436 short rhs,
int flag);
1437 unsigned short __kmpc_atomic_fixed2u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1438 unsigned short *lhs,
1439 unsigned short rhs,
int flag);
1440 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1441 kmp_int32 *lhs, kmp_int32 rhs,
1443 kmp_int32 __kmpc_atomic_fixed4_div_cpt_rev(
ident_t *id_ref,
int gtid,
1444 kmp_int32 *lhs, kmp_int32 rhs,
1446 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1447 kmp_uint32 *lhs, kmp_uint32 rhs,
1449 kmp_int32 __kmpc_atomic_fixed4_shl_cpt_rev(
ident_t *id_ref,
int gtid,
1450 kmp_int32 *lhs, kmp_int32 rhs,
1452 kmp_int32 __kmpc_atomic_fixed4_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1453 kmp_int32 *lhs, kmp_int32 rhs,
1455 kmp_uint32 __kmpc_atomic_fixed4u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1456 kmp_uint32 *lhs, kmp_uint32 rhs,
1458 kmp_int64 __kmpc_atomic_fixed8_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1459 kmp_int64 *lhs, kmp_int64 rhs,
1461 kmp_int64 __kmpc_atomic_fixed8_div_cpt_rev(
ident_t *id_ref,
int gtid,
1462 kmp_int64 *lhs, kmp_int64 rhs,
1464 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1465 kmp_uint64 *lhs, kmp_uint64 rhs,
1467 kmp_int64 __kmpc_atomic_fixed8_shl_cpt_rev(
ident_t *id_ref,
int gtid,
1468 kmp_int64 *lhs, kmp_int64 rhs,
1470 kmp_int64 __kmpc_atomic_fixed8_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1471 kmp_int64 *lhs, kmp_int64 rhs,
1473 kmp_uint64 __kmpc_atomic_fixed8u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1474 kmp_uint64 *lhs, kmp_uint64 rhs,
1476 float __kmpc_atomic_float4_sub_cpt_rev(
ident_t *id_ref,
int gtid,
float *lhs,
1477 float rhs,
int flag);
1478 float __kmpc_atomic_float4_div_cpt_rev(
ident_t *id_ref,
int gtid,
float *lhs,
1479 float rhs,
int flag);
1480 double __kmpc_atomic_float8_sub_cpt_rev(
ident_t *id_ref,
int gtid,
double *lhs,
1481 double rhs,
int flag);
1482 double __kmpc_atomic_float8_div_cpt_rev(
ident_t *id_ref,
int gtid,
double *lhs,
1483 double rhs,
int flag);
1484 long double __kmpc_atomic_float10_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1485 long double *lhs,
long double rhs,
1487 long double __kmpc_atomic_float10_div_cpt_rev(
ident_t *id_ref,
int gtid,
1488 long double *lhs,
long double rhs,
1491 QUAD_LEGACY __kmpc_atomic_float16_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1492 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1494 QUAD_LEGACY __kmpc_atomic_float16_div_cpt_rev(
ident_t *id_ref,
int gtid,
1495 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1500 void __kmpc_atomic_cmplx4_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1501 kmp_cmplx32 *lhs, kmp_cmplx32 rhs,
1502 kmp_cmplx32 *out,
int flag);
1503 void __kmpc_atomic_cmplx4_div_cpt_rev(
ident_t *id_ref,
int gtid,
1504 kmp_cmplx32 *lhs, kmp_cmplx32 rhs,
1505 kmp_cmplx32 *out,
int flag);
1506 kmp_cmplx64 __kmpc_atomic_cmplx8_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1507 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1509 kmp_cmplx64 __kmpc_atomic_cmplx8_div_cpt_rev(
ident_t *id_ref,
int gtid,
1510 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1512 kmp_cmplx80 __kmpc_atomic_cmplx10_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1513 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1515 kmp_cmplx80 __kmpc_atomic_cmplx10_div_cpt_rev(
ident_t *id_ref,
int gtid,
1516 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1519 CPLX128_LEG __kmpc_atomic_cmplx16_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1520 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1522 CPLX128_LEG __kmpc_atomic_cmplx16_div_cpt_rev(
ident_t *id_ref,
int gtid,
1523 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1526 Quad_a16_t __kmpc_atomic_float16_sub_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1528 Quad_a16_t rhs,
int flag);
1529 Quad_a16_t __kmpc_atomic_float16_div_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1531 Quad_a16_t rhs,
int flag);
1533 __kmpc_atomic_cmplx16_sub_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1534 kmp_cmplx128_a16_t *lhs,
1535 kmp_cmplx128_a16_t rhs,
int flag);
1537 __kmpc_atomic_cmplx16_div_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1538 kmp_cmplx128_a16_t *lhs,
1539 kmp_cmplx128_a16_t rhs,
int flag);
1544 char __kmpc_atomic_fixed1_swp(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
1545 short __kmpc_atomic_fixed2_swp(
ident_t *id_ref,
int gtid,
short *lhs,
1547 kmp_int32 __kmpc_atomic_fixed4_swp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
1549 kmp_int64 __kmpc_atomic_fixed8_swp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
1551 float __kmpc_atomic_float4_swp(
ident_t *id_ref,
int gtid,
float *lhs,
1553 double __kmpc_atomic_float8_swp(
ident_t *id_ref,
int gtid,
double *lhs,
1555 long double __kmpc_atomic_float10_swp(
ident_t *id_ref,
int gtid,
1556 long double *lhs,
long double rhs);
1558 QUAD_LEGACY __kmpc_atomic_float16_swp(
ident_t *id_ref,
int gtid,
1559 QUAD_LEGACY *lhs, QUAD_LEGACY rhs);
1562 void __kmpc_atomic_cmplx4_swp(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1563 kmp_cmplx32 rhs, kmp_cmplx32 *out);
1567 kmp_cmplx64 __kmpc_atomic_cmplx8_swp(
ident_t *id_ref,
int gtid,
1568 kmp_cmplx64 *lhs, kmp_cmplx64 rhs);
1569 kmp_cmplx80 __kmpc_atomic_cmplx10_swp(
ident_t *id_ref,
int gtid,
1570 kmp_cmplx80 *lhs, kmp_cmplx80 rhs);
1572 CPLX128_LEG __kmpc_atomic_cmplx16_swp(
ident_t *id_ref,
int gtid,
1573 CPLX128_LEG *lhs, CPLX128_LEG rhs);
1575 Quad_a16_t __kmpc_atomic_float16_a16_swp(
ident_t *id_ref,
int gtid,
1576 Quad_a16_t *lhs, Quad_a16_t rhs);
1577 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_a16_swp(
ident_t *id_ref,
int gtid,
1578 kmp_cmplx128_a16_t *lhs,
1579 kmp_cmplx128_a16_t rhs);
1586 char __kmpc_atomic_fixed1_add_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1587 _Quad rhs,
int flag);
1588 char __kmpc_atomic_fixed1_sub_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1589 _Quad rhs,
int flag);
1590 char __kmpc_atomic_fixed1_mul_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1591 _Quad rhs,
int flag);
1592 char __kmpc_atomic_fixed1_div_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1593 _Quad rhs,
int flag);
1594 unsigned char __kmpc_atomic_fixed1u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1595 unsigned char *lhs, _Quad rhs,
1597 unsigned char __kmpc_atomic_fixed1u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1598 unsigned char *lhs, _Quad rhs,
1600 unsigned char __kmpc_atomic_fixed1u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1601 unsigned char *lhs, _Quad rhs,
1603 unsigned char __kmpc_atomic_fixed1u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1604 unsigned char *lhs, _Quad rhs,
1607 short __kmpc_atomic_fixed2_add_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1608 _Quad rhs,
int flag);
1609 short __kmpc_atomic_fixed2_sub_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1610 _Quad rhs,
int flag);
1611 short __kmpc_atomic_fixed2_mul_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1612 _Quad rhs,
int flag);
1613 short __kmpc_atomic_fixed2_div_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1614 _Quad rhs,
int flag);
1615 unsigned short __kmpc_atomic_fixed2u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1616 unsigned short *lhs, _Quad rhs,
1618 unsigned short __kmpc_atomic_fixed2u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1619 unsigned short *lhs, _Quad rhs,
1621 unsigned short __kmpc_atomic_fixed2u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1622 unsigned short *lhs, _Quad rhs,
1624 unsigned short __kmpc_atomic_fixed2u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1625 unsigned short *lhs, _Quad rhs,
1628 kmp_int32 __kmpc_atomic_fixed4_add_cpt_fp(
ident_t *id_ref,
int gtid,
1629 kmp_int32 *lhs, _Quad rhs,
int flag);
1630 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1631 kmp_int32 *lhs, _Quad rhs,
int flag);
1632 kmp_int32 __kmpc_atomic_fixed4_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1633 kmp_int32 *lhs, _Quad rhs,
int flag);
1634 kmp_int32 __kmpc_atomic_fixed4_div_cpt_fp(
ident_t *id_ref,
int gtid,
1635 kmp_int32 *lhs, _Quad rhs,
int flag);
1636 kmp_uint32 __kmpc_atomic_fixed4u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1637 kmp_uint32 *lhs, _Quad rhs,
1639 kmp_uint32 __kmpc_atomic_fixed4u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1640 kmp_uint32 *lhs, _Quad rhs,
1642 kmp_uint32 __kmpc_atomic_fixed4u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1643 kmp_uint32 *lhs, _Quad rhs,
1645 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1646 kmp_uint32 *lhs, _Quad rhs,
1649 kmp_int64 __kmpc_atomic_fixed8_add_cpt_fp(
ident_t *id_ref,
int gtid,
1650 kmp_int64 *lhs, _Quad rhs,
int flag);
1651 kmp_int64 __kmpc_atomic_fixed8_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1652 kmp_int64 *lhs, _Quad rhs,
int flag);
1653 kmp_int64 __kmpc_atomic_fixed8_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1654 kmp_int64 *lhs, _Quad rhs,
int flag);
1655 kmp_int64 __kmpc_atomic_fixed8_div_cpt_fp(
ident_t *id_ref,
int gtid,
1656 kmp_int64 *lhs, _Quad rhs,
int flag);
1657 kmp_uint64 __kmpc_atomic_fixed8u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1658 kmp_uint64 *lhs, _Quad rhs,
1660 kmp_uint64 __kmpc_atomic_fixed8u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1661 kmp_uint64 *lhs, _Quad rhs,
1663 kmp_uint64 __kmpc_atomic_fixed8u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1664 kmp_uint64 *lhs, _Quad rhs,
1666 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1667 kmp_uint64 *lhs, _Quad rhs,
1670 float __kmpc_atomic_float4_add_cpt_fp(
ident_t *id_ref,
int gtid,
1671 kmp_real32 *lhs, _Quad rhs,
int flag);
1672 float __kmpc_atomic_float4_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1673 kmp_real32 *lhs, _Quad rhs,
int flag);
1674 float __kmpc_atomic_float4_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1675 kmp_real32 *lhs, _Quad rhs,
int flag);
1676 float __kmpc_atomic_float4_div_cpt_fp(
ident_t *id_ref,
int gtid,
1677 kmp_real32 *lhs, _Quad rhs,
int flag);
1679 double __kmpc_atomic_float8_add_cpt_fp(
ident_t *id_ref,
int gtid,
1680 kmp_real64 *lhs, _Quad rhs,
int flag);
1681 double __kmpc_atomic_float8_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1682 kmp_real64 *lhs, _Quad rhs,
int flag);
1683 double __kmpc_atomic_float8_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1684 kmp_real64 *lhs, _Quad rhs,
int flag);
1685 double __kmpc_atomic_float8_div_cpt_fp(
ident_t *id_ref,
int gtid,
1686 kmp_real64 *lhs, _Quad rhs,
int flag);
1688 long double __kmpc_atomic_float10_add_cpt_fp(
ident_t *id_ref,
int gtid,
1689 long double *lhs, _Quad rhs,
1691 long double __kmpc_atomic_float10_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1692 long double *lhs, _Quad rhs,
1694 long double __kmpc_atomic_float10_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1695 long double *lhs, _Quad rhs,
1697 long double __kmpc_atomic_float10_div_cpt_fp(
ident_t *id_ref,
int gtid,
1698 long double *lhs, _Quad rhs,
1701 char __kmpc_atomic_fixed1_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1702 _Quad rhs,
int flag);
1703 unsigned char __kmpc_atomic_fixed1u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1705 _Quad rhs,
int flag);
1706 char __kmpc_atomic_fixed1_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1707 _Quad rhs,
int flag);
1708 unsigned char __kmpc_atomic_fixed1u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1710 _Quad rhs,
int flag);
1711 short __kmpc_atomic_fixed2_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1712 _Quad rhs,
int flag);
1713 unsigned short __kmpc_atomic_fixed2u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1714 unsigned short *lhs,
1715 _Quad rhs,
int flag);
1716 short __kmpc_atomic_fixed2_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1717 _Quad rhs,
int flag);
1718 unsigned short __kmpc_atomic_fixed2u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1719 unsigned short *lhs,
1720 _Quad rhs,
int flag);
1721 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1722 kmp_int32 *lhs, _Quad rhs,
1724 kmp_uint32 __kmpc_atomic_fixed4u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1725 kmp_uint32 *lhs, _Quad rhs,
1727 kmp_int32 __kmpc_atomic_fixed4_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1728 kmp_int32 *lhs, _Quad rhs,
1730 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1731 kmp_uint32 *lhs, _Quad rhs,
1733 kmp_int64 __kmpc_atomic_fixed8_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1734 kmp_int64 *lhs, _Quad rhs,
1736 kmp_uint64 __kmpc_atomic_fixed8u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1737 kmp_uint64 *lhs, _Quad rhs,
1739 kmp_int64 __kmpc_atomic_fixed8_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1740 kmp_int64 *lhs, _Quad rhs,
1742 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1743 kmp_uint64 *lhs, _Quad rhs,
1745 float __kmpc_atomic_float4_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
1746 _Quad rhs,
int flag);
1747 float __kmpc_atomic_float4_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
1748 _Quad rhs,
int flag);
1749 double __kmpc_atomic_float8_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1750 double *lhs, _Quad rhs,
int flag);
1751 double __kmpc_atomic_float8_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1752 double *lhs, _Quad rhs,
int flag);
1753 long double __kmpc_atomic_float10_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1754 long double *lhs, _Quad rhs,
1756 long double __kmpc_atomic_float10_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1757 long double *lhs, _Quad rhs,
1760 #endif // KMP_HAVE_QUAD 1764 #endif // OMP_40_ENABLED 1766 #endif // KMP_ARCH_X86 || KMP_ARCH_X86_64