20 #include "ompt-specific.h"
32 #if defined(__cplusplus) && (KMP_OS_WINDOWS)
39 #define KMP_DO_ALIGN(alignment)
42 #if (_MSC_VER < 1600) && defined(_DEBUG)
47 #define _DEBUG_TEMPORARILY_UNSET_
52 template <
typename type_lhs,
typename type_rhs>
53 std::complex<type_lhs> __kmp_lhs_div_rhs(
const std::complex<type_lhs> &lhs,
54 const std::complex<type_rhs> &rhs) {
55 type_lhs a = lhs.real();
56 type_lhs b = lhs.imag();
57 type_rhs c = rhs.real();
58 type_rhs d = rhs.imag();
59 type_rhs den = c * c + d * d;
60 type_rhs r = (a * c + b * d);
61 type_rhs i = (b * c - a * d);
62 std::complex<type_lhs> ret(r / den, i / den);
67 struct __kmp_cmplx64_t : std::complex<double> {
69 __kmp_cmplx64_t() :
std::complex<double>() {}
71 __kmp_cmplx64_t(
const std::complex<double> &cd) :
std::complex<double>(cd) {}
73 void operator/=(
const __kmp_cmplx64_t &rhs) {
74 std::complex<double> lhs = *
this;
75 *
this = __kmp_lhs_div_rhs(lhs, rhs);
78 __kmp_cmplx64_t operator/(
const __kmp_cmplx64_t &rhs) {
79 std::complex<double> lhs = *
this;
80 return __kmp_lhs_div_rhs(lhs, rhs);
83 typedef struct __kmp_cmplx64_t kmp_cmplx64;
86 struct __kmp_cmplx32_t : std::complex<float> {
88 __kmp_cmplx32_t() :
std::complex<float>() {}
90 __kmp_cmplx32_t(
const std::complex<float> &cf) :
std::complex<float>(cf) {}
92 __kmp_cmplx32_t operator+(
const __kmp_cmplx32_t &b) {
93 std::complex<float> lhs = *
this;
94 std::complex<float> rhs = b;
97 __kmp_cmplx32_t operator-(
const __kmp_cmplx32_t &b) {
98 std::complex<float> lhs = *
this;
99 std::complex<float> rhs = b;
102 __kmp_cmplx32_t operator*(
const __kmp_cmplx32_t &b) {
103 std::complex<float> lhs = *
this;
104 std::complex<float> rhs = b;
108 __kmp_cmplx32_t operator+(
const kmp_cmplx64 &b) {
109 kmp_cmplx64 t = kmp_cmplx64(*
this) + b;
110 std::complex<double> d(t);
111 std::complex<float> f(d);
112 __kmp_cmplx32_t r(f);
115 __kmp_cmplx32_t operator-(
const kmp_cmplx64 &b) {
116 kmp_cmplx64 t = kmp_cmplx64(*
this) - b;
117 std::complex<double> d(t);
118 std::complex<float> f(d);
119 __kmp_cmplx32_t r(f);
122 __kmp_cmplx32_t operator*(
const kmp_cmplx64 &b) {
123 kmp_cmplx64 t = kmp_cmplx64(*
this) * b;
124 std::complex<double> d(t);
125 std::complex<float> f(d);
126 __kmp_cmplx32_t r(f);
130 void operator/=(
const __kmp_cmplx32_t &rhs) {
131 std::complex<float> lhs = *
this;
132 *
this = __kmp_lhs_div_rhs(lhs, rhs);
135 __kmp_cmplx32_t operator/(
const __kmp_cmplx32_t &rhs) {
136 std::complex<float> lhs = *
this;
137 return __kmp_lhs_div_rhs(lhs, rhs);
140 void operator/=(
const kmp_cmplx64 &rhs) {
141 std::complex<float> lhs = *
this;
142 *
this = __kmp_lhs_div_rhs(lhs, rhs);
145 __kmp_cmplx32_t operator/(
const kmp_cmplx64 &rhs) {
146 std::complex<float> lhs = *
this;
147 return __kmp_lhs_div_rhs(lhs, rhs);
150 typedef struct __kmp_cmplx32_t kmp_cmplx32;
153 struct KMP_DO_ALIGN(16) __kmp_cmplx80_t : std::complex<long double> {
155 __kmp_cmplx80_t() :
std::complex<long double>() {}
157 __kmp_cmplx80_t(
const std::complex<long double> &cld)
158 :
std::complex<long double>(cld) {}
160 void operator/=(
const __kmp_cmplx80_t &rhs) {
161 std::complex<long double> lhs = *
this;
162 *
this = __kmp_lhs_div_rhs(lhs, rhs);
165 __kmp_cmplx80_t operator/(
const __kmp_cmplx80_t &rhs) {
166 std::complex<long double> lhs = *
this;
167 return __kmp_lhs_div_rhs(lhs, rhs);
170 typedef KMP_DO_ALIGN(16) struct __kmp_cmplx80_t kmp_cmplx80;
174 struct __kmp_cmplx128_t : std::complex<_Quad> {
176 __kmp_cmplx128_t() :
std::complex<_Quad>() {}
178 __kmp_cmplx128_t(
const std::complex<_Quad> &cq) :
std::complex<_Quad>(cq) {}
180 void operator/=(
const __kmp_cmplx128_t &rhs) {
181 std::complex<_Quad> lhs = *
this;
182 *
this = __kmp_lhs_div_rhs(lhs, rhs);
185 __kmp_cmplx128_t operator/(
const __kmp_cmplx128_t &rhs) {
186 std::complex<_Quad> lhs = *
this;
187 return __kmp_lhs_div_rhs(lhs, rhs);
190 typedef struct __kmp_cmplx128_t kmp_cmplx128;
193 #ifdef _DEBUG_TEMPORARILY_UNSET_
194 #undef _DEBUG_TEMPORARILY_UNSET_
201 typedef float _Complex kmp_cmplx32;
202 typedef double _Complex kmp_cmplx64;
203 typedef long double _Complex kmp_cmplx80;
205 typedef _Quad _Complex kmp_cmplx128;
213 #if KMP_ARCH_X86 && KMP_HAVE_QUAD
217 #pragma pack(push, 4)
219 struct KMP_DO_ALIGN(4) Quad_a4_t {
223 Quad_a4_t(
const _Quad &cq) : q(cq) {}
225 Quad_a4_t operator+(
const Quad_a4_t &b) {
226 _Quad lhs = (*this).q;
228 return (Quad_a4_t)(lhs + rhs);
231 Quad_a4_t operator-(
const Quad_a4_t &b) {
232 _Quad lhs = (*this).q;
234 return (Quad_a4_t)(lhs - rhs);
236 Quad_a4_t operator*(
const Quad_a4_t &b) {
237 _Quad lhs = (*this).q;
239 return (Quad_a4_t)(lhs * rhs);
242 Quad_a4_t operator/(
const Quad_a4_t &b) {
243 _Quad lhs = (*this).q;
245 return (Quad_a4_t)(lhs / rhs);
249 struct KMP_DO_ALIGN(4) kmp_cmplx128_a4_t {
252 kmp_cmplx128_a4_t() : q() {}
254 kmp_cmplx128_a4_t(
const kmp_cmplx128 &c128) : q(c128) {}
256 kmp_cmplx128_a4_t operator+(
const kmp_cmplx128_a4_t &b) {
257 kmp_cmplx128 lhs = (*this).q;
258 kmp_cmplx128 rhs = b.q;
259 return (kmp_cmplx128_a4_t)(lhs + rhs);
261 kmp_cmplx128_a4_t operator-(
const kmp_cmplx128_a4_t &b) {
262 kmp_cmplx128 lhs = (*this).q;
263 kmp_cmplx128 rhs = b.q;
264 return (kmp_cmplx128_a4_t)(lhs - rhs);
266 kmp_cmplx128_a4_t operator*(
const kmp_cmplx128_a4_t &b) {
267 kmp_cmplx128 lhs = (*this).q;
268 kmp_cmplx128 rhs = b.q;
269 return (kmp_cmplx128_a4_t)(lhs * rhs);
272 kmp_cmplx128_a4_t operator/(
const kmp_cmplx128_a4_t &b) {
273 kmp_cmplx128 lhs = (*this).q;
274 kmp_cmplx128 rhs = b.q;
275 return (kmp_cmplx128_a4_t)(lhs / rhs);
282 struct KMP_DO_ALIGN(16) Quad_a16_t {
285 Quad_a16_t() : q() {}
286 Quad_a16_t(
const _Quad &cq) : q(cq) {}
288 Quad_a16_t operator+(
const Quad_a16_t &b) {
289 _Quad lhs = (*this).q;
291 return (Quad_a16_t)(lhs + rhs);
294 Quad_a16_t operator-(
const Quad_a16_t &b) {
295 _Quad lhs = (*this).q;
297 return (Quad_a16_t)(lhs - rhs);
299 Quad_a16_t operator*(
const Quad_a16_t &b) {
300 _Quad lhs = (*this).q;
302 return (Quad_a16_t)(lhs * rhs);
305 Quad_a16_t operator/(
const Quad_a16_t &b) {
306 _Quad lhs = (*this).q;
308 return (Quad_a16_t)(lhs / rhs);
312 struct KMP_DO_ALIGN(16) kmp_cmplx128_a16_t {
315 kmp_cmplx128_a16_t() : q() {}
317 kmp_cmplx128_a16_t(
const kmp_cmplx128 &c128) : q(c128) {}
319 kmp_cmplx128_a16_t operator+(
const kmp_cmplx128_a16_t &b) {
320 kmp_cmplx128 lhs = (*this).q;
321 kmp_cmplx128 rhs = b.q;
322 return (kmp_cmplx128_a16_t)(lhs + rhs);
324 kmp_cmplx128_a16_t operator-(
const kmp_cmplx128_a16_t &b) {
325 kmp_cmplx128 lhs = (*this).q;
326 kmp_cmplx128 rhs = b.q;
327 return (kmp_cmplx128_a16_t)(lhs - rhs);
329 kmp_cmplx128_a16_t operator*(
const kmp_cmplx128_a16_t &b) {
330 kmp_cmplx128 lhs = (*this).q;
331 kmp_cmplx128 rhs = b.q;
332 return (kmp_cmplx128_a16_t)(lhs * rhs);
335 kmp_cmplx128_a16_t operator/(
const kmp_cmplx128_a16_t &b) {
336 kmp_cmplx128 lhs = (*this).q;
337 kmp_cmplx128 rhs = b.q;
338 return (kmp_cmplx128_a16_t)(lhs / rhs);
345 #define QUAD_LEGACY Quad_a4_t
346 #define CPLX128_LEG kmp_cmplx128_a4_t
348 #define QUAD_LEGACY _Quad
349 #define CPLX128_LEG kmp_cmplx128
356 extern int __kmp_atomic_mode;
359 typedef kmp_queuing_lock_t kmp_atomic_lock_t;
361 static inline void __kmp_acquire_atomic_lock(kmp_atomic_lock_t *lck,
363 #if OMPT_SUPPORT && OMPT_OPTIONAL
364 if (ompt_enabled.ompt_callback_mutex_acquire) {
365 ompt_callbacks.ompt_callback(ompt_callback_mutex_acquire)(
366 ompt_mutex_atomic, 0, kmp_mutex_impl_queuing, (ompt_wait_id_t)lck,
367 OMPT_GET_RETURN_ADDRESS(0));
371 __kmp_acquire_queuing_lock(lck, gtid);
373 #if OMPT_SUPPORT && OMPT_OPTIONAL
374 if (ompt_enabled.ompt_callback_mutex_acquired) {
375 ompt_callbacks.ompt_callback(ompt_callback_mutex_acquired)(
376 ompt_mutex_atomic, (ompt_wait_id_t)lck, OMPT_GET_RETURN_ADDRESS(0));
381 static inline int __kmp_test_atomic_lock(kmp_atomic_lock_t *lck,
383 return __kmp_test_queuing_lock(lck, gtid);
386 static inline void __kmp_release_atomic_lock(kmp_atomic_lock_t *lck,
388 __kmp_release_queuing_lock(lck, gtid);
389 #if OMPT_SUPPORT && OMPT_OPTIONAL
390 if (ompt_enabled.ompt_callback_mutex_released) {
391 ompt_callbacks.ompt_callback(ompt_callback_mutex_released)(
392 ompt_mutex_atomic, (ompt_wait_id_t)lck, OMPT_GET_RETURN_ADDRESS(0));
397 static inline void __kmp_init_atomic_lock(kmp_atomic_lock_t *lck) {
398 __kmp_init_queuing_lock(lck);
401 static inline void __kmp_destroy_atomic_lock(kmp_atomic_lock_t *lck) {
402 __kmp_destroy_queuing_lock(lck);
406 extern kmp_atomic_lock_t __kmp_atomic_lock;
408 extern kmp_atomic_lock_t __kmp_atomic_lock_1i;
411 extern kmp_atomic_lock_t __kmp_atomic_lock_2i;
414 extern kmp_atomic_lock_t __kmp_atomic_lock_4i;
417 extern kmp_atomic_lock_t __kmp_atomic_lock_4r;
420 extern kmp_atomic_lock_t __kmp_atomic_lock_8i;
423 extern kmp_atomic_lock_t __kmp_atomic_lock_8r;
426 extern kmp_atomic_lock_t
427 __kmp_atomic_lock_8c;
429 extern kmp_atomic_lock_t
430 __kmp_atomic_lock_10r;
432 extern kmp_atomic_lock_t __kmp_atomic_lock_16r;
435 extern kmp_atomic_lock_t __kmp_atomic_lock_16c;
438 extern kmp_atomic_lock_t
439 __kmp_atomic_lock_20c;
441 extern kmp_atomic_lock_t __kmp_atomic_lock_32c;
448 void __kmpc_atomic_fixed1_add(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
449 void __kmpc_atomic_fixed1_andb(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
450 void __kmpc_atomic_fixed1_div(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
451 void __kmpc_atomic_fixed1u_div(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
453 void __kmpc_atomic_fixed1_mul(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
454 void __kmpc_atomic_fixed1_orb(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
455 void __kmpc_atomic_fixed1_shl(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
456 void __kmpc_atomic_fixed1_shr(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
457 void __kmpc_atomic_fixed1u_shr(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
459 void __kmpc_atomic_fixed1_sub(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
460 void __kmpc_atomic_fixed1_xor(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
462 void __kmpc_atomic_fixed2_add(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
463 void __kmpc_atomic_fixed2_andb(
ident_t *id_ref,
int gtid,
short *lhs,
465 void __kmpc_atomic_fixed2_div(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
466 void __kmpc_atomic_fixed2u_div(
ident_t *id_ref,
int gtid,
unsigned short *lhs,
468 void __kmpc_atomic_fixed2_mul(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
469 void __kmpc_atomic_fixed2_orb(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
470 void __kmpc_atomic_fixed2_shl(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
471 void __kmpc_atomic_fixed2_shr(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
472 void __kmpc_atomic_fixed2u_shr(
ident_t *id_ref,
int gtid,
unsigned short *lhs,
474 void __kmpc_atomic_fixed2_sub(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
475 void __kmpc_atomic_fixed2_xor(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
477 void __kmpc_atomic_fixed4_add(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
479 void __kmpc_atomic_fixed4_sub(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
482 void __kmpc_atomic_float4_add(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
484 void __kmpc_atomic_float4_sub(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
487 void __kmpc_atomic_fixed8_add(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
489 void __kmpc_atomic_fixed8_sub(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
492 void __kmpc_atomic_float8_add(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
494 void __kmpc_atomic_float8_sub(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
497 void __kmpc_atomic_fixed4_andb(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
499 void __kmpc_atomic_fixed4_div(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
501 void __kmpc_atomic_fixed4u_div(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
503 void __kmpc_atomic_fixed4_mul(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
505 void __kmpc_atomic_fixed4_orb(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
507 void __kmpc_atomic_fixed4_shl(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
509 void __kmpc_atomic_fixed4_shr(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
511 void __kmpc_atomic_fixed4u_shr(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
513 void __kmpc_atomic_fixed4_xor(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
516 void __kmpc_atomic_fixed8_andb(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
518 void __kmpc_atomic_fixed8_div(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
520 void __kmpc_atomic_fixed8u_div(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
522 void __kmpc_atomic_fixed8_mul(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
524 void __kmpc_atomic_fixed8_orb(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
526 void __kmpc_atomic_fixed8_shl(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
528 void __kmpc_atomic_fixed8_shr(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
530 void __kmpc_atomic_fixed8u_shr(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
532 void __kmpc_atomic_fixed8_xor(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
535 void __kmpc_atomic_float4_div(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
537 void __kmpc_atomic_float4_mul(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
540 void __kmpc_atomic_float8_div(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
542 void __kmpc_atomic_float8_mul(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
545 void __kmpc_atomic_fixed1_andl(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
546 void __kmpc_atomic_fixed1_orl(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
547 void __kmpc_atomic_fixed2_andl(
ident_t *id_ref,
int gtid,
short *lhs,
549 void __kmpc_atomic_fixed2_orl(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
550 void __kmpc_atomic_fixed4_andl(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
552 void __kmpc_atomic_fixed4_orl(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
554 void __kmpc_atomic_fixed8_andl(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
556 void __kmpc_atomic_fixed8_orl(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
559 void __kmpc_atomic_fixed1_max(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
560 void __kmpc_atomic_fixed1_min(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
561 void __kmpc_atomic_fixed2_max(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
562 void __kmpc_atomic_fixed2_min(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
563 void __kmpc_atomic_fixed4_max(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
565 void __kmpc_atomic_fixed4_min(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
567 void __kmpc_atomic_fixed8_max(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
569 void __kmpc_atomic_fixed8_min(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
571 void __kmpc_atomic_float4_max(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
573 void __kmpc_atomic_float4_min(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
575 void __kmpc_atomic_float8_max(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
577 void __kmpc_atomic_float8_min(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
580 void __kmpc_atomic_float16_max(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
582 void __kmpc_atomic_float16_min(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
587 void __kmpc_atomic_float16_max_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
589 void __kmpc_atomic_float16_min_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
594 void __kmpc_atomic_fixed1_neqv(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
595 void __kmpc_atomic_fixed2_neqv(
ident_t *id_ref,
int gtid,
short *lhs,
597 void __kmpc_atomic_fixed4_neqv(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
599 void __kmpc_atomic_fixed8_neqv(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
602 void __kmpc_atomic_fixed1_eqv(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
603 void __kmpc_atomic_fixed2_eqv(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
604 void __kmpc_atomic_fixed4_eqv(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
606 void __kmpc_atomic_fixed8_eqv(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
609 void __kmpc_atomic_float10_add(
ident_t *id_ref,
int gtid,
long double *lhs,
611 void __kmpc_atomic_float10_sub(
ident_t *id_ref,
int gtid,
long double *lhs,
613 void __kmpc_atomic_float10_mul(
ident_t *id_ref,
int gtid,
long double *lhs,
615 void __kmpc_atomic_float10_div(
ident_t *id_ref,
int gtid,
long double *lhs,
619 void __kmpc_atomic_float16_add(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
621 void __kmpc_atomic_float16_sub(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
623 void __kmpc_atomic_float16_mul(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
625 void __kmpc_atomic_float16_div(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
629 void __kmpc_atomic_float16_add_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
631 void __kmpc_atomic_float16_sub_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
633 void __kmpc_atomic_float16_mul_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
635 void __kmpc_atomic_float16_div_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
640 void __kmpc_atomic_cmplx4_add(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
642 void __kmpc_atomic_cmplx4_sub(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
644 void __kmpc_atomic_cmplx4_mul(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
646 void __kmpc_atomic_cmplx4_div(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
648 void __kmpc_atomic_cmplx8_add(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
650 void __kmpc_atomic_cmplx8_sub(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
652 void __kmpc_atomic_cmplx8_mul(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
654 void __kmpc_atomic_cmplx8_div(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
656 void __kmpc_atomic_cmplx10_add(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
658 void __kmpc_atomic_cmplx10_sub(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
660 void __kmpc_atomic_cmplx10_mul(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
662 void __kmpc_atomic_cmplx10_div(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
665 void __kmpc_atomic_cmplx16_add(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
667 void __kmpc_atomic_cmplx16_sub(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
669 void __kmpc_atomic_cmplx16_mul(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
671 void __kmpc_atomic_cmplx16_div(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
675 void __kmpc_atomic_cmplx16_add_a16(
ident_t *id_ref,
int gtid,
676 kmp_cmplx128_a16_t *lhs,
677 kmp_cmplx128_a16_t rhs);
678 void __kmpc_atomic_cmplx16_sub_a16(
ident_t *id_ref,
int gtid,
679 kmp_cmplx128_a16_t *lhs,
680 kmp_cmplx128_a16_t rhs);
681 void __kmpc_atomic_cmplx16_mul_a16(
ident_t *id_ref,
int gtid,
682 kmp_cmplx128_a16_t *lhs,
683 kmp_cmplx128_a16_t rhs);
684 void __kmpc_atomic_cmplx16_div_a16(
ident_t *id_ref,
int gtid,
685 kmp_cmplx128_a16_t *lhs,
686 kmp_cmplx128_a16_t rhs);
694 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
696 void __kmpc_atomic_fixed1_sub_rev(
ident_t *id_ref,
int gtid,
char *lhs,
698 void __kmpc_atomic_fixed1_div_rev(
ident_t *id_ref,
int gtid,
char *lhs,
700 void __kmpc_atomic_fixed1u_div_rev(
ident_t *id_ref,
int gtid,
701 unsigned char *lhs,
unsigned char rhs);
702 void __kmpc_atomic_fixed1_shl_rev(
ident_t *id_ref,
int gtid,
char *lhs,
704 void __kmpc_atomic_fixed1_shr_rev(
ident_t *id_ref,
int gtid,
char *lhs,
706 void __kmpc_atomic_fixed1u_shr_rev(
ident_t *id_ref,
int gtid,
707 unsigned char *lhs,
unsigned char rhs);
708 void __kmpc_atomic_fixed2_sub_rev(
ident_t *id_ref,
int gtid,
short *lhs,
710 void __kmpc_atomic_fixed2_div_rev(
ident_t *id_ref,
int gtid,
short *lhs,
712 void __kmpc_atomic_fixed2u_div_rev(
ident_t *id_ref,
int gtid,
713 unsigned short *lhs,
unsigned short rhs);
714 void __kmpc_atomic_fixed2_shl_rev(
ident_t *id_ref,
int gtid,
short *lhs,
716 void __kmpc_atomic_fixed2_shr_rev(
ident_t *id_ref,
int gtid,
short *lhs,
718 void __kmpc_atomic_fixed2u_shr_rev(
ident_t *id_ref,
int gtid,
719 unsigned short *lhs,
unsigned short rhs);
720 void __kmpc_atomic_fixed4_sub_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
722 void __kmpc_atomic_fixed4_div_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
724 void __kmpc_atomic_fixed4u_div_rev(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
726 void __kmpc_atomic_fixed4_shl_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
728 void __kmpc_atomic_fixed4_shr_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
730 void __kmpc_atomic_fixed4u_shr_rev(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
732 void __kmpc_atomic_fixed8_sub_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
734 void __kmpc_atomic_fixed8_div_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
736 void __kmpc_atomic_fixed8u_div_rev(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
738 void __kmpc_atomic_fixed8_shl_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
740 void __kmpc_atomic_fixed8_shr_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
742 void __kmpc_atomic_fixed8u_shr_rev(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
744 void __kmpc_atomic_float4_sub_rev(
ident_t *id_ref,
int gtid,
float *lhs,
746 void __kmpc_atomic_float4_div_rev(
ident_t *id_ref,
int gtid,
float *lhs,
748 void __kmpc_atomic_float8_sub_rev(
ident_t *id_ref,
int gtid,
double *lhs,
750 void __kmpc_atomic_float8_div_rev(
ident_t *id_ref,
int gtid,
double *lhs,
752 void __kmpc_atomic_float10_sub_rev(
ident_t *id_ref,
int gtid,
long double *lhs,
754 void __kmpc_atomic_float10_div_rev(
ident_t *id_ref,
int gtid,
long double *lhs,
757 void __kmpc_atomic_float16_sub_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
759 void __kmpc_atomic_float16_div_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
762 void __kmpc_atomic_cmplx4_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
764 void __kmpc_atomic_cmplx4_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
766 void __kmpc_atomic_cmplx8_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
768 void __kmpc_atomic_cmplx8_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
770 void __kmpc_atomic_cmplx10_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
772 void __kmpc_atomic_cmplx10_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
775 void __kmpc_atomic_cmplx16_sub_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
777 void __kmpc_atomic_cmplx16_div_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
781 void __kmpc_atomic_float16_sub_a16_rev(
ident_t *id_ref,
int gtid,
782 Quad_a16_t *lhs, Quad_a16_t rhs);
783 void __kmpc_atomic_float16_div_a16_rev(
ident_t *id_ref,
int gtid,
784 Quad_a16_t *lhs, Quad_a16_t rhs);
785 void __kmpc_atomic_cmplx16_sub_a16_rev(
ident_t *id_ref,
int gtid,
786 kmp_cmplx128_a16_t *lhs,
787 kmp_cmplx128_a16_t rhs);
788 void __kmpc_atomic_cmplx16_div_a16_rev(
ident_t *id_ref,
int gtid,
789 kmp_cmplx128_a16_t *lhs,
790 kmp_cmplx128_a16_t rhs);
792 #endif // KMP_HAVE_QUAD
794 #endif // KMP_ARCH_X86 || KMP_ARCH_X86_64
796 #endif // OMP_40_ENABLED
801 void __kmpc_atomic_fixed1_mul_float8(
ident_t *id_ref,
int gtid,
char *lhs,
803 void __kmpc_atomic_fixed1_div_float8(
ident_t *id_ref,
int gtid,
char *lhs,
805 void __kmpc_atomic_fixed2_mul_float8(
ident_t *id_ref,
int gtid,
short *lhs,
807 void __kmpc_atomic_fixed2_div_float8(
ident_t *id_ref,
int gtid,
short *lhs,
809 void __kmpc_atomic_fixed4_mul_float8(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
811 void __kmpc_atomic_fixed4_div_float8(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
813 void __kmpc_atomic_fixed8_mul_float8(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
815 void __kmpc_atomic_fixed8_div_float8(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
817 void __kmpc_atomic_float4_add_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
819 void __kmpc_atomic_float4_sub_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
821 void __kmpc_atomic_float4_mul_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
823 void __kmpc_atomic_float4_div_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
829 void __kmpc_atomic_fixed1_add_fp(
ident_t *id_ref,
int gtid,
char *lhs,
831 void __kmpc_atomic_fixed1u_add_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
833 void __kmpc_atomic_fixed1_sub_fp(
ident_t *id_ref,
int gtid,
char *lhs,
835 void __kmpc_atomic_fixed1u_sub_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
837 void __kmpc_atomic_fixed1_mul_fp(
ident_t *id_ref,
int gtid,
char *lhs,
839 void __kmpc_atomic_fixed1u_mul_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
841 void __kmpc_atomic_fixed1_div_fp(
ident_t *id_ref,
int gtid,
char *lhs,
843 void __kmpc_atomic_fixed1u_div_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
846 void __kmpc_atomic_fixed2_add_fp(
ident_t *id_ref,
int gtid,
short *lhs,
848 void __kmpc_atomic_fixed2u_add_fp(
ident_t *id_ref,
int gtid,
849 unsigned short *lhs, _Quad rhs);
850 void __kmpc_atomic_fixed2_sub_fp(
ident_t *id_ref,
int gtid,
short *lhs,
852 void __kmpc_atomic_fixed2u_sub_fp(
ident_t *id_ref,
int gtid,
853 unsigned short *lhs, _Quad rhs);
854 void __kmpc_atomic_fixed2_mul_fp(
ident_t *id_ref,
int gtid,
short *lhs,
856 void __kmpc_atomic_fixed2u_mul_fp(
ident_t *id_ref,
int gtid,
857 unsigned short *lhs, _Quad rhs);
858 void __kmpc_atomic_fixed2_div_fp(
ident_t *id_ref,
int gtid,
short *lhs,
860 void __kmpc_atomic_fixed2u_div_fp(
ident_t *id_ref,
int gtid,
861 unsigned short *lhs, _Quad rhs);
863 void __kmpc_atomic_fixed4_add_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
865 void __kmpc_atomic_fixed4u_add_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
867 void __kmpc_atomic_fixed4_sub_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
869 void __kmpc_atomic_fixed4u_sub_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
871 void __kmpc_atomic_fixed4_mul_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
873 void __kmpc_atomic_fixed4u_mul_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
875 void __kmpc_atomic_fixed4_div_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
877 void __kmpc_atomic_fixed4u_div_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
880 void __kmpc_atomic_fixed8_add_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
882 void __kmpc_atomic_fixed8u_add_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
884 void __kmpc_atomic_fixed8_sub_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
886 void __kmpc_atomic_fixed8u_sub_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
888 void __kmpc_atomic_fixed8_mul_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
890 void __kmpc_atomic_fixed8u_mul_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
892 void __kmpc_atomic_fixed8_div_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
894 void __kmpc_atomic_fixed8u_div_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
897 void __kmpc_atomic_float4_add_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
899 void __kmpc_atomic_float4_sub_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
901 void __kmpc_atomic_float4_mul_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
903 void __kmpc_atomic_float4_div_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
906 void __kmpc_atomic_float8_add_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
908 void __kmpc_atomic_float8_sub_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
910 void __kmpc_atomic_float8_mul_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
912 void __kmpc_atomic_float8_div_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
915 void __kmpc_atomic_float10_add_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
917 void __kmpc_atomic_float10_sub_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
919 void __kmpc_atomic_float10_mul_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
921 void __kmpc_atomic_float10_div_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
925 void __kmpc_atomic_fixed1_sub_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
927 void __kmpc_atomic_fixed1u_sub_rev_fp(
ident_t *id_ref,
int gtid,
928 unsigned char *lhs, _Quad rhs);
929 void __kmpc_atomic_fixed1_div_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
931 void __kmpc_atomic_fixed1u_div_rev_fp(
ident_t *id_ref,
int gtid,
932 unsigned char *lhs, _Quad rhs);
933 void __kmpc_atomic_fixed2_sub_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
935 void __kmpc_atomic_fixed2u_sub_rev_fp(
ident_t *id_ref,
int gtid,
936 unsigned short *lhs, _Quad rhs);
937 void __kmpc_atomic_fixed2_div_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
939 void __kmpc_atomic_fixed2u_div_rev_fp(
ident_t *id_ref,
int gtid,
940 unsigned short *lhs, _Quad rhs);
941 void __kmpc_atomic_fixed4_sub_rev_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
943 void __kmpc_atomic_fixed4u_sub_rev_fp(
ident_t *id_ref,
int gtid,
944 kmp_uint32 *lhs, _Quad rhs);
945 void __kmpc_atomic_fixed4_div_rev_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
947 void __kmpc_atomic_fixed4u_div_rev_fp(
ident_t *id_ref,
int gtid,
948 kmp_uint32 *lhs, _Quad rhs);
949 void __kmpc_atomic_fixed8_sub_rev_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
951 void __kmpc_atomic_fixed8u_sub_rev_fp(
ident_t *id_ref,
int gtid,
952 kmp_uint64 *lhs, _Quad rhs);
953 void __kmpc_atomic_fixed8_div_rev_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
955 void __kmpc_atomic_fixed8u_div_rev_fp(
ident_t *id_ref,
int gtid,
956 kmp_uint64 *lhs, _Quad rhs);
957 void __kmpc_atomic_float4_sub_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
959 void __kmpc_atomic_float4_div_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
961 void __kmpc_atomic_float8_sub_rev_fp(
ident_t *id_ref,
int gtid,
double *lhs,
963 void __kmpc_atomic_float8_div_rev_fp(
ident_t *id_ref,
int gtid,
double *lhs,
965 void __kmpc_atomic_float10_sub_rev_fp(
ident_t *id_ref,
int gtid,
966 long double *lhs, _Quad rhs);
967 void __kmpc_atomic_float10_div_rev_fp(
ident_t *id_ref,
int gtid,
968 long double *lhs, _Quad rhs);
970 #endif // KMP_HAVE_QUAD
973 void __kmpc_atomic_cmplx4_add_cmplx8(
ident_t *id_ref,
int gtid,
974 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
975 void __kmpc_atomic_cmplx4_sub_cmplx8(
ident_t *id_ref,
int gtid,
976 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
977 void __kmpc_atomic_cmplx4_mul_cmplx8(
ident_t *id_ref,
int gtid,
978 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
979 void __kmpc_atomic_cmplx4_div_cmplx8(
ident_t *id_ref,
int gtid,
980 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
983 void __kmpc_atomic_1(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
984 void (*f)(
void *,
void *,
void *));
985 void __kmpc_atomic_2(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
986 void (*f)(
void *,
void *,
void *));
987 void __kmpc_atomic_4(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
988 void (*f)(
void *,
void *,
void *));
989 void __kmpc_atomic_8(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
990 void (*f)(
void *,
void *,
void *));
991 void __kmpc_atomic_10(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
992 void (*f)(
void *,
void *,
void *));
993 void __kmpc_atomic_16(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
994 void (*f)(
void *,
void *,
void *));
995 void __kmpc_atomic_20(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
996 void (*f)(
void *,
void *,
void *));
997 void __kmpc_atomic_32(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
998 void (*f)(
void *,
void *,
void *));
1001 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
1004 char __kmpc_atomic_fixed1_rd(
ident_t *id_ref,
int gtid,
char *loc);
1005 short __kmpc_atomic_fixed2_rd(
ident_t *id_ref,
int gtid,
short *loc);
1006 kmp_int32 __kmpc_atomic_fixed4_rd(
ident_t *id_ref,
int gtid, kmp_int32 *loc);
1007 kmp_int64 __kmpc_atomic_fixed8_rd(
ident_t *id_ref,
int gtid, kmp_int64 *loc);
1008 kmp_real32 __kmpc_atomic_float4_rd(
ident_t *id_ref,
int gtid, kmp_real32 *loc);
1009 kmp_real64 __kmpc_atomic_float8_rd(
ident_t *id_ref,
int gtid, kmp_real64 *loc);
1010 long double __kmpc_atomic_float10_rd(
ident_t *id_ref,
int gtid,
1013 QUAD_LEGACY __kmpc_atomic_float16_rd(
ident_t *id_ref,
int gtid,
1018 #if (KMP_OS_WINDOWS)
1019 void __kmpc_atomic_cmplx4_rd(kmp_cmplx32 *out,
ident_t *id_ref,
int gtid,
1022 kmp_cmplx32 __kmpc_atomic_cmplx4_rd(
ident_t *id_ref,
int gtid,
1025 kmp_cmplx64 __kmpc_atomic_cmplx8_rd(
ident_t *id_ref,
int gtid,
1027 kmp_cmplx80 __kmpc_atomic_cmplx10_rd(
ident_t *id_ref,
int gtid,
1030 CPLX128_LEG __kmpc_atomic_cmplx16_rd(
ident_t *id_ref,
int gtid,
1034 Quad_a16_t __kmpc_atomic_float16_a16_rd(
ident_t *id_ref,
int gtid,
1036 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_a16_rd(
ident_t *id_ref,
int gtid,
1037 kmp_cmplx128_a16_t *loc);
1042 void __kmpc_atomic_fixed1_wr(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
1043 void __kmpc_atomic_fixed2_wr(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
1044 void __kmpc_atomic_fixed4_wr(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
1046 void __kmpc_atomic_fixed8_wr(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
1048 void __kmpc_atomic_float4_wr(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
1050 void __kmpc_atomic_float8_wr(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
1052 void __kmpc_atomic_float10_wr(
ident_t *id_ref,
int gtid,
long double *lhs,
1055 void __kmpc_atomic_float16_wr(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
1058 void __kmpc_atomic_cmplx4_wr(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1060 void __kmpc_atomic_cmplx8_wr(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
1062 void __kmpc_atomic_cmplx10_wr(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
1065 void __kmpc_atomic_cmplx16_wr(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
1069 void __kmpc_atomic_float16_a16_wr(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
1071 void __kmpc_atomic_cmplx16_a16_wr(
ident_t *id_ref,
int gtid,
1072 kmp_cmplx128_a16_t *lhs,
1073 kmp_cmplx128_a16_t rhs);
1080 char __kmpc_atomic_fixed1_add_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1081 char rhs,
int flag);
1082 char __kmpc_atomic_fixed1_andb_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1083 char rhs,
int flag);
1084 char __kmpc_atomic_fixed1_div_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1085 char rhs,
int flag);
1086 unsigned char __kmpc_atomic_fixed1u_div_cpt(
ident_t *id_ref,
int gtid,
1088 unsigned char rhs,
int flag);
1089 char __kmpc_atomic_fixed1_mul_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1090 char rhs,
int flag);
1091 char __kmpc_atomic_fixed1_orb_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1092 char rhs,
int flag);
1093 char __kmpc_atomic_fixed1_shl_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1094 char rhs,
int flag);
1095 char __kmpc_atomic_fixed1_shr_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1096 char rhs,
int flag);
1097 unsigned char __kmpc_atomic_fixed1u_shr_cpt(
ident_t *id_ref,
int gtid,
1099 unsigned char rhs,
int flag);
1100 char __kmpc_atomic_fixed1_sub_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1101 char rhs,
int flag);
1102 char __kmpc_atomic_fixed1_xor_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1103 char rhs,
int flag);
1105 short __kmpc_atomic_fixed2_add_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1106 short rhs,
int flag);
1107 short __kmpc_atomic_fixed2_andb_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1108 short rhs,
int flag);
1109 short __kmpc_atomic_fixed2_div_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1110 short rhs,
int flag);
1111 unsigned short __kmpc_atomic_fixed2u_div_cpt(
ident_t *id_ref,
int gtid,
1112 unsigned short *lhs,
1113 unsigned short rhs,
int flag);
1114 short __kmpc_atomic_fixed2_mul_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1115 short rhs,
int flag);
1116 short __kmpc_atomic_fixed2_orb_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1117 short rhs,
int flag);
1118 short __kmpc_atomic_fixed2_shl_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1119 short rhs,
int flag);
1120 short __kmpc_atomic_fixed2_shr_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1121 short rhs,
int flag);
1122 unsigned short __kmpc_atomic_fixed2u_shr_cpt(
ident_t *id_ref,
int gtid,
1123 unsigned short *lhs,
1124 unsigned short rhs,
int flag);
1125 short __kmpc_atomic_fixed2_sub_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1126 short rhs,
int flag);
1127 short __kmpc_atomic_fixed2_xor_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1128 short rhs,
int flag);
1130 kmp_int32 __kmpc_atomic_fixed4_add_cpt(
ident_t *id_ref,
int gtid,
1131 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1132 kmp_int32 __kmpc_atomic_fixed4_sub_cpt(
ident_t *id_ref,
int gtid,
1133 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1135 kmp_real32 __kmpc_atomic_float4_add_cpt(
ident_t *id_ref,
int gtid,
1136 kmp_real32 *lhs, kmp_real32 rhs,
1138 kmp_real32 __kmpc_atomic_float4_sub_cpt(
ident_t *id_ref,
int gtid,
1139 kmp_real32 *lhs, kmp_real32 rhs,
1142 kmp_int64 __kmpc_atomic_fixed8_add_cpt(
ident_t *id_ref,
int gtid,
1143 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1144 kmp_int64 __kmpc_atomic_fixed8_sub_cpt(
ident_t *id_ref,
int gtid,
1145 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1147 kmp_real64 __kmpc_atomic_float8_add_cpt(
ident_t *id_ref,
int gtid,
1148 kmp_real64 *lhs, kmp_real64 rhs,
1150 kmp_real64 __kmpc_atomic_float8_sub_cpt(
ident_t *id_ref,
int gtid,
1151 kmp_real64 *lhs, kmp_real64 rhs,
1154 kmp_int32 __kmpc_atomic_fixed4_andb_cpt(
ident_t *id_ref,
int gtid,
1155 kmp_int32 *lhs, kmp_int32 rhs,
1157 kmp_int32 __kmpc_atomic_fixed4_div_cpt(
ident_t *id_ref,
int gtid,
1158 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1159 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt(
ident_t *id_ref,
int gtid,
1160 kmp_uint32 *lhs, kmp_uint32 rhs,
1162 kmp_int32 __kmpc_atomic_fixed4_mul_cpt(
ident_t *id_ref,
int gtid,
1163 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1164 kmp_int32 __kmpc_atomic_fixed4_orb_cpt(
ident_t *id_ref,
int gtid,
1165 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1166 kmp_int32 __kmpc_atomic_fixed4_shl_cpt(
ident_t *id_ref,
int gtid,
1167 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1168 kmp_int32 __kmpc_atomic_fixed4_shr_cpt(
ident_t *id_ref,
int gtid,
1169 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1170 kmp_uint32 __kmpc_atomic_fixed4u_shr_cpt(
ident_t *id_ref,
int gtid,
1171 kmp_uint32 *lhs, kmp_uint32 rhs,
1173 kmp_int32 __kmpc_atomic_fixed4_xor_cpt(
ident_t *id_ref,
int gtid,
1174 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1176 kmp_int64 __kmpc_atomic_fixed8_andb_cpt(
ident_t *id_ref,
int gtid,
1177 kmp_int64 *lhs, kmp_int64 rhs,
1179 kmp_int64 __kmpc_atomic_fixed8_div_cpt(
ident_t *id_ref,
int gtid,
1180 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1181 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt(
ident_t *id_ref,
int gtid,
1182 kmp_uint64 *lhs, kmp_uint64 rhs,
1184 kmp_int64 __kmpc_atomic_fixed8_mul_cpt(
ident_t *id_ref,
int gtid,
1185 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1186 kmp_int64 __kmpc_atomic_fixed8_orb_cpt(
ident_t *id_ref,
int gtid,
1187 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1188 kmp_int64 __kmpc_atomic_fixed8_shl_cpt(
ident_t *id_ref,
int gtid,
1189 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1190 kmp_int64 __kmpc_atomic_fixed8_shr_cpt(
ident_t *id_ref,
int gtid,
1191 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1192 kmp_uint64 __kmpc_atomic_fixed8u_shr_cpt(
ident_t *id_ref,
int gtid,
1193 kmp_uint64 *lhs, kmp_uint64 rhs,
1195 kmp_int64 __kmpc_atomic_fixed8_xor_cpt(
ident_t *id_ref,
int gtid,
1196 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1198 kmp_real32 __kmpc_atomic_float4_div_cpt(
ident_t *id_ref,
int gtid,
1199 kmp_real32 *lhs, kmp_real32 rhs,
1201 kmp_real32 __kmpc_atomic_float4_mul_cpt(
ident_t *id_ref,
int gtid,
1202 kmp_real32 *lhs, kmp_real32 rhs,
1205 kmp_real64 __kmpc_atomic_float8_div_cpt(
ident_t *id_ref,
int gtid,
1206 kmp_real64 *lhs, kmp_real64 rhs,
1208 kmp_real64 __kmpc_atomic_float8_mul_cpt(
ident_t *id_ref,
int gtid,
1209 kmp_real64 *lhs, kmp_real64 rhs,
1212 char __kmpc_atomic_fixed1_andl_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1213 char rhs,
int flag);
1214 char __kmpc_atomic_fixed1_orl_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1215 char rhs,
int flag);
1216 short __kmpc_atomic_fixed2_andl_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1217 short rhs,
int flag);
1218 short __kmpc_atomic_fixed2_orl_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1219 short rhs,
int flag);
1220 kmp_int32 __kmpc_atomic_fixed4_andl_cpt(
ident_t *id_ref,
int gtid,
1221 kmp_int32 *lhs, kmp_int32 rhs,
1223 kmp_int32 __kmpc_atomic_fixed4_orl_cpt(
ident_t *id_ref,
int gtid,
1224 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1225 kmp_int64 __kmpc_atomic_fixed8_andl_cpt(
ident_t *id_ref,
int gtid,
1226 kmp_int64 *lhs, kmp_int64 rhs,
1228 kmp_int64 __kmpc_atomic_fixed8_orl_cpt(
ident_t *id_ref,
int gtid,
1229 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1231 char __kmpc_atomic_fixed1_max_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1232 char rhs,
int flag);
1233 char __kmpc_atomic_fixed1_min_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1234 char rhs,
int flag);
1235 short __kmpc_atomic_fixed2_max_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1236 short rhs,
int flag);
1237 short __kmpc_atomic_fixed2_min_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1238 short rhs,
int flag);
1239 kmp_int32 __kmpc_atomic_fixed4_max_cpt(
ident_t *id_ref,
int gtid,
1240 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1241 kmp_int32 __kmpc_atomic_fixed4_min_cpt(
ident_t *id_ref,
int gtid,
1242 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1243 kmp_int64 __kmpc_atomic_fixed8_max_cpt(
ident_t *id_ref,
int gtid,
1244 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1245 kmp_int64 __kmpc_atomic_fixed8_min_cpt(
ident_t *id_ref,
int gtid,
1246 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1247 kmp_real32 __kmpc_atomic_float4_max_cpt(
ident_t *id_ref,
int gtid,
1248 kmp_real32 *lhs, kmp_real32 rhs,
1250 kmp_real32 __kmpc_atomic_float4_min_cpt(
ident_t *id_ref,
int gtid,
1251 kmp_real32 *lhs, kmp_real32 rhs,
1253 kmp_real64 __kmpc_atomic_float8_max_cpt(
ident_t *id_ref,
int gtid,
1254 kmp_real64 *lhs, kmp_real64 rhs,
1256 kmp_real64 __kmpc_atomic_float8_min_cpt(
ident_t *id_ref,
int gtid,
1257 kmp_real64 *lhs, kmp_real64 rhs,
1260 QUAD_LEGACY __kmpc_atomic_float16_max_cpt(
ident_t *id_ref,
int gtid,
1261 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1263 QUAD_LEGACY __kmpc_atomic_float16_min_cpt(
ident_t *id_ref,
int gtid,
1264 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1268 char __kmpc_atomic_fixed1_neqv_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1269 char rhs,
int flag);
1270 short __kmpc_atomic_fixed2_neqv_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1271 short rhs,
int flag);
1272 kmp_int32 __kmpc_atomic_fixed4_neqv_cpt(
ident_t *id_ref,
int gtid,
1273 kmp_int32 *lhs, kmp_int32 rhs,
1275 kmp_int64 __kmpc_atomic_fixed8_neqv_cpt(
ident_t *id_ref,
int gtid,
1276 kmp_int64 *lhs, kmp_int64 rhs,
1279 char __kmpc_atomic_fixed1_eqv_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1280 char rhs,
int flag);
1281 short __kmpc_atomic_fixed2_eqv_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1282 short rhs,
int flag);
1283 kmp_int32 __kmpc_atomic_fixed4_eqv_cpt(
ident_t *id_ref,
int gtid,
1284 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1285 kmp_int64 __kmpc_atomic_fixed8_eqv_cpt(
ident_t *id_ref,
int gtid,
1286 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1288 long double __kmpc_atomic_float10_add_cpt(
ident_t *id_ref,
int gtid,
1289 long double *lhs,
long double rhs,
1291 long double __kmpc_atomic_float10_sub_cpt(
ident_t *id_ref,
int gtid,
1292 long double *lhs,
long double rhs,
1294 long double __kmpc_atomic_float10_mul_cpt(
ident_t *id_ref,
int gtid,
1295 long double *lhs,
long double rhs,
1297 long double __kmpc_atomic_float10_div_cpt(
ident_t *id_ref,
int gtid,
1298 long double *lhs,
long double rhs,
1302 QUAD_LEGACY __kmpc_atomic_float16_add_cpt(
ident_t *id_ref,
int gtid,
1303 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1305 QUAD_LEGACY __kmpc_atomic_float16_sub_cpt(
ident_t *id_ref,
int gtid,
1306 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1308 QUAD_LEGACY __kmpc_atomic_float16_mul_cpt(
ident_t *id_ref,
int gtid,
1309 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1311 QUAD_LEGACY __kmpc_atomic_float16_div_cpt(
ident_t *id_ref,
int gtid,
1312 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1318 void __kmpc_atomic_cmplx4_add_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1319 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1320 void __kmpc_atomic_cmplx4_sub_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1321 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1322 void __kmpc_atomic_cmplx4_mul_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1323 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1324 void __kmpc_atomic_cmplx4_div_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1325 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1327 kmp_cmplx64 __kmpc_atomic_cmplx8_add_cpt(
ident_t *id_ref,
int gtid,
1328 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1330 kmp_cmplx64 __kmpc_atomic_cmplx8_sub_cpt(
ident_t *id_ref,
int gtid,
1331 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1333 kmp_cmplx64 __kmpc_atomic_cmplx8_mul_cpt(
ident_t *id_ref,
int gtid,
1334 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1336 kmp_cmplx64 __kmpc_atomic_cmplx8_div_cpt(
ident_t *id_ref,
int gtid,
1337 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1339 kmp_cmplx80 __kmpc_atomic_cmplx10_add_cpt(
ident_t *id_ref,
int gtid,
1340 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1342 kmp_cmplx80 __kmpc_atomic_cmplx10_sub_cpt(
ident_t *id_ref,
int gtid,
1343 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1345 kmp_cmplx80 __kmpc_atomic_cmplx10_mul_cpt(
ident_t *id_ref,
int gtid,
1346 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1348 kmp_cmplx80 __kmpc_atomic_cmplx10_div_cpt(
ident_t *id_ref,
int gtid,
1349 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1352 CPLX128_LEG __kmpc_atomic_cmplx16_add_cpt(
ident_t *id_ref,
int gtid,
1353 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1355 CPLX128_LEG __kmpc_atomic_cmplx16_sub_cpt(
ident_t *id_ref,
int gtid,
1356 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1358 CPLX128_LEG __kmpc_atomic_cmplx16_mul_cpt(
ident_t *id_ref,
int gtid,
1359 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1361 CPLX128_LEG __kmpc_atomic_cmplx16_div_cpt(
ident_t *id_ref,
int gtid,
1362 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1366 Quad_a16_t __kmpc_atomic_float16_add_a16_cpt(
ident_t *id_ref,
int gtid,
1367 Quad_a16_t *lhs, Quad_a16_t rhs,
1369 Quad_a16_t __kmpc_atomic_float16_sub_a16_cpt(
ident_t *id_ref,
int gtid,
1370 Quad_a16_t *lhs, Quad_a16_t rhs,
1372 Quad_a16_t __kmpc_atomic_float16_mul_a16_cpt(
ident_t *id_ref,
int gtid,
1373 Quad_a16_t *lhs, Quad_a16_t rhs,
1375 Quad_a16_t __kmpc_atomic_float16_div_a16_cpt(
ident_t *id_ref,
int gtid,
1376 Quad_a16_t *lhs, Quad_a16_t rhs,
1378 Quad_a16_t __kmpc_atomic_float16_max_a16_cpt(
ident_t *id_ref,
int gtid,
1379 Quad_a16_t *lhs, Quad_a16_t rhs,
1381 Quad_a16_t __kmpc_atomic_float16_min_a16_cpt(
ident_t *id_ref,
int gtid,
1382 Quad_a16_t *lhs, Quad_a16_t rhs,
1384 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_add_a16_cpt(
ident_t *id_ref,
int gtid,
1385 kmp_cmplx128_a16_t *lhs,
1386 kmp_cmplx128_a16_t rhs,
1388 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_sub_a16_cpt(
ident_t *id_ref,
int gtid,
1389 kmp_cmplx128_a16_t *lhs,
1390 kmp_cmplx128_a16_t rhs,
1392 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_mul_a16_cpt(
ident_t *id_ref,
int gtid,
1393 kmp_cmplx128_a16_t *lhs,
1394 kmp_cmplx128_a16_t rhs,
1396 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_div_a16_cpt(
ident_t *id_ref,
int gtid,
1397 kmp_cmplx128_a16_t *lhs,
1398 kmp_cmplx128_a16_t rhs,
1403 void __kmpc_atomic_start(
void);
1404 void __kmpc_atomic_end(
void);
1411 char __kmpc_atomic_fixed1_sub_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1412 char rhs,
int flag);
1413 char __kmpc_atomic_fixed1_div_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1414 char rhs,
int flag);
1415 unsigned char __kmpc_atomic_fixed1u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1417 unsigned char rhs,
int flag);
1418 char __kmpc_atomic_fixed1_shl_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1419 char rhs,
int flag);
1420 char __kmpc_atomic_fixed1_shr_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1421 char rhs,
int flag);
1422 unsigned char __kmpc_atomic_fixed1u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1424 unsigned char rhs,
int flag);
1425 short __kmpc_atomic_fixed2_sub_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1426 short rhs,
int flag);
1427 short __kmpc_atomic_fixed2_div_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1428 short rhs,
int flag);
1429 unsigned short __kmpc_atomic_fixed2u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1430 unsigned short *lhs,
1431 unsigned short rhs,
int flag);
1432 short __kmpc_atomic_fixed2_shl_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1433 short rhs,
int flag);
1434 short __kmpc_atomic_fixed2_shr_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1435 short rhs,
int flag);
1436 unsigned short __kmpc_atomic_fixed2u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1437 unsigned short *lhs,
1438 unsigned short rhs,
int flag);
1439 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1440 kmp_int32 *lhs, kmp_int32 rhs,
1442 kmp_int32 __kmpc_atomic_fixed4_div_cpt_rev(
ident_t *id_ref,
int gtid,
1443 kmp_int32 *lhs, kmp_int32 rhs,
1445 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1446 kmp_uint32 *lhs, kmp_uint32 rhs,
1448 kmp_int32 __kmpc_atomic_fixed4_shl_cpt_rev(
ident_t *id_ref,
int gtid,
1449 kmp_int32 *lhs, kmp_int32 rhs,
1451 kmp_int32 __kmpc_atomic_fixed4_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1452 kmp_int32 *lhs, kmp_int32 rhs,
1454 kmp_uint32 __kmpc_atomic_fixed4u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1455 kmp_uint32 *lhs, kmp_uint32 rhs,
1457 kmp_int64 __kmpc_atomic_fixed8_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1458 kmp_int64 *lhs, kmp_int64 rhs,
1460 kmp_int64 __kmpc_atomic_fixed8_div_cpt_rev(
ident_t *id_ref,
int gtid,
1461 kmp_int64 *lhs, kmp_int64 rhs,
1463 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1464 kmp_uint64 *lhs, kmp_uint64 rhs,
1466 kmp_int64 __kmpc_atomic_fixed8_shl_cpt_rev(
ident_t *id_ref,
int gtid,
1467 kmp_int64 *lhs, kmp_int64 rhs,
1469 kmp_int64 __kmpc_atomic_fixed8_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1470 kmp_int64 *lhs, kmp_int64 rhs,
1472 kmp_uint64 __kmpc_atomic_fixed8u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1473 kmp_uint64 *lhs, kmp_uint64 rhs,
1475 float __kmpc_atomic_float4_sub_cpt_rev(
ident_t *id_ref,
int gtid,
float *lhs,
1476 float rhs,
int flag);
1477 float __kmpc_atomic_float4_div_cpt_rev(
ident_t *id_ref,
int gtid,
float *lhs,
1478 float rhs,
int flag);
1479 double __kmpc_atomic_float8_sub_cpt_rev(
ident_t *id_ref,
int gtid,
double *lhs,
1480 double rhs,
int flag);
1481 double __kmpc_atomic_float8_div_cpt_rev(
ident_t *id_ref,
int gtid,
double *lhs,
1482 double rhs,
int flag);
1483 long double __kmpc_atomic_float10_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1484 long double *lhs,
long double rhs,
1486 long double __kmpc_atomic_float10_div_cpt_rev(
ident_t *id_ref,
int gtid,
1487 long double *lhs,
long double rhs,
1490 QUAD_LEGACY __kmpc_atomic_float16_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1491 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1493 QUAD_LEGACY __kmpc_atomic_float16_div_cpt_rev(
ident_t *id_ref,
int gtid,
1494 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1499 void __kmpc_atomic_cmplx4_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1500 kmp_cmplx32 *lhs, kmp_cmplx32 rhs,
1501 kmp_cmplx32 *out,
int flag);
1502 void __kmpc_atomic_cmplx4_div_cpt_rev(
ident_t *id_ref,
int gtid,
1503 kmp_cmplx32 *lhs, kmp_cmplx32 rhs,
1504 kmp_cmplx32 *out,
int flag);
1505 kmp_cmplx64 __kmpc_atomic_cmplx8_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1506 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1508 kmp_cmplx64 __kmpc_atomic_cmplx8_div_cpt_rev(
ident_t *id_ref,
int gtid,
1509 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1511 kmp_cmplx80 __kmpc_atomic_cmplx10_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1512 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1514 kmp_cmplx80 __kmpc_atomic_cmplx10_div_cpt_rev(
ident_t *id_ref,
int gtid,
1515 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1518 CPLX128_LEG __kmpc_atomic_cmplx16_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1519 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1521 CPLX128_LEG __kmpc_atomic_cmplx16_div_cpt_rev(
ident_t *id_ref,
int gtid,
1522 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1525 Quad_a16_t __kmpc_atomic_float16_sub_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1527 Quad_a16_t rhs,
int flag);
1528 Quad_a16_t __kmpc_atomic_float16_div_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1530 Quad_a16_t rhs,
int flag);
1532 __kmpc_atomic_cmplx16_sub_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1533 kmp_cmplx128_a16_t *lhs,
1534 kmp_cmplx128_a16_t rhs,
int flag);
1536 __kmpc_atomic_cmplx16_div_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1537 kmp_cmplx128_a16_t *lhs,
1538 kmp_cmplx128_a16_t rhs,
int flag);
1543 char __kmpc_atomic_fixed1_swp(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
1544 short __kmpc_atomic_fixed2_swp(
ident_t *id_ref,
int gtid,
short *lhs,
1546 kmp_int32 __kmpc_atomic_fixed4_swp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
1548 kmp_int64 __kmpc_atomic_fixed8_swp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
1550 float __kmpc_atomic_float4_swp(
ident_t *id_ref,
int gtid,
float *lhs,
1552 double __kmpc_atomic_float8_swp(
ident_t *id_ref,
int gtid,
double *lhs,
1554 long double __kmpc_atomic_float10_swp(
ident_t *id_ref,
int gtid,
1555 long double *lhs,
long double rhs);
1557 QUAD_LEGACY __kmpc_atomic_float16_swp(
ident_t *id_ref,
int gtid,
1558 QUAD_LEGACY *lhs, QUAD_LEGACY rhs);
1561 void __kmpc_atomic_cmplx4_swp(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1562 kmp_cmplx32 rhs, kmp_cmplx32 *out);
1566 kmp_cmplx64 __kmpc_atomic_cmplx8_swp(
ident_t *id_ref,
int gtid,
1567 kmp_cmplx64 *lhs, kmp_cmplx64 rhs);
1568 kmp_cmplx80 __kmpc_atomic_cmplx10_swp(
ident_t *id_ref,
int gtid,
1569 kmp_cmplx80 *lhs, kmp_cmplx80 rhs);
1571 CPLX128_LEG __kmpc_atomic_cmplx16_swp(
ident_t *id_ref,
int gtid,
1572 CPLX128_LEG *lhs, CPLX128_LEG rhs);
1574 Quad_a16_t __kmpc_atomic_float16_a16_swp(
ident_t *id_ref,
int gtid,
1575 Quad_a16_t *lhs, Quad_a16_t rhs);
1576 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_a16_swp(
ident_t *id_ref,
int gtid,
1577 kmp_cmplx128_a16_t *lhs,
1578 kmp_cmplx128_a16_t rhs);
1585 char __kmpc_atomic_fixed1_add_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1586 _Quad rhs,
int flag);
1587 char __kmpc_atomic_fixed1_sub_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1588 _Quad rhs,
int flag);
1589 char __kmpc_atomic_fixed1_mul_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1590 _Quad rhs,
int flag);
1591 char __kmpc_atomic_fixed1_div_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1592 _Quad rhs,
int flag);
1593 unsigned char __kmpc_atomic_fixed1u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1594 unsigned char *lhs, _Quad rhs,
1596 unsigned char __kmpc_atomic_fixed1u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1597 unsigned char *lhs, _Quad rhs,
1599 unsigned char __kmpc_atomic_fixed1u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1600 unsigned char *lhs, _Quad rhs,
1602 unsigned char __kmpc_atomic_fixed1u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1603 unsigned char *lhs, _Quad rhs,
1606 short __kmpc_atomic_fixed2_add_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1607 _Quad rhs,
int flag);
1608 short __kmpc_atomic_fixed2_sub_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1609 _Quad rhs,
int flag);
1610 short __kmpc_atomic_fixed2_mul_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1611 _Quad rhs,
int flag);
1612 short __kmpc_atomic_fixed2_div_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1613 _Quad rhs,
int flag);
1614 unsigned short __kmpc_atomic_fixed2u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1615 unsigned short *lhs, _Quad rhs,
1617 unsigned short __kmpc_atomic_fixed2u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1618 unsigned short *lhs, _Quad rhs,
1620 unsigned short __kmpc_atomic_fixed2u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1621 unsigned short *lhs, _Quad rhs,
1623 unsigned short __kmpc_atomic_fixed2u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1624 unsigned short *lhs, _Quad rhs,
1627 kmp_int32 __kmpc_atomic_fixed4_add_cpt_fp(
ident_t *id_ref,
int gtid,
1628 kmp_int32 *lhs, _Quad rhs,
int flag);
1629 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1630 kmp_int32 *lhs, _Quad rhs,
int flag);
1631 kmp_int32 __kmpc_atomic_fixed4_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1632 kmp_int32 *lhs, _Quad rhs,
int flag);
1633 kmp_int32 __kmpc_atomic_fixed4_div_cpt_fp(
ident_t *id_ref,
int gtid,
1634 kmp_int32 *lhs, _Quad rhs,
int flag);
1635 kmp_uint32 __kmpc_atomic_fixed4u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1636 kmp_uint32 *lhs, _Quad rhs,
1638 kmp_uint32 __kmpc_atomic_fixed4u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1639 kmp_uint32 *lhs, _Quad rhs,
1641 kmp_uint32 __kmpc_atomic_fixed4u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1642 kmp_uint32 *lhs, _Quad rhs,
1644 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1645 kmp_uint32 *lhs, _Quad rhs,
1648 kmp_int64 __kmpc_atomic_fixed8_add_cpt_fp(
ident_t *id_ref,
int gtid,
1649 kmp_int64 *lhs, _Quad rhs,
int flag);
1650 kmp_int64 __kmpc_atomic_fixed8_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1651 kmp_int64 *lhs, _Quad rhs,
int flag);
1652 kmp_int64 __kmpc_atomic_fixed8_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1653 kmp_int64 *lhs, _Quad rhs,
int flag);
1654 kmp_int64 __kmpc_atomic_fixed8_div_cpt_fp(
ident_t *id_ref,
int gtid,
1655 kmp_int64 *lhs, _Quad rhs,
int flag);
1656 kmp_uint64 __kmpc_atomic_fixed8u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1657 kmp_uint64 *lhs, _Quad rhs,
1659 kmp_uint64 __kmpc_atomic_fixed8u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1660 kmp_uint64 *lhs, _Quad rhs,
1662 kmp_uint64 __kmpc_atomic_fixed8u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1663 kmp_uint64 *lhs, _Quad rhs,
1665 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1666 kmp_uint64 *lhs, _Quad rhs,
1669 float __kmpc_atomic_float4_add_cpt_fp(
ident_t *id_ref,
int gtid,
1670 kmp_real32 *lhs, _Quad rhs,
int flag);
1671 float __kmpc_atomic_float4_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1672 kmp_real32 *lhs, _Quad rhs,
int flag);
1673 float __kmpc_atomic_float4_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1674 kmp_real32 *lhs, _Quad rhs,
int flag);
1675 float __kmpc_atomic_float4_div_cpt_fp(
ident_t *id_ref,
int gtid,
1676 kmp_real32 *lhs, _Quad rhs,
int flag);
1678 double __kmpc_atomic_float8_add_cpt_fp(
ident_t *id_ref,
int gtid,
1679 kmp_real64 *lhs, _Quad rhs,
int flag);
1680 double __kmpc_atomic_float8_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1681 kmp_real64 *lhs, _Quad rhs,
int flag);
1682 double __kmpc_atomic_float8_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1683 kmp_real64 *lhs, _Quad rhs,
int flag);
1684 double __kmpc_atomic_float8_div_cpt_fp(
ident_t *id_ref,
int gtid,
1685 kmp_real64 *lhs, _Quad rhs,
int flag);
1687 long double __kmpc_atomic_float10_add_cpt_fp(
ident_t *id_ref,
int gtid,
1688 long double *lhs, _Quad rhs,
1690 long double __kmpc_atomic_float10_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1691 long double *lhs, _Quad rhs,
1693 long double __kmpc_atomic_float10_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1694 long double *lhs, _Quad rhs,
1696 long double __kmpc_atomic_float10_div_cpt_fp(
ident_t *id_ref,
int gtid,
1697 long double *lhs, _Quad rhs,
1700 char __kmpc_atomic_fixed1_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1701 _Quad rhs,
int flag);
1702 unsigned char __kmpc_atomic_fixed1u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1704 _Quad rhs,
int flag);
1705 char __kmpc_atomic_fixed1_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1706 _Quad rhs,
int flag);
1707 unsigned char __kmpc_atomic_fixed1u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1709 _Quad rhs,
int flag);
1710 short __kmpc_atomic_fixed2_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1711 _Quad rhs,
int flag);
1712 unsigned short __kmpc_atomic_fixed2u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1713 unsigned short *lhs,
1714 _Quad rhs,
int flag);
1715 short __kmpc_atomic_fixed2_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1716 _Quad rhs,
int flag);
1717 unsigned short __kmpc_atomic_fixed2u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1718 unsigned short *lhs,
1719 _Quad rhs,
int flag);
1720 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1721 kmp_int32 *lhs, _Quad rhs,
1723 kmp_uint32 __kmpc_atomic_fixed4u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1724 kmp_uint32 *lhs, _Quad rhs,
1726 kmp_int32 __kmpc_atomic_fixed4_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1727 kmp_int32 *lhs, _Quad rhs,
1729 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1730 kmp_uint32 *lhs, _Quad rhs,
1732 kmp_int64 __kmpc_atomic_fixed8_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1733 kmp_int64 *lhs, _Quad rhs,
1735 kmp_uint64 __kmpc_atomic_fixed8u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1736 kmp_uint64 *lhs, _Quad rhs,
1738 kmp_int64 __kmpc_atomic_fixed8_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1739 kmp_int64 *lhs, _Quad rhs,
1741 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1742 kmp_uint64 *lhs, _Quad rhs,
1744 float __kmpc_atomic_float4_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
1745 _Quad rhs,
int flag);
1746 float __kmpc_atomic_float4_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
1747 _Quad rhs,
int flag);
1748 double __kmpc_atomic_float8_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1749 double *lhs, _Quad rhs,
int flag);
1750 double __kmpc_atomic_float8_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1751 double *lhs, _Quad rhs,
int flag);
1752 long double __kmpc_atomic_float10_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1753 long double *lhs, _Quad rhs,
1755 long double __kmpc_atomic_float10_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1756 long double *lhs, _Quad rhs,
1759 #endif // KMP_HAVE_QUAD
1763 #endif // OMP_40_ENABLED
1765 #endif // KMP_ARCH_X86 || KMP_ARCH_X86_64