LLVM OpenMP* Runtime Library
 All Classes Functions Variables Typedefs Enumerations Enumerator Modules Pages
kmp_os.h
1 /*
2  * kmp_os.h -- KPTS runtime header file.
3  */
4 
5 //===----------------------------------------------------------------------===//
6 //
7 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
8 // See https://llvm.org/LICENSE.txt for license information.
9 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #ifndef KMP_OS_H
14 #define KMP_OS_H
15 
16 #include "kmp_config.h"
17 #include <stdlib.h>
18 #include <atomic>
19 
20 #define KMP_FTN_PLAIN 1
21 #define KMP_FTN_APPEND 2
22 #define KMP_FTN_UPPER 3
23 /*
24 #define KMP_FTN_PREPEND 4
25 #define KMP_FTN_UAPPEND 5
26 */
27 
28 #define KMP_PTR_SKIP (sizeof(void *))
29 
30 /* -------------------------- Compiler variations ------------------------ */
31 
32 #define KMP_OFF 0
33 #define KMP_ON 1
34 
35 #define KMP_MEM_CONS_VOLATILE 0
36 #define KMP_MEM_CONS_FENCE 1
37 
38 #ifndef KMP_MEM_CONS_MODEL
39 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
40 #endif
41 
42 #ifndef __has_cpp_attribute
43 #define __has_cpp_attribute(x) 0
44 #endif
45 
46 #ifndef __has_attribute
47 #define __has_attribute(x) 0
48 #endif
49 
50 /* ------------------------- Compiler recognition ---------------------- */
51 #define KMP_COMPILER_ICC 0
52 #define KMP_COMPILER_GCC 0
53 #define KMP_COMPILER_CLANG 0
54 #define KMP_COMPILER_MSVC 0
55 
56 #if defined(__INTEL_COMPILER)
57 #undef KMP_COMPILER_ICC
58 #define KMP_COMPILER_ICC 1
59 #elif defined(__clang__)
60 #undef KMP_COMPILER_CLANG
61 #define KMP_COMPILER_CLANG 1
62 #elif defined(__GNUC__)
63 #undef KMP_COMPILER_GCC
64 #define KMP_COMPILER_GCC 1
65 #elif defined(_MSC_VER)
66 #undef KMP_COMPILER_MSVC
67 #define KMP_COMPILER_MSVC 1
68 #else
69 #error Unknown compiler
70 #endif
71 
72 #if (KMP_OS_LINUX || KMP_OS_WINDOWS) && !KMP_OS_CNK
73 #define KMP_AFFINITY_SUPPORTED 1
74 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64
75 #define KMP_GROUP_AFFINITY 1
76 #else
77 #define KMP_GROUP_AFFINITY 0
78 #endif
79 #else
80 #define KMP_AFFINITY_SUPPORTED 0
81 #define KMP_GROUP_AFFINITY 0
82 #endif
83 
84 /* Check for quad-precision extension. */
85 #define KMP_HAVE_QUAD 0
86 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
87 #if KMP_COMPILER_ICC
88 /* _Quad is already defined for icc */
89 #undef KMP_HAVE_QUAD
90 #define KMP_HAVE_QUAD 1
91 #elif KMP_COMPILER_CLANG
92 /* Clang doesn't support a software-implemented
93  128-bit extended precision type yet */
94 typedef long double _Quad;
95 #elif KMP_COMPILER_GCC
96 /* GCC on NetBSD lacks __multc3/__divtc3 builtins needed for quad */
97 #if !KMP_OS_NETBSD
98 typedef __float128 _Quad;
99 #undef KMP_HAVE_QUAD
100 #define KMP_HAVE_QUAD 1
101 #endif
102 #elif KMP_COMPILER_MSVC
103 typedef long double _Quad;
104 #endif
105 #else
106 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
107 typedef long double _Quad;
108 #undef KMP_HAVE_QUAD
109 #define KMP_HAVE_QUAD 1
110 #endif
111 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
112 
113 #define KMP_USE_X87CONTROL 0
114 #if KMP_OS_WINDOWS
115 #define KMP_END_OF_LINE "\r\n"
116 typedef char kmp_int8;
117 typedef unsigned char kmp_uint8;
118 typedef short kmp_int16;
119 typedef unsigned short kmp_uint16;
120 typedef int kmp_int32;
121 typedef unsigned int kmp_uint32;
122 #define KMP_INT32_SPEC "d"
123 #define KMP_UINT32_SPEC "u"
124 #ifndef KMP_STRUCT64
125 typedef __int64 kmp_int64;
126 typedef unsigned __int64 kmp_uint64;
127 #define KMP_INT64_SPEC "I64d"
128 #define KMP_UINT64_SPEC "I64u"
129 #else
130 struct kmp_struct64 {
131  kmp_int32 a, b;
132 };
133 typedef struct kmp_struct64 kmp_int64;
134 typedef struct kmp_struct64 kmp_uint64;
135 /* Not sure what to use for KMP_[U]INT64_SPEC here */
136 #endif
137 #if KMP_ARCH_X86 && KMP_MSVC_COMPAT
138 #undef KMP_USE_X87CONTROL
139 #define KMP_USE_X87CONTROL 1
140 #endif
141 #if KMP_ARCH_X86_64
142 #define KMP_INTPTR 1
143 typedef __int64 kmp_intptr_t;
144 typedef unsigned __int64 kmp_uintptr_t;
145 #define KMP_INTPTR_SPEC "I64d"
146 #define KMP_UINTPTR_SPEC "I64u"
147 #endif
148 #endif /* KMP_OS_WINDOWS */
149 
150 #if KMP_OS_UNIX
151 #define KMP_END_OF_LINE "\n"
152 typedef char kmp_int8;
153 typedef unsigned char kmp_uint8;
154 typedef short kmp_int16;
155 typedef unsigned short kmp_uint16;
156 typedef int kmp_int32;
157 typedef unsigned int kmp_uint32;
158 typedef long long kmp_int64;
159 typedef unsigned long long kmp_uint64;
160 #define KMP_INT32_SPEC "d"
161 #define KMP_UINT32_SPEC "u"
162 #define KMP_INT64_SPEC "lld"
163 #define KMP_UINT64_SPEC "llu"
164 #endif /* KMP_OS_UNIX */
165 
166 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS
167 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
168 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS64
169 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
170 #else
171 #error "Can't determine size_t printf format specifier."
172 #endif
173 
174 #if KMP_ARCH_X86
175 #define KMP_SIZE_T_MAX (0xFFFFFFFF)
176 #else
177 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
178 #endif
179 
180 typedef size_t kmp_size_t;
181 typedef float kmp_real32;
182 typedef double kmp_real64;
183 
184 #ifndef KMP_INTPTR
185 #define KMP_INTPTR 1
186 typedef long kmp_intptr_t;
187 typedef unsigned long kmp_uintptr_t;
188 #define KMP_INTPTR_SPEC "ld"
189 #define KMP_UINTPTR_SPEC "lu"
190 #endif
191 
192 #ifdef BUILD_I8
193 typedef kmp_int64 kmp_int;
194 typedef kmp_uint64 kmp_uint;
195 #else
196 typedef kmp_int32 kmp_int;
197 typedef kmp_uint32 kmp_uint;
198 #endif /* BUILD_I8 */
199 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
200 #define KMP_INT_MIN ((kmp_int32)0x80000000)
201 
202 #ifdef __cplusplus
203 // macros to cast out qualifiers and to re-interpret types
204 #define CCAST(type, var) const_cast<type>(var)
205 #define RCAST(type, var) reinterpret_cast<type>(var)
206 //-------------------------------------------------------------------------
207 // template for debug prints specification ( d, u, lld, llu ), and to obtain
208 // signed/unsigned flavors of a type
209 template <typename T> struct traits_t {};
210 // int
211 template <> struct traits_t<signed int> {
212  typedef signed int signed_t;
213  typedef unsigned int unsigned_t;
214  typedef double floating_t;
215  static char const *spec;
216  static const signed_t max_value = 0x7fffffff;
217  static const signed_t min_value = 0x80000000;
218  static const int type_size = sizeof(signed_t);
219 };
220 // unsigned int
221 template <> struct traits_t<unsigned int> {
222  typedef signed int signed_t;
223  typedef unsigned int unsigned_t;
224  typedef double floating_t;
225  static char const *spec;
226  static const unsigned_t max_value = 0xffffffff;
227  static const unsigned_t min_value = 0x00000000;
228  static const int type_size = sizeof(unsigned_t);
229 };
230 // long
231 template <> struct traits_t<signed long> {
232  typedef signed long signed_t;
233  typedef unsigned long unsigned_t;
234  typedef long double floating_t;
235  static char const *spec;
236  static const int type_size = sizeof(signed_t);
237 };
238 // long long
239 template <> struct traits_t<signed long long> {
240  typedef signed long long signed_t;
241  typedef unsigned long long unsigned_t;
242  typedef long double floating_t;
243  static char const *spec;
244  static const signed_t max_value = 0x7fffffffffffffffLL;
245  static const signed_t min_value = 0x8000000000000000LL;
246  static const int type_size = sizeof(signed_t);
247 };
248 // unsigned long long
249 template <> struct traits_t<unsigned long long> {
250  typedef signed long long signed_t;
251  typedef unsigned long long unsigned_t;
252  typedef long double floating_t;
253  static char const *spec;
254  static const unsigned_t max_value = 0xffffffffffffffffLL;
255  static const unsigned_t min_value = 0x0000000000000000LL;
256  static const int type_size = sizeof(unsigned_t);
257 };
258 //-------------------------------------------------------------------------
259 #else
260 #define CCAST(type, var) (type)(var)
261 #define RCAST(type, var) (type)(var)
262 #endif // __cplusplus
263 
264 #define KMP_EXPORT extern /* export declaration in guide libraries */
265 
266 #if __GNUC__ >= 4 && !defined(__MINGW32__)
267 #define __forceinline __inline
268 #endif
269 
270 #if KMP_OS_WINDOWS
271 #include <windows.h>
272 
273 static inline int KMP_GET_PAGE_SIZE(void) {
274  SYSTEM_INFO si;
275  GetSystemInfo(&si);
276  return si.dwPageSize;
277 }
278 #else
279 #define KMP_GET_PAGE_SIZE() getpagesize()
280 #endif
281 
282 #define PAGE_ALIGNED(_addr) \
283  (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
284 #define ALIGN_TO_PAGE(x) \
285  (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
286 
287 /* ---------- Support for cache alignment, padding, etc. ----------------*/
288 
289 #ifdef __cplusplus
290 extern "C" {
291 #endif // __cplusplus
292 
293 #define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */
294 
295 /* Define the default size of the cache line */
296 #ifndef CACHE_LINE
297 #define CACHE_LINE 128 /* cache line size in bytes */
298 #else
299 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
300 // 2006-02-13: This produces too many warnings on OS X*. Disable for now
301 #warning CACHE_LINE is too small.
302 #endif
303 #endif /* CACHE_LINE */
304 
305 #define KMP_CACHE_PREFETCH(ADDR) /* nothing */
306 
307 // Define attribute that indicates that the fall through from the previous
308 // case label is intentional and should not be diagnosed by a compiler
309 // Code from libcxx/include/__config
310 // Use a function like macro to imply that it must be followed by a semicolon
311 #if __cplusplus > 201402L && __has_cpp_attribute(fallthrough)
312 # define KMP_FALLTHROUGH() [[fallthrough]]
313 #elif __has_cpp_attribute(clang::fallthrough)
314 # define KMP_FALLTHROUGH() [[clang::fallthrough]]
315 #elif __has_attribute(fallthough) || __GNUC__ >= 7
316 # define KMP_FALLTHROUGH() __attribute__((__fallthrough__))
317 #else
318 # define KMP_FALLTHROUGH() ((void)0)
319 #endif
320 
321 // Define attribute that indicates a function does not return
322 #if __cplusplus >= 201103L
323 #define KMP_NORETURN [[noreturn]]
324 #elif KMP_OS_WINDOWS
325 #define KMP_NORETURN __declspec(noreturn)
326 #else
327 #define KMP_NORETURN __attribute__((noreturn))
328 #endif
329 
330 #if KMP_OS_WINDOWS && KMP_MSVC_COMPAT
331 #define KMP_ALIGN(bytes) __declspec(align(bytes))
332 #define KMP_THREAD_LOCAL __declspec(thread)
333 #define KMP_ALIAS /* Nothing */
334 #else
335 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
336 #define KMP_THREAD_LOCAL __thread
337 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
338 #endif
339 
340 #if KMP_HAVE_WEAK_ATTRIBUTE
341 #define KMP_WEAK_ATTRIBUTE __attribute__((weak))
342 #else
343 #define KMP_WEAK_ATTRIBUTE /* Nothing */
344 #endif
345 
346 // Define KMP_VERSION_SYMBOL and KMP_EXPAND_NAME
347 #ifndef KMP_STR
348 #define KMP_STR(x) _KMP_STR(x)
349 #define _KMP_STR(x) #x
350 #endif
351 
352 #ifdef KMP_USE_VERSION_SYMBOLS
353 // If using versioned symbols, KMP_EXPAND_NAME prepends
354 // __kmp_api_ to the real API name
355 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
356 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
357 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) \
358  _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
359 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) \
360  __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias \
361  __attribute__((alias(KMP_STR(__kmp_api_##api_name)))); \
362  __asm__( \
363  ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
364  api_name) "@" ver_str "\n\t"); \
365  __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR( \
366  api_name) "@@" default_ver "\n\t")
367 #else // KMP_USE_VERSION_SYMBOLS
368 #define KMP_EXPAND_NAME(api_name) api_name
369 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) /* Nothing */
370 #endif // KMP_USE_VERSION_SYMBOLS
371 
372 /* Temporary note: if performance testing of this passes, we can remove
373  all references to KMP_DO_ALIGN and replace with KMP_ALIGN. */
374 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
375 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
376 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
377 
378 /* General purpose fence types for memory operations */
379 enum kmp_mem_fence_type {
380  kmp_no_fence, /* No memory fence */
381  kmp_acquire_fence, /* Acquire (read) memory fence */
382  kmp_release_fence, /* Release (write) memory fence */
383  kmp_full_fence /* Full (read+write) memory fence */
384 };
385 
386 // Synchronization primitives
387 
388 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS
389 
390 #if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG
391 #pragma intrinsic(InterlockedExchangeAdd)
392 #pragma intrinsic(InterlockedCompareExchange)
393 #pragma intrinsic(InterlockedExchange)
394 #pragma intrinsic(InterlockedExchange64)
395 #endif
396 
397 // Using InterlockedIncrement / InterlockedDecrement causes a library loading
398 // ordering problem, so we use InterlockedExchangeAdd instead.
399 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
400 #define KMP_TEST_THEN_INC_ACQ32(p) \
401  InterlockedExchangeAdd((volatile long *)(p), 1)
402 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
403 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
404  InterlockedExchangeAdd((volatile long *)(p), 4)
405 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
406 #define KMP_TEST_THEN_DEC_ACQ32(p) \
407  InterlockedExchangeAdd((volatile long *)(p), -1)
408 #define KMP_TEST_THEN_ADD32(p, v) \
409  InterlockedExchangeAdd((volatile long *)(p), (v))
410 
411 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
412  InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
413 
414 #define KMP_XCHG_FIXED32(p, v) \
415  InterlockedExchange((volatile long *)(p), (long)(v))
416 #define KMP_XCHG_FIXED64(p, v) \
417  InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
418 
419 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
420  kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v);
421  return *(kmp_real32 *)&tmp;
422 }
423 
424 // Routines that we still need to implement in assembly.
425 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
426 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
427 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
428 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
429 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
430 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
431 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
432 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
433 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
434 
435 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
436  kmp_int8 sv);
437 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
438  kmp_int16 sv);
439 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
440  kmp_int32 sv);
441 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
442  kmp_int64 sv);
443 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
444  kmp_int8 sv);
445 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
446  kmp_int16 cv, kmp_int16 sv);
447 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
448  kmp_int32 cv, kmp_int32 sv);
449 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
450  kmp_int64 cv, kmp_int64 sv);
451 
452 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
453 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
454 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
455 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
456 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
457 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
458 
459 //#define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32((p), 1)
460 //#define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32((p), 1)
461 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
462 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
463 //#define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32((p), 4)
464 //#define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32((p), 4)
465 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
466 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
467 //#define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32((p), -1)
468 //#define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32((p), -1)
469 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
470 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
471 //#define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32((p), (v))
472 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
473 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
474 
475 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
476 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
477 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
478 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
479 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
480 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
481 
482 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
483  __kmp_compare_and_store8((p), (cv), (sv))
484 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
485  __kmp_compare_and_store8((p), (cv), (sv))
486 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
487  __kmp_compare_and_store16((p), (cv), (sv))
488 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
489  __kmp_compare_and_store16((p), (cv), (sv))
490 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
491  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
492  (kmp_int32)(sv))
493 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
494  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
495  (kmp_int32)(sv))
496 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
497  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
498  (kmp_int64)(sv))
499 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
500  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
501  (kmp_int64)(sv))
502 
503 #if KMP_ARCH_X86
504 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
505  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
506  (kmp_int32)(sv))
507 #else /* 64 bit pointers */
508 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
509  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
510  (kmp_int64)(sv))
511 #endif /* KMP_ARCH_X86 */
512 
513 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
514  __kmp_compare_and_store_ret8((p), (cv), (sv))
515 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
516  __kmp_compare_and_store_ret16((p), (cv), (sv))
517 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
518  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
519  (kmp_int64)(sv))
520 
521 #define KMP_XCHG_FIXED8(p, v) \
522  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
523 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
524 //#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
525 //#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
526 //#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
527 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
528 
529 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
530 
531 /* cast p to correct type so that proper intrinsic will be used */
532 #define KMP_TEST_THEN_INC32(p) \
533  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
534 #define KMP_TEST_THEN_INC_ACQ32(p) \
535  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
536 #define KMP_TEST_THEN_INC64(p) \
537  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
538 #define KMP_TEST_THEN_INC_ACQ64(p) \
539  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
540 #define KMP_TEST_THEN_ADD4_32(p) \
541  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
542 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
543  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
544 #define KMP_TEST_THEN_ADD4_64(p) \
545  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
546 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
547  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
548 #define KMP_TEST_THEN_DEC32(p) \
549  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
550 #define KMP_TEST_THEN_DEC_ACQ32(p) \
551  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
552 #define KMP_TEST_THEN_DEC64(p) \
553  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
554 #define KMP_TEST_THEN_DEC_ACQ64(p) \
555  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
556 #define KMP_TEST_THEN_ADD8(p, v) \
557  __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
558 #define KMP_TEST_THEN_ADD32(p, v) \
559  __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
560 #define KMP_TEST_THEN_ADD64(p, v) \
561  __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
562 
563 #define KMP_TEST_THEN_OR8(p, v) \
564  __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
565 #define KMP_TEST_THEN_AND8(p, v) \
566  __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
567 #define KMP_TEST_THEN_OR32(p, v) \
568  __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
569 #define KMP_TEST_THEN_AND32(p, v) \
570  __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
571 #define KMP_TEST_THEN_OR64(p, v) \
572  __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
573 #define KMP_TEST_THEN_AND64(p, v) \
574  __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
575 
576 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
577  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
578  (kmp_uint8)(sv))
579 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
580  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
581  (kmp_uint8)(sv))
582 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
583  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
584  (kmp_uint16)(sv))
585 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
586  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
587  (kmp_uint16)(sv))
588 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
589  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
590  (kmp_uint32)(sv))
591 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
592  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
593  (kmp_uint32)(sv))
594 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
595  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
596  (kmp_uint64)(sv))
597 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
598  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
599  (kmp_uint64)(sv))
600 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
601  __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \
602  (void *)(sv))
603 
604 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
605  __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
606  (kmp_uint8)(sv))
607 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
608  __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
609  (kmp_uint16)(sv))
610 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
611  __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
612  (kmp_uint32)(sv))
613 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
614  __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
615  (kmp_uint64)(sv))
616 
617 #define KMP_XCHG_FIXED8(p, v) \
618  __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
619 #define KMP_XCHG_FIXED16(p, v) \
620  __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
621 #define KMP_XCHG_FIXED32(p, v) \
622  __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
623 #define KMP_XCHG_FIXED64(p, v) \
624  __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
625 
626 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
627  kmp_int32 tmp =
628  __sync_lock_test_and_set((volatile kmp_uint32 *)(p), *(kmp_uint32 *)&v);
629  return *(kmp_real32 *)&tmp;
630 }
631 
632 inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) {
633  kmp_int64 tmp =
634  __sync_lock_test_and_set((volatile kmp_uint64 *)(p), *(kmp_uint64 *)&v);
635  return *(kmp_real64 *)&tmp;
636 }
637 
638 #else
639 
640 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
641 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
642 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
643 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
644 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
645 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
646 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
647 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
648 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
649 
650 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
651  kmp_int8 sv);
652 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
653  kmp_int16 sv);
654 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
655  kmp_int32 sv);
656 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
657  kmp_int64 sv);
658 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
659  kmp_int8 sv);
660 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
661  kmp_int16 cv, kmp_int16 sv);
662 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
663  kmp_int32 cv, kmp_int32 sv);
664 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
665  kmp_int64 cv, kmp_int64 sv);
666 
667 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
668 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
669 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
670 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
671 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
672 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
673 
674 #define KMP_TEST_THEN_INC32(p) \
675  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
676 #define KMP_TEST_THEN_INC_ACQ32(p) \
677  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
678 #define KMP_TEST_THEN_INC64(p) \
679  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
680 #define KMP_TEST_THEN_INC_ACQ64(p) \
681  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
682 #define KMP_TEST_THEN_ADD4_32(p) \
683  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
684 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
685  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
686 #define KMP_TEST_THEN_ADD4_64(p) \
687  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
688 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
689  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
690 #define KMP_TEST_THEN_DEC32(p) \
691  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
692 #define KMP_TEST_THEN_DEC_ACQ32(p) \
693  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
694 #define KMP_TEST_THEN_DEC64(p) \
695  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
696 #define KMP_TEST_THEN_DEC_ACQ64(p) \
697  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
698 #define KMP_TEST_THEN_ADD8(p, v) \
699  __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
700 #define KMP_TEST_THEN_ADD32(p, v) \
701  __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
702 #define KMP_TEST_THEN_ADD64(p, v) \
703  __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
704 
705 #define KMP_TEST_THEN_OR8(p, v) \
706  __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
707 #define KMP_TEST_THEN_AND8(p, v) \
708  __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
709 #define KMP_TEST_THEN_OR32(p, v) \
710  __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
711 #define KMP_TEST_THEN_AND32(p, v) \
712  __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
713 #define KMP_TEST_THEN_OR64(p, v) \
714  __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
715 #define KMP_TEST_THEN_AND64(p, v) \
716  __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
717 
718 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
719  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
720  (kmp_int8)(sv))
721 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
722  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
723  (kmp_int8)(sv))
724 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
725  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
726  (kmp_int16)(sv))
727 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
728  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
729  (kmp_int16)(sv))
730 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
731  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
732  (kmp_int32)(sv))
733 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
734  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
735  (kmp_int32)(sv))
736 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
737  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
738  (kmp_int64)(sv))
739 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
740  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
741  (kmp_int64)(sv))
742 
743 #if KMP_ARCH_X86
744 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
745  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
746  (kmp_int32)(sv))
747 #else /* 64 bit pointers */
748 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
749  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
750  (kmp_int64)(sv))
751 #endif /* KMP_ARCH_X86 */
752 
753 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
754  __kmp_compare_and_store_ret8((p), (cv), (sv))
755 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
756  __kmp_compare_and_store_ret16((p), (cv), (sv))
757 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
758  __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
759  (kmp_int32)(sv))
760 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
761  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
762  (kmp_int64)(sv))
763 
764 #define KMP_XCHG_FIXED8(p, v) \
765  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
766 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
767 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
768 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
769 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
770 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
771 
772 #endif /* KMP_ASM_INTRINS */
773 
774 /* ------------- relaxed consistency memory model stuff ------------------ */
775 
776 #if KMP_OS_WINDOWS
777 #ifdef __ABSOFT_WIN
778 #define KMP_MB() asm("nop")
779 #define KMP_IMB() asm("nop")
780 #else
781 #define KMP_MB() /* _asm{ nop } */
782 #define KMP_IMB() /* _asm{ nop } */
783 #endif
784 #endif /* KMP_OS_WINDOWS */
785 
786 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \
787  KMP_ARCH_MIPS64
788 #define KMP_MB() __sync_synchronize()
789 #endif
790 
791 #ifndef KMP_MB
792 #define KMP_MB() /* nothing to do */
793 #endif
794 
795 #ifndef KMP_IMB
796 #define KMP_IMB() /* nothing to do */
797 #endif
798 
799 #ifndef KMP_ST_REL32
800 #define KMP_ST_REL32(A, D) (*(A) = (D))
801 #endif
802 
803 #ifndef KMP_ST_REL64
804 #define KMP_ST_REL64(A, D) (*(A) = (D))
805 #endif
806 
807 #ifndef KMP_LD_ACQ32
808 #define KMP_LD_ACQ32(A) (*(A))
809 #endif
810 
811 #ifndef KMP_LD_ACQ64
812 #define KMP_LD_ACQ64(A) (*(A))
813 #endif
814 
815 /* ------------------------------------------------------------------------ */
816 // FIXME - maybe this should this be
817 //
818 // #define TCR_4(a) (*(volatile kmp_int32 *)(&a))
819 // #define TCW_4(a,b) (a) = (*(volatile kmp_int32 *)&(b))
820 //
821 // #define TCR_8(a) (*(volatile kmp_int64 *)(a))
822 // #define TCW_8(a,b) (a) = (*(volatile kmp_int64 *)(&b))
823 //
824 // I'm fairly certain this is the correct thing to do, but I'm afraid
825 // of performance regressions.
826 
827 #define TCR_1(a) (a)
828 #define TCW_1(a, b) (a) = (b)
829 #define TCR_4(a) (a)
830 #define TCW_4(a, b) (a) = (b)
831 #define TCI_4(a) (++(a))
832 #define TCD_4(a) (--(a))
833 #define TCR_8(a) (a)
834 #define TCW_8(a, b) (a) = (b)
835 #define TCI_8(a) (++(a))
836 #define TCD_8(a) (--(a))
837 #define TCR_SYNC_4(a) (a)
838 #define TCW_SYNC_4(a, b) (a) = (b)
839 #define TCX_SYNC_4(a, b, c) \
840  KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \
841  (kmp_int32)(b), (kmp_int32)(c))
842 #define TCR_SYNC_8(a) (a)
843 #define TCW_SYNC_8(a, b) (a) = (b)
844 #define TCX_SYNC_8(a, b, c) \
845  KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \
846  (kmp_int64)(b), (kmp_int64)(c))
847 
848 #if KMP_ARCH_X86 || KMP_ARCH_MIPS
849 // What about ARM?
850 #define TCR_PTR(a) ((void *)TCR_4(a))
851 #define TCW_PTR(a, b) TCW_4((a), (b))
852 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
853 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
854 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
855 
856 #else /* 64 bit pointers */
857 
858 #define TCR_PTR(a) ((void *)TCR_8(a))
859 #define TCW_PTR(a, b) TCW_8((a), (b))
860 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
861 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
862 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
863 
864 #endif /* KMP_ARCH_X86 */
865 
866 /* If these FTN_{TRUE,FALSE} values change, may need to change several places
867  where they are used to check that language is Fortran, not C. */
868 
869 #ifndef FTN_TRUE
870 #define FTN_TRUE TRUE
871 #endif
872 
873 #ifndef FTN_FALSE
874 #define FTN_FALSE FALSE
875 #endif
876 
877 typedef void (*microtask_t)(int *gtid, int *npr, ...);
878 
879 #ifdef USE_VOLATILE_CAST
880 #define VOLATILE_CAST(x) (volatile x)
881 #else
882 #define VOLATILE_CAST(x) (x)
883 #endif
884 
885 #define KMP_WAIT_YIELD __kmp_wait_yield_4
886 #define KMP_WAIT_YIELD_PTR __kmp_wait_yield_4_ptr
887 #define KMP_EQ __kmp_eq_4
888 #define KMP_NEQ __kmp_neq_4
889 #define KMP_LT __kmp_lt_4
890 #define KMP_GE __kmp_ge_4
891 #define KMP_LE __kmp_le_4
892 
893 /* Workaround for Intel(R) 64 code gen bug when taking address of static array
894  * (Intel(R) 64 Tracker #138) */
895 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
896 #define STATIC_EFI2_WORKAROUND
897 #else
898 #define STATIC_EFI2_WORKAROUND static
899 #endif
900 
901 // Support of BGET usage
902 #ifndef KMP_USE_BGET
903 #define KMP_USE_BGET 1
904 #endif
905 
906 // Switches for OSS builds
907 #ifndef USE_CMPXCHG_FIX
908 #define USE_CMPXCHG_FIX 1
909 #endif
910 
911 // Enable dynamic user lock
912 #if OMP_45_ENABLED
913 #define KMP_USE_DYNAMIC_LOCK 1
914 #endif
915 
916 // Enable Intel(R) Transactional Synchronization Extensions (Intel(R) TSX) if
917 // dynamic user lock is turned on
918 #if KMP_USE_DYNAMIC_LOCK
919 // Visual studio can't handle the asm sections in this code
920 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
921 #ifdef KMP_USE_ADAPTIVE_LOCKS
922 #undef KMP_USE_ADAPTIVE_LOCKS
923 #endif
924 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
925 #endif
926 
927 // Enable tick time conversion of ticks to seconds
928 #if KMP_STATS_ENABLED
929 #define KMP_HAVE_TICK_TIME \
930  (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
931 #endif
932 
933 // Warning levels
934 enum kmp_warnings_level {
935  kmp_warnings_off = 0, /* No warnings */
936  kmp_warnings_low, /* Minimal warnings (default) */
937  kmp_warnings_explicit = 6, /* Explicitly set to ON - more warnings */
938  kmp_warnings_verbose /* reserved */
939 };
940 
941 #ifdef __cplusplus
942 } // extern "C"
943 #endif // __cplusplus
944 
945 // Macros for C++11 atomic functions
946 #define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
947 #define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
948 
949 // For non-default load/store
950 #define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
951 #define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
952 #define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
953 #define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
954 
955 // For non-default fetch_<op>
956 #define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
957 #define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
958 #define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
959 #define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
960 #define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
961 #define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
962 #define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
963 #define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
964 
965 // Callers of the following functions cannot see the side effect on "expected".
966 template <typename T>
967 bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) {
968  return p->compare_exchange_strong(
969  expected, desired, std::memory_order_acq_rel, std::memory_order_relaxed);
970 }
971 
972 template <typename T>
973 bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) {
974  return p->compare_exchange_strong(
975  expected, desired, std::memory_order_acquire, std::memory_order_relaxed);
976 }
977 
978 template <typename T>
979 bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) {
980  return p->compare_exchange_strong(
981  expected, desired, std::memory_order_release, std::memory_order_relaxed);
982 }
983 
984 #endif /* KMP_OS_H */
985 // Safe C API
986 #include "kmp_safe_c_api.h"