LLVM OpenMP* Runtime Library
kmp_os.h
1 /*
2  * kmp_os.h -- KPTS runtime header file.
3  */
4 
5 //===----------------------------------------------------------------------===//
6 //
7 // The LLVM Compiler Infrastructure
8 //
9 // This file is dual licensed under the MIT and the University of Illinois Open
10 // Source Licenses. See LICENSE.txt for details.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #ifndef KMP_OS_H
15 #define KMP_OS_H
16 
17 #include "kmp_config.h"
18 #include <stdlib.h>
19 #include <atomic>
20 
21 #define KMP_FTN_PLAIN 1
22 #define KMP_FTN_APPEND 2
23 #define KMP_FTN_UPPER 3
24 /*
25 #define KMP_FTN_PREPEND 4
26 #define KMP_FTN_UAPPEND 5
27 */
28 
29 #define KMP_PTR_SKIP (sizeof(void *))
30 
31 /* -------------------------- Compiler variations ------------------------ */
32 
33 #define KMP_OFF 0
34 #define KMP_ON 1
35 
36 #define KMP_MEM_CONS_VOLATILE 0
37 #define KMP_MEM_CONS_FENCE 1
38 
39 #ifndef KMP_MEM_CONS_MODEL
40 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
41 #endif
42 
43 /* ------------------------- Compiler recognition ---------------------- */
44 #define KMP_COMPILER_ICC 0
45 #define KMP_COMPILER_GCC 0
46 #define KMP_COMPILER_CLANG 0
47 #define KMP_COMPILER_MSVC 0
48 
49 #if defined(__INTEL_COMPILER)
50 #undef KMP_COMPILER_ICC
51 #define KMP_COMPILER_ICC 1
52 #elif defined(__clang__)
53 #undef KMP_COMPILER_CLANG
54 #define KMP_COMPILER_CLANG 1
55 #elif defined(__GNUC__)
56 #undef KMP_COMPILER_GCC
57 #define KMP_COMPILER_GCC 1
58 #elif defined(_MSC_VER)
59 #undef KMP_COMPILER_MSVC
60 #define KMP_COMPILER_MSVC 1
61 #else
62 #error Unknown compiler
63 #endif
64 
65 #if (KMP_OS_LINUX || KMP_OS_WINDOWS) && !KMP_OS_CNK
66 #define KMP_AFFINITY_SUPPORTED 1
67 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64
68 #define KMP_GROUP_AFFINITY 1
69 #else
70 #define KMP_GROUP_AFFINITY 0
71 #endif
72 #else
73 #define KMP_AFFINITY_SUPPORTED 0
74 #define KMP_GROUP_AFFINITY 0
75 #endif
76 
77 /* Check for quad-precision extension. */
78 #define KMP_HAVE_QUAD 0
79 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
80 #if KMP_COMPILER_ICC
81 /* _Quad is already defined for icc */
82 #undef KMP_HAVE_QUAD
83 #define KMP_HAVE_QUAD 1
84 #elif KMP_COMPILER_CLANG
85 /* Clang doesn't support a software-implemented
86  128-bit extended precision type yet */
87 typedef long double _Quad;
88 #elif KMP_COMPILER_GCC
89 /* GCC on NetBSD lacks __multc3/__divtc3 builtins needed for quad */
90 #if !KMP_OS_NETBSD
91 typedef __float128 _Quad;
92 #undef KMP_HAVE_QUAD
93 #define KMP_HAVE_QUAD 1
94 #endif
95 #elif KMP_COMPILER_MSVC
96 typedef long double _Quad;
97 #endif
98 #else
99 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
100 typedef long double _Quad;
101 #undef KMP_HAVE_QUAD
102 #define KMP_HAVE_QUAD 1
103 #endif
104 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
105 
106 #define KMP_USE_X87CONTROL 0
107 #if KMP_OS_WINDOWS
108 #define KMP_END_OF_LINE "\r\n"
109 typedef char kmp_int8;
110 typedef unsigned char kmp_uint8;
111 typedef short kmp_int16;
112 typedef unsigned short kmp_uint16;
113 typedef int kmp_int32;
114 typedef unsigned int kmp_uint32;
115 #define KMP_INT32_SPEC "d"
116 #define KMP_UINT32_SPEC "u"
117 #ifndef KMP_STRUCT64
118 typedef __int64 kmp_int64;
119 typedef unsigned __int64 kmp_uint64;
120 #define KMP_INT64_SPEC "I64d"
121 #define KMP_UINT64_SPEC "I64u"
122 #else
123 struct kmp_struct64 {
124  kmp_int32 a, b;
125 };
126 typedef struct kmp_struct64 kmp_int64;
127 typedef struct kmp_struct64 kmp_uint64;
128 /* Not sure what to use for KMP_[U]INT64_SPEC here */
129 #endif
130 #if KMP_ARCH_X86 && KMP_MSVC_COMPAT
131 #undef KMP_USE_X87CONTROL
132 #define KMP_USE_X87CONTROL 1
133 #endif
134 #if KMP_ARCH_X86_64
135 #define KMP_INTPTR 1
136 typedef __int64 kmp_intptr_t;
137 typedef unsigned __int64 kmp_uintptr_t;
138 #define KMP_INTPTR_SPEC "I64d"
139 #define KMP_UINTPTR_SPEC "I64u"
140 #endif
141 #endif /* KMP_OS_WINDOWS */
142 
143 #if KMP_OS_UNIX
144 #define KMP_END_OF_LINE "\n"
145 typedef char kmp_int8;
146 typedef unsigned char kmp_uint8;
147 typedef short kmp_int16;
148 typedef unsigned short kmp_uint16;
149 typedef int kmp_int32;
150 typedef unsigned int kmp_uint32;
151 typedef long long kmp_int64;
152 typedef unsigned long long kmp_uint64;
153 #define KMP_INT32_SPEC "d"
154 #define KMP_UINT32_SPEC "u"
155 #define KMP_INT64_SPEC "lld"
156 #define KMP_UINT64_SPEC "llu"
157 #endif /* KMP_OS_UNIX */
158 
159 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS
160 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
161 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS64
162 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
163 #else
164 #error "Can't determine size_t printf format specifier."
165 #endif
166 
167 #if KMP_ARCH_X86
168 #define KMP_SIZE_T_MAX (0xFFFFFFFF)
169 #else
170 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
171 #endif
172 
173 typedef size_t kmp_size_t;
174 typedef float kmp_real32;
175 typedef double kmp_real64;
176 
177 #ifndef KMP_INTPTR
178 #define KMP_INTPTR 1
179 typedef long kmp_intptr_t;
180 typedef unsigned long kmp_uintptr_t;
181 #define KMP_INTPTR_SPEC "ld"
182 #define KMP_UINTPTR_SPEC "lu"
183 #endif
184 
185 #ifdef BUILD_I8
186 typedef kmp_int64 kmp_int;
187 typedef kmp_uint64 kmp_uint;
188 #else
189 typedef kmp_int32 kmp_int;
190 typedef kmp_uint32 kmp_uint;
191 #endif /* BUILD_I8 */
192 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
193 #define KMP_INT_MIN ((kmp_int32)0x80000000)
194 
195 #ifdef __cplusplus
196 // macros to cast out qualifiers and to re-interpret types
197 #define CCAST(type, var) const_cast<type>(var)
198 #define RCAST(type, var) reinterpret_cast<type>(var)
199 //-------------------------------------------------------------------------
200 // template for debug prints specification ( d, u, lld, llu ), and to obtain
201 // signed/unsigned flavors of a type
202 template <typename T> struct traits_t {};
203 // int
204 template <> struct traits_t<signed int> {
205  typedef signed int signed_t;
206  typedef unsigned int unsigned_t;
207  typedef double floating_t;
208  static char const *spec;
209  static const signed_t max_value = 0x7fffffff;
210  static const signed_t min_value = 0x80000000;
211  static const int type_size = sizeof(signed_t);
212 };
213 // unsigned int
214 template <> struct traits_t<unsigned int> {
215  typedef signed int signed_t;
216  typedef unsigned int unsigned_t;
217  typedef double floating_t;
218  static char const *spec;
219  static const unsigned_t max_value = 0xffffffff;
220  static const unsigned_t min_value = 0x00000000;
221  static const int type_size = sizeof(unsigned_t);
222 };
223 // long
224 template <> struct traits_t<signed long> {
225  typedef signed long signed_t;
226  typedef unsigned long unsigned_t;
227  typedef long double floating_t;
228  static char const *spec;
229  static const int type_size = sizeof(signed_t);
230 };
231 // long long
232 template <> struct traits_t<signed long long> {
233  typedef signed long long signed_t;
234  typedef unsigned long long unsigned_t;
235  typedef long double floating_t;
236  static char const *spec;
237  static const signed_t max_value = 0x7fffffffffffffffLL;
238  static const signed_t min_value = 0x8000000000000000LL;
239  static const int type_size = sizeof(signed_t);
240 };
241 // unsigned long long
242 template <> struct traits_t<unsigned long long> {
243  typedef signed long long signed_t;
244  typedef unsigned long long unsigned_t;
245  typedef long double floating_t;
246  static char const *spec;
247  static const unsigned_t max_value = 0xffffffffffffffffLL;
248  static const unsigned_t min_value = 0x0000000000000000LL;
249  static const int type_size = sizeof(unsigned_t);
250 };
251 //-------------------------------------------------------------------------
252 #else
253 #define CCAST(type, var) (type)(var)
254 #define RCAST(type, var) (type)(var)
255 #endif // __cplusplus
256 
257 #define KMP_EXPORT extern /* export declaration in guide libraries */
258 
259 #if __GNUC__ >= 4 && !defined(__MINGW32__)
260 #define __forceinline __inline
261 #endif
262 
263 #if KMP_OS_WINDOWS
264 #include <windows.h>
265 
266 static inline int KMP_GET_PAGE_SIZE(void) {
267  SYSTEM_INFO si;
268  GetSystemInfo(&si);
269  return si.dwPageSize;
270 }
271 #else
272 #define KMP_GET_PAGE_SIZE() getpagesize()
273 #endif
274 
275 #define PAGE_ALIGNED(_addr) \
276  (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
277 #define ALIGN_TO_PAGE(x) \
278  (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
279 
280 /* ---------- Support for cache alignment, padding, etc. ----------------*/
281 
282 #ifdef __cplusplus
283 extern "C" {
284 #endif // __cplusplus
285 
286 #define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */
287 
288 /* Define the default size of the cache line */
289 #ifndef CACHE_LINE
290 #define CACHE_LINE 128 /* cache line size in bytes */
291 #else
292 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
293 // 2006-02-13: This produces too many warnings on OS X*. Disable for now
294 #warning CACHE_LINE is too small.
295 #endif
296 #endif /* CACHE_LINE */
297 
298 #define KMP_CACHE_PREFETCH(ADDR) /* nothing */
299 
300 // Define attribute that indicates a function does not return
301 #if __cplusplus >= 201103L
302 #define KMP_NORETURN [[noreturn]]
303 #elif KMP_OS_WINDOWS
304 #define KMP_NORETURN __declspec(noreturn)
305 #else
306 #define KMP_NORETURN __attribute__((noreturn))
307 #endif
308 
309 #if KMP_OS_WINDOWS && KMP_MSVC_COMPAT
310 #define KMP_ALIGN(bytes) __declspec(align(bytes))
311 #define KMP_THREAD_LOCAL __declspec(thread)
312 #define KMP_ALIAS /* Nothing */
313 #else
314 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
315 #define KMP_THREAD_LOCAL __thread
316 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
317 #endif
318 
319 #if KMP_HAVE_WEAK_ATTRIBUTE
320 #define KMP_WEAK_ATTRIBUTE __attribute__((weak))
321 #else
322 #define KMP_WEAK_ATTRIBUTE /* Nothing */
323 #endif
324 
325 // Define KMP_VERSION_SYMBOL and KMP_EXPAND_NAME
326 #ifndef KMP_STR
327 #define KMP_STR(x) _KMP_STR(x)
328 #define _KMP_STR(x) #x
329 #endif
330 
331 #ifdef KMP_USE_VERSION_SYMBOLS
332 // If using versioned symbols, KMP_EXPAND_NAME prepends
333 // __kmp_api_ to the real API name
334 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
335 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
336 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) \
337  _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
338 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) \
339  __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias \
340  __attribute__((alias(KMP_STR(__kmp_api_##api_name)))); \
341  __asm__( \
342  ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
343  api_name) "@" ver_str "\n\t"); \
344  __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR( \
345  api_name) "@@" default_ver "\n\t")
346 #else // KMP_USE_VERSION_SYMBOLS
347 #define KMP_EXPAND_NAME(api_name) api_name
348 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) /* Nothing */
349 #endif // KMP_USE_VERSION_SYMBOLS
350 
351 /* Temporary note: if performance testing of this passes, we can remove
352  all references to KMP_DO_ALIGN and replace with KMP_ALIGN. */
353 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
354 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
355 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
356 
357 /* General purpose fence types for memory operations */
358 enum kmp_mem_fence_type {
359  kmp_no_fence, /* No memory fence */
360  kmp_acquire_fence, /* Acquire (read) memory fence */
361  kmp_release_fence, /* Release (write) memory fence */
362  kmp_full_fence /* Full (read+write) memory fence */
363 };
364 
365 // Synchronization primitives
366 
367 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS
368 
369 #if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG
370 #pragma intrinsic(InterlockedExchangeAdd)
371 #pragma intrinsic(InterlockedCompareExchange)
372 #pragma intrinsic(InterlockedExchange)
373 #pragma intrinsic(InterlockedExchange64)
374 #endif
375 
376 // Using InterlockedIncrement / InterlockedDecrement causes a library loading
377 // ordering problem, so we use InterlockedExchangeAdd instead.
378 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
379 #define KMP_TEST_THEN_INC_ACQ32(p) \
380  InterlockedExchangeAdd((volatile long *)(p), 1)
381 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
382 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
383  InterlockedExchangeAdd((volatile long *)(p), 4)
384 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
385 #define KMP_TEST_THEN_DEC_ACQ32(p) \
386  InterlockedExchangeAdd((volatile long *)(p), -1)
387 #define KMP_TEST_THEN_ADD32(p, v) \
388  InterlockedExchangeAdd((volatile long *)(p), (v))
389 
390 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
391  InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
392 
393 #define KMP_XCHG_FIXED32(p, v) \
394  InterlockedExchange((volatile long *)(p), (long)(v))
395 #define KMP_XCHG_FIXED64(p, v) \
396  InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
397 
398 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
399  kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v);
400  return *(kmp_real32 *)&tmp;
401 }
402 
403 // Routines that we still need to implement in assembly.
404 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
405 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
406 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
407 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
408 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
409 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
410 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
411 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
412 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
413 
414 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
415  kmp_int8 sv);
416 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
417  kmp_int16 sv);
418 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
419  kmp_int32 sv);
420 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
421  kmp_int64 sv);
422 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
423  kmp_int8 sv);
424 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
425  kmp_int16 cv, kmp_int16 sv);
426 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
427  kmp_int32 cv, kmp_int32 sv);
428 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
429  kmp_int64 cv, kmp_int64 sv);
430 
431 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
432 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
433 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
434 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
435 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
436 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
437 
438 //#define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32((p), 1)
439 //#define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32((p), 1)
440 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
441 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
442 //#define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32((p), 4)
443 //#define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32((p), 4)
444 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
445 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
446 //#define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32((p), -1)
447 //#define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32((p), -1)
448 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
449 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
450 //#define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32((p), (v))
451 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
452 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
453 
454 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
455 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
456 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
457 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
458 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
459 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
460 
461 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
462  __kmp_compare_and_store8((p), (cv), (sv))
463 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
464  __kmp_compare_and_store8((p), (cv), (sv))
465 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
466  __kmp_compare_and_store16((p), (cv), (sv))
467 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
468  __kmp_compare_and_store16((p), (cv), (sv))
469 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
470  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
471  (kmp_int32)(sv))
472 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
473  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
474  (kmp_int32)(sv))
475 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
476  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
477  (kmp_int64)(sv))
478 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
479  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
480  (kmp_int64)(sv))
481 
482 #if KMP_ARCH_X86
483 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
484  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
485  (kmp_int32)(sv))
486 #else /* 64 bit pointers */
487 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
488  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
489  (kmp_int64)(sv))
490 #endif /* KMP_ARCH_X86 */
491 
492 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
493  __kmp_compare_and_store_ret8((p), (cv), (sv))
494 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
495  __kmp_compare_and_store_ret16((p), (cv), (sv))
496 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
497  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
498  (kmp_int64)(sv))
499 
500 #define KMP_XCHG_FIXED8(p, v) \
501  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
502 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
503 //#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
504 //#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
505 //#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
506 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
507 
508 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
509 
510 /* cast p to correct type so that proper intrinsic will be used */
511 #define KMP_TEST_THEN_INC32(p) \
512  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
513 #define KMP_TEST_THEN_INC_ACQ32(p) \
514  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
515 #define KMP_TEST_THEN_INC64(p) \
516  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
517 #define KMP_TEST_THEN_INC_ACQ64(p) \
518  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
519 #define KMP_TEST_THEN_ADD4_32(p) \
520  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
521 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
522  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
523 #define KMP_TEST_THEN_ADD4_64(p) \
524  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
525 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
526  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
527 #define KMP_TEST_THEN_DEC32(p) \
528  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
529 #define KMP_TEST_THEN_DEC_ACQ32(p) \
530  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
531 #define KMP_TEST_THEN_DEC64(p) \
532  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
533 #define KMP_TEST_THEN_DEC_ACQ64(p) \
534  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
535 #define KMP_TEST_THEN_ADD8(p, v) \
536  __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
537 #define KMP_TEST_THEN_ADD32(p, v) \
538  __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
539 #define KMP_TEST_THEN_ADD64(p, v) \
540  __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
541 
542 #define KMP_TEST_THEN_OR8(p, v) \
543  __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
544 #define KMP_TEST_THEN_AND8(p, v) \
545  __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
546 #define KMP_TEST_THEN_OR32(p, v) \
547  __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
548 #define KMP_TEST_THEN_AND32(p, v) \
549  __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
550 #define KMP_TEST_THEN_OR64(p, v) \
551  __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
552 #define KMP_TEST_THEN_AND64(p, v) \
553  __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
554 
555 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
556  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
557  (kmp_uint8)(sv))
558 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
559  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
560  (kmp_uint8)(sv))
561 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
562  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
563  (kmp_uint16)(sv))
564 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
565  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
566  (kmp_uint16)(sv))
567 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
568  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
569  (kmp_uint32)(sv))
570 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
571  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
572  (kmp_uint32)(sv))
573 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
574  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
575  (kmp_uint64)(sv))
576 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
577  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
578  (kmp_uint64)(sv))
579 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
580  __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \
581  (void *)(sv))
582 
583 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
584  __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
585  (kmp_uint8)(sv))
586 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
587  __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
588  (kmp_uint16)(sv))
589 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
590  __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
591  (kmp_uint32)(sv))
592 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
593  __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
594  (kmp_uint64)(sv))
595 
596 #define KMP_XCHG_FIXED8(p, v) \
597  __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
598 #define KMP_XCHG_FIXED16(p, v) \
599  __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
600 #define KMP_XCHG_FIXED32(p, v) \
601  __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
602 #define KMP_XCHG_FIXED64(p, v) \
603  __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
604 
605 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
606  kmp_int32 tmp =
607  __sync_lock_test_and_set((volatile kmp_uint32 *)(p), *(kmp_uint32 *)&v);
608  return *(kmp_real32 *)&tmp;
609 }
610 
611 inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) {
612  kmp_int64 tmp =
613  __sync_lock_test_and_set((volatile kmp_uint64 *)(p), *(kmp_uint64 *)&v);
614  return *(kmp_real64 *)&tmp;
615 }
616 
617 #else
618 
619 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
620 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
621 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
622 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
623 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
624 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
625 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
626 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
627 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
628 
629 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
630  kmp_int8 sv);
631 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
632  kmp_int16 sv);
633 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
634  kmp_int32 sv);
635 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
636  kmp_int64 sv);
637 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
638  kmp_int8 sv);
639 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
640  kmp_int16 cv, kmp_int16 sv);
641 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
642  kmp_int32 cv, kmp_int32 sv);
643 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
644  kmp_int64 cv, kmp_int64 sv);
645 
646 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
647 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
648 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
649 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
650 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
651 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
652 
653 #define KMP_TEST_THEN_INC32(p) \
654  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
655 #define KMP_TEST_THEN_INC_ACQ32(p) \
656  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
657 #define KMP_TEST_THEN_INC64(p) \
658  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
659 #define KMP_TEST_THEN_INC_ACQ64(p) \
660  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
661 #define KMP_TEST_THEN_ADD4_32(p) \
662  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
663 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
664  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
665 #define KMP_TEST_THEN_ADD4_64(p) \
666  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
667 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
668  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
669 #define KMP_TEST_THEN_DEC32(p) \
670  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
671 #define KMP_TEST_THEN_DEC_ACQ32(p) \
672  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
673 #define KMP_TEST_THEN_DEC64(p) \
674  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
675 #define KMP_TEST_THEN_DEC_ACQ64(p) \
676  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
677 #define KMP_TEST_THEN_ADD8(p, v) \
678  __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
679 #define KMP_TEST_THEN_ADD32(p, v) \
680  __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
681 #define KMP_TEST_THEN_ADD64(p, v) \
682  __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
683 
684 #define KMP_TEST_THEN_OR8(p, v) \
685  __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
686 #define KMP_TEST_THEN_AND8(p, v) \
687  __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
688 #define KMP_TEST_THEN_OR32(p, v) \
689  __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
690 #define KMP_TEST_THEN_AND32(p, v) \
691  __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
692 #define KMP_TEST_THEN_OR64(p, v) \
693  __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
694 #define KMP_TEST_THEN_AND64(p, v) \
695  __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
696 
697 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
698  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
699  (kmp_int8)(sv))
700 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
701  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
702  (kmp_int8)(sv))
703 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
704  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
705  (kmp_int16)(sv))
706 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
707  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
708  (kmp_int16)(sv))
709 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
710  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
711  (kmp_int32)(sv))
712 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
713  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
714  (kmp_int32)(sv))
715 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
716  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
717  (kmp_int64)(sv))
718 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
719  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
720  (kmp_int64)(sv))
721 
722 #if KMP_ARCH_X86
723 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
724  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
725  (kmp_int32)(sv))
726 #else /* 64 bit pointers */
727 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
728  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
729  (kmp_int64)(sv))
730 #endif /* KMP_ARCH_X86 */
731 
732 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
733  __kmp_compare_and_store_ret8((p), (cv), (sv))
734 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
735  __kmp_compare_and_store_ret16((p), (cv), (sv))
736 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
737  __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
738  (kmp_int32)(sv))
739 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
740  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
741  (kmp_int64)(sv))
742 
743 #define KMP_XCHG_FIXED8(p, v) \
744  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
745 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
746 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
747 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
748 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
749 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
750 
751 #endif /* KMP_ASM_INTRINS */
752 
753 /* ------------- relaxed consistency memory model stuff ------------------ */
754 
755 #if KMP_OS_WINDOWS
756 #ifdef __ABSOFT_WIN
757 #define KMP_MB() asm("nop")
758 #define KMP_IMB() asm("nop")
759 #else
760 #define KMP_MB() /* _asm{ nop } */
761 #define KMP_IMB() /* _asm{ nop } */
762 #endif
763 #endif /* KMP_OS_WINDOWS */
764 
765 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \
766  KMP_ARCH_MIPS64
767 #define KMP_MB() __sync_synchronize()
768 #endif
769 
770 #ifndef KMP_MB
771 #define KMP_MB() /* nothing to do */
772 #endif
773 
774 #ifndef KMP_IMB
775 #define KMP_IMB() /* nothing to do */
776 #endif
777 
778 #ifndef KMP_ST_REL32
779 #define KMP_ST_REL32(A, D) (*(A) = (D))
780 #endif
781 
782 #ifndef KMP_ST_REL64
783 #define KMP_ST_REL64(A, D) (*(A) = (D))
784 #endif
785 
786 #ifndef KMP_LD_ACQ32
787 #define KMP_LD_ACQ32(A) (*(A))
788 #endif
789 
790 #ifndef KMP_LD_ACQ64
791 #define KMP_LD_ACQ64(A) (*(A))
792 #endif
793 
794 /* ------------------------------------------------------------------------ */
795 // FIXME - maybe this should this be
796 //
797 // #define TCR_4(a) (*(volatile kmp_int32 *)(&a))
798 // #define TCW_4(a,b) (a) = (*(volatile kmp_int32 *)&(b))
799 //
800 // #define TCR_8(a) (*(volatile kmp_int64 *)(a))
801 // #define TCW_8(a,b) (a) = (*(volatile kmp_int64 *)(&b))
802 //
803 // I'm fairly certain this is the correct thing to do, but I'm afraid
804 // of performance regressions.
805 
806 #define TCR_1(a) (a)
807 #define TCW_1(a, b) (a) = (b)
808 #define TCR_4(a) (a)
809 #define TCW_4(a, b) (a) = (b)
810 #define TCI_4(a) (++(a))
811 #define TCD_4(a) (--(a))
812 #define TCR_8(a) (a)
813 #define TCW_8(a, b) (a) = (b)
814 #define TCI_8(a) (++(a))
815 #define TCD_8(a) (--(a))
816 #define TCR_SYNC_4(a) (a)
817 #define TCW_SYNC_4(a, b) (a) = (b)
818 #define TCX_SYNC_4(a, b, c) \
819  KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \
820  (kmp_int32)(b), (kmp_int32)(c))
821 #define TCR_SYNC_8(a) (a)
822 #define TCW_SYNC_8(a, b) (a) = (b)
823 #define TCX_SYNC_8(a, b, c) \
824  KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \
825  (kmp_int64)(b), (kmp_int64)(c))
826 
827 #if KMP_ARCH_X86 || KMP_ARCH_MIPS
828 // What about ARM?
829 #define TCR_PTR(a) ((void *)TCR_4(a))
830 #define TCW_PTR(a, b) TCW_4((a), (b))
831 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
832 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
833 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
834 
835 #else /* 64 bit pointers */
836 
837 #define TCR_PTR(a) ((void *)TCR_8(a))
838 #define TCW_PTR(a, b) TCW_8((a), (b))
839 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
840 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
841 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
842 
843 #endif /* KMP_ARCH_X86 */
844 
845 /* If these FTN_{TRUE,FALSE} values change, may need to change several places
846  where they are used to check that language is Fortran, not C. */
847 
848 #ifndef FTN_TRUE
849 #define FTN_TRUE TRUE
850 #endif
851 
852 #ifndef FTN_FALSE
853 #define FTN_FALSE FALSE
854 #endif
855 
856 typedef void (*microtask_t)(int *gtid, int *npr, ...);
857 
858 #ifdef USE_VOLATILE_CAST
859 #define VOLATILE_CAST(x) (volatile x)
860 #else
861 #define VOLATILE_CAST(x) (x)
862 #endif
863 
864 #define KMP_WAIT_YIELD __kmp_wait_yield_4
865 #define KMP_WAIT_YIELD_PTR __kmp_wait_yield_4_ptr
866 #define KMP_EQ __kmp_eq_4
867 #define KMP_NEQ __kmp_neq_4
868 #define KMP_LT __kmp_lt_4
869 #define KMP_GE __kmp_ge_4
870 #define KMP_LE __kmp_le_4
871 
872 /* Workaround for Intel(R) 64 code gen bug when taking address of static array
873  * (Intel(R) 64 Tracker #138) */
874 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
875 #define STATIC_EFI2_WORKAROUND
876 #else
877 #define STATIC_EFI2_WORKAROUND static
878 #endif
879 
880 // Support of BGET usage
881 #ifndef KMP_USE_BGET
882 #define KMP_USE_BGET 1
883 #endif
884 
885 // Switches for OSS builds
886 #ifndef USE_CMPXCHG_FIX
887 #define USE_CMPXCHG_FIX 1
888 #endif
889 
890 // Enable dynamic user lock
891 #if OMP_45_ENABLED
892 #define KMP_USE_DYNAMIC_LOCK 1
893 #endif
894 
895 // Enable Intel(R) Transactional Synchronization Extensions (Intel(R) TSX) if
896 // dynamic user lock is turned on
897 #if KMP_USE_DYNAMIC_LOCK
898 // Visual studio can't handle the asm sections in this code
899 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
900 #ifdef KMP_USE_ADAPTIVE_LOCKS
901 #undef KMP_USE_ADAPTIVE_LOCKS
902 #endif
903 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
904 #endif
905 
906 // Enable tick time conversion of ticks to seconds
907 #if KMP_STATS_ENABLED
908 #define KMP_HAVE_TICK_TIME \
909  (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
910 #endif
911 
912 // Warning levels
913 enum kmp_warnings_level {
914  kmp_warnings_off = 0, /* No warnings */
915  kmp_warnings_low, /* Minimal warnings (default) */
916  kmp_warnings_explicit = 6, /* Explicitly set to ON - more warnings */
917  kmp_warnings_verbose /* reserved */
918 };
919 
920 #ifdef __cplusplus
921 } // extern "C"
922 #endif // __cplusplus
923 
924 // Macros for C++11 atomic functions
925 #define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
926 #define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
927 
928 // For non-default load/store
929 #define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
930 #define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
931 #define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
932 #define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
933 
934 // For non-default fetch_<op>
935 #define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
936 #define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
937 #define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
938 #define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
939 #define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
940 #define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
941 #define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
942 #define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
943 
944 // Callers of the following functions cannot see the side effect on "expected".
945 template <typename T>
946 bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) {
947  return p->compare_exchange_strong(
948  expected, desired, std::memory_order_acq_rel, std::memory_order_relaxed);
949 }
950 
951 template <typename T>
952 bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) {
953  return p->compare_exchange_strong(
954  expected, desired, std::memory_order_acquire, std::memory_order_relaxed);
955 }
956 
957 template <typename T>
958 bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) {
959  return p->compare_exchange_strong(
960  expected, desired, std::memory_order_release, std::memory_order_relaxed);
961 }
962 
963 #endif /* KMP_OS_H */
964 // Safe C API
965 #include "kmp_safe_c_api.h"