LLVM OpenMP* Runtime Library
kmp_os.h
1 /*
2  * kmp_os.h -- KPTS runtime header file.
3  */
4 
5 //===----------------------------------------------------------------------===//
6 //
7 // The LLVM Compiler Infrastructure
8 //
9 // This file is dual licensed under the MIT and the University of Illinois Open
10 // Source Licenses. See LICENSE.txt for details.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #ifndef KMP_OS_H
15 #define KMP_OS_H
16 
17 #include "kmp_config.h"
18 #include <stdlib.h>
19 #include <atomic>
20 
21 #define KMP_FTN_PLAIN 1
22 #define KMP_FTN_APPEND 2
23 #define KMP_FTN_UPPER 3
24 /*
25 #define KMP_FTN_PREPEND 4
26 #define KMP_FTN_UAPPEND 5
27 */
28 
29 #define KMP_PTR_SKIP (sizeof(void *))
30 
31 /* -------------------------- Compiler variations ------------------------ */
32 
33 #define KMP_OFF 0
34 #define KMP_ON 1
35 
36 #define KMP_MEM_CONS_VOLATILE 0
37 #define KMP_MEM_CONS_FENCE 1
38 
39 #ifndef KMP_MEM_CONS_MODEL
40 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
41 #endif
42 
43 /* ------------------------- Compiler recognition ---------------------- */
44 #define KMP_COMPILER_ICC 0
45 #define KMP_COMPILER_GCC 0
46 #define KMP_COMPILER_CLANG 0
47 #define KMP_COMPILER_MSVC 0
48 
49 #if defined(__INTEL_COMPILER)
50 #undef KMP_COMPILER_ICC
51 #define KMP_COMPILER_ICC 1
52 #elif defined(__clang__)
53 #undef KMP_COMPILER_CLANG
54 #define KMP_COMPILER_CLANG 1
55 #elif defined(__GNUC__)
56 #undef KMP_COMPILER_GCC
57 #define KMP_COMPILER_GCC 1
58 #elif defined(_MSC_VER)
59 #undef KMP_COMPILER_MSVC
60 #define KMP_COMPILER_MSVC 1
61 #else
62 #error Unknown compiler
63 #endif
64 
65 #if (KMP_OS_LINUX || KMP_OS_WINDOWS) && !KMP_OS_CNK
66 #define KMP_AFFINITY_SUPPORTED 1
67 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64
68 #define KMP_GROUP_AFFINITY 1
69 #else
70 #define KMP_GROUP_AFFINITY 0
71 #endif
72 #else
73 #define KMP_AFFINITY_SUPPORTED 0
74 #define KMP_GROUP_AFFINITY 0
75 #endif
76 
77 /* Check for quad-precision extension. */
78 #define KMP_HAVE_QUAD 0
79 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
80 #if KMP_COMPILER_ICC
81 /* _Quad is already defined for icc */
82 #undef KMP_HAVE_QUAD
83 #define KMP_HAVE_QUAD 1
84 #elif KMP_COMPILER_CLANG
85 /* Clang doesn't support a software-implemented
86  128-bit extended precision type yet */
87 typedef long double _Quad;
88 #elif KMP_COMPILER_GCC
89 typedef __float128 _Quad;
90 #undef KMP_HAVE_QUAD
91 #define KMP_HAVE_QUAD 1
92 #elif KMP_COMPILER_MSVC
93 typedef long double _Quad;
94 #endif
95 #else
96 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
97 typedef long double _Quad;
98 #undef KMP_HAVE_QUAD
99 #define KMP_HAVE_QUAD 1
100 #endif
101 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
102 
103 #if KMP_OS_WINDOWS
104 typedef char kmp_int8;
105 typedef unsigned char kmp_uint8;
106 typedef short kmp_int16;
107 typedef unsigned short kmp_uint16;
108 typedef int kmp_int32;
109 typedef unsigned int kmp_uint32;
110 #define KMP_INT32_SPEC "d"
111 #define KMP_UINT32_SPEC "u"
112 #ifndef KMP_STRUCT64
113 typedef __int64 kmp_int64;
114 typedef unsigned __int64 kmp_uint64;
115 #define KMP_INT64_SPEC "I64d"
116 #define KMP_UINT64_SPEC "I64u"
117 #else
118 struct kmp_struct64 {
119  kmp_int32 a, b;
120 };
121 typedef struct kmp_struct64 kmp_int64;
122 typedef struct kmp_struct64 kmp_uint64;
123 /* Not sure what to use for KMP_[U]INT64_SPEC here */
124 #endif
125 #if KMP_ARCH_X86_64
126 #define KMP_INTPTR 1
127 typedef __int64 kmp_intptr_t;
128 typedef unsigned __int64 kmp_uintptr_t;
129 #define KMP_INTPTR_SPEC "I64d"
130 #define KMP_UINTPTR_SPEC "I64u"
131 #endif
132 #endif /* KMP_OS_WINDOWS */
133 
134 #if KMP_OS_UNIX
135 typedef char kmp_int8;
136 typedef unsigned char kmp_uint8;
137 typedef short kmp_int16;
138 typedef unsigned short kmp_uint16;
139 typedef int kmp_int32;
140 typedef unsigned int kmp_uint32;
141 typedef long long kmp_int64;
142 typedef unsigned long long kmp_uint64;
143 #define KMP_INT32_SPEC "d"
144 #define KMP_UINT32_SPEC "u"
145 #define KMP_INT64_SPEC "lld"
146 #define KMP_UINT64_SPEC "llu"
147 #endif /* KMP_OS_UNIX */
148 
149 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS
150 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
151 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS64
152 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
153 #else
154 #error "Can't determine size_t printf format specifier."
155 #endif
156 
157 #if KMP_ARCH_X86
158 #define KMP_SIZE_T_MAX (0xFFFFFFFF)
159 #else
160 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
161 #endif
162 
163 typedef size_t kmp_size_t;
164 typedef float kmp_real32;
165 typedef double kmp_real64;
166 
167 #ifndef KMP_INTPTR
168 #define KMP_INTPTR 1
169 typedef long kmp_intptr_t;
170 typedef unsigned long kmp_uintptr_t;
171 #define KMP_INTPTR_SPEC "ld"
172 #define KMP_UINTPTR_SPEC "lu"
173 #endif
174 
175 #ifdef BUILD_I8
176 typedef kmp_int64 kmp_int;
177 typedef kmp_uint64 kmp_uint;
178 #else
179 typedef kmp_int32 kmp_int;
180 typedef kmp_uint32 kmp_uint;
181 #endif /* BUILD_I8 */
182 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
183 #define KMP_INT_MIN ((kmp_int32)0x80000000)
184 
185 #ifdef __cplusplus
186 // macros to cast out qualifiers and to re-interpret types
187 #define CCAST(type, var) const_cast<type>(var)
188 #define RCAST(type, var) reinterpret_cast<type>(var)
189 //-------------------------------------------------------------------------
190 // template for debug prints specification ( d, u, lld, llu ), and to obtain
191 // signed/unsigned flavors of a type
192 template <typename T> struct traits_t {};
193 // int
194 template <> struct traits_t<signed int> {
195  typedef signed int signed_t;
196  typedef unsigned int unsigned_t;
197  typedef double floating_t;
198  static char const *spec;
199  static const signed_t max_value = 0x7fffffff;
200  static const signed_t min_value = 0x80000000;
201  static const int type_size = sizeof(signed_t);
202 };
203 // unsigned int
204 template <> struct traits_t<unsigned int> {
205  typedef signed int signed_t;
206  typedef unsigned int unsigned_t;
207  typedef double floating_t;
208  static char const *spec;
209  static const unsigned_t max_value = 0xffffffff;
210  static const unsigned_t min_value = 0x00000000;
211  static const int type_size = sizeof(unsigned_t);
212 };
213 // long
214 template <> struct traits_t<signed long> {
215  typedef signed long signed_t;
216  typedef unsigned long unsigned_t;
217  typedef long double floating_t;
218  static char const *spec;
219  static const int type_size = sizeof(signed_t);
220 };
221 // long long
222 template <> struct traits_t<signed long long> {
223  typedef signed long long signed_t;
224  typedef unsigned long long unsigned_t;
225  typedef long double floating_t;
226  static char const *spec;
227  static const signed_t max_value = 0x7fffffffffffffffLL;
228  static const signed_t min_value = 0x8000000000000000LL;
229  static const int type_size = sizeof(signed_t);
230 };
231 // unsigned long long
232 template <> struct traits_t<unsigned long long> {
233  typedef signed long long signed_t;
234  typedef unsigned long long unsigned_t;
235  typedef long double floating_t;
236  static char const *spec;
237  static const unsigned_t max_value = 0xffffffffffffffffLL;
238  static const unsigned_t min_value = 0x0000000000000000LL;
239  static const int type_size = sizeof(unsigned_t);
240 };
241 //-------------------------------------------------------------------------
242 #else
243 #define CCAST(type, var) (type)(var)
244 #define RCAST(type, var) (type)(var)
245 #endif // __cplusplus
246 
247 #define KMP_EXPORT extern /* export declaration in guide libraries */
248 
249 #if __GNUC__ >= 4
250 #define __forceinline __inline
251 #endif
252 
253 #if KMP_OS_WINDOWS
254 #include <windows.h>
255 
256 static inline int KMP_GET_PAGE_SIZE(void) {
257  SYSTEM_INFO si;
258  GetSystemInfo(&si);
259  return si.dwPageSize;
260 }
261 #else
262 #define KMP_GET_PAGE_SIZE() getpagesize()
263 #endif
264 
265 #define PAGE_ALIGNED(_addr) \
266  (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
267 #define ALIGN_TO_PAGE(x) \
268  (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
269 
270 /* ---------- Support for cache alignment, padding, etc. ----------------*/
271 
272 #ifdef __cplusplus
273 extern "C" {
274 #endif // __cplusplus
275 
276 #define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */
277 
278 /* Define the default size of the cache line */
279 #ifndef CACHE_LINE
280 #define CACHE_LINE 128 /* cache line size in bytes */
281 #else
282 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
283 // 2006-02-13: This produces too many warnings on OS X*. Disable for now
284 #warning CACHE_LINE is too small.
285 #endif
286 #endif /* CACHE_LINE */
287 
288 #define KMP_CACHE_PREFETCH(ADDR) /* nothing */
289 
290 // Define attribute that indicates a function does not return
291 #if __cplusplus >= 201103L
292 #define KMP_NORETURN [[noreturn]]
293 #elif KMP_OS_WINDOWS
294 #define KMP_NORETURN __declspec(noreturn)
295 #else
296 #define KMP_NORETURN __attribute__((noreturn))
297 #endif
298 
299 #if KMP_OS_WINDOWS
300 #define KMP_ALIGN(bytes) __declspec(align(bytes))
301 #define KMP_THREAD_LOCAL __declspec(thread)
302 #define KMP_ALIAS /* Nothing */
303 #else
304 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
305 #define KMP_THREAD_LOCAL __thread
306 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
307 #endif
308 
309 #if KMP_HAVE_WEAK_ATTRIBUTE
310 #define KMP_WEAK_ATTRIBUTE __attribute__((weak))
311 #else
312 #define KMP_WEAK_ATTRIBUTE /* Nothing */
313 #endif
314 
315 // Define KMP_VERSION_SYMBOL and KMP_EXPAND_NAME
316 #ifndef KMP_STR
317 #define KMP_STR(x) _KMP_STR(x)
318 #define _KMP_STR(x) #x
319 #endif
320 
321 #ifdef KMP_USE_VERSION_SYMBOLS
322 // If using versioned symbols, KMP_EXPAND_NAME prepends
323 // __kmp_api_ to the real API name
324 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
325 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
326 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) \
327  _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
328 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) \
329  __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias \
330  __attribute__((alias(KMP_STR(__kmp_api_##api_name)))); \
331  __asm__( \
332  ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
333  api_name) "@" ver_str "\n\t"); \
334  __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR( \
335  api_name) "@@" default_ver "\n\t")
336 #else // KMP_USE_VERSION_SYMBOLS
337 #define KMP_EXPAND_NAME(api_name) api_name
338 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) /* Nothing */
339 #endif // KMP_USE_VERSION_SYMBOLS
340 
341 /* Temporary note: if performance testing of this passes, we can remove
342  all references to KMP_DO_ALIGN and replace with KMP_ALIGN. */
343 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
344 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
345 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
346 
347 /* General purpose fence types for memory operations */
348 enum kmp_mem_fence_type {
349  kmp_no_fence, /* No memory fence */
350  kmp_acquire_fence, /* Acquire (read) memory fence */
351  kmp_release_fence, /* Release (write) memory fence */
352  kmp_full_fence /* Full (read+write) memory fence */
353 };
354 
355 // Synchronization primitives
356 
357 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS
358 
359 #pragma intrinsic(InterlockedExchangeAdd)
360 #pragma intrinsic(InterlockedCompareExchange)
361 #pragma intrinsic(InterlockedExchange)
362 #pragma intrinsic(InterlockedExchange64)
363 
364 // Using InterlockedIncrement / InterlockedDecrement causes a library loading
365 // ordering problem, so we use InterlockedExchangeAdd instead.
366 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
367 #define KMP_TEST_THEN_INC_ACQ32(p) \
368  InterlockedExchangeAdd((volatile long *)(p), 1)
369 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
370 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
371  InterlockedExchangeAdd((volatile long *)(p), 4)
372 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
373 #define KMP_TEST_THEN_DEC_ACQ32(p) \
374  InterlockedExchangeAdd((volatile long *)(p), -1)
375 #define KMP_TEST_THEN_ADD32(p, v) \
376  InterlockedExchangeAdd((volatile long *)(p), (v))
377 
378 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
379  InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
380 
381 #define KMP_XCHG_FIXED32(p, v) \
382  InterlockedExchange((volatile long *)(p), (long)(v))
383 #define KMP_XCHG_FIXED64(p, v) \
384  InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
385 
386 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
387  kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v);
388  return *(kmp_real32 *)&tmp;
389 }
390 
391 // Routines that we still need to implement in assembly.
392 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
393 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
394 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
395 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
396 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
397 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
398 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
399 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
400 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
401 
402 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
403  kmp_int8 sv);
404 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
405  kmp_int16 sv);
406 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
407  kmp_int32 sv);
408 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
409  kmp_int64 sv);
410 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
411  kmp_int8 sv);
412 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
413  kmp_int16 cv, kmp_int16 sv);
414 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
415  kmp_int32 cv, kmp_int32 sv);
416 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
417  kmp_int64 cv, kmp_int64 sv);
418 
419 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
420 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
421 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
422 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
423 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
424 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
425 
426 //#define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32((p), 1)
427 //#define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32((p), 1)
428 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
429 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
430 //#define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32((p), 4)
431 //#define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32((p), 4)
432 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
433 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
434 //#define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32((p), -1)
435 //#define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32((p), -1)
436 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
437 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
438 //#define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32((p), (v))
439 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
440 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
441 
442 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
443 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
444 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
445 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
446 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
447 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
448 
449 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
450  __kmp_compare_and_store8((p), (cv), (sv))
451 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
452  __kmp_compare_and_store8((p), (cv), (sv))
453 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
454  __kmp_compare_and_store16((p), (cv), (sv))
455 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
456  __kmp_compare_and_store16((p), (cv), (sv))
457 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
458  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
459  (kmp_int32)(sv))
460 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
461  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
462  (kmp_int32)(sv))
463 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
464  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
465  (kmp_int64)(sv))
466 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
467  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
468  (kmp_int64)(sv))
469 
470 #if KMP_ARCH_X86
471 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
472  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
473  (kmp_int32)(sv))
474 #else /* 64 bit pointers */
475 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
476  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
477  (kmp_int64)(sv))
478 #endif /* KMP_ARCH_X86 */
479 
480 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
481  __kmp_compare_and_store_ret8((p), (cv), (sv))
482 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
483  __kmp_compare_and_store_ret16((p), (cv), (sv))
484 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
485  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
486  (kmp_int64)(sv))
487 
488 #define KMP_XCHG_FIXED8(p, v) \
489  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
490 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
491 //#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
492 //#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
493 //#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
494 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
495 
496 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
497 
498 /* cast p to correct type so that proper intrinsic will be used */
499 #define KMP_TEST_THEN_INC32(p) \
500  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
501 #define KMP_TEST_THEN_INC_ACQ32(p) \
502  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
503 #define KMP_TEST_THEN_INC64(p) \
504  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
505 #define KMP_TEST_THEN_INC_ACQ64(p) \
506  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
507 #define KMP_TEST_THEN_ADD4_32(p) \
508  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
509 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
510  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
511 #define KMP_TEST_THEN_ADD4_64(p) \
512  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
513 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
514  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
515 #define KMP_TEST_THEN_DEC32(p) \
516  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
517 #define KMP_TEST_THEN_DEC_ACQ32(p) \
518  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
519 #define KMP_TEST_THEN_DEC64(p) \
520  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
521 #define KMP_TEST_THEN_DEC_ACQ64(p) \
522  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
523 #define KMP_TEST_THEN_ADD8(p, v) \
524  __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
525 #define KMP_TEST_THEN_ADD32(p, v) \
526  __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
527 #define KMP_TEST_THEN_ADD64(p, v) \
528  __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
529 
530 #define KMP_TEST_THEN_OR8(p, v) \
531  __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
532 #define KMP_TEST_THEN_AND8(p, v) \
533  __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
534 #define KMP_TEST_THEN_OR32(p, v) \
535  __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
536 #define KMP_TEST_THEN_AND32(p, v) \
537  __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
538 #define KMP_TEST_THEN_OR64(p, v) \
539  __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
540 #define KMP_TEST_THEN_AND64(p, v) \
541  __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
542 
543 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
544  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
545  (kmp_uint8)(sv))
546 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
547  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
548  (kmp_uint8)(sv))
549 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
550  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
551  (kmp_uint16)(sv))
552 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
553  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
554  (kmp_uint16)(sv))
555 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
556  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
557  (kmp_uint32)(sv))
558 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
559  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
560  (kmp_uint32)(sv))
561 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
562  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
563  (kmp_uint64)(sv))
564 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
565  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
566  (kmp_uint64)(sv))
567 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
568  __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \
569  (void *)(sv))
570 
571 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
572  __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
573  (kmp_uint8)(sv))
574 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
575  __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
576  (kmp_uint16)(sv))
577 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
578  __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
579  (kmp_uint32)(sv))
580 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
581  __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
582  (kmp_uint64)(sv))
583 
584 #define KMP_XCHG_FIXED8(p, v) \
585  __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
586 #define KMP_XCHG_FIXED16(p, v) \
587  __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
588 #define KMP_XCHG_FIXED32(p, v) \
589  __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
590 #define KMP_XCHG_FIXED64(p, v) \
591  __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
592 
593 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
594  kmp_int32 tmp =
595  __sync_lock_test_and_set((volatile kmp_uint32 *)(p), *(kmp_uint32 *)&v);
596  return *(kmp_real32 *)&tmp;
597 }
598 
599 inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) {
600  kmp_int64 tmp =
601  __sync_lock_test_and_set((volatile kmp_uint64 *)(p), *(kmp_uint64 *)&v);
602  return *(kmp_real64 *)&tmp;
603 }
604 
605 #else
606 
607 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
608 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
609 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
610 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
611 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
612 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
613 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
614 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
615 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
616 
617 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
618  kmp_int8 sv);
619 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
620  kmp_int16 sv);
621 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
622  kmp_int32 sv);
623 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
624  kmp_int64 sv);
625 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
626  kmp_int8 sv);
627 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
628  kmp_int16 cv, kmp_int16 sv);
629 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
630  kmp_int32 cv, kmp_int32 sv);
631 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
632  kmp_int64 cv, kmp_int64 sv);
633 
634 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
635 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
636 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
637 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
638 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
639 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
640 
641 #define KMP_TEST_THEN_INC32(p) \
642  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
643 #define KMP_TEST_THEN_INC_ACQ32(p) \
644  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
645 #define KMP_TEST_THEN_INC64(p) \
646  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
647 #define KMP_TEST_THEN_INC_ACQ64(p) \
648  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
649 #define KMP_TEST_THEN_ADD4_32(p) \
650  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
651 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
652  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
653 #define KMP_TEST_THEN_ADD4_64(p) \
654  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
655 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
656  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
657 #define KMP_TEST_THEN_DEC32(p) \
658  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
659 #define KMP_TEST_THEN_DEC_ACQ32(p) \
660  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
661 #define KMP_TEST_THEN_DEC64(p) \
662  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
663 #define KMP_TEST_THEN_DEC_ACQ64(p) \
664  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
665 #define KMP_TEST_THEN_ADD8(p, v) \
666  __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
667 #define KMP_TEST_THEN_ADD32(p, v) \
668  __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
669 #define KMP_TEST_THEN_ADD64(p, v) \
670  __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
671 
672 #define KMP_TEST_THEN_OR8(p, v) \
673  __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
674 #define KMP_TEST_THEN_AND8(p, v) \
675  __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
676 #define KMP_TEST_THEN_OR32(p, v) \
677  __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
678 #define KMP_TEST_THEN_AND32(p, v) \
679  __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
680 #define KMP_TEST_THEN_OR64(p, v) \
681  __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
682 #define KMP_TEST_THEN_AND64(p, v) \
683  __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
684 
685 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
686  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
687  (kmp_int8)(sv))
688 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
689  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
690  (kmp_int8)(sv))
691 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
692  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
693  (kmp_int16)(sv))
694 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
695  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
696  (kmp_int16)(sv))
697 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
698  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
699  (kmp_int32)(sv))
700 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
701  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
702  (kmp_int32)(sv))
703 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
704  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
705  (kmp_int64)(sv))
706 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
707  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
708  (kmp_int64)(sv))
709 
710 #if KMP_ARCH_X86
711 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
712  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
713  (kmp_int32)(sv))
714 #else /* 64 bit pointers */
715 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
716  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
717  (kmp_int64)(sv))
718 #endif /* KMP_ARCH_X86 */
719 
720 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
721  __kmp_compare_and_store_ret8((p), (cv), (sv))
722 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
723  __kmp_compare_and_store_ret16((p), (cv), (sv))
724 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
725  __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
726  (kmp_int32)(sv))
727 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
728  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
729  (kmp_int64)(sv))
730 
731 #define KMP_XCHG_FIXED8(p, v) \
732  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
733 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
734 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
735 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
736 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
737 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
738 
739 #endif /* KMP_ASM_INTRINS */
740 
741 /* ------------- relaxed consistency memory model stuff ------------------ */
742 
743 #if KMP_OS_WINDOWS
744 #ifdef __ABSOFT_WIN
745 #define KMP_MB() asm("nop")
746 #define KMP_IMB() asm("nop")
747 #else
748 #define KMP_MB() /* _asm{ nop } */
749 #define KMP_IMB() /* _asm{ nop } */
750 #endif
751 #endif /* KMP_OS_WINDOWS */
752 
753 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \
754  KMP_ARCH_MIPS64
755 #define KMP_MB() __sync_synchronize()
756 #endif
757 
758 #ifndef KMP_MB
759 #define KMP_MB() /* nothing to do */
760 #endif
761 
762 #ifndef KMP_IMB
763 #define KMP_IMB() /* nothing to do */
764 #endif
765 
766 #ifndef KMP_ST_REL32
767 #define KMP_ST_REL32(A, D) (*(A) = (D))
768 #endif
769 
770 #ifndef KMP_ST_REL64
771 #define KMP_ST_REL64(A, D) (*(A) = (D))
772 #endif
773 
774 #ifndef KMP_LD_ACQ32
775 #define KMP_LD_ACQ32(A) (*(A))
776 #endif
777 
778 #ifndef KMP_LD_ACQ64
779 #define KMP_LD_ACQ64(A) (*(A))
780 #endif
781 
782 /* ------------------------------------------------------------------------ */
783 // FIXME - maybe this should this be
784 //
785 // #define TCR_4(a) (*(volatile kmp_int32 *)(&a))
786 // #define TCW_4(a,b) (a) = (*(volatile kmp_int32 *)&(b))
787 //
788 // #define TCR_8(a) (*(volatile kmp_int64 *)(a))
789 // #define TCW_8(a,b) (a) = (*(volatile kmp_int64 *)(&b))
790 //
791 // I'm fairly certain this is the correct thing to do, but I'm afraid
792 // of performance regressions.
793 
794 #define TCR_1(a) (a)
795 #define TCW_1(a, b) (a) = (b)
796 #define TCR_4(a) (a)
797 #define TCW_4(a, b) (a) = (b)
798 #define TCI_4(a) (++(a))
799 #define TCD_4(a) (--(a))
800 #define TCR_8(a) (a)
801 #define TCW_8(a, b) (a) = (b)
802 #define TCI_8(a) (++(a))
803 #define TCD_8(a) (--(a))
804 #define TCR_SYNC_4(a) (a)
805 #define TCW_SYNC_4(a, b) (a) = (b)
806 #define TCX_SYNC_4(a, b, c) \
807  KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \
808  (kmp_int32)(b), (kmp_int32)(c))
809 #define TCR_SYNC_8(a) (a)
810 #define TCW_SYNC_8(a, b) (a) = (b)
811 #define TCX_SYNC_8(a, b, c) \
812  KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \
813  (kmp_int64)(b), (kmp_int64)(c))
814 
815 #if KMP_ARCH_X86 || KMP_ARCH_MIPS
816 // What about ARM?
817 #define TCR_PTR(a) ((void *)TCR_4(a))
818 #define TCW_PTR(a, b) TCW_4((a), (b))
819 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
820 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
821 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
822 
823 #else /* 64 bit pointers */
824 
825 #define TCR_PTR(a) ((void *)TCR_8(a))
826 #define TCW_PTR(a, b) TCW_8((a), (b))
827 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
828 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
829 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
830 
831 #endif /* KMP_ARCH_X86 */
832 
833 /* If these FTN_{TRUE,FALSE} values change, may need to change several places
834  where they are used to check that language is Fortran, not C. */
835 
836 #ifndef FTN_TRUE
837 #define FTN_TRUE TRUE
838 #endif
839 
840 #ifndef FTN_FALSE
841 #define FTN_FALSE FALSE
842 #endif
843 
844 typedef void (*microtask_t)(int *gtid, int *npr, ...);
845 
846 #ifdef USE_VOLATILE_CAST
847 #define VOLATILE_CAST(x) (volatile x)
848 #else
849 #define VOLATILE_CAST(x) (x)
850 #endif
851 
852 #define KMP_WAIT_YIELD __kmp_wait_yield_4
853 #define KMP_WAIT_YIELD_PTR __kmp_wait_yield_4_ptr
854 #define KMP_EQ __kmp_eq_4
855 #define KMP_NEQ __kmp_neq_4
856 #define KMP_LT __kmp_lt_4
857 #define KMP_GE __kmp_ge_4
858 #define KMP_LE __kmp_le_4
859 
860 /* Workaround for Intel(R) 64 code gen bug when taking address of static array
861  * (Intel(R) 64 Tracker #138) */
862 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
863 #define STATIC_EFI2_WORKAROUND
864 #else
865 #define STATIC_EFI2_WORKAROUND static
866 #endif
867 
868 // Support of BGET usage
869 #ifndef KMP_USE_BGET
870 #define KMP_USE_BGET 1
871 #endif
872 
873 // Switches for OSS builds
874 #ifndef USE_CMPXCHG_FIX
875 #define USE_CMPXCHG_FIX 1
876 #endif
877 
878 // Enable dynamic user lock
879 #if OMP_45_ENABLED
880 #define KMP_USE_DYNAMIC_LOCK 1
881 #endif
882 
883 // Enable Intel(R) Transactional Synchronization Extensions (Intel(R) TSX) if
884 // dynamic user lock is turned on
885 #if KMP_USE_DYNAMIC_LOCK
886 // Visual studio can't handle the asm sections in this code
887 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
888 #ifdef KMP_USE_ADAPTIVE_LOCKS
889 #undef KMP_USE_ADAPTIVE_LOCKS
890 #endif
891 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
892 #endif
893 
894 // Enable tick time conversion of ticks to seconds
895 #if KMP_STATS_ENABLED
896 #define KMP_HAVE_TICK_TIME \
897  (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
898 #endif
899 
900 // Warning levels
901 enum kmp_warnings_level {
902  kmp_warnings_off = 0, /* No warnings */
903  kmp_warnings_low, /* Minimal warnings (default) */
904  kmp_warnings_explicit = 6, /* Explicitly set to ON - more warnings */
905  kmp_warnings_verbose /* reserved */
906 };
907 
908 #ifdef __cplusplus
909 } // extern "C"
910 #endif // __cplusplus
911 
912 // Macros for C++11 atomic functions
913 #define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
914 #define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
915 
916 // For non-default load/store
917 #define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
918 #define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
919 #define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
920 #define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
921 
922 // For non-default fetch_<op>
923 #define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
924 #define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
925 #define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
926 #define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
927 #define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
928 #define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
929 #define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
930 #define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
931 
932 // Callers of the following functions cannot see the side effect on "expected".
933 template <typename T>
934 bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) {
935  return p->compare_exchange_strong(
936  expected, desired, std::memory_order_acq_rel, std::memory_order_relaxed);
937 }
938 
939 template <typename T>
940 bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) {
941  return p->compare_exchange_strong(
942  expected, desired, std::memory_order_acquire, std::memory_order_relaxed);
943 }
944 
945 template <typename T>
946 bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) {
947  return p->compare_exchange_strong(
948  expected, desired, std::memory_order_release, std::memory_order_relaxed);
949 }
950 
951 #endif /* KMP_OS_H */
952 // Safe C API
953 #include "kmp_safe_c_api.h"