LLVM OpenMP* Runtime Library
kmp_os.h
1 /*
2  * kmp_os.h -- KPTS runtime header file.
3  */
4 
5 //===----------------------------------------------------------------------===//
6 //
7 // The LLVM Compiler Infrastructure
8 //
9 // This file is dual licensed under the MIT and the University of Illinois Open
10 // Source Licenses. See LICENSE.txt for details.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #ifndef KMP_OS_H
15 #define KMP_OS_H
16 
17 #include "kmp_config.h"
18 #include <stdlib.h>
19 
20 #define KMP_FTN_PLAIN 1
21 #define KMP_FTN_APPEND 2
22 #define KMP_FTN_UPPER 3
23 /*
24 #define KMP_FTN_PREPEND 4
25 #define KMP_FTN_UAPPEND 5
26 */
27 
28 #define KMP_PTR_SKIP (sizeof(void *))
29 
30 /* -------------------------- Compiler variations ------------------------ */
31 
32 #define KMP_OFF 0
33 #define KMP_ON 1
34 
35 #define KMP_MEM_CONS_VOLATILE 0
36 #define KMP_MEM_CONS_FENCE 1
37 
38 #ifndef KMP_MEM_CONS_MODEL
39 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
40 #endif
41 
42 /* ------------------------- Compiler recognition ---------------------- */
43 #define KMP_COMPILER_ICC 0
44 #define KMP_COMPILER_GCC 0
45 #define KMP_COMPILER_CLANG 0
46 #define KMP_COMPILER_MSVC 0
47 
48 #if defined(__INTEL_COMPILER)
49 #undef KMP_COMPILER_ICC
50 #define KMP_COMPILER_ICC 1
51 #elif defined(__clang__)
52 #undef KMP_COMPILER_CLANG
53 #define KMP_COMPILER_CLANG 1
54 #elif defined(__GNUC__)
55 #undef KMP_COMPILER_GCC
56 #define KMP_COMPILER_GCC 1
57 #elif defined(_MSC_VER)
58 #undef KMP_COMPILER_MSVC
59 #define KMP_COMPILER_MSVC 1
60 #else
61 #error Unknown compiler
62 #endif
63 
64 #if (KMP_OS_LINUX || KMP_OS_WINDOWS) && !KMP_OS_CNK
65 #define KMP_AFFINITY_SUPPORTED 1
66 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64
67 #define KMP_GROUP_AFFINITY 1
68 #else
69 #define KMP_GROUP_AFFINITY 0
70 #endif
71 #else
72 #define KMP_AFFINITY_SUPPORTED 0
73 #define KMP_GROUP_AFFINITY 0
74 #endif
75 
76 /* Check for quad-precision extension. */
77 #define KMP_HAVE_QUAD 0
78 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
79 #if KMP_COMPILER_ICC
80 /* _Quad is already defined for icc */
81 #undef KMP_HAVE_QUAD
82 #define KMP_HAVE_QUAD 1
83 #elif KMP_COMPILER_CLANG
84 /* Clang doesn't support a software-implemented
85  128-bit extended precision type yet */
86 typedef long double _Quad;
87 #elif KMP_COMPILER_GCC
88 typedef __float128 _Quad;
89 #undef KMP_HAVE_QUAD
90 #define KMP_HAVE_QUAD 1
91 #elif KMP_COMPILER_MSVC
92 typedef long double _Quad;
93 #endif
94 #else
95 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
96 typedef long double _Quad;
97 #undef KMP_HAVE_QUAD
98 #define KMP_HAVE_QUAD 1
99 #endif
100 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
101 
102 #if KMP_OS_WINDOWS
103 typedef char kmp_int8;
104 typedef unsigned char kmp_uint8;
105 typedef short kmp_int16;
106 typedef unsigned short kmp_uint16;
107 typedef int kmp_int32;
108 typedef unsigned int kmp_uint32;
109 #define KMP_INT32_SPEC "d"
110 #define KMP_UINT32_SPEC "u"
111 #ifndef KMP_STRUCT64
112 typedef __int64 kmp_int64;
113 typedef unsigned __int64 kmp_uint64;
114 #define KMP_INT64_SPEC "I64d"
115 #define KMP_UINT64_SPEC "I64u"
116 #else
117 struct kmp_struct64 {
118  kmp_int32 a, b;
119 };
120 typedef struct kmp_struct64 kmp_int64;
121 typedef struct kmp_struct64 kmp_uint64;
122 /* Not sure what to use for KMP_[U]INT64_SPEC here */
123 #endif
124 #if KMP_ARCH_X86_64
125 #define KMP_INTPTR 1
126 typedef __int64 kmp_intptr_t;
127 typedef unsigned __int64 kmp_uintptr_t;
128 #define KMP_INTPTR_SPEC "I64d"
129 #define KMP_UINTPTR_SPEC "I64u"
130 #endif
131 #endif /* KMP_OS_WINDOWS */
132 
133 #if KMP_OS_UNIX
134 typedef char kmp_int8;
135 typedef unsigned char kmp_uint8;
136 typedef short kmp_int16;
137 typedef unsigned short kmp_uint16;
138 typedef int kmp_int32;
139 typedef unsigned int kmp_uint32;
140 typedef long long kmp_int64;
141 typedef unsigned long long kmp_uint64;
142 #define KMP_INT32_SPEC "d"
143 #define KMP_UINT32_SPEC "u"
144 #define KMP_INT64_SPEC "lld"
145 #define KMP_UINT64_SPEC "llu"
146 #endif /* KMP_OS_UNIX */
147 
148 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS
149 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
150 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS64
151 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
152 #else
153 #error "Can't determine size_t printf format specifier."
154 #endif
155 
156 #if KMP_ARCH_X86
157 #define KMP_SIZE_T_MAX (0xFFFFFFFF)
158 #else
159 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
160 #endif
161 
162 typedef size_t kmp_size_t;
163 typedef float kmp_real32;
164 typedef double kmp_real64;
165 
166 #ifndef KMP_INTPTR
167 #define KMP_INTPTR 1
168 typedef long kmp_intptr_t;
169 typedef unsigned long kmp_uintptr_t;
170 #define KMP_INTPTR_SPEC "ld"
171 #define KMP_UINTPTR_SPEC "lu"
172 #endif
173 
174 #ifdef BUILD_I8
175 typedef kmp_int64 kmp_int;
176 typedef kmp_uint64 kmp_uint;
177 #else
178 typedef kmp_int32 kmp_int;
179 typedef kmp_uint32 kmp_uint;
180 #endif /* BUILD_I8 */
181 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
182 #define KMP_INT_MIN ((kmp_int32)0x80000000)
183 
184 #ifdef __cplusplus
185 // macros to cast out qualifiers and to re-interpret types
186 #define CCAST(type, var) const_cast<type>(var)
187 #define RCAST(type, var) reinterpret_cast<type>(var)
188 //-------------------------------------------------------------------------
189 // template for debug prints specification ( d, u, lld, llu ), and to obtain
190 // signed/unsigned flavors of a type
191 template <typename T> struct traits_t {};
192 // int
193 template <> struct traits_t<signed int> {
194  typedef signed int signed_t;
195  typedef unsigned int unsigned_t;
196  typedef double floating_t;
197  static char const *spec;
198  static const signed_t max_value = 0x7fffffff;
199  static const signed_t min_value = 0x80000000;
200  static const int type_size = sizeof(signed_t);
201 };
202 // unsigned int
203 template <> struct traits_t<unsigned int> {
204  typedef signed int signed_t;
205  typedef unsigned int unsigned_t;
206  typedef double floating_t;
207  static char const *spec;
208  static const unsigned_t max_value = 0xffffffff;
209  static const unsigned_t min_value = 0x00000000;
210  static const int type_size = sizeof(unsigned_t);
211 };
212 // long
213 template <> struct traits_t<signed long> {
214  typedef signed long signed_t;
215  typedef unsigned long unsigned_t;
216  typedef long double floating_t;
217  static char const *spec;
218  static const int type_size = sizeof(signed_t);
219 };
220 // long long
221 template <> struct traits_t<signed long long> {
222  typedef signed long long signed_t;
223  typedef unsigned long long unsigned_t;
224  typedef long double floating_t;
225  static char const *spec;
226  static const signed_t max_value = 0x7fffffffffffffffLL;
227  static const signed_t min_value = 0x8000000000000000LL;
228  static const int type_size = sizeof(signed_t);
229 };
230 // unsigned long long
231 template <> struct traits_t<unsigned long long> {
232  typedef signed long long signed_t;
233  typedef unsigned long long unsigned_t;
234  typedef long double floating_t;
235  static char const *spec;
236  static const unsigned_t max_value = 0xffffffffffffffffLL;
237  static const unsigned_t min_value = 0x0000000000000000LL;
238  static const int type_size = sizeof(unsigned_t);
239 };
240 //-------------------------------------------------------------------------
241 #else
242 #define CCAST(type, var) (type)(var)
243 #define RCAST(type, var) (type)(var)
244 #endif // __cplusplus
245 
246 #define KMP_EXPORT extern /* export declaration in guide libraries */
247 
248 #if __GNUC__ >= 4
249 #define __forceinline __inline
250 #endif
251 
252 #if KMP_OS_WINDOWS
253 #include <windows.h>
254 
255 static inline int KMP_GET_PAGE_SIZE(void) {
256  SYSTEM_INFO si;
257  GetSystemInfo(&si);
258  return si.dwPageSize;
259 }
260 #else
261 #define KMP_GET_PAGE_SIZE() getpagesize()
262 #endif
263 
264 #define PAGE_ALIGNED(_addr) \
265  (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
266 #define ALIGN_TO_PAGE(x) \
267  (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
268 
269 /* ---------- Support for cache alignment, padding, etc. ----------------*/
270 
271 #ifdef __cplusplus
272 extern "C" {
273 #endif // __cplusplus
274 
275 #define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */
276 
277 /* Define the default size of the cache line */
278 #ifndef CACHE_LINE
279 #define CACHE_LINE 128 /* cache line size in bytes */
280 #else
281 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
282 // 2006-02-13: This produces too many warnings on OS X*. Disable for now
283 #warning CACHE_LINE is too small.
284 #endif
285 #endif /* CACHE_LINE */
286 
287 #define KMP_CACHE_PREFETCH(ADDR) /* nothing */
288 
289 // Define attribute that indicates a function does not return
290 #if __cplusplus >= 201103L
291 #define KMP_NORETURN [[noreturn]]
292 #elif KMP_OS_WINDOWS
293 #define KMP_NORETURN __declspec(noreturn)
294 #else
295 #define KMP_NORETURN __attribute__((noreturn))
296 #endif
297 
298 #if KMP_OS_WINDOWS
299 #define KMP_ALIGN(bytes) __declspec(align(bytes))
300 #define KMP_THREAD_LOCAL __declspec(thread)
301 #define KMP_ALIAS /* Nothing */
302 #else
303 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
304 #define KMP_THREAD_LOCAL __thread
305 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
306 #endif
307 
308 #if KMP_HAVE_WEAK_ATTRIBUTE
309 #define KMP_WEAK_ATTRIBUTE __attribute__((weak))
310 #else
311 #define KMP_WEAK_ATTRIBUTE /* Nothing */
312 #endif
313 
314 // Define KMP_VERSION_SYMBOL and KMP_EXPAND_NAME
315 #ifdef KMP_USE_VERSION_SYMBOLS
316 #define KMP_STR(x) _KMP_STR(x)
317 #define _KMP_STR(x) #x
318 // If using versioned symbols, KMP_EXPAND_NAME prepends
319 // __kmp_api_ to the real API name
320 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
321 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
322 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) \
323  _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
324 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) \
325  __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias \
326  __attribute__((alias(KMP_STR(__kmp_api_##api_name)))); \
327  __asm__( \
328  ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
329  api_name) "@" ver_str "\n\t"); \
330  __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR( \
331  api_name) "@@" default_ver "\n\t")
332 #else // KMP_USE_VERSION_SYMBOLS
333 #define KMP_EXPAND_NAME(api_name) api_name
334 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) /* Nothing */
335 #endif // KMP_USE_VERSION_SYMBOLS
336 
337 /* Temporary note: if performance testing of this passes, we can remove
338  all references to KMP_DO_ALIGN and replace with KMP_ALIGN. */
339 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
340 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
341 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
342 
343 /* General purpose fence types for memory operations */
344 enum kmp_mem_fence_type {
345  kmp_no_fence, /* No memory fence */
346  kmp_acquire_fence, /* Acquire (read) memory fence */
347  kmp_release_fence, /* Release (write) memory fence */
348  kmp_full_fence /* Full (read+write) memory fence */
349 };
350 
351 // Synchronization primitives
352 
353 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS
354 
355 #pragma intrinsic(InterlockedExchangeAdd)
356 #pragma intrinsic(InterlockedCompareExchange)
357 #pragma intrinsic(InterlockedExchange)
358 #pragma intrinsic(InterlockedExchange64)
359 
360 // Using InterlockedIncrement / InterlockedDecrement causes a library loading
361 // ordering problem, so we use InterlockedExchangeAdd instead.
362 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
363 #define KMP_TEST_THEN_INC_ACQ32(p) \
364  InterlockedExchangeAdd((volatile long *)(p), 1)
365 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
366 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
367  InterlockedExchangeAdd((volatile long *)(p), 4)
368 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
369 #define KMP_TEST_THEN_DEC_ACQ32(p) \
370  InterlockedExchangeAdd((volatile long *)(p), -1)
371 #define KMP_TEST_THEN_ADD32(p, v) \
372  InterlockedExchangeAdd((volatile long *)(p), (v))
373 
374 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
375  InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
376 
377 #define KMP_XCHG_FIXED32(p, v) \
378  InterlockedExchange((volatile long *)(p), (long)(v))
379 #define KMP_XCHG_FIXED64(p, v) \
380  InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
381 
382 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
383  kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v);
384  return *(kmp_real32 *)&tmp;
385 }
386 
387 // Routines that we still need to implement in assembly.
388 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
389 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
390 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
391 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
392 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
393 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
394 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
395 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
396 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
397 
398 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
399  kmp_int8 sv);
400 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
401  kmp_int16 sv);
402 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
403  kmp_int32 sv);
404 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
405  kmp_int64 sv);
406 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
407  kmp_int8 sv);
408 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
409  kmp_int16 cv, kmp_int16 sv);
410 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
411  kmp_int32 cv, kmp_int32 sv);
412 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
413  kmp_int64 cv, kmp_int64 sv);
414 
415 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
416 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
417 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
418 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
419 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
420 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
421 
422 //#define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32((p), 1)
423 //#define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32((p), 1)
424 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
425 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
426 //#define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32((p), 4)
427 //#define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32((p), 4)
428 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
429 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
430 //#define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32((p), -1)
431 //#define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32((p), -1)
432 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
433 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
434 //#define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32((p), (v))
435 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
436 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
437 
438 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
439 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
440 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
441 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
442 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
443 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
444 
445 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
446  __kmp_compare_and_store8((p), (cv), (sv))
447 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
448  __kmp_compare_and_store8((p), (cv), (sv))
449 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
450  __kmp_compare_and_store16((p), (cv), (sv))
451 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
452  __kmp_compare_and_store16((p), (cv), (sv))
453 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
454  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
455  (kmp_int32)(sv))
456 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
457  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
458  (kmp_int32)(sv))
459 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
460  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
461  (kmp_int64)(sv))
462 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
463  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
464  (kmp_int64)(sv))
465 
466 #if KMP_ARCH_X86
467 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
468  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
469  (kmp_int32)(sv))
470 #else /* 64 bit pointers */
471 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
472  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
473  (kmp_int64)(sv))
474 #endif /* KMP_ARCH_X86 */
475 
476 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
477  __kmp_compare_and_store_ret8((p), (cv), (sv))
478 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
479  __kmp_compare_and_store_ret16((p), (cv), (sv))
480 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
481  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
482  (kmp_int64)(sv))
483 
484 #define KMP_XCHG_FIXED8(p, v) \
485  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
486 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
487 //#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
488 //#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
489 //#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
490 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
491 
492 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
493 
494 /* cast p to correct type so that proper intrinsic will be used */
495 #define KMP_TEST_THEN_INC32(p) \
496  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
497 #define KMP_TEST_THEN_INC_ACQ32(p) \
498  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
499 #define KMP_TEST_THEN_INC64(p) \
500  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
501 #define KMP_TEST_THEN_INC_ACQ64(p) \
502  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
503 #define KMP_TEST_THEN_ADD4_32(p) \
504  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
505 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
506  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
507 #define KMP_TEST_THEN_ADD4_64(p) \
508  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
509 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
510  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
511 #define KMP_TEST_THEN_DEC32(p) \
512  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
513 #define KMP_TEST_THEN_DEC_ACQ32(p) \
514  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
515 #define KMP_TEST_THEN_DEC64(p) \
516  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
517 #define KMP_TEST_THEN_DEC_ACQ64(p) \
518  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
519 #define KMP_TEST_THEN_ADD8(p, v) \
520  __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
521 #define KMP_TEST_THEN_ADD32(p, v) \
522  __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
523 #define KMP_TEST_THEN_ADD64(p, v) \
524  __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
525 
526 #define KMP_TEST_THEN_OR8(p, v) \
527  __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
528 #define KMP_TEST_THEN_AND8(p, v) \
529  __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
530 #define KMP_TEST_THEN_OR32(p, v) \
531  __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
532 #define KMP_TEST_THEN_AND32(p, v) \
533  __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
534 #define KMP_TEST_THEN_OR64(p, v) \
535  __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
536 #define KMP_TEST_THEN_AND64(p, v) \
537  __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
538 
539 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
540  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
541  (kmp_uint8)(sv))
542 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
543  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
544  (kmp_uint8)(sv))
545 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
546  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
547  (kmp_uint16)(sv))
548 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
549  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
550  (kmp_uint16)(sv))
551 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
552  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
553  (kmp_uint32)(sv))
554 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
555  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
556  (kmp_uint32)(sv))
557 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
558  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
559  (kmp_uint64)(sv))
560 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
561  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
562  (kmp_uint64)(sv))
563 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
564  __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \
565  (void *)(sv))
566 
567 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
568  __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
569  (kmp_uint8)(sv))
570 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
571  __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
572  (kmp_uint16)(sv))
573 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
574  __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
575  (kmp_uint32)(sv))
576 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
577  __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
578  (kmp_uint64)(sv))
579 
580 #define KMP_XCHG_FIXED8(p, v) \
581  __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
582 #define KMP_XCHG_FIXED16(p, v) \
583  __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
584 #define KMP_XCHG_FIXED32(p, v) \
585  __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
586 #define KMP_XCHG_FIXED64(p, v) \
587  __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
588 
589 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
590  kmp_int32 tmp =
591  __sync_lock_test_and_set((volatile kmp_uint32 *)(p), *(kmp_uint32 *)&v);
592  return *(kmp_real32 *)&tmp;
593 }
594 
595 inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) {
596  kmp_int64 tmp =
597  __sync_lock_test_and_set((volatile kmp_uint64 *)(p), *(kmp_uint64 *)&v);
598  return *(kmp_real64 *)&tmp;
599 }
600 
601 #else
602 
603 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
604 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
605 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
606 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
607 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
608 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
609 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
610 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
611 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
612 
613 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
614  kmp_int8 sv);
615 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
616  kmp_int16 sv);
617 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
618  kmp_int32 sv);
619 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
620  kmp_int64 sv);
621 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
622  kmp_int8 sv);
623 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
624  kmp_int16 cv, kmp_int16 sv);
625 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
626  kmp_int32 cv, kmp_int32 sv);
627 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
628  kmp_int64 cv, kmp_int64 sv);
629 
630 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
631 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
632 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
633 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
634 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
635 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
636 
637 #define KMP_TEST_THEN_INC32(p) \
638  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
639 #define KMP_TEST_THEN_INC_ACQ32(p) \
640  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
641 #define KMP_TEST_THEN_INC64(p) \
642  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
643 #define KMP_TEST_THEN_INC_ACQ64(p) \
644  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
645 #define KMP_TEST_THEN_ADD4_32(p) \
646  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
647 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
648  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
649 #define KMP_TEST_THEN_ADD4_64(p) \
650  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
651 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
652  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
653 #define KMP_TEST_THEN_DEC32(p) \
654  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
655 #define KMP_TEST_THEN_DEC_ACQ32(p) \
656  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
657 #define KMP_TEST_THEN_DEC64(p) \
658  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
659 #define KMP_TEST_THEN_DEC_ACQ64(p) \
660  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
661 #define KMP_TEST_THEN_ADD8(p, v) \
662  __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
663 #define KMP_TEST_THEN_ADD32(p, v) \
664  __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
665 #define KMP_TEST_THEN_ADD64(p, v) \
666  __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
667 
668 #define KMP_TEST_THEN_OR8(p, v) \
669  __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
670 #define KMP_TEST_THEN_AND8(p, v) \
671  __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
672 #define KMP_TEST_THEN_OR32(p, v) \
673  __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
674 #define KMP_TEST_THEN_AND32(p, v) \
675  __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
676 #define KMP_TEST_THEN_OR64(p, v) \
677  __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
678 #define KMP_TEST_THEN_AND64(p, v) \
679  __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
680 
681 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
682  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
683  (kmp_int8)(sv))
684 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
685  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
686  (kmp_int8)(sv))
687 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
688  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
689  (kmp_int16)(sv))
690 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
691  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
692  (kmp_int16)(sv))
693 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
694  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
695  (kmp_int32)(sv))
696 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
697  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
698  (kmp_int32)(sv))
699 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
700  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
701  (kmp_int64)(sv))
702 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
703  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
704  (kmp_int64)(sv))
705 
706 #if KMP_ARCH_X86
707 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
708  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
709  (kmp_int32)(sv))
710 #else /* 64 bit pointers */
711 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
712  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
713  (kmp_int64)(sv))
714 #endif /* KMP_ARCH_X86 */
715 
716 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
717  __kmp_compare_and_store_ret8((p), (cv), (sv))
718 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
719  __kmp_compare_and_store_ret16((p), (cv), (sv))
720 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
721  __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
722  (kmp_int32)(sv))
723 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
724  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
725  (kmp_int64)(sv))
726 
727 #define KMP_XCHG_FIXED8(p, v) \
728  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
729 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
730 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
731 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
732 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
733 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
734 
735 #endif /* KMP_ASM_INTRINS */
736 
737 /* ------------- relaxed consistency memory model stuff ------------------ */
738 
739 #if KMP_OS_WINDOWS
740 #ifdef __ABSOFT_WIN
741 #define KMP_MB() asm("nop")
742 #define KMP_IMB() asm("nop")
743 #else
744 #define KMP_MB() /* _asm{ nop } */
745 #define KMP_IMB() /* _asm{ nop } */
746 #endif
747 #endif /* KMP_OS_WINDOWS */
748 
749 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \
750  KMP_ARCH_MIPS64
751 #define KMP_MB() __sync_synchronize()
752 #endif
753 
754 #ifndef KMP_MB
755 #define KMP_MB() /* nothing to do */
756 #endif
757 
758 #ifndef KMP_IMB
759 #define KMP_IMB() /* nothing to do */
760 #endif
761 
762 #ifndef KMP_ST_REL32
763 #define KMP_ST_REL32(A, D) (*(A) = (D))
764 #endif
765 
766 #ifndef KMP_ST_REL64
767 #define KMP_ST_REL64(A, D) (*(A) = (D))
768 #endif
769 
770 #ifndef KMP_LD_ACQ32
771 #define KMP_LD_ACQ32(A) (*(A))
772 #endif
773 
774 #ifndef KMP_LD_ACQ64
775 #define KMP_LD_ACQ64(A) (*(A))
776 #endif
777 
778 /* ------------------------------------------------------------------------ */
779 // FIXME - maybe this should this be
780 //
781 // #define TCR_4(a) (*(volatile kmp_int32 *)(&a))
782 // #define TCW_4(a,b) (a) = (*(volatile kmp_int32 *)&(b))
783 //
784 // #define TCR_8(a) (*(volatile kmp_int64 *)(a))
785 // #define TCW_8(a,b) (a) = (*(volatile kmp_int64 *)(&b))
786 //
787 // I'm fairly certain this is the correct thing to do, but I'm afraid
788 // of performance regressions.
789 
790 #define TCR_1(a) (a)
791 #define TCW_1(a, b) (a) = (b)
792 #define TCR_4(a) (a)
793 #define TCW_4(a, b) (a) = (b)
794 #define TCI_4(a) (++(a))
795 #define TCD_4(a) (--(a))
796 #define TCR_8(a) (a)
797 #define TCW_8(a, b) (a) = (b)
798 #define TCI_8(a) (++(a))
799 #define TCD_8(a) (--(a))
800 #define TCR_SYNC_4(a) (a)
801 #define TCW_SYNC_4(a, b) (a) = (b)
802 #define TCX_SYNC_4(a, b, c) \
803  KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \
804  (kmp_int32)(b), (kmp_int32)(c))
805 #define TCR_SYNC_8(a) (a)
806 #define TCW_SYNC_8(a, b) (a) = (b)
807 #define TCX_SYNC_8(a, b, c) \
808  KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \
809  (kmp_int64)(b), (kmp_int64)(c))
810 
811 #if KMP_ARCH_X86 || KMP_ARCH_MIPS
812 // What about ARM?
813 #define TCR_PTR(a) ((void *)TCR_4(a))
814 #define TCW_PTR(a, b) TCW_4((a), (b))
815 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
816 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
817 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
818 
819 #else /* 64 bit pointers */
820 
821 #define TCR_PTR(a) ((void *)TCR_8(a))
822 #define TCW_PTR(a, b) TCW_8((a), (b))
823 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
824 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
825 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
826 
827 #endif /* KMP_ARCH_X86 */
828 
829 /* If these FTN_{TRUE,FALSE} values change, may need to change several places
830  where they are used to check that language is Fortran, not C. */
831 
832 #ifndef FTN_TRUE
833 #define FTN_TRUE TRUE
834 #endif
835 
836 #ifndef FTN_FALSE
837 #define FTN_FALSE FALSE
838 #endif
839 
840 typedef void (*microtask_t)(int *gtid, int *npr, ...);
841 
842 #ifdef USE_VOLATILE_CAST
843 #define VOLATILE_CAST(x) (volatile x)
844 #else
845 #define VOLATILE_CAST(x) (x)
846 #endif
847 
848 #define KMP_WAIT_YIELD __kmp_wait_yield_4
849 #define KMP_WAIT_YIELD_PTR __kmp_wait_yield_4_ptr
850 #define KMP_EQ __kmp_eq_4
851 #define KMP_NEQ __kmp_neq_4
852 #define KMP_LT __kmp_lt_4
853 #define KMP_GE __kmp_ge_4
854 #define KMP_LE __kmp_le_4
855 
856 /* Workaround for Intel(R) 64 code gen bug when taking address of static array
857  * (Intel(R) 64 Tracker #138) */
858 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
859 #define STATIC_EFI2_WORKAROUND
860 #else
861 #define STATIC_EFI2_WORKAROUND static
862 #endif
863 
864 // Support of BGET usage
865 #ifndef KMP_USE_BGET
866 #define KMP_USE_BGET 1
867 #endif
868 
869 // Switches for OSS builds
870 #ifndef USE_CMPXCHG_FIX
871 #define USE_CMPXCHG_FIX 1
872 #endif
873 
874 // Enable dynamic user lock
875 #if OMP_45_ENABLED
876 #define KMP_USE_DYNAMIC_LOCK 1
877 #endif
878 
879 // Enable Intel(R) Transactional Synchronization Extensions (Intel(R) TSX) if
880 // dynamic user lock is turned on
881 #if KMP_USE_DYNAMIC_LOCK
882 // Visual studio can't handle the asm sections in this code
883 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
884 #ifdef KMP_USE_ADAPTIVE_LOCKS
885 #undef KMP_USE_ADAPTIVE_LOCKS
886 #endif
887 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
888 #endif
889 
890 // Enable tick time conversion of ticks to seconds
891 #if KMP_STATS_ENABLED
892 #define KMP_HAVE_TICK_TIME \
893  (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
894 #endif
895 
896 // Warning levels
897 enum kmp_warnings_level {
898  kmp_warnings_off = 0, /* No warnings */
899  kmp_warnings_low, /* Minimal warnings (default) */
900  kmp_warnings_explicit = 6, /* Explicitly set to ON - more warnings */
901  kmp_warnings_verbose /* reserved */
902 };
903 
904 #ifdef __cplusplus
905 } // extern "C"
906 #endif // __cplusplus
907 
908 #endif /* KMP_OS_H */
909 // Safe C API
910 #include "kmp_safe_c_api.h"