32 #ifndef _GLIBCXX_PARALLEL_COMPATIBILITY_H
33 #define _GLIBCXX_PARALLEL_COMPATIBILITY_H 1
38 #if defined(__SUNPRO_CC) && defined(__sparc)
39 #include <sys/atomic.h>
42 #if !defined(_WIN32) || defined (__CYGWIN__)
58 __attribute((dllimport)) void __attribute__((stdcall)) Sleep (
unsigned long);
64 template<
typename must_be_
int =
int>
67 asm volatile(
"lock xadd %0,%1"
68 :
"=r" (inc),
"=m" (*x)
74 template<
typename must_be_
int =
int>
77 asm volatile(
"lock xadd %0,%1"
78 :
"=r" (inc),
"=m" (*x)
97 #if defined(__ICC) //x86 version
98 return _InterlockedExchangeAdd((
void*)ptr, addend);
99 #elif defined(__ECC) //IA-64 version
100 return _InterlockedExchangeAdd((
void*)ptr, addend);
101 #elif defined(__ICL) || defined(_MSC_VER)
102 return _InterlockedExchangeAdd(reinterpret_cast<volatile long*>(ptr),
104 #elif defined(__GNUC__)
105 return __sync_fetch_and_add(ptr, addend);
106 #elif defined(__SUNPRO_CC) && defined(__sparc)
107 volatile int32 before, after;
111 after = before + addend;
112 }
while (atomic_cas_32((
volatile unsigned int*)ptr, before,
115 #else //fallback, slow
116 #pragma message("slow fetch_and_add_32")
136 #if defined(__ICC) && defined(__x86_64) //x86 version
137 return faa64<int>((
int64*)ptr, addend);
138 #elif defined(__ECC) //IA-64 version
139 return _InterlockedExchangeAdd64((
void*)ptr, addend);
140 #elif defined(__ICL) || defined(_MSC_VER)
142 _GLIBCXX_PARALLEL_ASSERT(
false);
145 return _InterlockedExchangeAdd64(ptr, addend);
147 #elif defined(__GNUC__) && defined(__x86_64)
148 return __sync_fetch_and_add(ptr, addend);
149 #elif defined(__GNUC__) && defined(__i386) && \
150 (defined(__i686) || defined(__pentium4) || defined(__athlon))
151 return __sync_fetch_and_add(ptr, addend);
152 #elif defined(__SUNPRO_CC) && defined(__sparc)
153 volatile int64 before, after;
157 after = before + addend;
158 }
while (atomic_cas_64((
volatile unsigned long long*)ptr, before,
161 #else //fallback, slow
162 #if defined(__GNUC__) && defined(__i386)
166 #pragma message("slow fetch_and_add_64")
187 if (
sizeof(T) ==
sizeof(
int32))
189 else if (
sizeof(T) ==
sizeof(
int64))
192 _GLIBCXX_PARALLEL_ASSERT(
false);
198 template<
typename must_be_
int =
int>
203 __asm__ __volatile__(
"lock; cmpxchgl %1,%2"
205 :
"q"(nw),
"m"(*(
volatile long long*)(ptr)),
"0"(old)
210 #if defined(__x86_64)
211 template<
typename must_be_
int =
int>
216 __asm__ __volatile__(
"lock; cmpxchgq %1,%2"
218 :
"q"(nw),
"m"(*(
volatile long long*)(ptr)),
"0"(old)
237 #if defined(__ICC) //x86 version
238 return _InterlockedCompareExchange((
void*)ptr, replacement,
239 comparand) == comparand;
240 #elif defined(__ECC) //IA-64 version
241 return _InterlockedCompareExchange((
void*)ptr, replacement,
242 comparand) == comparand;
243 #elif defined(__ICL) || defined(_MSC_VER)
244 return _InterlockedCompareExchange(reinterpret_cast<volatile long*>(ptr),
245 replacement, comparand) == comparand;
246 #elif defined(__GNUC__)
247 return __sync_bool_compare_and_swap(ptr, comparand, replacement);
248 #elif defined(__SUNPRO_CC) && defined(__sparc)
249 return atomic_cas_32((
volatile unsigned int*)ptr, comparand,
250 replacement) == comparand;
252 #pragma message("slow compare_and_swap_32")
256 if (*ptr == comparand)
277 #if defined(__ICC) && defined(__x86_64) //x86 version
278 return cas64<int>(ptr, comparand, replacement) == comparand;
279 #elif defined(__ECC) //IA-64 version
280 return _InterlockedCompareExchange64((
void*)ptr, replacement,
281 comparand) == comparand;
282 #elif defined(__ICL) || defined(_MSC_VER)
284 _GLIBCXX_PARALLEL_ASSERT(
false);
287 return _InterlockedCompareExchange64(ptr, replacement,
288 comparand) == comparand;
291 #elif defined(__GNUC__) && defined(__x86_64)
292 return __sync_bool_compare_and_swap(ptr, comparand, replacement);
293 #elif defined(__GNUC__) && defined(__i386) && \
294 (defined(__i686) || defined(__pentium4) || defined(__athlon))
295 return __sync_bool_compare_and_swap(ptr, comparand, replacement);
296 #elif defined(__SUNPRO_CC) && defined(__sparc)
297 return atomic_cas_64((
volatile unsigned long long*)ptr,
298 comparand, replacement) == comparand;
300 #if defined(__GNUC__) && defined(__i386)
304 #pragma message("slow compare_and_swap_64")
308 if (*ptr == comparand)
329 if (
sizeof(T) ==
sizeof(
int32))
331 else if (
sizeof(T) ==
sizeof(
int64))
334 _GLIBCXX_PARALLEL_ASSERT(
false);
342 #if defined (_WIN32) && !defined (__CYGWIN__)
bool compare_and_swap(volatile T *ptr, T comparand, T replacement)
Compare *ptr and comparand. If equal, let *ptr=replacement and return true, return false otherwise...
GNU parallel code for public use.
Sequential helper functions. This file is a GNU parallel extension to the Standard C++ Library...
int64 fetch_and_add_64(volatile int64 *ptr, int64 addend)
Add a value to a variable, atomically.
long long int64
64-bit signed integer.
bool compare_and_swap_32(volatile int32 *ptr, int32 comparand, int32 replacement)
Compare *ptr and comparand. If equal, let *ptr=replacement and return true, return false otherwise...
int int32
32-bit signed integer.
Basic types and typedefs. This file is a GNU parallel extension to the Standard C++ Library...
void yield()
Yield the control to another thread, without waiting for the end to the time slice.
T fetch_and_add(volatile T *ptr, T addend)
Add a value to a variable, atomically.
bool compare_and_swap_64(volatile int64 *ptr, int64 comparand, int64 replacement)
Compare *ptr and comparand. If equal, let *ptr=replacement and return true, return false otherwise...
int32 fetch_and_add_32(volatile int32 *ptr, int32 addend)
Add a value to a variable, atomically.