aboutsummaryrefslogtreecommitdiff
path: root/libstdc++-v3
diff options
context:
space:
mode:
authorJonathan Wakely <jwakely.gcc@gmail.com>2012-10-09 08:16:13 +0000
committerJonathan Wakely <jwakely.gcc@gmail.com>2012-10-09 08:16:13 +0000
commit8e8ae86110b878049f9dba07866b72f170cb21cb (patch)
tree87cbb09e7fd23d73d39e84854a1fad151118a777 /libstdc++-v3
parent8ba21f8ea572bbca5e26f40326a4f610796c2683 (diff)
PR libstdc++/54754
* include/parallel/compatibility.h: Use atomic built-ins when they are lock-free. git-svn-id: https://gcc.gnu.org/svn/gcc/trunk@192240 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'libstdc++-v3')
-rw-r--r--libstdc++-v3/ChangeLog6
-rw-r--r--libstdc++-v3/include/parallel/compatibility.h170
2 files changed, 49 insertions, 127 deletions
diff --git a/libstdc++-v3/ChangeLog b/libstdc++-v3/ChangeLog
index 0b03bacd894..b9f3533798c 100644
--- a/libstdc++-v3/ChangeLog
+++ b/libstdc++-v3/ChangeLog
@@ -1,3 +1,9 @@
+2012-10-09 Jonathan Wakely <jwakely.gcc@gmail.com>
+
+ PR libstdc++/54754
+ * include/parallel/compatibility.h: Use atomic built-ins when they are
+ lock-free.
+
2012-10-09 Uros Bizjak <ubizjak@gmail.com>
* testsuite/util/testsuite_abi.cc (check_version): Add CXXABI_1.3.7.
diff --git a/libstdc++-v3/include/parallel/compatibility.h b/libstdc++-v3/include/parallel/compatibility.h
index 03506d84de3..a58e65fe60c 100644
--- a/libstdc++-v3/include/parallel/compatibility.h
+++ b/libstdc++-v3/include/parallel/compatibility.h
@@ -51,154 +51,70 @@ __attribute((dllimport)) void __attribute__((stdcall)) Sleep (unsigned long);
namespace __gnu_parallel
{
- // These atomic functions only work on integers
-
- /** @brief Add a value to a variable, atomically.
- *
- * Implementation is heavily platform-dependent.
- * @param __ptr Pointer to a 32-bit signed integer.
- * @param __addend Value to add.
- */
- inline int32_t
- __fetch_and_add_32(volatile int32_t* __ptr, int32_t __addend)
- {
- return __atomic_fetch_add(__ptr, __addend, __ATOMIC_ACQ_REL);
- }
-
- /** @brief Add a value to a variable, atomically.
- *
- * Implementation is heavily platform-dependent.
- * @param __ptr Pointer to a 64-bit signed integer.
- * @param __addend Value to add.
- */
- inline int64_t
- __fetch_and_add_64(volatile int64_t* __ptr, int64_t __addend)
- {
-#if defined(__x86_64)
- return __atomic_fetch_add(__ptr, __addend, __ATOMIC_ACQ_REL);
-#elif defined(__i386) && \
- (defined(__i686) || defined(__pentium4) || defined(__athlon) \
- || defined(__k8) || defined(__core2))
- return __atomic_fetch_add(__ptr, __addend, __ATOMIC_ACQ_REL);
-#else //fallback, slow
-#if defined(__i386)
- // XXX doesn'__t work with -march=native
- //#warning "please compile with -march=i686 or better"
-#endif
-#pragma message("slow __fetch_and_add_64")
- int64_t __res;
-#pragma omp critical
+ template<typename _Tp>
+ inline _Tp
+ __add_omp(volatile _Tp* __ptr, _Tp __addend)
{
- __res = *__ptr;
- *(__ptr) += __addend;
+ int64_t __res;
+#pragma omp critical
+ {
+ __res = *__ptr;
+ *(__ptr) += __addend;
+ }
+ return __res;
}
- return __res;
-#endif
- }
/** @brief Add a value to a variable, atomically.
*
- * Implementation is heavily platform-dependent.
* @param __ptr Pointer to a signed integer.
* @param __addend Value to add.
*/
template<typename _Tp>
- inline _Tp
- __fetch_and_add(volatile _Tp* __ptr, _Tp __addend)
- {
- if (sizeof(_Tp) == sizeof(int32_t))
- return
- (_Tp)__fetch_and_add_32((volatile int32_t*) __ptr, (int32_t)__addend);
- else if (sizeof(_Tp) == sizeof(int64_t))
- return
- (_Tp)__fetch_and_add_64((volatile int64_t*) __ptr, (int64_t)__addend);
- else
- _GLIBCXX_PARALLEL_ASSERT(false);
- }
-
- /** @brief Compare @c *__ptr and @c __comparand. If equal, let @c
- * *__ptr=__replacement and return @c true, return @c false otherwise.
- *
- * Implementation is heavily platform-dependent.
- * @param __ptr Pointer to 32-bit signed integer.
- * @param __comparand Compare value.
- * @param __replacement Replacement value.
- */
- inline bool
- __compare_and_swap_32(volatile int32_t* __ptr, int32_t __comparand,
- int32_t __replacement)
- {
- return __atomic_compare_exchange_n(__ptr, &__comparand, __replacement,
- false, __ATOMIC_ACQ_REL,
- __ATOMIC_RELAXED);
- }
+ inline _Tp
+ __fetch_and_add(volatile _Tp* __ptr, _Tp __addend)
+ {
+ if (__atomic_always_lock_free(sizeof(_Tp), __ptr))
+ return __atomic_fetch_add(__ptr, __addend, __ATOMIC_ACQ_REL);
+ return __add_omp(__ptr, __addend);
+ }
- /** @brief Compare @c *__ptr and @c __comparand. If equal, let @c
- * *__ptr=__replacement and return @c true, return @c false otherwise.
- *
- * Implementation is heavily platform-dependent.
- * @param __ptr Pointer to 64-bit signed integer.
- * @param __comparand Compare value.
- * @param __replacement Replacement value.
- */
- inline bool
- __compare_and_swap_64(volatile int64_t* __ptr, int64_t __comparand,
- int64_t __replacement)
- {
-#if defined(__x86_64)
- return __atomic_compare_exchange_n(__ptr, &__comparand, __replacement,
- false, __ATOMIC_ACQ_REL,
- __ATOMIC_RELAXED);
-#elif defined(__i386) && \
- (defined(__i686) || defined(__pentium4) || defined(__athlon) \
- || defined(__k8) || defined(__core2))
- return __atomic_compare_exchange_n(__ptr, &__comparand, __replacement,
- false, __ATOMIC_ACQ_REL,
- __ATOMIC_RELAXED);
-#else
-#if defined(__i386)
- // XXX -march=native
- //#warning "please compile with -march=i686 or better"
-#endif
-#pragma message("slow __compare_and_swap_64")
- bool __res = false;
-#pragma omp critical
+ template<typename _Tp>
+ inline bool
+ __cas_omp(volatile _Tp* __ptr, _Tp __comparand, _Tp __replacement)
{
- if (*__ptr == __comparand)
- {
- *__ptr = __replacement;
- __res = true;
- }
+ bool __res = false;
+#pragma omp critical
+ {
+ if (*__ptr == __comparand)
+ {
+ *__ptr = __replacement;
+ __res = true;
+ }
+ }
+ return __res;
}
- return __res;
-#endif
- }
- /** @brief Compare @c *__ptr and @c __comparand. If equal, let @c
+ /** @brief Compare-and-swap
+ *
+ * Compare @c *__ptr and @c __comparand. If equal, let @c
* *__ptr=__replacement and return @c true, return @c false otherwise.
*
- * Implementation is heavily platform-dependent.
* @param __ptr Pointer to signed integer.
* @param __comparand Compare value.
* @param __replacement Replacement value.
*/
template<typename _Tp>
- inline bool
- __compare_and_swap(volatile _Tp* __ptr, _Tp __comparand, _Tp __replacement)
- {
- if (sizeof(_Tp) == sizeof(int32_t))
- return __compare_and_swap_32((volatile int32_t*) __ptr,
- (int32_t)__comparand,
- (int32_t)__replacement);
- else if (sizeof(_Tp) == sizeof(int64_t))
- return __compare_and_swap_64((volatile int64_t*) __ptr,
- (int64_t)__comparand,
- (int64_t)__replacement);
- else
- _GLIBCXX_PARALLEL_ASSERT(false);
- }
+ inline bool
+ __compare_and_swap(volatile _Tp* __ptr, _Tp __comparand, _Tp __replacement)
+ {
+ if (__atomic_always_lock_free(sizeof(_Tp), __ptr))
+ return __atomic_compare_exchange_n(__ptr, &__comparand, __replacement,
+ false, __ATOMIC_ACQ_REL,
+ __ATOMIC_RELAXED);
+ return __cas_omp(__ptr, __comparand, __replacement);
+ }
- /** @brief Yield the control to another thread, without waiting for
+ /** @brief Yield control to another thread, without waiting for
* the end of the time slice.
*/
inline void