forked from OSchip/llvm-project
[libc++] Change memory_order to an enum class
This implements P0439R0. Thanks to Zoe Carver for the patch. Differential Revision: https://reviews.llvm.org/D58201 llvm-svn: 355403
This commit is contained in:
parent
bc6b225d42
commit
b55803283b
|
@ -20,17 +20,24 @@ namespace std
|
||||||
|
|
||||||
#define __cpp_lib_atomic_is_always_lock_free // as specified by SG10
|
#define __cpp_lib_atomic_is_always_lock_free // as specified by SG10
|
||||||
|
|
||||||
// order and consistency
|
// order and consistency
|
||||||
|
|
||||||
typedef enum memory_order
|
enum memory_order: unspecified // enum class in C++20
|
||||||
{
|
{
|
||||||
memory_order_relaxed,
|
relaxed,
|
||||||
memory_order_consume, // load-consume
|
consume, // load-consume
|
||||||
memory_order_acquire, // load-acquire
|
acquire, // load-acquire
|
||||||
memory_order_release, // store-release
|
release, // store-release
|
||||||
memory_order_acq_rel, // store-release load-acquire
|
acq_rel, // store-release load-acquire
|
||||||
memory_order_seq_cst // store-release load-acquire
|
seq_cst // store-release load-acquire
|
||||||
} memory_order;
|
};
|
||||||
|
|
||||||
|
inline constexpr auto memory_order_relaxed = memory_order::relaxed;
|
||||||
|
inline constexpr auto memory_order_consume = memory_order::consume;
|
||||||
|
inline constexpr auto memory_order_acquire = memory_order::acquire;
|
||||||
|
inline constexpr auto memory_order_release = memory_order::release;
|
||||||
|
inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
|
||||||
|
inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
|
||||||
|
|
||||||
template <class T> T kill_dependency(T y) noexcept;
|
template <class T> T kill_dependency(T y) noexcept;
|
||||||
|
|
||||||
|
@ -577,14 +584,39 @@ void atomic_signal_fence(memory_order m) noexcept;
|
||||||
|
|
||||||
_LIBCPP_BEGIN_NAMESPACE_STD
|
_LIBCPP_BEGIN_NAMESPACE_STD
|
||||||
|
|
||||||
typedef enum memory_order
|
#if _LIBCPP_STD_VER > 17
|
||||||
{
|
|
||||||
memory_order_relaxed, memory_order_consume, memory_order_acquire,
|
enum class memory_order: unsigned {
|
||||||
memory_order_release, memory_order_acq_rel, memory_order_seq_cst
|
relaxed, consume, acquire, release, acq_rel, seq_cst
|
||||||
|
};
|
||||||
|
|
||||||
|
inline constexpr auto memory_order_relaxed = memory_order::relaxed;
|
||||||
|
inline constexpr auto memory_order_consume = memory_order::consume;
|
||||||
|
inline constexpr auto memory_order_acquire = memory_order::acquire;
|
||||||
|
inline constexpr auto memory_order_release = memory_order::release;
|
||||||
|
inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
|
||||||
|
inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
|
||||||
|
|
||||||
|
static_assert((is_same<underlying_type<memory_order>::type,
|
||||||
|
unsigned>::value), "Underlying type differs from unsigned unexpectedly");
|
||||||
|
|
||||||
|
#else
|
||||||
|
|
||||||
|
typedef enum memory_order {
|
||||||
|
memory_order_relaxed, memory_order_consume, memory_order_acquire,
|
||||||
|
memory_order_release, memory_order_acq_rel, memory_order_seq_cst
|
||||||
} memory_order;
|
} memory_order;
|
||||||
|
|
||||||
|
static_assert((is_same<underlying_type<memory_order>::type,
|
||||||
|
unsigned>::value), "Underlying type differs from unsigned unexpectedly");
|
||||||
|
|
||||||
|
#endif // _LIBCPP_STD_VER > 17
|
||||||
|
|
||||||
|
typedef underlying_type<memory_order>::type __memory_order_underlying_t; // unsigned
|
||||||
|
|
||||||
|
|
||||||
#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
|
#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
|
||||||
defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
|
defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
|
||||||
|
|
||||||
// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
|
// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
|
||||||
// the default operator= in an object is not volatile, a byte-by-byte copy
|
// the default operator= in an object is not volatile, a byte-by-byte copy
|
||||||
|
@ -1423,65 +1455,65 @@ struct __atomic_base // false
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
||||||
_LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
|
_LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
|
||||||
{__cxx_atomic_store(&__a_, __d, __m);}
|
{__cxx_atomic_store(&__a_, __d, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
||||||
_LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
|
_LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
|
||||||
{__cxx_atomic_store(&__a_, __d, __m);}
|
{__cxx_atomic_store(&__a_, __d, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
|
_Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
|
||||||
_LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
|
_LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
|
||||||
{return __cxx_atomic_load(&__a_, __m);}
|
{return __cxx_atomic_load(&__a_, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
|
_Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
|
||||||
_LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
|
_LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
|
||||||
{return __cxx_atomic_load(&__a_, __m);}
|
{return __cxx_atomic_load(&__a_, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
operator _Tp() const volatile _NOEXCEPT {return load();}
|
operator _Tp() const volatile _NOEXCEPT {return load();}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
operator _Tp() const _NOEXCEPT {return load();}
|
operator _Tp() const _NOEXCEPT {return load();}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
_Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
||||||
{return __cxx_atomic_exchange(&__a_, __d, __m);}
|
{return __cxx_atomic_exchange(&__a_, __d, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
_Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
||||||
{return __cxx_atomic_exchange(&__a_, __d, __m);}
|
{return __cxx_atomic_exchange(&__a_, __d, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
bool compare_exchange_weak(_Tp& __e, _Tp __d,
|
bool compare_exchange_weak(_Tp& __e, _Tp __d,
|
||||||
memory_order __s, memory_order __f) volatile _NOEXCEPT
|
memory_order __s, memory_order __f) volatile _NOEXCEPT
|
||||||
_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
|
_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
|
||||||
{return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
|
{return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, static_cast<__memory_order_underlying_t>(__s), static_cast<__memory_order_underlying_t>(__f));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
bool compare_exchange_weak(_Tp& __e, _Tp __d,
|
bool compare_exchange_weak(_Tp& __e, _Tp __d,
|
||||||
memory_order __s, memory_order __f) _NOEXCEPT
|
memory_order __s, memory_order __f) _NOEXCEPT
|
||||||
_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
|
_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
|
||||||
{return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
|
{return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, static_cast<__memory_order_underlying_t>(__s), static_cast<__memory_order_underlying_t>(__f));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
bool compare_exchange_strong(_Tp& __e, _Tp __d,
|
bool compare_exchange_strong(_Tp& __e, _Tp __d,
|
||||||
memory_order __s, memory_order __f) volatile _NOEXCEPT
|
memory_order __s, memory_order __f) volatile _NOEXCEPT
|
||||||
_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
|
_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
|
||||||
{return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
|
{return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, static_cast<__memory_order_underlying_t>(__s), static_cast<__memory_order_underlying_t>(__f));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
bool compare_exchange_strong(_Tp& __e, _Tp __d,
|
bool compare_exchange_strong(_Tp& __e, _Tp __d,
|
||||||
memory_order __s, memory_order __f) _NOEXCEPT
|
memory_order __s, memory_order __f) _NOEXCEPT
|
||||||
_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
|
_LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
|
||||||
{return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
|
{return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, static_cast<__memory_order_underlying_t>(__s), static_cast<__memory_order_underlying_t>(__f));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
bool compare_exchange_weak(_Tp& __e, _Tp __d,
|
bool compare_exchange_weak(_Tp& __e, _Tp __d,
|
||||||
memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
||||||
{return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
|
{return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, static_cast<__memory_order_underlying_t>(__m), static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
bool compare_exchange_weak(_Tp& __e, _Tp __d,
|
bool compare_exchange_weak(_Tp& __e, _Tp __d,
|
||||||
memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
||||||
{return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
|
{return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, static_cast<__memory_order_underlying_t>(__m), static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
bool compare_exchange_strong(_Tp& __e, _Tp __d,
|
bool compare_exchange_strong(_Tp& __e, _Tp __d,
|
||||||
memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
||||||
{return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
|
{return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, static_cast<__memory_order_underlying_t>(__m), static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
bool compare_exchange_strong(_Tp& __e, _Tp __d,
|
bool compare_exchange_strong(_Tp& __e, _Tp __d,
|
||||||
memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
||||||
{return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
|
{return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, static_cast<__memory_order_underlying_t>(__m), static_cast<__memory_order_underlying_t>(__m));}
|
||||||
|
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
__atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
|
__atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
|
||||||
|
@ -1520,34 +1552,34 @@ struct __atomic_base<_Tp, true>
|
||||||
|
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
_Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
||||||
{return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
|
{return __cxx_atomic_fetch_add(&this->__a_, __op, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
_Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
||||||
{return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
|
{return __cxx_atomic_fetch_add(&this->__a_, __op, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
_Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
||||||
{return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
|
{return __cxx_atomic_fetch_sub(&this->__a_, __op, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
_Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
||||||
{return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
|
{return __cxx_atomic_fetch_sub(&this->__a_, __op, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
_Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
||||||
{return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
|
{return __cxx_atomic_fetch_and(&this->__a_, __op, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
_Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
||||||
{return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
|
{return __cxx_atomic_fetch_and(&this->__a_, __op, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
_Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
||||||
{return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
|
{return __cxx_atomic_fetch_or(&this->__a_, __op, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
_Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
||||||
{return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
|
{return __cxx_atomic_fetch_or(&this->__a_, __op, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
_Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
||||||
{return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
|
{return __cxx_atomic_fetch_xor(&this->__a_, __op, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
_Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
||||||
{return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
|
{return __cxx_atomic_fetch_xor(&this->__a_, __op, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
|
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
|
_Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
|
||||||
|
@ -1629,17 +1661,17 @@ struct atomic<_Tp*>
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
|
_Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
|
||||||
volatile _NOEXCEPT
|
volatile _NOEXCEPT
|
||||||
{return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
|
{return __cxx_atomic_fetch_add(&this->__a_, __op, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
_Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
||||||
{return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
|
{return __cxx_atomic_fetch_add(&this->__a_, __op, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
|
_Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
|
||||||
volatile _NOEXCEPT
|
volatile _NOEXCEPT
|
||||||
{return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
|
{return __cxx_atomic_fetch_sub(&this->__a_, __op, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
_Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
||||||
{return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
|
{return __cxx_atomic_fetch_sub(&this->__a_, __op, static_cast<__memory_order_underlying_t>(__m));}
|
||||||
|
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
_Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
|
_Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
|
||||||
|
@ -2232,16 +2264,16 @@ typedef struct atomic_flag
|
||||||
|
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
||||||
{return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
|
{return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
||||||
{return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
|
{return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
|
||||||
{__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
|
{__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), static_cast<__memory_order_underlying_t>(__m));}
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
|
||||||
{__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
|
{__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), static_cast<__memory_order_underlying_t>(__m));}
|
||||||
|
|
||||||
_LIBCPP_INLINE_VISIBILITY
|
_LIBCPP_INLINE_VISIBILITY
|
||||||
atomic_flag() _NOEXCEPT _LIBCPP_DEFAULT
|
atomic_flag() _NOEXCEPT _LIBCPP_DEFAULT
|
||||||
|
@ -2323,14 +2355,14 @@ inline _LIBCPP_INLINE_VISIBILITY
|
||||||
void
|
void
|
||||||
atomic_thread_fence(memory_order __m) _NOEXCEPT
|
atomic_thread_fence(memory_order __m) _NOEXCEPT
|
||||||
{
|
{
|
||||||
__cxx_atomic_thread_fence(__m);
|
__cxx_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__m));
|
||||||
}
|
}
|
||||||
|
|
||||||
inline _LIBCPP_INLINE_VISIBILITY
|
inline _LIBCPP_INLINE_VISIBILITY
|
||||||
void
|
void
|
||||||
atomic_signal_fence(memory_order __m) _NOEXCEPT
|
atomic_signal_fence(memory_order __m) _NOEXCEPT
|
||||||
{
|
{
|
||||||
__cxx_atomic_signal_fence(__m);
|
__cxx_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__m));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Atomics for standard typedef types
|
// Atomics for standard typedef types
|
||||||
|
|
|
@ -21,14 +21,15 @@
|
||||||
|
|
||||||
int main(int, char**)
|
int main(int, char**)
|
||||||
{
|
{
|
||||||
assert(std::memory_order_relaxed == 0);
|
assert(static_cast<int>(std::memory_order_relaxed) == 0);
|
||||||
assert(std::memory_order_consume == 1);
|
assert(static_cast<int>(std::memory_order_consume) == 1);
|
||||||
assert(std::memory_order_acquire == 2);
|
assert(static_cast<int>(std::memory_order_acquire) == 2);
|
||||||
assert(std::memory_order_release == 3);
|
assert(static_cast<int>(std::memory_order_release) == 3);
|
||||||
assert(std::memory_order_acq_rel == 4);
|
assert(static_cast<int>(std::memory_order_acq_rel) == 4);
|
||||||
assert(std::memory_order_seq_cst == 5);
|
assert(static_cast<int>(std::memory_order_seq_cst) == 5);
|
||||||
std::memory_order o = std::memory_order_seq_cst;
|
|
||||||
assert(o == 5);
|
|
||||||
|
|
||||||
return 0;
|
std::memory_order o = std::memory_order_seq_cst;
|
||||||
|
assert(static_cast<int>(o) == 5);
|
||||||
|
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,23 @@
|
||||||
|
//===----------------------------------------------------------------------===//
|
||||||
|
//
|
||||||
|
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
||||||
|
// See https://llvm.org/LICENSE.txt for license information.
|
||||||
|
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||||
|
//
|
||||||
|
//===----------------------------------------------------------------------===//
|
||||||
|
//
|
||||||
|
// UNSUPPORTED: libcpp-has-no-threads, c++98, c++03, c++11, c++14, c++17
|
||||||
|
|
||||||
|
#include <atomic>
|
||||||
|
|
||||||
|
int main(int, char**)
|
||||||
|
{
|
||||||
|
static_assert(std::memory_order_relaxed == std::memory_order::relaxed);
|
||||||
|
static_assert(std::memory_order_consume == std::memory_order::consume);
|
||||||
|
static_assert(std::memory_order_acquire == std::memory_order::acquire);
|
||||||
|
static_assert(std::memory_order_release == std::memory_order::release);
|
||||||
|
static_assert(std::memory_order_acq_rel == std::memory_order::acq_rel);
|
||||||
|
static_assert(std::memory_order_seq_cst == std::memory_order::seq_cst);
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
|
@ -61,7 +61,7 @@
|
||||||
<tr><td><a href="https://wg21.link/P0053R7">P0053R7</a></td><td>LWG</td><td>C++ Synchronized Buffered Ostream</td><td>Albuquerque</td><td></td><td></td></tr>
|
<tr><td><a href="https://wg21.link/P0053R7">P0053R7</a></td><td>LWG</td><td>C++ Synchronized Buffered Ostream</td><td>Albuquerque</td><td></td><td></td></tr>
|
||||||
<tr><td><a href="https://wg21.link/P0202R3">P0202R3</a></td><td>LWG</td><td>Add constexpr modifiers to functions in <algorithm> and <utility> Headers</td><td>Albuquerque</td><td><I>In Progress</I></td><td>7.0</td></tr>
|
<tr><td><a href="https://wg21.link/P0202R3">P0202R3</a></td><td>LWG</td><td>Add constexpr modifiers to functions in <algorithm> and <utility> Headers</td><td>Albuquerque</td><td><I>In Progress</I></td><td>7.0</td></tr>
|
||||||
<tr><td><a href="https://wg21.link/P0415R1">P0415R1</a></td><td>LWG</td><td>Constexpr for <tt>std::complex</tt></td><td>Albuquerque</td><td><I>In Progress</I></td><td>7.0</td></tr>
|
<tr><td><a href="https://wg21.link/P0415R1">P0415R1</a></td><td>LWG</td><td>Constexpr for <tt>std::complex</tt></td><td>Albuquerque</td><td><I>In Progress</I></td><td>7.0</td></tr>
|
||||||
<tr><td><a href="https://wg21.link/P0439R0">P0439R0</a></td><td>LWG</td><td>Make <tt>std::memory_order</tt> a scoped enumeration</td><td>Albuquerque</td><td></td><td></td></tr>
|
<tr><td><a href="https://wg21.link/P0439R0">P0439R0</a></td><td>LWG</td><td>Make <tt>std::memory_order</tt> a scoped enumeration</td><td>Albuquerque</td><td>Complete</td><td></td></tr>
|
||||||
<tr><td><a href="https://wg21.link/P0457R2">P0457R2</a></td><td>LWG</td><td>String Prefix and Suffix Checking</td><td>Albuquerque</td><td>Complete</td><td>6.0</td></tr>
|
<tr><td><a href="https://wg21.link/P0457R2">P0457R2</a></td><td>LWG</td><td>String Prefix and Suffix Checking</td><td>Albuquerque</td><td>Complete</td><td>6.0</td></tr>
|
||||||
<tr><td><a href="https://wg21.link/P0550R2">P0550R2</a></td><td>LWG</td><td>Transformation Trait <tt>remove_cvref</tt></td><td>Albuquerque</td><td>Complete</td><td>6.0</td></tr>
|
<tr><td><a href="https://wg21.link/P0550R2">P0550R2</a></td><td>LWG</td><td>Transformation Trait <tt>remove_cvref</tt></td><td>Albuquerque</td><td>Complete</td><td>6.0</td></tr>
|
||||||
<tr><td><a href="https://wg21.link/P0600R1">P0600R1</a></td><td>LWG</td><td>nodiscard in the Library</td><td>Albuquerque</td><td><I>In Progress</I></td><td>7.0</td></tr>
|
<tr><td><a href="https://wg21.link/P0600R1">P0600R1</a></td><td>LWG</td><td>nodiscard in the Library</td><td>Albuquerque</td><td><I>In Progress</I></td><td>7.0</td></tr>
|
||||||
|
|
Loading…
Reference in New Issue