2020-01-18 08:38:21 +00:00
|
|
|
/*
|
|
|
|
* Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>
|
|
|
|
* All rights reserved.
|
|
|
|
*
|
|
|
|
* Redistribution and use in source and binary forms, with or without
|
|
|
|
* modification, are permitted provided that the following conditions are met:
|
|
|
|
*
|
|
|
|
* 1. Redistributions of source code must retain the above copyright notice, this
|
|
|
|
* list of conditions and the following disclaimer.
|
|
|
|
*
|
|
|
|
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
|
|
* this list of conditions and the following disclaimer in the documentation
|
|
|
|
* and/or other materials provided with the distribution.
|
|
|
|
*
|
|
|
|
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
|
|
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
|
|
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
|
|
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
|
|
|
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
|
|
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
|
|
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
|
|
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
|
|
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
|
|
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
*/
|
|
|
|
|
2019-10-12 17:17:34 +00:00
|
|
|
#pragma once
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
#include <AK/Platform.h>
|
2020-01-12 17:44:51 +00:00
|
|
|
#include <AK/Types.h>
|
2019-11-16 11:18:25 +00:00
|
|
|
|
2019-10-12 17:17:34 +00:00
|
|
|
namespace AK {
|
|
|
|
|
2021-02-27 17:30:20 +00:00
|
|
|
static inline void atomic_signal_fence(MemoryOrder order) noexcept
|
|
|
|
{
|
|
|
|
return __atomic_signal_fence(order);
|
|
|
|
}
|
|
|
|
|
2020-12-20 01:48:56 +00:00
|
|
|
static inline void atomic_thread_fence(MemoryOrder order) noexcept
|
|
|
|
{
|
|
|
|
return __atomic_thread_fence(order);
|
|
|
|
}
|
|
|
|
|
2021-02-27 17:30:20 +00:00
|
|
|
static inline void full_memory_barrier() noexcept
|
|
|
|
{
|
|
|
|
atomic_signal_fence(AK::MemoryOrder::memory_order_acq_rel);
|
|
|
|
atomic_thread_fence(AK::MemoryOrder::memory_order_acq_rel);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
template<typename T>
|
2020-06-02 04:55:30 +00:00
|
|
|
static inline T atomic_exchange(volatile T* var, T desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
|
|
|
{
|
|
|
|
return __atomic_exchange_n(var, desired, order);
|
|
|
|
}
|
|
|
|
|
2020-07-02 14:11:06 +00:00
|
|
|
template<typename T, typename V = typename RemoveVolatile<T>::Type>
|
|
|
|
static inline V* atomic_exchange(volatile T** var, V* desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
|
|
|
{
|
|
|
|
return __atomic_exchange_n(var, desired, order);
|
|
|
|
}
|
|
|
|
|
|
|
|
template<typename T, typename V = typename RemoveVolatile<T>::Type>
|
|
|
|
static inline V* atomic_exchange(volatile T** var, std::nullptr_t, MemoryOrder order = memory_order_seq_cst) noexcept
|
|
|
|
{
|
|
|
|
return __atomic_exchange_n(const_cast<V**>(var), nullptr, order);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
template<typename T>
|
2020-08-05 13:23:12 +00:00
|
|
|
[[nodiscard]] static inline bool atomic_compare_exchange_strong(volatile T* var, T& expected, T desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
2020-06-02 04:55:30 +00:00
|
|
|
{
|
|
|
|
if (order == memory_order_acq_rel || order == memory_order_release)
|
|
|
|
return __atomic_compare_exchange_n(var, &expected, desired, false, memory_order_release, memory_order_acquire);
|
|
|
|
else
|
|
|
|
return __atomic_compare_exchange_n(var, &expected, desired, false, order, order);
|
|
|
|
}
|
|
|
|
|
2020-07-02 14:11:06 +00:00
|
|
|
template<typename T, typename V = typename RemoveVolatile<T>::Type>
|
2020-08-05 13:23:12 +00:00
|
|
|
[[nodiscard]] static inline bool atomic_compare_exchange_strong(volatile T** var, V*& expected, V* desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
2020-07-02 14:11:06 +00:00
|
|
|
{
|
|
|
|
if (order == memory_order_acq_rel || order == memory_order_release)
|
|
|
|
return __atomic_compare_exchange_n(var, &expected, desired, false, memory_order_release, memory_order_acquire);
|
|
|
|
else
|
|
|
|
return __atomic_compare_exchange_n(var, &expected, desired, false, order, order);
|
|
|
|
}
|
|
|
|
|
|
|
|
template<typename T, typename V = typename RemoveVolatile<T>::Type>
|
2020-08-05 13:23:12 +00:00
|
|
|
[[nodiscard]] static inline bool atomic_compare_exchange_strong(volatile T** var, V*& expected, std::nullptr_t, MemoryOrder order = memory_order_seq_cst) noexcept
|
2020-07-02 14:11:06 +00:00
|
|
|
{
|
|
|
|
if (order == memory_order_acq_rel || order == memory_order_release)
|
|
|
|
return __atomic_compare_exchange_n(const_cast<V**>(var), &expected, nullptr, false, memory_order_release, memory_order_acquire);
|
|
|
|
else
|
|
|
|
return __atomic_compare_exchange_n(const_cast<V**>(var), &expected, nullptr, false, order, order);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
template<typename T>
|
2020-06-02 04:55:30 +00:00
|
|
|
static inline T atomic_fetch_add(volatile T* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept
|
|
|
|
{
|
|
|
|
return __atomic_fetch_add(var, val, order);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
template<typename T>
|
2020-06-02 04:55:30 +00:00
|
|
|
static inline T atomic_fetch_sub(volatile T* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept
|
|
|
|
{
|
|
|
|
return __atomic_fetch_sub(var, val, order);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
template<typename T>
|
2020-06-02 04:55:30 +00:00
|
|
|
static inline T atomic_fetch_and(volatile T* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept
|
|
|
|
{
|
|
|
|
return __atomic_fetch_and(var, val, order);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
template<typename T>
|
2020-06-02 04:55:30 +00:00
|
|
|
static inline T atomic_fetch_or(volatile T* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept
|
|
|
|
{
|
|
|
|
return __atomic_fetch_or(var, val, order);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
template<typename T>
|
2020-06-02 04:55:30 +00:00
|
|
|
static inline T atomic_fetch_xor(volatile T* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept
|
|
|
|
{
|
|
|
|
return __atomic_fetch_xor(var, val, order);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
template<typename T>
|
2020-06-02 04:55:30 +00:00
|
|
|
static inline T atomic_load(volatile T* var, MemoryOrder order = memory_order_seq_cst) noexcept
|
|
|
|
{
|
|
|
|
return __atomic_load_n(var, order);
|
|
|
|
}
|
|
|
|
|
2020-07-02 14:11:06 +00:00
|
|
|
template<typename T, typename V = typename RemoveVolatile<T>::Type>
|
|
|
|
static inline V* atomic_load(volatile T** var, MemoryOrder order = memory_order_seq_cst) noexcept
|
|
|
|
{
|
|
|
|
return __atomic_load_n(const_cast<V**>(var), order);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
template<typename T>
|
2020-06-02 04:55:30 +00:00
|
|
|
static inline void atomic_store(volatile T* var, T desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
|
|
|
{
|
|
|
|
__atomic_store_n(var, desired, order);
|
|
|
|
}
|
|
|
|
|
2020-07-02 14:11:06 +00:00
|
|
|
template<typename T, typename V = typename RemoveVolatile<T>::Type>
|
|
|
|
static inline void atomic_store(volatile T** var, V* desired, MemoryOrder order = memory_order_seq_cst) noexcept
|
|
|
|
{
|
|
|
|
__atomic_store_n(var, desired, order);
|
|
|
|
}
|
|
|
|
|
|
|
|
template<typename T, typename V = typename RemoveVolatile<T>::Type>
|
|
|
|
static inline void atomic_store(volatile T** var, std::nullptr_t, MemoryOrder order = memory_order_seq_cst) noexcept
|
|
|
|
{
|
|
|
|
__atomic_store_n(const_cast<V**>(var), nullptr, order);
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
template<typename T, MemoryOrder DefaultMemoryOrder = AK::MemoryOrder::memory_order_seq_cst>
|
2020-01-12 17:44:51 +00:00
|
|
|
class Atomic {
|
|
|
|
T m_value { 0 };
|
2019-10-12 17:17:34 +00:00
|
|
|
|
|
|
|
public:
|
2020-01-12 17:44:51 +00:00
|
|
|
Atomic() noexcept = default;
|
|
|
|
Atomic& operator=(const Atomic&) volatile = delete;
|
2021-01-03 23:43:10 +00:00
|
|
|
Atomic& operator=(Atomic&&) volatile = delete;
|
|
|
|
Atomic(const Atomic&) = delete;
|
|
|
|
Atomic(Atomic&&) = delete;
|
2020-01-12 17:44:51 +00:00
|
|
|
|
|
|
|
Atomic(T val) noexcept
|
|
|
|
: m_value(val)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2020-07-02 14:11:06 +00:00
|
|
|
volatile T* ptr() noexcept
|
2020-06-02 04:55:30 +00:00
|
|
|
{
|
|
|
|
return &m_value;
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
T exchange(T desired, MemoryOrder order = DefaultMemoryOrder) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return __atomic_exchange_n(&m_value, desired, order);
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
[[nodiscard]] bool compare_exchange_strong(T& expected, T desired, MemoryOrder order = DefaultMemoryOrder) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
if (order == memory_order_acq_rel || order == memory_order_release)
|
|
|
|
return __atomic_compare_exchange_n(&m_value, &expected, desired, false, memory_order_release, memory_order_acquire);
|
|
|
|
else
|
|
|
|
return __atomic_compare_exchange_n(&m_value, &expected, desired, false, order, order);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
ALWAYS_INLINE T operator++() volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return fetch_add(1) + 1;
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
ALWAYS_INLINE T operator++(int) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return fetch_add(1);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
ALWAYS_INLINE T operator+=(T val) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return fetch_add(val) + val;
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
ALWAYS_INLINE T fetch_add(T val, MemoryOrder order = DefaultMemoryOrder) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return __atomic_fetch_add(&m_value, val, order);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
ALWAYS_INLINE T operator--() volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return fetch_sub(1) - 1;
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
ALWAYS_INLINE T operator--(int) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return fetch_sub(1);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
ALWAYS_INLINE T operator-=(T val) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return fetch_sub(val) - val;
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
ALWAYS_INLINE T fetch_sub(T val, MemoryOrder order = DefaultMemoryOrder) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return __atomic_fetch_sub(&m_value, val, order);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
ALWAYS_INLINE T operator&=(T val) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return fetch_and(val) & val;
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
ALWAYS_INLINE T fetch_and(T val, MemoryOrder order = DefaultMemoryOrder) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return __atomic_fetch_and(&m_value, val, order);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
ALWAYS_INLINE T operator|=(T val) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return fetch_or(val) | val;
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
ALWAYS_INLINE T fetch_or(T val, MemoryOrder order = DefaultMemoryOrder) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return __atomic_fetch_or(&m_value, val, order);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
ALWAYS_INLINE T operator^=(T val) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return fetch_xor(val) ^ val;
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
ALWAYS_INLINE T fetch_xor(T val, MemoryOrder order = DefaultMemoryOrder) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return __atomic_fetch_xor(&m_value, val, order);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
ALWAYS_INLINE operator T() const volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return load();
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
ALWAYS_INLINE T load(MemoryOrder order = DefaultMemoryOrder) const volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return __atomic_load_n(&m_value, order);
|
|
|
|
}
|
2019-10-12 17:17:34 +00:00
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
ALWAYS_INLINE T operator=(T desired) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
store(desired);
|
|
|
|
return desired;
|
|
|
|
}
|
2019-10-12 17:17:34 +00:00
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
ALWAYS_INLINE void store(T desired, MemoryOrder order = DefaultMemoryOrder) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
__atomic_store_n(&m_value, desired, order);
|
|
|
|
}
|
|
|
|
|
2020-06-12 13:20:35 +00:00
|
|
|
ALWAYS_INLINE bool is_lock_free() const volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return __atomic_is_lock_free(sizeof(m_value), &m_value);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
template<typename T, MemoryOrder DefaultMemoryOrder>
|
|
|
|
class Atomic<T*, DefaultMemoryOrder> {
|
2020-01-12 17:44:51 +00:00
|
|
|
T* m_value { nullptr };
|
2019-10-12 17:17:34 +00:00
|
|
|
|
|
|
|
public:
|
2020-01-12 17:44:51 +00:00
|
|
|
Atomic() noexcept = default;
|
|
|
|
Atomic& operator=(const Atomic&) volatile = delete;
|
2021-01-03 23:43:10 +00:00
|
|
|
Atomic& operator=(Atomic&&) volatile = delete;
|
|
|
|
Atomic(const Atomic&) = delete;
|
|
|
|
Atomic(Atomic&&) = delete;
|
2020-01-12 17:44:51 +00:00
|
|
|
|
|
|
|
Atomic(T* val) noexcept
|
|
|
|
: m_value(val)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2020-07-02 14:11:06 +00:00
|
|
|
volatile T** ptr() noexcept
|
2020-06-02 04:55:30 +00:00
|
|
|
{
|
|
|
|
return &m_value;
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
T* exchange(T* desired, MemoryOrder order = DefaultMemoryOrder) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return __atomic_exchange_n(&m_value, desired, order);
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
[[nodiscard]] bool compare_exchange_strong(T*& expected, T* desired, MemoryOrder order = DefaultMemoryOrder) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
if (order == memory_order_acq_rel || order == memory_order_release)
|
|
|
|
return __atomic_compare_exchange_n(&m_value, &expected, desired, false, memory_order_release, memory_order_acquire);
|
|
|
|
else
|
|
|
|
return __atomic_compare_exchange_n(&m_value, &expected, desired, false, order, order);
|
|
|
|
}
|
|
|
|
|
|
|
|
T* operator++() volatile noexcept
|
|
|
|
{
|
|
|
|
return fetch_add(1) + 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
T* operator++(int) volatile noexcept
|
|
|
|
{
|
|
|
|
return fetch_add(1);
|
|
|
|
}
|
|
|
|
|
|
|
|
T* operator+=(ptrdiff_t val) volatile noexcept
|
|
|
|
{
|
|
|
|
return fetch_add(val) + val;
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
T* fetch_add(ptrdiff_t val, MemoryOrder order = DefaultMemoryOrder) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return __atomic_fetch_add(&m_value, val * sizeof(*m_value), order);
|
|
|
|
}
|
|
|
|
|
|
|
|
T* operator--() volatile noexcept
|
|
|
|
{
|
|
|
|
return fetch_sub(1) - 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
T* operator--(int) volatile noexcept
|
|
|
|
{
|
|
|
|
return fetch_sub(1);
|
|
|
|
}
|
|
|
|
|
|
|
|
T* operator-=(ptrdiff_t val) volatile noexcept
|
|
|
|
{
|
|
|
|
return fetch_sub(val) - val;
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
T* fetch_sub(ptrdiff_t val, MemoryOrder order = DefaultMemoryOrder) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return __atomic_fetch_sub(&m_value, val * sizeof(*m_value), order);
|
|
|
|
}
|
|
|
|
|
|
|
|
operator T*() const volatile noexcept
|
|
|
|
{
|
|
|
|
return load();
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
T* load(MemoryOrder order = DefaultMemoryOrder) const volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
return __atomic_load_n(&m_value, order);
|
|
|
|
}
|
|
|
|
|
|
|
|
T* operator=(T* desired) volatile noexcept
|
|
|
|
{
|
|
|
|
store(desired);
|
|
|
|
return desired;
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:43:10 +00:00
|
|
|
void store(T* desired, MemoryOrder order = DefaultMemoryOrder) volatile noexcept
|
2020-01-12 17:44:51 +00:00
|
|
|
{
|
|
|
|
__atomic_store_n(&m_value, desired, order);
|
|
|
|
}
|
|
|
|
|
|
|
|
bool is_lock_free() const volatile noexcept
|
|
|
|
{
|
|
|
|
return __atomic_is_lock_free(sizeof(m_value), &m_value);
|
|
|
|
}
|
2019-10-12 17:17:34 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
using AK::Atomic;
|
2021-02-27 17:30:20 +00:00
|
|
|
using AK::full_memory_barrier;
|