2020-01-18 08:38:21 +00:00
/*
* Copyright ( c ) 2018 - 2020 , Andreas Kling < kling @ serenityos . org >
*
2021-04-22 08:24:48 +00:00
* SPDX - License - Identifier : BSD - 2 - Clause
2020-01-18 08:38:21 +00:00
*/
2019-10-12 17:17:34 +00:00
# pragma once
2021-06-19 14:28:42 +00:00
# include <AK/Concepts.h>
2020-06-12 13:20:35 +00:00
# include <AK/Platform.h>
2020-01-12 17:44:51 +00:00
# include <AK/Types.h>
2019-11-16 11:18:25 +00:00
2019-10-12 17:17:34 +00:00
namespace AK {
2021-02-27 17:30:20 +00:00
static inline void atomic_signal_fence ( MemoryOrder order ) noexcept
{
return __atomic_signal_fence ( order ) ;
}
2020-12-20 01:48:56 +00:00
static inline void atomic_thread_fence ( MemoryOrder order ) noexcept
{
return __atomic_thread_fence ( order ) ;
}
2021-02-27 17:30:20 +00:00
static inline void full_memory_barrier ( ) noexcept
{
atomic_signal_fence ( AK : : MemoryOrder : : memory_order_acq_rel ) ;
atomic_thread_fence ( AK : : MemoryOrder : : memory_order_acq_rel ) ;
}
2020-06-12 13:20:35 +00:00
template < typename T >
2020-06-02 04:55:30 +00:00
static inline T atomic_exchange ( volatile T * var , T desired , MemoryOrder order = memory_order_seq_cst ) noexcept
{
return __atomic_exchange_n ( var , desired , order ) ;
}
2021-04-10 13:59:06 +00:00
template < typename T , typename V = RemoveVolatile < T > >
2020-07-02 14:11:06 +00:00
static inline V * atomic_exchange ( volatile T * * var , V * desired , MemoryOrder order = memory_order_seq_cst ) noexcept
{
return __atomic_exchange_n ( var , desired , order ) ;
}
2021-04-10 13:59:06 +00:00
template < typename T , typename V = RemoveVolatile < T > >
2020-07-02 14:11:06 +00:00
static inline V * atomic_exchange ( volatile T * * var , std : : nullptr_t , MemoryOrder order = memory_order_seq_cst ) noexcept
{
return __atomic_exchange_n ( const_cast < V * * > ( var ) , nullptr , order ) ;
}
2020-06-12 13:20:35 +00:00
template < typename T >
2020-08-05 13:23:12 +00:00
[ [ nodiscard ] ] static inline bool atomic_compare_exchange_strong ( volatile T * var , T & expected , T desired , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-06-02 04:55:30 +00:00
{
if ( order = = memory_order_acq_rel | | order = = memory_order_release )
return __atomic_compare_exchange_n ( var , & expected , desired , false , memory_order_release , memory_order_acquire ) ;
2021-11-06 17:24:53 +00:00
return __atomic_compare_exchange_n ( var , & expected , desired , false , order , order ) ;
2020-06-02 04:55:30 +00:00
}
2021-04-10 13:59:06 +00:00
template < typename T , typename V = RemoveVolatile < T > >
2020-08-05 13:23:12 +00:00
[ [ nodiscard ] ] static inline bool atomic_compare_exchange_strong ( volatile T * * var , V * & expected , V * desired , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-07-02 14:11:06 +00:00
{
if ( order = = memory_order_acq_rel | | order = = memory_order_release )
return __atomic_compare_exchange_n ( var , & expected , desired , false , memory_order_release , memory_order_acquire ) ;
2021-11-06 17:24:53 +00:00
return __atomic_compare_exchange_n ( var , & expected , desired , false , order , order ) ;
2020-07-02 14:11:06 +00:00
}
2021-04-10 13:59:06 +00:00
template < typename T , typename V = RemoveVolatile < T > >
2020-08-05 13:23:12 +00:00
[ [ nodiscard ] ] static inline bool atomic_compare_exchange_strong ( volatile T * * var , V * & expected , std : : nullptr_t , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-07-02 14:11:06 +00:00
{
if ( order = = memory_order_acq_rel | | order = = memory_order_release )
return __atomic_compare_exchange_n ( const_cast < V * * > ( var ) , & expected , nullptr , false , memory_order_release , memory_order_acquire ) ;
2021-11-06 17:24:53 +00:00
return __atomic_compare_exchange_n ( const_cast < V * * > ( var ) , & expected , nullptr , false , order , order ) ;
2020-07-02 14:11:06 +00:00
}
2020-06-12 13:20:35 +00:00
template < typename T >
2020-06-02 04:55:30 +00:00
static inline T atomic_fetch_add ( volatile T * var , T val , MemoryOrder order = memory_order_seq_cst ) noexcept
{
return __atomic_fetch_add ( var , val , order ) ;
}
2020-06-12 13:20:35 +00:00
template < typename T >
2020-06-02 04:55:30 +00:00
static inline T atomic_fetch_sub ( volatile T * var , T val , MemoryOrder order = memory_order_seq_cst ) noexcept
{
return __atomic_fetch_sub ( var , val , order ) ;
}
2020-06-12 13:20:35 +00:00
template < typename T >
2020-06-02 04:55:30 +00:00
static inline T atomic_fetch_and ( volatile T * var , T val , MemoryOrder order = memory_order_seq_cst ) noexcept
{
return __atomic_fetch_and ( var , val , order ) ;
}
2020-06-12 13:20:35 +00:00
template < typename T >
2020-06-02 04:55:30 +00:00
static inline T atomic_fetch_or ( volatile T * var , T val , MemoryOrder order = memory_order_seq_cst ) noexcept
{
return __atomic_fetch_or ( var , val , order ) ;
}
2020-06-12 13:20:35 +00:00
template < typename T >
2020-06-02 04:55:30 +00:00
static inline T atomic_fetch_xor ( volatile T * var , T val , MemoryOrder order = memory_order_seq_cst ) noexcept
{
return __atomic_fetch_xor ( var , val , order ) ;
}
2020-06-12 13:20:35 +00:00
template < typename T >
2020-06-02 04:55:30 +00:00
static inline T atomic_load ( volatile T * var , MemoryOrder order = memory_order_seq_cst ) noexcept
{
return __atomic_load_n ( var , order ) ;
}
2021-04-10 13:59:06 +00:00
template < typename T , typename V = RemoveVolatile < T > >
2020-07-02 14:11:06 +00:00
static inline V * atomic_load ( volatile T * * var , MemoryOrder order = memory_order_seq_cst ) noexcept
{
return __atomic_load_n ( const_cast < V * * > ( var ) , order ) ;
}
2020-06-12 13:20:35 +00:00
template < typename T >
2020-06-02 04:55:30 +00:00
static inline void atomic_store ( volatile T * var , T desired , MemoryOrder order = memory_order_seq_cst ) noexcept
{
__atomic_store_n ( var , desired , order ) ;
}
2021-04-10 13:59:06 +00:00
template < typename T , typename V = RemoveVolatile < T > >
2020-07-02 14:11:06 +00:00
static inline void atomic_store ( volatile T * * var , V * desired , MemoryOrder order = memory_order_seq_cst ) noexcept
{
__atomic_store_n ( var , desired , order ) ;
}
2021-04-10 13:59:06 +00:00
template < typename T , typename V = RemoveVolatile < T > >
2020-07-02 14:11:06 +00:00
static inline void atomic_store ( volatile T * * var , std : : nullptr_t , MemoryOrder order = memory_order_seq_cst ) noexcept
{
__atomic_store_n ( const_cast < V * * > ( var ) , nullptr , order ) ;
}
2021-07-12 15:46:05 +00:00
template < typename T >
static inline bool atomic_is_lock_free ( volatile T * ptr = nullptr ) noexcept
{
return __atomic_is_lock_free ( sizeof ( T ) , ptr ) ;
}
2021-01-03 23:43:10 +00:00
template < typename T , MemoryOrder DefaultMemoryOrder = AK : : MemoryOrder : : memory_order_seq_cst >
2020-01-12 17:44:51 +00:00
class Atomic {
2022-02-08 22:28:25 +00:00
// FIXME: This should work through concepts/requires clauses, but according to the compiler,
// "IsIntegral is not more specialized than IsFundamental".
// Additionally, Enums are not fundamental types except that they behave like them in every observable way.
static_assert ( IsFundamental < T > | IsEnum < T > , " Atomic doesn't support non-primitive types, because it relies on compiler intrinsics. If you put non-primitives into it, you'll get linker errors like \" undefined reference to __atomic_store \" . " ) ;
2020-01-12 17:44:51 +00:00
T m_value { 0 } ;
2019-10-12 17:17:34 +00:00
public :
2020-01-12 17:44:51 +00:00
Atomic ( ) noexcept = default ;
2022-04-01 17:58:27 +00:00
Atomic & operator = ( Atomic const & ) volatile = delete ;
2021-01-03 23:43:10 +00:00
Atomic & operator = ( Atomic & & ) volatile = delete ;
2022-04-01 17:58:27 +00:00
Atomic ( Atomic const & ) = delete ;
2021-01-03 23:43:10 +00:00
Atomic ( Atomic & & ) = delete ;
2020-01-12 17:44:51 +00:00
2021-06-19 14:28:42 +00:00
constexpr Atomic ( T val ) noexcept
: m_value ( val )
{
}
volatile T * ptr ( ) noexcept
{
return & m_value ;
}
T exchange ( T desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
{
// We use this hack to prevent unnecessary initialization, even if T has a default constructor.
// NOTE: Will need to investigate if it pessimizes the generated assembly.
alignas ( T ) u8 buffer [ sizeof ( T ) ] ;
T * ret = reinterpret_cast < T * > ( buffer ) ;
__atomic_exchange ( & m_value , & desired , ret , order ) ;
return * ret ;
}
[ [ nodiscard ] ] bool compare_exchange_strong ( T & expected , T desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
{
if ( order = = memory_order_acq_rel | | order = = memory_order_release )
return __atomic_compare_exchange ( & m_value , & expected , & desired , false , memory_order_release , memory_order_acquire ) ;
2021-11-06 17:24:53 +00:00
return __atomic_compare_exchange ( & m_value , & expected , & desired , false , order , order ) ;
2021-06-19 14:28:42 +00:00
}
ALWAYS_INLINE operator T ( ) const volatile noexcept
{
return load ( ) ;
}
ALWAYS_INLINE T load ( MemoryOrder order = DefaultMemoryOrder ) const volatile noexcept
{
alignas ( T ) u8 buffer [ sizeof ( T ) ] ;
T * ret = reinterpret_cast < T * > ( buffer ) ;
__atomic_load ( & m_value , ret , order ) ;
return * ret ;
}
2021-10-31 20:52:26 +00:00
// NOLINTNEXTLINE(misc-unconventional-assign-operator) We want operator= to exchange the value, so returning an object of type Atomic& here does not make sense
2021-06-19 14:28:42 +00:00
ALWAYS_INLINE T operator = ( T desired ) volatile noexcept
{
store ( desired ) ;
return desired ;
}
ALWAYS_INLINE void store ( T desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
{
__atomic_store ( & m_value , & desired , order ) ;
}
ALWAYS_INLINE bool is_lock_free ( ) const volatile noexcept
{
return __atomic_is_lock_free ( sizeof ( m_value ) , & m_value ) ;
}
} ;
template < Integral T , MemoryOrder DefaultMemoryOrder >
class Atomic < T , DefaultMemoryOrder > {
T m_value { 0 } ;
public :
Atomic ( ) noexcept = default ;
2022-04-01 17:58:27 +00:00
Atomic & operator = ( Atomic const & ) volatile = delete ;
2021-06-19 14:28:42 +00:00
Atomic & operator = ( Atomic & & ) volatile = delete ;
2022-04-01 17:58:27 +00:00
Atomic ( Atomic const & ) = delete ;
2021-06-19 14:28:42 +00:00
Atomic ( Atomic & & ) = delete ;
constexpr Atomic ( T val ) noexcept
2020-01-12 17:44:51 +00:00
: m_value ( val )
{
}
2020-07-02 14:11:06 +00:00
volatile T * ptr ( ) noexcept
2020-06-02 04:55:30 +00:00
{
return & m_value ;
}
2021-01-03 23:43:10 +00:00
T exchange ( T desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_exchange_n ( & m_value , desired , order ) ;
}
2021-01-03 23:43:10 +00:00
[ [ nodiscard ] ] bool compare_exchange_strong ( T & expected , T desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
if ( order = = memory_order_acq_rel | | order = = memory_order_release )
return __atomic_compare_exchange_n ( & m_value , & expected , desired , false , memory_order_release , memory_order_acquire ) ;
2021-11-06 17:24:53 +00:00
return __atomic_compare_exchange_n ( & m_value , & expected , desired , false , order , order ) ;
2020-01-12 17:44:51 +00:00
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator + + ( ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_add ( 1 ) + 1 ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator + + ( int ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_add ( 1 ) ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator + = ( T val ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_add ( val ) + val ;
}
2021-01-03 23:43:10 +00:00
ALWAYS_INLINE T fetch_add ( T val , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_fetch_add ( & m_value , val , order ) ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator - - ( ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_sub ( 1 ) - 1 ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator - - ( int ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_sub ( 1 ) ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator - = ( T val ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_sub ( val ) - val ;
}
2021-01-03 23:43:10 +00:00
ALWAYS_INLINE T fetch_sub ( T val , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_fetch_sub ( & m_value , val , order ) ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator & = ( T val ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_and ( val ) & val ;
}
2021-01-03 23:43:10 +00:00
ALWAYS_INLINE T fetch_and ( T val , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_fetch_and ( & m_value , val , order ) ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator | = ( T val ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_or ( val ) | val ;
}
2021-01-03 23:43:10 +00:00
ALWAYS_INLINE T fetch_or ( T val , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_fetch_or ( & m_value , val , order ) ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator ^ = ( T val ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_xor ( val ) ^ val ;
}
2021-01-03 23:43:10 +00:00
ALWAYS_INLINE T fetch_xor ( T val , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_fetch_xor ( & m_value , val , order ) ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE operator T ( ) const volatile noexcept
2020-01-12 17:44:51 +00:00
{
return load ( ) ;
}
2021-01-03 23:43:10 +00:00
ALWAYS_INLINE T load ( MemoryOrder order = DefaultMemoryOrder ) const volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_load_n ( & m_value , order ) ;
}
2019-10-12 17:17:34 +00:00
2021-10-31 20:52:26 +00:00
// NOLINTNEXTLINE(misc-unconventional-assign-operator) We want operator= to exchange the value, so returning an object of type Atomic& here does not make sense
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator = ( T desired ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
store ( desired ) ;
return desired ;
}
2019-10-12 17:17:34 +00:00
2021-01-03 23:43:10 +00:00
ALWAYS_INLINE void store ( T desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
__atomic_store_n ( & m_value , desired , order ) ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE bool is_lock_free ( ) const volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_is_lock_free ( sizeof ( m_value ) , & m_value ) ;
}
} ;
2021-01-03 23:43:10 +00:00
template < typename T , MemoryOrder DefaultMemoryOrder >
class Atomic < T * , DefaultMemoryOrder > {
2020-01-12 17:44:51 +00:00
T * m_value { nullptr } ;
2019-10-12 17:17:34 +00:00
public :
2020-01-12 17:44:51 +00:00
Atomic ( ) noexcept = default ;
2022-04-01 17:58:27 +00:00
Atomic & operator = ( Atomic const & ) volatile = delete ;
2021-01-03 23:43:10 +00:00
Atomic & operator = ( Atomic & & ) volatile = delete ;
2022-04-01 17:58:27 +00:00
Atomic ( Atomic const & ) = delete ;
2021-01-03 23:43:10 +00:00
Atomic ( Atomic & & ) = delete ;
2020-01-12 17:44:51 +00:00
2021-06-19 14:28:42 +00:00
constexpr Atomic ( T * val ) noexcept
2020-01-12 17:44:51 +00:00
: m_value ( val )
{
}
2020-07-02 14:11:06 +00:00
volatile T * * ptr ( ) noexcept
2020-06-02 04:55:30 +00:00
{
return & m_value ;
}
2021-01-03 23:43:10 +00:00
T * exchange ( T * desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_exchange_n ( & m_value , desired , order ) ;
}
2021-01-03 23:43:10 +00:00
[ [ nodiscard ] ] bool compare_exchange_strong ( T * & expected , T * desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
if ( order = = memory_order_acq_rel | | order = = memory_order_release )
return __atomic_compare_exchange_n ( & m_value , & expected , desired , false , memory_order_release , memory_order_acquire ) ;
2021-11-06 17:24:53 +00:00
return __atomic_compare_exchange_n ( & m_value , & expected , desired , false , order , order ) ;
2020-01-12 17:44:51 +00:00
}
T * operator + + ( ) volatile noexcept
{
return fetch_add ( 1 ) + 1 ;
}
T * operator + + ( int ) volatile noexcept
{
return fetch_add ( 1 ) ;
}
T * operator + = ( ptrdiff_t val ) volatile noexcept
{
return fetch_add ( val ) + val ;
}
2021-01-03 23:43:10 +00:00
T * fetch_add ( ptrdiff_t val , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_fetch_add ( & m_value , val * sizeof ( * m_value ) , order ) ;
}
T * operator - - ( ) volatile noexcept
{
return fetch_sub ( 1 ) - 1 ;
}
T * operator - - ( int ) volatile noexcept
{
return fetch_sub ( 1 ) ;
}
T * operator - = ( ptrdiff_t val ) volatile noexcept
{
return fetch_sub ( val ) - val ;
}
2021-01-03 23:43:10 +00:00
T * fetch_sub ( ptrdiff_t val , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_fetch_sub ( & m_value , val * sizeof ( * m_value ) , order ) ;
}
operator T * ( ) const volatile noexcept
{
return load ( ) ;
}
2021-01-03 23:43:10 +00:00
T * load ( MemoryOrder order = DefaultMemoryOrder ) const volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_load_n ( & m_value , order ) ;
}
2021-10-31 20:52:26 +00:00
// NOLINTNEXTLINE(misc-unconventional-assign-operator) We want operator= to exchange the value, so returning an object of type Atomic& here does not make sense
2020-01-12 17:44:51 +00:00
T * operator = ( T * desired ) volatile noexcept
{
store ( desired ) ;
return desired ;
}
2021-01-03 23:43:10 +00:00
void store ( T * desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
__atomic_store_n ( & m_value , desired , order ) ;
}
bool is_lock_free ( ) const volatile noexcept
{
return __atomic_is_lock_free ( sizeof ( m_value ) , & m_value ) ;
}
2019-10-12 17:17:34 +00:00
} ;
}
using AK : : Atomic ;
2021-02-27 17:30:20 +00:00
using AK : : full_memory_barrier ;