2020-01-18 08:38:21 +00:00
/*
2024-10-04 11:19:50 +00:00
* Copyright ( c ) 2018 - 2020 , Andreas Kling < andreas @ ladybird . org >
2020-01-18 08:38:21 +00:00
*
2021-04-22 08:24:48 +00:00
* SPDX - License - Identifier : BSD - 2 - Clause
2020-01-18 08:38:21 +00:00
*/
2019-10-12 17:17:34 +00:00
# pragma once
2021-06-19 14:28:42 +00:00
# include <AK/Concepts.h>
2020-06-12 13:20:35 +00:00
# include <AK/Platform.h>
2020-01-12 17:44:51 +00:00
# include <AK/Types.h>
2019-11-16 11:18:25 +00:00
2019-10-12 17:17:34 +00:00
namespace AK {
2021-02-27 17:30:20 +00:00
static inline void atomic_signal_fence ( MemoryOrder order ) noexcept
{
return __atomic_signal_fence ( order ) ;
}
2020-12-20 01:48:56 +00:00
static inline void atomic_thread_fence ( MemoryOrder order ) noexcept
{
return __atomic_thread_fence ( order ) ;
}
2021-02-27 17:30:20 +00:00
static inline void full_memory_barrier ( ) noexcept
{
atomic_signal_fence ( AK : : MemoryOrder : : memory_order_acq_rel ) ;
atomic_thread_fence ( AK : : MemoryOrder : : memory_order_acq_rel ) ;
}
2020-06-12 13:20:35 +00:00
template < typename T >
2022-10-16 22:06:11 +00:00
static inline T atomic_exchange ( T volatile * var , T desired , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-06-02 04:55:30 +00:00
{
return __atomic_exchange_n ( var , desired , order ) ;
}
2021-04-10 13:59:06 +00:00
template < typename T , typename V = RemoveVolatile < T > >
2022-10-16 22:06:11 +00:00
static inline V * atomic_exchange ( T volatile * * var , V * desired , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-07-02 14:11:06 +00:00
{
return __atomic_exchange_n ( var , desired , order ) ;
}
2021-04-10 13:59:06 +00:00
template < typename T , typename V = RemoveVolatile < T > >
2022-12-13 06:59:30 +00:00
static inline V * atomic_exchange ( T volatile * * var , nullptr_t , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-07-02 14:11:06 +00:00
{
return __atomic_exchange_n ( const_cast < V * * > ( var ) , nullptr , order ) ;
}
2020-06-12 13:20:35 +00:00
template < typename T >
2022-10-16 22:06:11 +00:00
[ [ nodiscard ] ] static inline bool atomic_compare_exchange_strong ( T volatile * var , T & expected , T desired , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-06-02 04:55:30 +00:00
{
if ( order = = memory_order_acq_rel | | order = = memory_order_release )
return __atomic_compare_exchange_n ( var , & expected , desired , false , memory_order_release , memory_order_acquire ) ;
2021-11-06 17:24:53 +00:00
return __atomic_compare_exchange_n ( var , & expected , desired , false , order , order ) ;
2020-06-02 04:55:30 +00:00
}
2021-04-10 13:59:06 +00:00
template < typename T , typename V = RemoveVolatile < T > >
2022-10-16 22:06:11 +00:00
[ [ nodiscard ] ] static inline bool atomic_compare_exchange_strong ( T volatile * * var , V * & expected , V * desired , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-07-02 14:11:06 +00:00
{
if ( order = = memory_order_acq_rel | | order = = memory_order_release )
return __atomic_compare_exchange_n ( var , & expected , desired , false , memory_order_release , memory_order_acquire ) ;
2021-11-06 17:24:53 +00:00
return __atomic_compare_exchange_n ( var , & expected , desired , false , order , order ) ;
2020-07-02 14:11:06 +00:00
}
2021-04-10 13:59:06 +00:00
template < typename T , typename V = RemoveVolatile < T > >
2022-12-13 06:59:30 +00:00
[ [ nodiscard ] ] static inline bool atomic_compare_exchange_strong ( T volatile * * var , V * & expected , nullptr_t , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-07-02 14:11:06 +00:00
{
if ( order = = memory_order_acq_rel | | order = = memory_order_release )
return __atomic_compare_exchange_n ( const_cast < V * * > ( var ) , & expected , nullptr , false , memory_order_release , memory_order_acquire ) ;
2021-11-06 17:24:53 +00:00
return __atomic_compare_exchange_n ( const_cast < V * * > ( var ) , & expected , nullptr , false , order , order ) ;
2020-07-02 14:11:06 +00:00
}
2020-06-12 13:20:35 +00:00
template < typename T >
2022-10-16 22:06:11 +00:00
static inline T atomic_fetch_add ( T volatile * var , T val , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-06-02 04:55:30 +00:00
{
return __atomic_fetch_add ( var , val , order ) ;
}
2020-06-12 13:20:35 +00:00
template < typename T >
2022-10-16 22:06:11 +00:00
static inline T atomic_fetch_sub ( T volatile * var , T val , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-06-02 04:55:30 +00:00
{
return __atomic_fetch_sub ( var , val , order ) ;
}
2020-06-12 13:20:35 +00:00
template < typename T >
2022-10-16 22:06:11 +00:00
static inline T atomic_fetch_and ( T volatile * var , T val , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-06-02 04:55:30 +00:00
{
return __atomic_fetch_and ( var , val , order ) ;
}
2020-06-12 13:20:35 +00:00
template < typename T >
2022-10-16 22:06:11 +00:00
static inline T atomic_fetch_or ( T volatile * var , T val , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-06-02 04:55:30 +00:00
{
return __atomic_fetch_or ( var , val , order ) ;
}
2020-06-12 13:20:35 +00:00
template < typename T >
2022-10-16 22:06:11 +00:00
static inline T atomic_fetch_xor ( T volatile * var , T val , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-06-02 04:55:30 +00:00
{
return __atomic_fetch_xor ( var , val , order ) ;
}
2020-06-12 13:20:35 +00:00
template < typename T >
2022-10-16 22:06:11 +00:00
static inline T atomic_load ( T volatile * var , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-06-02 04:55:30 +00:00
{
return __atomic_load_n ( var , order ) ;
}
2021-04-10 13:59:06 +00:00
template < typename T , typename V = RemoveVolatile < T > >
2022-10-16 22:06:11 +00:00
static inline V * atomic_load ( T volatile * * var , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-07-02 14:11:06 +00:00
{
return __atomic_load_n ( const_cast < V * * > ( var ) , order ) ;
}
2024-11-01 23:09:07 +00:00
static inline void atomic_pause ( )
{
# if __has_builtin(__builtin_ia32_pause)
__builtin_ia32_pause ( ) ;
# elif __has_builtin(__builtin_arm_yield)
__builtin_arm_yield ( ) ;
# endif
}
2020-06-12 13:20:35 +00:00
template < typename T >
2022-10-16 22:06:11 +00:00
static inline void atomic_store ( T volatile * var , T desired , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-06-02 04:55:30 +00:00
{
__atomic_store_n ( var , desired , order ) ;
}
2021-04-10 13:59:06 +00:00
template < typename T , typename V = RemoveVolatile < T > >
2022-10-16 22:06:11 +00:00
static inline void atomic_store ( T volatile * * var , V * desired , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-07-02 14:11:06 +00:00
{
__atomic_store_n ( var , desired , order ) ;
}
2021-04-10 13:59:06 +00:00
template < typename T , typename V = RemoveVolatile < T > >
2022-12-13 06:59:30 +00:00
static inline void atomic_store ( T volatile * * var , nullptr_t , MemoryOrder order = memory_order_seq_cst ) noexcept
2020-07-02 14:11:06 +00:00
{
__atomic_store_n ( const_cast < V * * > ( var ) , nullptr , order ) ;
}
2021-07-12 15:46:05 +00:00
template < typename T >
2022-10-16 22:06:11 +00:00
static inline bool atomic_is_lock_free ( T volatile * ptr = nullptr ) noexcept
2021-07-12 15:46:05 +00:00
{
return __atomic_is_lock_free ( sizeof ( T ) , ptr ) ;
}
2021-01-03 23:43:10 +00:00
template < typename T , MemoryOrder DefaultMemoryOrder = AK : : MemoryOrder : : memory_order_seq_cst >
2020-01-12 17:44:51 +00:00
class Atomic {
2022-02-08 22:28:25 +00:00
// FIXME: This should work through concepts/requires clauses, but according to the compiler,
// "IsIntegral is not more specialized than IsFundamental".
// Additionally, Enums are not fundamental types except that they behave like them in every observable way.
static_assert ( IsFundamental < T > | IsEnum < T > , " Atomic doesn't support non-primitive types, because it relies on compiler intrinsics. If you put non-primitives into it, you'll get linker errors like \" undefined reference to __atomic_store \" . " ) ;
2020-01-12 17:44:51 +00:00
T m_value { 0 } ;
2019-10-12 17:17:34 +00:00
public :
2020-01-12 17:44:51 +00:00
Atomic ( ) noexcept = default ;
2022-04-01 17:58:27 +00:00
Atomic & operator = ( Atomic const & ) volatile = delete ;
2021-01-03 23:43:10 +00:00
Atomic & operator = ( Atomic & & ) volatile = delete ;
2022-04-01 17:58:27 +00:00
Atomic ( Atomic const & ) = delete ;
2021-01-03 23:43:10 +00:00
Atomic ( Atomic & & ) = delete ;
2020-01-12 17:44:51 +00:00
2021-06-19 14:28:42 +00:00
constexpr Atomic ( T val ) noexcept
: m_value ( val )
{
}
2022-10-16 22:06:11 +00:00
T volatile * ptr ( ) noexcept
2021-06-19 14:28:42 +00:00
{
return & m_value ;
}
T exchange ( T desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
{
// We use this hack to prevent unnecessary initialization, even if T has a default constructor.
// NOTE: Will need to investigate if it pessimizes the generated assembly.
alignas ( T ) u8 buffer [ sizeof ( T ) ] ;
T * ret = reinterpret_cast < T * > ( buffer ) ;
__atomic_exchange ( & m_value , & desired , ret , order ) ;
return * ret ;
}
[ [ nodiscard ] ] bool compare_exchange_strong ( T & expected , T desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
{
if ( order = = memory_order_acq_rel | | order = = memory_order_release )
return __atomic_compare_exchange ( & m_value , & expected , & desired , false , memory_order_release , memory_order_acquire ) ;
2021-11-06 17:24:53 +00:00
return __atomic_compare_exchange ( & m_value , & expected , & desired , false , order , order ) ;
2021-06-19 14:28:42 +00:00
}
ALWAYS_INLINE operator T ( ) const volatile noexcept
{
return load ( ) ;
}
ALWAYS_INLINE T load ( MemoryOrder order = DefaultMemoryOrder ) const volatile noexcept
{
alignas ( T ) u8 buffer [ sizeof ( T ) ] ;
T * ret = reinterpret_cast < T * > ( buffer ) ;
__atomic_load ( & m_value , ret , order ) ;
return * ret ;
}
2021-10-31 20:52:26 +00:00
// NOLINTNEXTLINE(misc-unconventional-assign-operator) We want operator= to exchange the value, so returning an object of type Atomic& here does not make sense
2021-06-19 14:28:42 +00:00
ALWAYS_INLINE T operator = ( T desired ) volatile noexcept
{
store ( desired ) ;
return desired ;
}
ALWAYS_INLINE void store ( T desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
{
__atomic_store ( & m_value , & desired , order ) ;
}
ALWAYS_INLINE bool is_lock_free ( ) const volatile noexcept
{
return __atomic_is_lock_free ( sizeof ( m_value ) , & m_value ) ;
}
} ;
template < Integral T , MemoryOrder DefaultMemoryOrder >
class Atomic < T , DefaultMemoryOrder > {
T m_value { 0 } ;
public :
Atomic ( ) noexcept = default ;
2022-04-01 17:58:27 +00:00
Atomic & operator = ( Atomic const & ) volatile = delete ;
2021-06-19 14:28:42 +00:00
Atomic & operator = ( Atomic & & ) volatile = delete ;
2022-04-01 17:58:27 +00:00
Atomic ( Atomic const & ) = delete ;
2021-06-19 14:28:42 +00:00
Atomic ( Atomic & & ) = delete ;
constexpr Atomic ( T val ) noexcept
2020-01-12 17:44:51 +00:00
: m_value ( val )
{
}
2022-10-16 22:06:11 +00:00
T volatile * ptr ( ) noexcept
2020-06-02 04:55:30 +00:00
{
return & m_value ;
}
2021-01-03 23:43:10 +00:00
T exchange ( T desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_exchange_n ( & m_value , desired , order ) ;
}
2021-01-03 23:43:10 +00:00
[ [ nodiscard ] ] bool compare_exchange_strong ( T & expected , T desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
if ( order = = memory_order_acq_rel | | order = = memory_order_release )
return __atomic_compare_exchange_n ( & m_value , & expected , desired , false , memory_order_release , memory_order_acquire ) ;
2021-11-06 17:24:53 +00:00
return __atomic_compare_exchange_n ( & m_value , & expected , desired , false , order , order ) ;
2020-01-12 17:44:51 +00:00
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator + + ( ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_add ( 1 ) + 1 ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator + + ( int ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_add ( 1 ) ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator + = ( T val ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_add ( val ) + val ;
}
2021-01-03 23:43:10 +00:00
ALWAYS_INLINE T fetch_add ( T val , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_fetch_add ( & m_value , val , order ) ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator - - ( ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_sub ( 1 ) - 1 ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator - - ( int ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_sub ( 1 ) ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator - = ( T val ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_sub ( val ) - val ;
}
2021-01-03 23:43:10 +00:00
ALWAYS_INLINE T fetch_sub ( T val , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
2024-04-23 02:17:37 +00:00
T volatile * ptr = & m_value ;
// FIXME: GCC > 12 will wrongly warn on -Wstringop-overflow here with ASAN+UBSAN
# if defined(AK_COMPILER_GCC) && defined(HAS_ADDRESS_SANITIZER)
if ( ! ptr )
__builtin_unreachable ( ) ;
# endif
return __atomic_fetch_sub ( ptr , val , order ) ;
2020-01-12 17:44:51 +00:00
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator & = ( T val ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_and ( val ) & val ;
}
2021-01-03 23:43:10 +00:00
ALWAYS_INLINE T fetch_and ( T val , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_fetch_and ( & m_value , val , order ) ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator | = ( T val ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_or ( val ) | val ;
}
2021-01-03 23:43:10 +00:00
ALWAYS_INLINE T fetch_or ( T val , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_fetch_or ( & m_value , val , order ) ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator ^ = ( T val ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return fetch_xor ( val ) ^ val ;
}
2021-01-03 23:43:10 +00:00
ALWAYS_INLINE T fetch_xor ( T val , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_fetch_xor ( & m_value , val , order ) ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE operator T ( ) const volatile noexcept
2020-01-12 17:44:51 +00:00
{
return load ( ) ;
}
2021-01-03 23:43:10 +00:00
ALWAYS_INLINE T load ( MemoryOrder order = DefaultMemoryOrder ) const volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_load_n ( & m_value , order ) ;
}
2019-10-12 17:17:34 +00:00
2021-10-31 20:52:26 +00:00
// NOLINTNEXTLINE(misc-unconventional-assign-operator) We want operator= to exchange the value, so returning an object of type Atomic& here does not make sense
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE T operator = ( T desired ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
store ( desired ) ;
return desired ;
}
2019-10-12 17:17:34 +00:00
2021-01-03 23:43:10 +00:00
ALWAYS_INLINE void store ( T desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
__atomic_store_n ( & m_value , desired , order ) ;
}
2020-06-12 13:20:35 +00:00
ALWAYS_INLINE bool is_lock_free ( ) const volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_is_lock_free ( sizeof ( m_value ) , & m_value ) ;
}
} ;
2021-01-03 23:43:10 +00:00
template < typename T , MemoryOrder DefaultMemoryOrder >
class Atomic < T * , DefaultMemoryOrder > {
2020-01-12 17:44:51 +00:00
T * m_value { nullptr } ;
2019-10-12 17:17:34 +00:00
public :
2020-01-12 17:44:51 +00:00
Atomic ( ) noexcept = default ;
2022-04-01 17:58:27 +00:00
Atomic & operator = ( Atomic const & ) volatile = delete ;
2021-01-03 23:43:10 +00:00
Atomic & operator = ( Atomic & & ) volatile = delete ;
2022-04-01 17:58:27 +00:00
Atomic ( Atomic const & ) = delete ;
2021-01-03 23:43:10 +00:00
Atomic ( Atomic & & ) = delete ;
2020-01-12 17:44:51 +00:00
2021-06-19 14:28:42 +00:00
constexpr Atomic ( T * val ) noexcept
2020-01-12 17:44:51 +00:00
: m_value ( val )
{
}
2023-07-04 12:44:18 +00:00
T * volatile * ptr ( ) noexcept
2020-06-02 04:55:30 +00:00
{
return & m_value ;
}
2021-01-03 23:43:10 +00:00
T * exchange ( T * desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_exchange_n ( & m_value , desired , order ) ;
}
2021-01-03 23:43:10 +00:00
[ [ nodiscard ] ] bool compare_exchange_strong ( T * & expected , T * desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
if ( order = = memory_order_acq_rel | | order = = memory_order_release )
return __atomic_compare_exchange_n ( & m_value , & expected , desired , false , memory_order_release , memory_order_acquire ) ;
2021-11-06 17:24:53 +00:00
return __atomic_compare_exchange_n ( & m_value , & expected , desired , false , order , order ) ;
2020-01-12 17:44:51 +00:00
}
T * operator + + ( ) volatile noexcept
{
return fetch_add ( 1 ) + 1 ;
}
T * operator + + ( int ) volatile noexcept
{
return fetch_add ( 1 ) ;
}
T * operator + = ( ptrdiff_t val ) volatile noexcept
{
return fetch_add ( val ) + val ;
}
2021-01-03 23:43:10 +00:00
T * fetch_add ( ptrdiff_t val , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_fetch_add ( & m_value , val * sizeof ( * m_value ) , order ) ;
}
T * operator - - ( ) volatile noexcept
{
return fetch_sub ( 1 ) - 1 ;
}
T * operator - - ( int ) volatile noexcept
{
return fetch_sub ( 1 ) ;
}
T * operator - = ( ptrdiff_t val ) volatile noexcept
{
return fetch_sub ( val ) - val ;
}
2021-01-03 23:43:10 +00:00
T * fetch_sub ( ptrdiff_t val , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_fetch_sub ( & m_value , val * sizeof ( * m_value ) , order ) ;
}
operator T * ( ) const volatile noexcept
{
return load ( ) ;
}
2021-01-03 23:43:10 +00:00
T * load ( MemoryOrder order = DefaultMemoryOrder ) const volatile noexcept
2020-01-12 17:44:51 +00:00
{
return __atomic_load_n ( & m_value , order ) ;
}
2021-10-31 20:52:26 +00:00
// NOLINTNEXTLINE(misc-unconventional-assign-operator) We want operator= to exchange the value, so returning an object of type Atomic& here does not make sense
2020-01-12 17:44:51 +00:00
T * operator = ( T * desired ) volatile noexcept
{
store ( desired ) ;
return desired ;
}
2021-01-03 23:43:10 +00:00
void store ( T * desired , MemoryOrder order = DefaultMemoryOrder ) volatile noexcept
2020-01-12 17:44:51 +00:00
{
__atomic_store_n ( & m_value , desired , order ) ;
}
bool is_lock_free ( ) const volatile noexcept
{
return __atomic_is_lock_free ( sizeof ( m_value ) , & m_value ) ;
}
2019-10-12 17:17:34 +00:00
} ;
}
2022-11-26 11:18:30 +00:00
# if USING_AK_GLOBALLY
2019-10-12 17:17:34 +00:00
using AK : : Atomic ;
2021-02-27 17:30:20 +00:00
using AK : : full_memory_barrier ;
2022-11-26 11:18:30 +00:00
# endif