ThreadSafeNonnullRefPtr.h 9.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363
  1. /*
  2. * Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #pragma once
  7. #include <AK/Assertions.h>
  8. #include <AK/Atomic.h>
  9. #include <AK/Format.h>
  10. #include <AK/Traits.h>
  11. #include <AK/Types.h>
  12. #ifdef KERNEL
  13. # include <Kernel/Arch/Processor.h>
  14. # include <Kernel/Arch/ScopedCritical.h>
  15. #endif
  16. #define THREADSAFENONNULLREFPTR_SCRUB_BYTE 0xa1
  17. namespace AK {
  18. template<typename T>
  19. class OwnPtr;
  20. template<typename T, typename PtrTraits>
  21. class RefPtr;
  22. template<typename T>
  23. ALWAYS_INLINE void ref_if_not_null(T* ptr)
  24. {
  25. if (ptr)
  26. ptr->ref();
  27. }
  28. template<typename T>
  29. ALWAYS_INLINE void unref_if_not_null(T* ptr)
  30. {
  31. if (ptr)
  32. ptr->unref();
  33. }
  34. template<typename T>
  35. class [[nodiscard]] NonnullRefPtr {
  36. template<typename U, typename P>
  37. friend class RefPtr;
  38. template<typename U>
  39. friend class NonnullRefPtr;
  40. template<typename U>
  41. friend class WeakPtr;
  42. public:
  43. using ElementType = T;
  44. enum AdoptTag { Adopt };
  45. ALWAYS_INLINE NonnullRefPtr(const T& object)
  46. : m_bits((FlatPtr)&object)
  47. {
  48. VERIFY(!(m_bits & 1));
  49. const_cast<T&>(object).ref();
  50. }
  51. template<typename U>
  52. ALWAYS_INLINE NonnullRefPtr(const U& object) requires(IsConvertible<U*, T*>)
  53. : m_bits((FlatPtr) static_cast<const T*>(&object))
  54. {
  55. VERIFY(!(m_bits & 1));
  56. const_cast<T&>(static_cast<const T&>(object)).ref();
  57. }
  58. ALWAYS_INLINE NonnullRefPtr(AdoptTag, T& object)
  59. : m_bits((FlatPtr)&object)
  60. {
  61. VERIFY(!(m_bits & 1));
  62. }
  63. ALWAYS_INLINE NonnullRefPtr(NonnullRefPtr&& other)
  64. : m_bits((FlatPtr)&other.leak_ref())
  65. {
  66. VERIFY(!(m_bits & 1));
  67. }
  68. template<typename U>
  69. ALWAYS_INLINE NonnullRefPtr(NonnullRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
  70. : m_bits((FlatPtr)&other.leak_ref())
  71. {
  72. VERIFY(!(m_bits & 1));
  73. }
  74. ALWAYS_INLINE NonnullRefPtr(const NonnullRefPtr& other)
  75. : m_bits((FlatPtr)other.add_ref())
  76. {
  77. VERIFY(!(m_bits & 1));
  78. }
  79. template<typename U>
  80. ALWAYS_INLINE NonnullRefPtr(const NonnullRefPtr<U>& other) requires(IsConvertible<U*, T*>)
  81. : m_bits((FlatPtr)other.add_ref())
  82. {
  83. VERIFY(!(m_bits & 1));
  84. }
  85. ALWAYS_INLINE ~NonnullRefPtr()
  86. {
  87. assign(nullptr);
  88. #ifdef SANITIZE_PTRS
  89. m_bits.store(explode_byte(THREADSAFENONNULLREFPTR_SCRUB_BYTE), AK::MemoryOrder::memory_order_relaxed);
  90. #endif
  91. }
  92. template<typename U>
  93. NonnullRefPtr(const OwnPtr<U>&) = delete;
  94. template<typename U>
  95. NonnullRefPtr& operator=(const OwnPtr<U>&) = delete;
  96. template<typename U>
  97. NonnullRefPtr(const RefPtr<U>&) = delete;
  98. template<typename U>
  99. NonnullRefPtr& operator=(const RefPtr<U>&) = delete;
  100. NonnullRefPtr(const RefPtr<T>&) = delete;
  101. NonnullRefPtr& operator=(const RefPtr<T>&) = delete;
  102. NonnullRefPtr& operator=(const NonnullRefPtr& other)
  103. {
  104. if (this != &other)
  105. assign(other.add_ref());
  106. return *this;
  107. }
  108. template<typename U>
  109. NonnullRefPtr& operator=(const NonnullRefPtr<U>& other) requires(IsConvertible<U*, T*>)
  110. {
  111. assign(other.add_ref());
  112. return *this;
  113. }
  114. ALWAYS_INLINE NonnullRefPtr& operator=(NonnullRefPtr&& other)
  115. {
  116. if (this != &other)
  117. assign(&other.leak_ref());
  118. return *this;
  119. }
  120. template<typename U>
  121. NonnullRefPtr& operator=(NonnullRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
  122. {
  123. assign(&other.leak_ref());
  124. return *this;
  125. }
  126. NonnullRefPtr& operator=(const T& object)
  127. {
  128. const_cast<T&>(object).ref();
  129. assign(const_cast<T*>(&object));
  130. return *this;
  131. }
  132. [[nodiscard]] ALWAYS_INLINE T& leak_ref()
  133. {
  134. T* ptr = exchange(nullptr);
  135. VERIFY(ptr);
  136. return *ptr;
  137. }
  138. ALWAYS_INLINE RETURNS_NONNULL T* ptr()
  139. {
  140. return as_nonnull_ptr();
  141. }
  142. ALWAYS_INLINE RETURNS_NONNULL const T* ptr() const
  143. {
  144. return as_nonnull_ptr();
  145. }
  146. ALWAYS_INLINE RETURNS_NONNULL T* operator->()
  147. {
  148. return as_nonnull_ptr();
  149. }
  150. ALWAYS_INLINE RETURNS_NONNULL const T* operator->() const
  151. {
  152. return as_nonnull_ptr();
  153. }
  154. ALWAYS_INLINE T& operator*()
  155. {
  156. return *as_nonnull_ptr();
  157. }
  158. ALWAYS_INLINE const T& operator*() const
  159. {
  160. return *as_nonnull_ptr();
  161. }
  162. ALWAYS_INLINE RETURNS_NONNULL operator T*()
  163. {
  164. return as_nonnull_ptr();
  165. }
  166. ALWAYS_INLINE RETURNS_NONNULL operator const T*() const
  167. {
  168. return as_nonnull_ptr();
  169. }
  170. ALWAYS_INLINE operator T&()
  171. {
  172. return *as_nonnull_ptr();
  173. }
  174. ALWAYS_INLINE operator const T&() const
  175. {
  176. return *as_nonnull_ptr();
  177. }
  178. operator bool() const = delete;
  179. bool operator!() const = delete;
  180. void swap(NonnullRefPtr& other)
  181. {
  182. if (this == &other)
  183. return;
  184. // NOTE: swap is not atomic!
  185. T* other_ptr = other.exchange(nullptr);
  186. T* ptr = exchange(other_ptr);
  187. other.exchange(ptr);
  188. }
  189. template<typename U>
  190. void swap(NonnullRefPtr<U>& other) requires(IsConvertible<U*, T*>)
  191. {
  192. // NOTE: swap is not atomic!
  193. U* other_ptr = other.exchange(nullptr);
  194. T* ptr = exchange(other_ptr);
  195. other.exchange(ptr);
  196. }
  197. // clang-format off
  198. private:
  199. NonnullRefPtr() = delete;
  200. // clang-format on
  201. ALWAYS_INLINE T* as_ptr() const
  202. {
  203. return (T*)(m_bits.load(AK::MemoryOrder::memory_order_relaxed) & ~(FlatPtr)1);
  204. }
  205. ALWAYS_INLINE RETURNS_NONNULL T* as_nonnull_ptr() const
  206. {
  207. T* ptr = (T*)(m_bits.load(AK::MemoryOrder::memory_order_relaxed) & ~(FlatPtr)1);
  208. VERIFY(ptr);
  209. return ptr;
  210. }
  211. template<typename F>
  212. void do_while_locked(F f) const
  213. {
  214. #ifdef KERNEL
  215. // We don't want to be pre-empted while we have the lock bit set
  216. Kernel::ScopedCritical critical;
  217. #endif
  218. FlatPtr bits;
  219. for (;;) {
  220. bits = m_bits.fetch_or(1, AK::MemoryOrder::memory_order_acq_rel);
  221. if (!(bits & 1))
  222. break;
  223. #ifdef KERNEL
  224. Kernel::Processor::wait_check();
  225. #endif
  226. }
  227. VERIFY(!(bits & 1));
  228. f((T*)bits);
  229. m_bits.store(bits, AK::MemoryOrder::memory_order_release);
  230. }
  231. ALWAYS_INLINE void assign(T* new_ptr)
  232. {
  233. T* prev_ptr = exchange(new_ptr);
  234. unref_if_not_null(prev_ptr);
  235. }
  236. ALWAYS_INLINE T* exchange(T* new_ptr)
  237. {
  238. VERIFY(!((FlatPtr)new_ptr & 1));
  239. #ifdef KERNEL
  240. // We don't want to be pre-empted while we have the lock bit set
  241. Kernel::ScopedCritical critical;
  242. #endif
  243. // Only exchange while not locked
  244. FlatPtr expected = m_bits.load(AK::MemoryOrder::memory_order_relaxed);
  245. for (;;) {
  246. expected &= ~(FlatPtr)1; // only if lock bit is not set
  247. if (m_bits.compare_exchange_strong(expected, (FlatPtr)new_ptr, AK::MemoryOrder::memory_order_acq_rel))
  248. break;
  249. #ifdef KERNEL
  250. Kernel::Processor::wait_check();
  251. #endif
  252. }
  253. VERIFY(!(expected & 1));
  254. return (T*)expected;
  255. }
  256. T* add_ref() const
  257. {
  258. #ifdef KERNEL
  259. // We don't want to be pre-empted while we have the lock bit set
  260. Kernel::ScopedCritical critical;
  261. #endif
  262. // Lock the pointer
  263. FlatPtr expected = m_bits.load(AK::MemoryOrder::memory_order_relaxed);
  264. for (;;) {
  265. expected &= ~(FlatPtr)1; // only if lock bit is not set
  266. if (m_bits.compare_exchange_strong(expected, expected | 1, AK::MemoryOrder::memory_order_acq_rel))
  267. break;
  268. #ifdef KERNEL
  269. Kernel::Processor::wait_check();
  270. #endif
  271. }
  272. // Add a reference now that we locked the pointer
  273. ref_if_not_null((T*)expected);
  274. // Unlock the pointer again
  275. m_bits.store(expected, AK::MemoryOrder::memory_order_release);
  276. return (T*)expected;
  277. }
  278. mutable Atomic<FlatPtr> m_bits { 0 };
  279. };
  280. template<typename T>
  281. inline NonnullRefPtr<T> adopt_ref(T& object)
  282. {
  283. return NonnullRefPtr<T>(NonnullRefPtr<T>::Adopt, object);
  284. }
  285. template<typename T>
  286. struct Formatter<NonnullRefPtr<T>> : Formatter<const T*> {
  287. ErrorOr<void> format(FormatBuilder& builder, const NonnullRefPtr<T>& value)
  288. {
  289. return Formatter<const T*>::format(builder, value.ptr());
  290. }
  291. };
  292. template<typename T, typename U>
  293. inline void swap(NonnullRefPtr<T>& a, NonnullRefPtr<U>& b) requires(IsConvertible<U*, T*>)
  294. {
  295. a.swap(b);
  296. }
  297. template<typename T, class... Args>
  298. requires(IsConstructible<T, Args...>) inline NonnullRefPtr<T> make_ref_counted(Args&&... args)
  299. {
  300. return NonnullRefPtr<T>(NonnullRefPtr<T>::Adopt, *new T(forward<Args>(args)...));
  301. }
  302. // FIXME: Remove once P0960R3 is available in Clang.
  303. template<typename T, class... Args>
  304. inline NonnullRefPtr<T> make_ref_counted(Args&&... args)
  305. {
  306. return NonnullRefPtr<T>(NonnullRefPtr<T>::Adopt, *new T { forward<Args>(args)... });
  307. }
  308. }
  309. template<typename T>
  310. struct Traits<NonnullRefPtr<T>> : public GenericTraits<NonnullRefPtr<T>> {
  311. using PeekType = T*;
  312. using ConstPeekType = const T*;
  313. static unsigned hash(const NonnullRefPtr<T>& p) { return ptr_hash(p.ptr()); }
  314. static bool equals(const NonnullRefPtr<T>& a, const NonnullRefPtr<T>& b) { return a.ptr() == b.ptr(); }
  315. };
  316. using AK::adopt_ref;
  317. using AK::make_ref_counted;
  318. using AK::NonnullRefPtr;