ThreadSafeRefPtr.h 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519
  1. /*
  2. * Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #pragma once
  7. #include <AK/Assertions.h>
  8. #include <AK/Atomic.h>
  9. #include <AK/Error.h>
  10. #include <AK/Format.h>
  11. #include <AK/NonnullRefPtr.h>
  12. #include <AK/StdLibExtras.h>
  13. #include <AK/Traits.h>
  14. #include <AK/Types.h>
  15. #ifdef KERNEL
  16. # include <Kernel/Arch/Processor.h>
  17. # include <Kernel/Arch/ScopedCritical.h>
  18. #endif
  19. #define THREADSAFEREFPTR_SCRUB_BYTE 0xa0
  20. namespace AK {
  21. template<typename T>
  22. class OwnPtr;
  23. template<typename T>
  24. struct RefPtrTraits {
  25. ALWAYS_INLINE static T* as_ptr(FlatPtr bits)
  26. {
  27. return (T*)(bits & ~(FlatPtr)1);
  28. }
  29. ALWAYS_INLINE static FlatPtr as_bits(T* ptr)
  30. {
  31. VERIFY(((FlatPtr)ptr & 1) == 0);
  32. return (FlatPtr)ptr;
  33. }
  34. template<typename U, typename PtrTraits>
  35. ALWAYS_INLINE static FlatPtr convert_from(FlatPtr bits)
  36. {
  37. if (PtrTraits::is_null(bits))
  38. return default_null_value;
  39. return as_bits(PtrTraits::as_ptr(bits));
  40. }
  41. ALWAYS_INLINE static bool is_null(FlatPtr bits)
  42. {
  43. return (bits & ~(FlatPtr)1) == 0;
  44. }
  45. ALWAYS_INLINE static FlatPtr exchange(Atomic<FlatPtr>& atomic_var, FlatPtr new_value)
  46. {
  47. // Only exchange when lock is not held
  48. VERIFY((new_value & 1) == 0);
  49. FlatPtr expected = atomic_var.load(AK::MemoryOrder::memory_order_relaxed);
  50. for (;;) {
  51. expected &= ~(FlatPtr)1; // only if lock bit is not set
  52. if (atomic_var.compare_exchange_strong(expected, new_value, AK::MemoryOrder::memory_order_acq_rel))
  53. break;
  54. #ifdef KERNEL
  55. Kernel::Processor::wait_check();
  56. #endif
  57. }
  58. return expected;
  59. }
  60. ALWAYS_INLINE static bool exchange_if_null(Atomic<FlatPtr>& atomic_var, FlatPtr new_value)
  61. {
  62. // Only exchange when lock is not held
  63. VERIFY((new_value & 1) == 0);
  64. for (;;) {
  65. FlatPtr expected = default_null_value; // only if lock bit is not set
  66. if (atomic_var.compare_exchange_strong(expected, new_value, AK::MemoryOrder::memory_order_acq_rel))
  67. break;
  68. if (!is_null(expected))
  69. return false;
  70. #ifdef KERNEL
  71. Kernel::Processor::wait_check();
  72. #endif
  73. }
  74. return true;
  75. }
  76. ALWAYS_INLINE static FlatPtr lock(Atomic<FlatPtr>& atomic_var)
  77. {
  78. // This sets the lock bit atomically, preventing further modifications.
  79. // This is important when e.g. copying a RefPtr where the source
  80. // might be released and freed too quickly. This allows us
  81. // to temporarily lock the pointer so we can add a reference, then
  82. // unlock it
  83. FlatPtr bits;
  84. for (;;) {
  85. bits = atomic_var.fetch_or(1, AK::MemoryOrder::memory_order_acq_rel);
  86. if ((bits & 1) == 0)
  87. break;
  88. #ifdef KERNEL
  89. Kernel::Processor::wait_check();
  90. #endif
  91. }
  92. VERIFY((bits & 1) == 0);
  93. return bits;
  94. }
  95. ALWAYS_INLINE static void unlock(Atomic<FlatPtr>& atomic_var, FlatPtr new_value)
  96. {
  97. VERIFY((new_value & 1) == 0);
  98. atomic_var.store(new_value, AK::MemoryOrder::memory_order_release);
  99. }
  100. static constexpr FlatPtr default_null_value = 0;
  101. using NullType = std::nullptr_t;
  102. };
  103. template<typename T, typename PtrTraits>
  104. class [[nodiscard]] RefPtr {
  105. template<typename U, typename P>
  106. friend class RefPtr;
  107. template<typename U>
  108. friend class WeakPtr;
  109. public:
  110. enum AdoptTag {
  111. Adopt
  112. };
  113. RefPtr() = default;
  114. RefPtr(const T* ptr)
  115. : m_bits(PtrTraits::as_bits(const_cast<T*>(ptr)))
  116. {
  117. ref_if_not_null(const_cast<T*>(ptr));
  118. }
  119. RefPtr(const T& object)
  120. : m_bits(PtrTraits::as_bits(const_cast<T*>(&object)))
  121. {
  122. T* ptr = const_cast<T*>(&object);
  123. VERIFY(ptr);
  124. VERIFY(!is_null());
  125. ptr->ref();
  126. }
  127. RefPtr(AdoptTag, T& object)
  128. : m_bits(PtrTraits::as_bits(&object))
  129. {
  130. VERIFY(!is_null());
  131. }
  132. RefPtr(RefPtr&& other)
  133. : m_bits(other.leak_ref_raw())
  134. {
  135. }
  136. ALWAYS_INLINE RefPtr(const NonnullRefPtr<T>& other)
  137. : m_bits(PtrTraits::as_bits(const_cast<T*>(other.add_ref())))
  138. {
  139. }
  140. template<typename U>
  141. ALWAYS_INLINE RefPtr(const NonnullRefPtr<U>& other) requires(IsConvertible<U*, T*>)
  142. : m_bits(PtrTraits::as_bits(const_cast<U*>(other.add_ref())))
  143. {
  144. }
  145. template<typename U>
  146. ALWAYS_INLINE RefPtr(NonnullRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
  147. : m_bits(PtrTraits::as_bits(&other.leak_ref()))
  148. {
  149. VERIFY(!is_null());
  150. }
  151. template<typename U, typename P = RefPtrTraits<U>>
  152. RefPtr(RefPtr<U, P>&& other) requires(IsConvertible<U*, T*>)
  153. : m_bits(PtrTraits::template convert_from<U, P>(other.leak_ref_raw()))
  154. {
  155. }
  156. RefPtr(const RefPtr& other)
  157. : m_bits(other.add_ref_raw())
  158. {
  159. }
  160. template<typename U, typename P = RefPtrTraits<U>>
  161. RefPtr(const RefPtr<U, P>& other) requires(IsConvertible<U*, T*>)
  162. : m_bits(other.add_ref_raw())
  163. {
  164. }
  165. ALWAYS_INLINE ~RefPtr()
  166. {
  167. clear();
  168. #ifdef SANITIZE_PTRS
  169. m_bits.store(explode_byte(THREADSAFEREFPTR_SCRUB_BYTE), AK::MemoryOrder::memory_order_relaxed);
  170. #endif
  171. }
  172. template<typename U>
  173. RefPtr(const OwnPtr<U>&) = delete;
  174. template<typename U>
  175. RefPtr& operator=(const OwnPtr<U>&) = delete;
  176. void swap(RefPtr& other)
  177. {
  178. if (this == &other)
  179. return;
  180. // NOTE: swap is not atomic!
  181. FlatPtr other_bits = PtrTraits::exchange(other.m_bits, PtrTraits::default_null_value);
  182. FlatPtr bits = PtrTraits::exchange(m_bits, other_bits);
  183. PtrTraits::exchange(other.m_bits, bits);
  184. }
  185. template<typename U, typename P = RefPtrTraits<U>>
  186. void swap(RefPtr<U, P>& other) requires(IsConvertible<U*, T*>)
  187. {
  188. // NOTE: swap is not atomic!
  189. FlatPtr other_bits = P::exchange(other.m_bits, P::default_null_value);
  190. FlatPtr bits = PtrTraits::exchange(m_bits, PtrTraits::template convert_from<U, P>(other_bits));
  191. P::exchange(other.m_bits, P::template convert_from<U, P>(bits));
  192. }
  193. ALWAYS_INLINE RefPtr& operator=(RefPtr&& other)
  194. {
  195. if (this != &other)
  196. assign_raw(other.leak_ref_raw());
  197. return *this;
  198. }
  199. template<typename U, typename P = RefPtrTraits<U>>
  200. ALWAYS_INLINE RefPtr& operator=(RefPtr<U, P>&& other) requires(IsConvertible<U*, T*>)
  201. {
  202. assign_raw(PtrTraits::template convert_from<U, P>(other.leak_ref_raw()));
  203. return *this;
  204. }
  205. template<typename U>
  206. ALWAYS_INLINE RefPtr& operator=(NonnullRefPtr<U>&& other) requires(IsConvertible<U*, T*>)
  207. {
  208. assign_raw(PtrTraits::as_bits(&other.leak_ref()));
  209. return *this;
  210. }
  211. ALWAYS_INLINE RefPtr& operator=(const NonnullRefPtr<T>& other)
  212. {
  213. assign_raw(PtrTraits::as_bits(other.add_ref()));
  214. return *this;
  215. }
  216. template<typename U>
  217. ALWAYS_INLINE RefPtr& operator=(const NonnullRefPtr<U>& other) requires(IsConvertible<U*, T*>)
  218. {
  219. assign_raw(PtrTraits::as_bits(other.add_ref()));
  220. return *this;
  221. }
  222. ALWAYS_INLINE RefPtr& operator=(const RefPtr& other)
  223. {
  224. if (this != &other)
  225. assign_raw(other.add_ref_raw());
  226. return *this;
  227. }
  228. template<typename U>
  229. ALWAYS_INLINE RefPtr& operator=(const RefPtr<U>& other) requires(IsConvertible<U*, T*>)
  230. {
  231. assign_raw(other.add_ref_raw());
  232. return *this;
  233. }
  234. ALWAYS_INLINE RefPtr& operator=(const T* ptr)
  235. {
  236. ref_if_not_null(const_cast<T*>(ptr));
  237. assign_raw(PtrTraits::as_bits(const_cast<T*>(ptr)));
  238. return *this;
  239. }
  240. ALWAYS_INLINE RefPtr& operator=(const T& object)
  241. {
  242. const_cast<T&>(object).ref();
  243. assign_raw(PtrTraits::as_bits(const_cast<T*>(&object)));
  244. return *this;
  245. }
  246. RefPtr& operator=(std::nullptr_t)
  247. {
  248. clear();
  249. return *this;
  250. }
  251. ALWAYS_INLINE bool assign_if_null(RefPtr&& other)
  252. {
  253. if (this == &other)
  254. return is_null();
  255. return PtrTraits::exchange_if_null(m_bits, other.leak_ref_raw());
  256. }
  257. template<typename U, typename P = RefPtrTraits<U>>
  258. ALWAYS_INLINE bool assign_if_null(RefPtr<U, P>&& other)
  259. {
  260. if (this == &other)
  261. return is_null();
  262. return PtrTraits::exchange_if_null(m_bits, PtrTraits::template convert_from<U, P>(other.leak_ref_raw()));
  263. }
  264. ALWAYS_INLINE void clear()
  265. {
  266. assign_raw(PtrTraits::default_null_value);
  267. }
  268. bool operator!() const { return PtrTraits::is_null(m_bits.load(AK::MemoryOrder::memory_order_relaxed)); }
  269. [[nodiscard]] T* leak_ref()
  270. {
  271. FlatPtr bits = PtrTraits::exchange(m_bits, PtrTraits::default_null_value);
  272. return PtrTraits::as_ptr(bits);
  273. }
  274. NonnullRefPtr<T> release_nonnull()
  275. {
  276. FlatPtr bits = PtrTraits::exchange(m_bits, PtrTraits::default_null_value);
  277. VERIFY(!PtrTraits::is_null(bits));
  278. return NonnullRefPtr<T>(NonnullRefPtr<T>::Adopt, *PtrTraits::as_ptr(bits));
  279. }
  280. ALWAYS_INLINE T* ptr() { return as_ptr(); }
  281. ALWAYS_INLINE const T* ptr() const { return as_ptr(); }
  282. ALWAYS_INLINE T* operator->()
  283. {
  284. return as_nonnull_ptr();
  285. }
  286. ALWAYS_INLINE const T* operator->() const
  287. {
  288. return as_nonnull_ptr();
  289. }
  290. ALWAYS_INLINE T& operator*()
  291. {
  292. return *as_nonnull_ptr();
  293. }
  294. ALWAYS_INLINE const T& operator*() const
  295. {
  296. return *as_nonnull_ptr();
  297. }
  298. ALWAYS_INLINE operator const T*() const { return as_ptr(); }
  299. ALWAYS_INLINE operator T*() { return as_ptr(); }
  300. ALWAYS_INLINE operator bool() { return !is_null(); }
  301. bool operator==(std::nullptr_t) const { return is_null(); }
  302. bool operator!=(std::nullptr_t) const { return !is_null(); }
  303. bool operator==(const RefPtr& other) const { return as_ptr() == other.as_ptr(); }
  304. bool operator!=(const RefPtr& other) const { return as_ptr() != other.as_ptr(); }
  305. bool operator==(RefPtr& other) { return as_ptr() == other.as_ptr(); }
  306. bool operator!=(RefPtr& other) { return as_ptr() != other.as_ptr(); }
  307. bool operator==(const T* other) const { return as_ptr() == other; }
  308. bool operator!=(const T* other) const { return as_ptr() != other; }
  309. bool operator==(T* other) { return as_ptr() == other; }
  310. bool operator!=(T* other) { return as_ptr() != other; }
  311. ALWAYS_INLINE bool is_null() const { return PtrTraits::is_null(m_bits.load(AK::MemoryOrder::memory_order_relaxed)); }
  312. template<typename U = T, typename EnableIf<IsSame<U, T> && !IsNullPointer<typename PtrTraits::NullType>>::Type* = nullptr>
  313. typename PtrTraits::NullType null_value() const
  314. {
  315. // make sure we are holding a null value
  316. FlatPtr bits = m_bits.load(AK::MemoryOrder::memory_order_relaxed);
  317. VERIFY(PtrTraits::is_null(bits));
  318. return PtrTraits::to_null_value(bits);
  319. }
  320. template<typename U = T, typename EnableIf<IsSame<U, T> && !IsNullPointer<typename PtrTraits::NullType>>::Type* = nullptr>
  321. void set_null_value(typename PtrTraits::NullType value)
  322. {
  323. // make sure that new null value would be interpreted as a null value
  324. FlatPtr bits = PtrTraits::from_null_value(value);
  325. VERIFY(PtrTraits::is_null(bits));
  326. assign_raw(bits);
  327. }
  328. private:
  329. template<typename F>
  330. void do_while_locked(F f) const
  331. {
  332. #ifdef KERNEL
  333. // We don't want to be pre-empted while we have the lock bit set
  334. Kernel::ScopedCritical critical;
  335. #endif
  336. FlatPtr bits = PtrTraits::lock(m_bits);
  337. T* ptr = PtrTraits::as_ptr(bits);
  338. f(ptr);
  339. PtrTraits::unlock(m_bits, bits);
  340. }
  341. [[nodiscard]] ALWAYS_INLINE FlatPtr leak_ref_raw()
  342. {
  343. return PtrTraits::exchange(m_bits, PtrTraits::default_null_value);
  344. }
  345. [[nodiscard]] ALWAYS_INLINE FlatPtr add_ref_raw() const
  346. {
  347. #ifdef KERNEL
  348. // We don't want to be pre-empted while we have the lock bit set
  349. Kernel::ScopedCritical critical;
  350. #endif
  351. // This prevents a race condition between thread A and B:
  352. // 1. Thread A copies RefPtr, e.g. through assignment or copy constructor,
  353. // gets the pointer from source, but is pre-empted before adding
  354. // another reference
  355. // 2. Thread B calls clear, leak_ref, or release_nonnull on source, and
  356. // then drops the last reference, causing the object to be deleted
  357. // 3. Thread A finishes step #1 by attempting to add a reference to
  358. // the object that was already deleted in step #2
  359. FlatPtr bits = PtrTraits::lock(m_bits);
  360. if (T* ptr = PtrTraits::as_ptr(bits))
  361. ptr->ref();
  362. PtrTraits::unlock(m_bits, bits);
  363. return bits;
  364. }
  365. ALWAYS_INLINE void assign_raw(FlatPtr bits)
  366. {
  367. FlatPtr prev_bits = PtrTraits::exchange(m_bits, bits);
  368. unref_if_not_null(PtrTraits::as_ptr(prev_bits));
  369. }
  370. ALWAYS_INLINE T* as_ptr() const
  371. {
  372. return PtrTraits::as_ptr(m_bits.load(AK::MemoryOrder::memory_order_relaxed));
  373. }
  374. ALWAYS_INLINE T* as_nonnull_ptr() const
  375. {
  376. return as_nonnull_ptr(m_bits.load(AK::MemoryOrder::memory_order_relaxed));
  377. }
  378. ALWAYS_INLINE T* as_nonnull_ptr(FlatPtr bits) const
  379. {
  380. VERIFY(!PtrTraits::is_null(bits));
  381. return PtrTraits::as_ptr(bits);
  382. }
  383. mutable Atomic<FlatPtr> m_bits { PtrTraits::default_null_value };
  384. };
  385. template<typename T>
  386. struct Formatter<RefPtr<T>> : Formatter<const T*> {
  387. ErrorOr<void> format(FormatBuilder& builder, RefPtr<T> const& value)
  388. {
  389. return Formatter<const T*>::format(builder, value.ptr());
  390. }
  391. };
  392. template<typename T>
  393. struct Traits<RefPtr<T>> : public GenericTraits<RefPtr<T>> {
  394. using PeekType = T*;
  395. using ConstPeekType = const T*;
  396. static unsigned hash(const RefPtr<T>& p) { return ptr_hash(p.ptr()); }
  397. static bool equals(const RefPtr<T>& a, const RefPtr<T>& b) { return a.ptr() == b.ptr(); }
  398. };
  399. template<typename T, typename U>
  400. inline NonnullRefPtr<T> static_ptr_cast(const NonnullRefPtr<U>& ptr)
  401. {
  402. return NonnullRefPtr<T>(static_cast<const T&>(*ptr));
  403. }
  404. template<typename T, typename U, typename PtrTraits = RefPtrTraits<T>>
  405. inline RefPtr<T> static_ptr_cast(const RefPtr<U>& ptr)
  406. {
  407. return RefPtr<T, PtrTraits>(static_cast<const T*>(ptr.ptr()));
  408. }
  409. template<typename T, typename PtrTraitsT, typename U, typename PtrTraitsU>
  410. inline void swap(RefPtr<T, PtrTraitsT>& a, RefPtr<U, PtrTraitsU>& b) requires(IsConvertible<U*, T*>)
  411. {
  412. a.swap(b);
  413. }
  414. template<typename T>
  415. inline RefPtr<T> adopt_ref_if_nonnull(T* object)
  416. {
  417. if (object)
  418. return RefPtr<T>(RefPtr<T>::Adopt, *object);
  419. return {};
  420. }
  421. template<typename T, class... Args>
  422. requires(IsConstructible<T, Args...>) inline RefPtr<T> try_make_ref_counted(Args&&... args)
  423. {
  424. return adopt_ref_if_nonnull(new (nothrow) T(forward<Args>(args)...));
  425. }
  426. // FIXME: Remove once P0960R3 is available in Clang.
  427. template<typename T, class... Args>
  428. inline RefPtr<T> try_make_ref_counted(Args&&... args)
  429. {
  430. return adopt_ref_if_nonnull(new (nothrow) T { forward<Args>(args)... });
  431. }
  432. template<typename T>
  433. inline ErrorOr<NonnullRefPtr<T>> adopt_nonnull_ref_or_enomem(T* object)
  434. {
  435. auto result = adopt_ref_if_nonnull(object);
  436. if (!result)
  437. return Error::from_errno(ENOMEM);
  438. return result.release_nonnull();
  439. }
  440. }
  441. using AK::adopt_ref_if_nonnull;
  442. using AK::RefPtr;
  443. using AK::static_ptr_cast;
  444. using AK::try_make_ref_counted;
  445. #ifdef KERNEL
  446. using AK::adopt_nonnull_ref_or_enomem;
  447. #endif