فهرست منبع

Kernel: Support Mutex Protected lists in ListedRefCounted

This will allow us to support Mutex Protected lists like the custodies
list as well.
Idan Horowitz 3 سال پیش
والد
کامیت
be91b4fe3e
4فایلهای تغییر یافته به همراه19 افزوده شده و 8 حذف شده
  1. 1 1
      Kernel/FileSystem/Inode.h
  2. 16 5
      Kernel/Library/ListedRefCounted.h
  3. 1 1
      Kernel/Memory/VMObject.h
  4. 1 1
      Kernel/Thread.h

+ 1 - 1
Kernel/FileSystem/Inode.h

@@ -23,7 +23,7 @@
 
 namespace Kernel {
 
-class Inode : public ListedRefCounted<Inode>
+class Inode : public ListedRefCounted<Inode, LockType::Spinlock>
     , public Weakable<Inode> {
     friend class VirtualFileSystem;
     friend class FileSystem;

+ 16 - 5
Kernel/Library/ListedRefCounted.h

@@ -11,21 +11,32 @@
 namespace Kernel {
 
 // ListedRefCounted<T> is a slot-in replacement for RefCounted<T> to use in classes
-// that add themselves to a SpinlockProtected<IntrusiveList> when constructed.
-// The custom unref() implementation here ensures that the the list is locked during
+// that add themselves to a {Spinlock, Mutex}Protected<IntrusiveList> when constructed.
+// The custom unref() implementation here ensures that the list is locked during
 // unref(), and that the T is removed from the list before ~T() is invoked.
 
-template<typename T>
+enum class LockType {
+    Spinlock,
+    Mutex,
+};
+
+template<typename T, LockType Lock>
 class ListedRefCounted : public RefCountedBase {
 public:
     bool unref() const
     {
-        auto new_ref_count = T::all_instances().with([&](auto& list) {
+        auto callback = [&](auto& list) {
             auto new_ref_count = deref_base();
             if (new_ref_count == 0)
                 list.remove(const_cast<T&>(static_cast<T const&>(*this)));
             return new_ref_count;
-        });
+        };
+
+        RefCountType new_ref_count;
+        if constexpr (Lock == LockType::Spinlock)
+            new_ref_count = T::all_instances().with(callback);
+        else if constexpr (Lock == LockType::Mutex)
+            new_ref_count = T::all_instances().with_exclusive(callback);
         if (new_ref_count == 0) {
             call_will_be_destroyed_if_present(static_cast<const T*>(this));
             delete const_cast<T*>(static_cast<T const*>(this));

+ 1 - 1
Kernel/Memory/VMObject.h

@@ -18,7 +18,7 @@
 namespace Kernel::Memory {
 
 class VMObject
-    : public ListedRefCounted<VMObject>
+    : public ListedRefCounted<VMObject, LockType::Spinlock>
     , public Weakable<VMObject> {
     friend class MemoryManager;
     friend class Region;

+ 1 - 1
Kernel/Thread.h

@@ -146,7 +146,7 @@ struct ThreadRegisters {
 };
 
 class Thread
-    : public ListedRefCounted<Thread>
+    : public ListedRefCounted<Thread, LockType::Spinlock>
     , public Weakable<Thread> {
     AK_MAKE_NONCOPYABLE(Thread);
     AK_MAKE_NONMOVABLE(Thread);