Kernel: Reduce LOCK_DEBUG ifdefs by utilizing Kernel::LockLocation

The LOCK_DEBUG conditional code is pretty ugly for a feature that we
only use rarely. We can remove a significant amount of this code by
utilizing a zero sized fake type when not building in LOCK_DEBUG mode.

This lets us keep the same API, but just let the compiler optimize it
away when don't actually care about the location the caller came from.
This commit is contained in:
Brian Gianforcaro 2021-08-07 04:19:39 -07:00 committed by Andreas Kling
parent 6c18b4e558
commit bea74f4b77
Notes: sideshowbarker 2024-07-18 07:01:24 +09:00
4 changed files with 13 additions and 53 deletions

View file

@ -4,22 +4,16 @@
* SPDX-License-Identifier: BSD-2-Clause
*/
#ifdef LOCK_DEBUG
# include <AK/SourceLocation.h>
#endif
#include <Kernel/Debug.h>
#include <Kernel/KSyms.h>
#include <Kernel/Locking/LockLocation.h>
#include <Kernel/Locking/Mutex.h>
#include <Kernel/Locking/SpinLock.h>
#include <Kernel/Thread.h>
namespace Kernel {
#if LOCK_DEBUG
void Mutex::lock(Mode mode, const SourceLocation& location)
#else
void Mutex::lock(Mode mode)
#endif
void Mutex::lock(Mode mode, [[maybe_unused]] const LockLocation& location)
{
// NOTE: This may be called from an interrupt handler (not an IRQ handler)
// and also from within critical sections!
@ -318,11 +312,7 @@ auto Mutex::force_unlock_if_locked(u32& lock_count_to_restore) -> Mode
return current_mode;
}
#if LOCK_DEBUG
void Mutex::restore_lock(Mode mode, u32 lock_count, const SourceLocation& location)
#else
void Mutex::restore_lock(Mode mode, u32 lock_count)
#endif
void Mutex::restore_lock(Mode mode, u32 lock_count, [[maybe_unused]] const LockLocation& location)
{
VERIFY(mode != Mode::Unlocked);
VERIFY(lock_count > 0);

View file

@ -11,6 +11,7 @@
#include <AK/HashMap.h>
#include <AK/Types.h>
#include <Kernel/Forward.h>
#include <Kernel/Locking/LockLocation.h>
#include <Kernel/Locking/LockMode.h>
#include <Kernel/WaitQueue.h>
@ -31,13 +32,8 @@ public:
}
~Mutex() = default;
#if LOCK_DEBUG
void lock(Mode mode = Mode::Exclusive, const SourceLocation& location = SourceLocation::current());
void restore_lock(Mode, u32, const SourceLocation& location = SourceLocation::current());
#else
void lock(Mode = Mode::Exclusive);
void restore_lock(Mode, u32);
#endif
void lock(Mode mode = Mode::Exclusive, const LockLocation& location = LockLocation::current());
void restore_lock(Mode, u32, const LockLocation& location = LockLocation::current());
void unlock();
[[nodiscard]] Mode force_unlock_if_locked(u32&);
@ -115,18 +111,10 @@ public:
{
}
#if LOCK_DEBUG
ALWAYS_INLINE explicit MutexLocker(Mutex& l, Mutex::Mode mode = Mutex::Mode::Exclusive, const SourceLocation& location = SourceLocation::current())
#else
ALWAYS_INLINE explicit MutexLocker(Mutex& l, Mutex::Mode mode = Mutex::Mode::Exclusive)
#endif
ALWAYS_INLINE explicit MutexLocker(Mutex& l, Mutex::Mode mode = Mutex::Mode::Exclusive, const LockLocation& location = LockLocation::current())
: m_lock(&l)
{
#if LOCK_DEBUG
m_lock->lock(mode, location);
#else
m_lock->lock(mode);
#endif
}
ALWAYS_INLINE ~MutexLocker()
@ -143,38 +131,22 @@ public:
m_lock->unlock();
}
#if LOCK_DEBUG
ALWAYS_INLINE void attach_and_lock(Mutex& lock, Mutex::Mode mode = Mutex::Mode::Exclusive, const SourceLocation& location = SourceLocation::current())
#else
ALWAYS_INLINE void attach_and_lock(Mutex& lock, Mutex::Mode mode = Mutex::Mode::Exclusive)
#endif
ALWAYS_INLINE void attach_and_lock(Mutex& lock, Mutex::Mode mode = Mutex::Mode::Exclusive, const LockLocation& location = LockLocation::current())
{
VERIFY(!m_locked);
m_lock = &lock;
m_locked = true;
#if LOCK_DEBUG
m_lock->lock(mode, location);
#else
m_lock->lock(mode);
#endif
}
#if LOCK_DEBUG
ALWAYS_INLINE void lock(Mutex::Mode mode = Mutex::Mode::Exclusive, const SourceLocation& location = SourceLocation::current())
#else
ALWAYS_INLINE void lock(Mutex::Mode mode = Mutex::Mode::Exclusive)
#endif
ALWAYS_INLINE void lock(Mutex::Mode mode = Mutex::Mode::Exclusive, const LockLocation& location = LockLocation::current())
{
VERIFY(m_lock);
VERIFY(!m_locked);
m_locked = true;
#if LOCK_DEBUG
m_lock->lock(mode, location);
#else
m_lock->lock(mode);
#endif
}
private:

View file

@ -514,7 +514,7 @@ void Thread::finalize()
dbgln("Thread {} leaking {} Locks!", *this, lock_count());
ScopedSpinLock list_lock(m_holding_locks_lock);
for (auto& info : m_holding_locks_list) {
const auto& location = info.source_location;
const auto& location = info.lock_location;
dbgln(" - Mutex: \"{}\" @ {} locked in function \"{}\" at \"{}:{}\" with a count of: {}", info.lock->name(), info.lock, location.function_name(), location.filename(), location.line_number(), info.count);
}
VERIFY_NOT_REACHED();

View file

@ -12,9 +12,6 @@
#include <AK/IntrusiveList.h>
#include <AK/Optional.h>
#include <AK/OwnPtr.h>
#ifdef LOCK_DEBUG
# include <AK/SourceLocation.h>
#endif
#include <AK/String.h>
#include <AK/Time.h>
#include <AK/Vector.h>
@ -27,6 +24,7 @@
#include <Kernel/Forward.h>
#include <Kernel/KResult.h>
#include <Kernel/KString.h>
#include <Kernel/Locking/LockLocation.h>
#include <Kernel/Locking/LockMode.h>
#include <Kernel/Memory/VirtualRange.h>
#include <Kernel/Scheduler.h>
@ -1152,7 +1150,7 @@ public:
RecursiveSpinLock& get_lock() const { return m_lock; }
#if LOCK_DEBUG
void holding_lock(Mutex& lock, int refs_delta, const SourceLocation& location)
void holding_lock(Mutex& lock, int refs_delta, const LockLocation& location)
{
VERIFY(refs_delta != 0);
m_holding_locks.fetch_add(refs_delta, AK::MemoryOrder::memory_order_relaxed);
@ -1317,7 +1315,7 @@ private:
#if LOCK_DEBUG
struct HoldingLockInfo {
Mutex* lock;
SourceLocation source_location;
LockLocation lock_location;
unsigned count;
};
Atomic<u32> m_holding_locks { 0 };