Kernel: Tag more methods and types as [[nodiscard]]

Tag methods at where not obvserving the return value is an obvious error
with [[nodiscard]] to catch potential future bugs.
This commit is contained in:
Brian Gianforcaro 2020-12-26 01:47:08 -08:00 committed by Andreas Kling
parent 21a5524d01
commit 815d39886f
Notes: sideshowbarker 2024-07-19 00:34:07 +09:00
3 changed files with 26 additions and 24 deletions

View file

@ -87,12 +87,12 @@ public:
m_pools[pool].update(reinterpret_cast<const u8*>(&event_data), sizeof(T));
}
bool is_seeded() const
[[nodiscard]] bool is_seeded() const
{
return m_reseed_number > 0;
}
bool is_ready() const
[[nodiscard]] bool is_ready() const
{
return is_seeded() || m_p0_len >= reseed_threshold;
}

View file

@ -58,7 +58,7 @@ public:
Processor::current().leave_critical(prev_flags);
}
ALWAYS_INLINE bool is_locked() const
[[nodiscard]] ALWAYS_INLINE bool is_locked() const
{
return m_lock.load(AK::memory_order_relaxed) != 0;
}
@ -105,12 +105,12 @@ public:
Processor::current().leave_critical(prev_flags);
}
ALWAYS_INLINE bool is_locked() const
[[nodiscard]] ALWAYS_INLINE bool is_locked() const
{
return m_lock.load(AK::memory_order_relaxed) != 0;
}
ALWAYS_INLINE bool own_lock() const
[[nodiscard]] ALWAYS_INLINE bool own_lock() const
{
return m_lock.load(AK::memory_order_relaxed) == FlatPtr(&Processor::current());
}
@ -126,7 +126,8 @@ private:
};
template<typename LockType>
class ScopedSpinLock {
class NO_DISCARD ScopedSpinLock {
AK_MAKE_NONCOPYABLE(ScopedSpinLock);
public:
@ -175,7 +176,7 @@ public:
m_have_lock = false;
}
ALWAYS_INLINE bool have_lock() const
[[nodiscard]] ALWAYS_INLINE bool have_lock() const
{
return m_have_lock;
}

View file

@ -113,7 +113,7 @@ public:
m_is_joinable = false;
}
bool is_joinable() const
[[nodiscard]] bool is_joinable() const
{
ScopedSpinLock lock(m_lock);
return m_is_joinable;
@ -181,7 +181,7 @@ public:
return m_type != type;
}
bool was_interrupted() const
[[nodiscard]] bool was_interrupted() const
{
switch (m_type) {
case InterruptedBySignal:
@ -192,7 +192,7 @@ public:
}
}
bool timed_out() const
[[nodiscard]] bool timed_out() const
{
return m_type == InterruptedByTimeout;
}
@ -330,7 +330,7 @@ public:
{
return m_was_interrupted_by_signal;
}
bool was_interrupted() const
[[nodiscard]] bool was_interrupted() const
{
return m_was_interrupted_by_death || m_was_interrupted_by_signal != 0;
}
@ -734,10 +734,10 @@ public:
void resume_from_stopped();
bool should_be_stopped() const;
bool is_stopped() const { return m_state == Stopped; }
bool is_blocked() const { return m_state == Blocked; }
bool is_in_block() const
[[nodiscard]] bool should_be_stopped() const;
[[nodiscard]] bool is_stopped() const { return m_state == Stopped; }
[[nodiscard]] bool is_blocked() const { return m_state == Blocked; }
[[nodiscard]] bool is_in_block() const
{
ScopedSpinLock lock(m_block_lock);
return m_in_block;
@ -932,7 +932,7 @@ public:
// Tell this thread to unblock if needed,
// gracefully unwind the stack and die.
void set_should_die();
bool should_die() const { return m_should_die; }
[[nodiscard]] bool should_die() const { return m_should_die; }
void die_if_needed();
void exit(void* = nullptr);
@ -946,7 +946,7 @@ public:
void set_state(State, u8 = 0);
bool is_initialized() const { return m_initialized; }
[[nodiscard]] bool is_initialized() const { return m_initialized; }
void set_initialized(bool initialized) { m_initialized = initialized; }
void send_urgent_signal_to_self(u8 signal);
@ -963,11 +963,11 @@ public:
DispatchSignalResult try_dispatch_one_pending_signal(u8 signal);
DispatchSignalResult dispatch_signal(u8 signal);
void check_dispatch_pending_signal();
bool has_unmasked_pending_signals() const { return m_have_any_unmasked_pending_signals.load(AK::memory_order_consume); }
[[nodiscard]] bool has_unmasked_pending_signals() const { return m_have_any_unmasked_pending_signals.load(AK::memory_order_consume); }
void terminate_due_to_signal(u8 signal);
bool should_ignore_signal(u8 signal) const;
bool has_signal_handler(u8 signal) const;
bool has_pending_signal(u8 signal) const;
[[nodiscard]] bool should_ignore_signal(u8 signal) const;
[[nodiscard]] bool has_signal_handler(u8 signal) const;
[[nodiscard]] bool has_pending_signal(u8 signal) const;
u32 pending_signals() const;
u32 pending_signals_for_state() const;
@ -1030,12 +1030,13 @@ public:
{
m_is_active.store(active, AK::memory_order_release);
}
bool is_active() const
[[nodiscard]] bool is_active() const
{
return m_is_active.load(AK::MemoryOrder::memory_order_acquire);
}
bool is_finalizable() const
[[nodiscard]] bool is_finalizable() const
{
// We can't finalize as long as this thread is still running
// Note that checking for Running state here isn't sufficient
@ -1060,7 +1061,7 @@ public:
template<typename Callback>
static IterationDecision for_each(Callback);
static bool is_runnable_state(Thread::State state)
[[nodiscard]] static bool is_runnable_state(Thread::State state)
{
return state == Thread::State::Running || state == Thread::State::Runnable;
}