ladybird/AK/Function.h

313 lines
10 KiB
C
Raw Permalink Normal View History

2018-10-17 08:55:43 +00:00
/*
* Copyright (C) 2016 Apple Inc. All rights reserved.
* Copyright (c) 2021, Gunnar Beutner <gbeutner@serenityos.org>
* Copyright (c) 2018-2023, Andreas Kling <andreas@ladybird.org>
2018-10-17 08:55:43 +00:00
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include <AK/Assertions.h>
#include <AK/Atomic.h>
#include <AK/BitCast.h>
#include <AK/Noncopyable.h>
#include <AK/ScopeGuard.h>
#include <AK/Span.h>
#include <AK/StdLibExtras.h>
#include <AK/Types.h>
2018-10-17 08:55:43 +00:00
namespace AK {
LibJSGCVerifier: Detect stack-allocated ref captures in lambdas For example, consider the following code snippet: Vector<Function<void()>> m_callbacks; void add_callback(Function<void()> callback) { m_callbacks.append(move(callback)); } // Somewhere else... void do_something() { int a = 10; add_callback([&a] { dbgln("a is {}", a); }); } // Oops, "a" is now destroyed, but the callback in m_callbacks // has a reference to it! We now statically detect the capture of "a" in the lambda above and flag it as incorrect. Note that capturing the value implicitly with a capture list of `[&]` would also be detected. Of course, many functions that accept Function<...> don't store them anywhere, instead immediately invoking them inside of the function. To avoid a warning in this case, the parameter can be annotated with NOESCAPE to indicate that capturing stack variables is fine: void do_something_now(NOESCAPE Function<...> callback) { callback(...) } Lastly, there are situations where the callback does generally escape, but where the caller knows that it won't escape long enough to cause any issues. For example, consider this fake example from LibWeb: void do_something() { bool is_done = false; HTML::queue_global_task([&] { do_some_work(); is_done = true; }); HTML::main_thread_event_loop().spin_until([&] { return is_done; }); } In this case, we know that the lambda passed to queue_global_task will be executed before the function returns, and will not persist afterwards. To avoid this warning, annotate the type of the capture with IGNORE_USE_IN_ESCAPING_LAMBDA: void do_something() { IGNORE_USE_IN_ESCAPING_LAMBDA bool is_done = false; // ... }
2024-04-07 23:01:26 +00:00
// These annotations are used to avoid capturing a variable with local storage in a lambda that outlives it
#if defined(AK_COMPILER_CLANG)
# define ESCAPING [[clang::annotate("serenity::escaping")]]
LibJSGCVerifier: Detect stack-allocated ref captures in lambdas For example, consider the following code snippet: Vector<Function<void()>> m_callbacks; void add_callback(Function<void()> callback) { m_callbacks.append(move(callback)); } // Somewhere else... void do_something() { int a = 10; add_callback([&a] { dbgln("a is {}", a); }); } // Oops, "a" is now destroyed, but the callback in m_callbacks // has a reference to it! We now statically detect the capture of "a" in the lambda above and flag it as incorrect. Note that capturing the value implicitly with a capture list of `[&]` would also be detected. Of course, many functions that accept Function<...> don't store them anywhere, instead immediately invoking them inside of the function. To avoid a warning in this case, the parameter can be annotated with NOESCAPE to indicate that capturing stack variables is fine: void do_something_now(NOESCAPE Function<...> callback) { callback(...) } Lastly, there are situations where the callback does generally escape, but where the caller knows that it won't escape long enough to cause any issues. For example, consider this fake example from LibWeb: void do_something() { bool is_done = false; HTML::queue_global_task([&] { do_some_work(); is_done = true; }); HTML::main_thread_event_loop().spin_until([&] { return is_done; }); } In this case, we know that the lambda passed to queue_global_task will be executed before the function returns, and will not persist afterwards. To avoid this warning, annotate the type of the capture with IGNORE_USE_IN_ESCAPING_LAMBDA: void do_something() { IGNORE_USE_IN_ESCAPING_LAMBDA bool is_done = false; // ... }
2024-04-07 23:01:26 +00:00
// FIXME: When we get C++23, change this to be applied to the lambda directly instead of to the types of its captures
# define IGNORE_USE_IN_ESCAPING_LAMBDA [[clang::annotate("serenity::ignore_use_in_escaping_lambda")]]
#else
# define ESCAPING
LibJSGCVerifier: Detect stack-allocated ref captures in lambdas For example, consider the following code snippet: Vector<Function<void()>> m_callbacks; void add_callback(Function<void()> callback) { m_callbacks.append(move(callback)); } // Somewhere else... void do_something() { int a = 10; add_callback([&a] { dbgln("a is {}", a); }); } // Oops, "a" is now destroyed, but the callback in m_callbacks // has a reference to it! We now statically detect the capture of "a" in the lambda above and flag it as incorrect. Note that capturing the value implicitly with a capture list of `[&]` would also be detected. Of course, many functions that accept Function<...> don't store them anywhere, instead immediately invoking them inside of the function. To avoid a warning in this case, the parameter can be annotated with NOESCAPE to indicate that capturing stack variables is fine: void do_something_now(NOESCAPE Function<...> callback) { callback(...) } Lastly, there are situations where the callback does generally escape, but where the caller knows that it won't escape long enough to cause any issues. For example, consider this fake example from LibWeb: void do_something() { bool is_done = false; HTML::queue_global_task([&] { do_some_work(); is_done = true; }); HTML::main_thread_event_loop().spin_until([&] { return is_done; }); } In this case, we know that the lambda passed to queue_global_task will be executed before the function returns, and will not persist afterwards. To avoid this warning, annotate the type of the capture with IGNORE_USE_IN_ESCAPING_LAMBDA: void do_something() { IGNORE_USE_IN_ESCAPING_LAMBDA bool is_done = false; // ... }
2024-04-07 23:01:26 +00:00
# define IGNORE_USE_IN_ESCAPING_LAMBDA
#endif
template<typename>
class Function;
2018-10-17 08:55:43 +00:00
template<typename F>
inline constexpr bool IsFunctionPointer = (IsPointer<F> && IsFunction<RemovePointer<F>>);
// Not a function pointer, and not an lvalue reference.
template<typename F>
inline constexpr bool IsFunctionObject = (!IsFunctionPointer<F> && IsRvalueReference<F&&>);
template<typename Out, typename... In>
2018-10-17 08:55:43 +00:00
class Function<Out(In...)> {
AK_MAKE_NONCOPYABLE(Function);
2018-10-17 08:55:43 +00:00
public:
using FunctionType = Out(In...);
using ReturnType = Out;
constexpr static auto AccommodateExcessiveAlignmentRequirements = true;
constexpr static size_t ExcessiveAlignmentThreshold = 16;
2018-10-17 08:55:43 +00:00
Function() = default;
Function(nullptr_t)
{
}
2018-10-17 08:55:43 +00:00
~Function()
{
clear(false);
}
[[nodiscard]] ReadonlyBytes raw_capture_range() const
{
if (!m_size)
return {};
if (auto* wrapper = callable_wrapper())
return ReadonlyBytes { wrapper, m_size };
return {};
}
template<typename CallableType>
2022-10-16 22:06:11 +00:00
Function(CallableType&& callable)
requires((IsFunctionObject<CallableType> && IsCallableWithArguments<CallableType, Out, In...> && !IsSame<RemoveCVReference<CallableType>, Function>))
2018-10-17 08:55:43 +00:00
{
init_with_callable(forward<CallableType>(callable), CallableKind::FunctionObject);
2018-10-17 08:55:43 +00:00
}
template<typename FunctionType>
2022-10-16 22:06:11 +00:00
Function(FunctionType f)
requires((IsFunctionPointer<FunctionType> && IsCallableWithArguments<RemovePointer<FunctionType>, Out, In...> && !IsSame<RemoveCVReference<FunctionType>, Function>))
2018-10-17 08:55:43 +00:00
{
init_with_callable(move(f), CallableKind::FunctionPointer);
}
Function(Function&& other)
{
move_from(move(other));
2018-10-17 08:55:43 +00:00
}
// Note: Despite this method being const, a mutable lambda _may_ modify its own captures.
Out operator()(In... in) const
2018-10-17 08:55:43 +00:00
{
auto* wrapper = callable_wrapper();
VERIFY(wrapper);
++m_call_nesting_level;
ScopeGuard guard([this] {
if (--m_call_nesting_level == 0 && m_deferred_clear)
const_cast<Function*>(this)->clear(false);
});
return wrapper->call(forward<In>(in)...);
2018-10-17 08:55:43 +00:00
}
explicit operator bool() const { return !!callable_wrapper(); }
2018-10-17 08:55:43 +00:00
template<typename CallableType>
2022-10-16 22:06:11 +00:00
Function& operator=(CallableType&& callable)
requires((IsFunctionObject<CallableType> && IsCallableWithArguments<CallableType, Out, In...>))
2018-10-17 08:55:43 +00:00
{
clear();
init_with_callable(forward<CallableType>(callable), CallableKind::FunctionObject);
2018-10-17 08:55:43 +00:00
return *this;
}
template<typename FunctionType>
2022-10-16 22:06:11 +00:00
Function& operator=(FunctionType f)
requires((IsFunctionPointer<FunctionType> && IsCallableWithArguments<RemovePointer<FunctionType>, Out, In...>))
2018-10-17 08:55:43 +00:00
{
clear();
if (f)
init_with_callable(move(f), CallableKind::FunctionPointer);
2018-10-17 08:55:43 +00:00
return *this;
}
Function& operator=(nullptr_t)
2018-10-17 08:55:43 +00:00
{
clear();
return *this;
}
Function& operator=(Function&& other)
{
if (this != &other) {
clear();
move_from(move(other));
}
2018-10-17 08:55:43 +00:00
return *this;
}
private:
enum class CallableKind {
FunctionPointer,
FunctionObject,
};
2018-10-17 08:55:43 +00:00
class CallableWrapperBase {
public:
virtual ~CallableWrapperBase() = default;
// Note: This is not const to allow storing mutable lambdas.
virtual Out call(In...) = 0;
virtual void destroy() = 0;
virtual void init_and_swap(u8*, size_t) = 0;
2018-10-17 08:55:43 +00:00
};
template<typename CallableType>
class CallableWrapper final : public CallableWrapperBase {
AK_MAKE_NONMOVABLE(CallableWrapper);
AK_MAKE_NONCOPYABLE(CallableWrapper);
2018-10-17 08:55:43 +00:00
public:
explicit CallableWrapper(CallableType&& callable)
: m_callable(move(callable))
{
}
Out call(In... in) final override
{
return m_callable(forward<In>(in)...);
}
2018-10-17 08:55:43 +00:00
void destroy() final override
{
delete this;
}
// NOLINTNEXTLINE(readability-non-const-parameter) False positive; destination is used in a placement new expression
void init_and_swap(u8* destination, size_t size) final override
{
VERIFY(size >= sizeof(CallableWrapper));
new (destination) CallableWrapper { move(m_callable) };
}
2018-10-17 08:55:43 +00:00
private:
CallableType m_callable;
};
enum class FunctionKind {
NullPointer,
Inline,
Outline,
};
CallableWrapperBase* callable_wrapper() const
{
switch (m_kind) {
case FunctionKind::NullPointer:
return nullptr;
case FunctionKind::Inline:
return bit_cast<CallableWrapperBase*>(&m_storage);
case FunctionKind::Outline:
return *bit_cast<CallableWrapperBase**>(&m_storage);
default:
VERIFY_NOT_REACHED();
}
}
void clear(bool may_defer = true)
{
bool called_from_inside_function = m_call_nesting_level > 0;
// NOTE: This VERIFY could fail because a Function is destroyed from within itself.
VERIFY(may_defer || !called_from_inside_function);
if (called_from_inside_function && may_defer) {
m_deferred_clear = true;
return;
}
m_deferred_clear = false;
auto* wrapper = callable_wrapper();
if (m_kind == FunctionKind::Inline) {
VERIFY(wrapper);
wrapper->~CallableWrapperBase();
} else if (m_kind == FunctionKind::Outline) {
VERIFY(wrapper);
wrapper->destroy();
}
m_kind = FunctionKind::NullPointer;
}
template<typename Callable>
void init_with_callable(Callable&& callable, CallableKind callable_kind)
{
if constexpr (alignof(Callable) > ExcessiveAlignmentThreshold && !AccommodateExcessiveAlignmentRequirements) {
static_assert(
alignof(Callable) <= ExcessiveAlignmentThreshold,
"This callable object has a very large alignment requirement, "
"check your capture list if it is a lambda expression, "
"and make sure your callable object is not excessively aligned.");
}
VERIFY(m_call_nesting_level == 0);
using WrapperType = CallableWrapper<Callable>;
if constexpr (alignof(Callable) > inline_alignment || sizeof(WrapperType) > inline_capacity) {
*bit_cast<CallableWrapperBase**>(&m_storage) = new WrapperType(forward<Callable>(callable));
m_kind = FunctionKind::Outline;
} else {
static_assert(sizeof(WrapperType) <= inline_capacity);
new (m_storage) WrapperType(forward<Callable>(callable));
m_kind = FunctionKind::Inline;
}
if (callable_kind == CallableKind::FunctionObject)
m_size = sizeof(WrapperType);
else
m_size = 0;
}
void move_from(Function&& other)
{
VERIFY(m_call_nesting_level == 0 && other.m_call_nesting_level == 0);
auto* other_wrapper = other.callable_wrapper();
m_size = other.m_size;
switch (other.m_kind) {
case FunctionKind::NullPointer:
break;
case FunctionKind::Inline:
other_wrapper->init_and_swap(m_storage, inline_capacity);
m_kind = FunctionKind::Inline;
break;
case FunctionKind::Outline:
*bit_cast<CallableWrapperBase**>(&m_storage) = other_wrapper;
m_kind = FunctionKind::Outline;
break;
default:
VERIFY_NOT_REACHED();
}
other.m_kind = FunctionKind::NullPointer;
}
size_t m_size { 0 };
FunctionKind m_kind { FunctionKind::NullPointer };
bool m_deferred_clear { false };
mutable Atomic<u16> m_call_nesting_level { 0 };
static constexpr size_t inline_alignment = max(alignof(CallableWrapperBase), alignof(CallableWrapperBase*));
// Empirically determined to fit most lambdas and functions.
static constexpr size_t inline_capacity = 4 * sizeof(void*);
alignas(inline_alignment) u8 m_storage[inline_capacity];
2018-10-17 08:55:43 +00:00
};
}
#if USING_AK_GLOBALLY
2018-10-17 08:55:43 +00:00
using AK::Function;
using AK::IsCallableWithArguments;
#endif