
This patch adds two macros to declare per-type allocators: - JS_DECLARE_ALLOCATOR(TypeName) - JS_DEFINE_ALLOCATOR(TypeName) When used, they add a type-specific CellAllocator that the Heap will delegate allocation requests to. The result of this is that GC objects of the same type always end up within the same HeapBlock, drastically reducing the ability to perform type confusion attacks. It also improves HeapBlock utilization, since each block now has cells sized exactly to the type used within that block. (Previously we only had a handful of block sizes available, and most GC allocations ended up with a large amount of slack in their tails.) There is a small performance hit from this, but I'm sure we can make up for it elsewhere. Note that the old size-based allocators still exist, and we fall back to them for any type that doesn't have its own CellAllocator.
60 lines
1.8 KiB
C++
60 lines
1.8 KiB
C++
/*
|
|
* Copyright (c) 2021-2022, Idan Horowitz <idan.horowitz@serenityos.org>
|
|
*
|
|
* SPDX-License-Identifier: BSD-2-Clause
|
|
*/
|
|
|
|
#include <LibJS/Runtime/WeakRef.h>
|
|
|
|
namespace JS {
|
|
|
|
JS_DEFINE_ALLOCATOR(WeakRef);
|
|
|
|
NonnullGCPtr<WeakRef> WeakRef::create(Realm& realm, Object& value)
|
|
{
|
|
return realm.heap().allocate<WeakRef>(realm, value, realm.intrinsics().weak_ref_prototype());
|
|
}
|
|
|
|
NonnullGCPtr<WeakRef> WeakRef::create(Realm& realm, Symbol& value)
|
|
{
|
|
return realm.heap().allocate<WeakRef>(realm, value, realm.intrinsics().weak_ref_prototype());
|
|
}
|
|
|
|
WeakRef::WeakRef(Object& value, Object& prototype)
|
|
: Object(ConstructWithPrototypeTag::Tag, prototype)
|
|
, WeakContainer(heap())
|
|
, m_value(&value)
|
|
, m_last_execution_generation(vm().execution_generation())
|
|
{
|
|
}
|
|
|
|
WeakRef::WeakRef(Symbol& value, Object& prototype)
|
|
: Object(ConstructWithPrototypeTag::Tag, prototype)
|
|
, WeakContainer(heap())
|
|
, m_value(&value)
|
|
, m_last_execution_generation(vm().execution_generation())
|
|
{
|
|
}
|
|
|
|
void WeakRef::remove_dead_cells(Badge<Heap>)
|
|
{
|
|
if (m_value.visit([](Cell* cell) -> bool { return cell->state() == Cell::State::Live; }, [](Empty) -> bool { VERIFY_NOT_REACHED(); }))
|
|
return;
|
|
|
|
m_value = Empty {};
|
|
// This is an optimization, we deregister from the garbage collector early (even if we were not garbage collected ourself yet)
|
|
// to reduce the garbage collection overhead, which we can do because a cleared weak ref cannot be reused.
|
|
WeakContainer::deregister();
|
|
}
|
|
|
|
void WeakRef::visit_edges(Visitor& visitor)
|
|
{
|
|
Base::visit_edges(visitor);
|
|
|
|
if (vm().execution_generation() == m_last_execution_generation) {
|
|
auto* cell = m_value.visit([](Cell* cell) -> Cell* { return cell; }, [](Empty) -> Cell* { return nullptr; });
|
|
visitor.visit(cell);
|
|
}
|
|
}
|
|
|
|
}
|