|
@@ -7,6 +7,8 @@
|
|
|
#include <AK/Badge.h>
|
|
|
#include <AK/Debug.h>
|
|
|
#include <AK/HashTable.h>
|
|
|
+#include <AK/JsonArray.h>
|
|
|
+#include <AK/JsonObject.h>
|
|
|
#include <AK/StackInfo.h>
|
|
|
#include <AK/TemporaryChange.h>
|
|
|
#include <LibCore/ElapsedTimer.h>
|
|
@@ -36,6 +38,7 @@ static int gc_perf_string_id;
|
|
|
|
|
|
// NOTE: We keep a per-thread list of custom ranges. This hinges on the assumption that there is one JS VM per thread.
|
|
|
static __thread HashMap<FlatPtr*, size_t>* s_custom_ranges_for_conservative_scan = nullptr;
|
|
|
+static __thread HashMap<FlatPtr*, SourceLocation*>* s_safe_function_locations = nullptr;
|
|
|
|
|
|
Heap::Heap(VM& vm)
|
|
|
: HeapBase(vm)
|
|
@@ -91,6 +94,107 @@ Cell* Heap::allocate_cell(size_t size)
|
|
|
return allocator.allocate_cell(*this);
|
|
|
}
|
|
|
|
|
|
+class GraphConstructorVisitor final : public Cell::Visitor {
|
|
|
+public:
|
|
|
+ explicit GraphConstructorVisitor(HashMap<Cell*, HeapRootTypeOrLocation> const& roots)
|
|
|
+ {
|
|
|
+ for (auto* root : roots.keys()) {
|
|
|
+ visit(root);
|
|
|
+ auto& graph_node = m_graph.ensure(reinterpret_cast<FlatPtr>(root));
|
|
|
+ graph_node.class_name = root->class_name();
|
|
|
+ graph_node.root_origin = *roots.get(root);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ virtual void visit_impl(Cell& cell) override
|
|
|
+ {
|
|
|
+ if (m_node_being_visited)
|
|
|
+ m_node_being_visited->edges.set(reinterpret_cast<FlatPtr>(&cell));
|
|
|
+
|
|
|
+ if (m_graph.get(reinterpret_cast<FlatPtr>(&cell)).has_value())
|
|
|
+ return;
|
|
|
+
|
|
|
+ m_work_queue.append(cell);
|
|
|
+ }
|
|
|
+
|
|
|
+ void visit_all_cells()
|
|
|
+ {
|
|
|
+ while (!m_work_queue.is_empty()) {
|
|
|
+ auto ptr = reinterpret_cast<FlatPtr>(&m_work_queue.last());
|
|
|
+ m_node_being_visited = &m_graph.ensure(ptr);
|
|
|
+ m_node_being_visited->class_name = m_work_queue.last().class_name();
|
|
|
+ m_work_queue.take_last().visit_edges(*this);
|
|
|
+ m_node_being_visited = nullptr;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ void dump()
|
|
|
+ {
|
|
|
+ auto graph = AK::JsonObject();
|
|
|
+ for (auto& it : m_graph) {
|
|
|
+ AK::JsonArray edges;
|
|
|
+ for (auto const& value : it.value.edges) {
|
|
|
+ edges.must_append(DeprecatedString::formatted("{}", value));
|
|
|
+ }
|
|
|
+
|
|
|
+ auto node = AK::JsonObject();
|
|
|
+ if (it.value.root_origin.has_value()) {
|
|
|
+ it.value.root_origin->visit(
|
|
|
+ [&](HeapRootType location) {
|
|
|
+ switch (location) {
|
|
|
+ case HeapRootType::Handle:
|
|
|
+ node.set("root"sv, "Handle"sv);
|
|
|
+ return;
|
|
|
+ case HeapRootType::MarkedVector:
|
|
|
+ node.set("root"sv, "MarkedVector");
|
|
|
+ return;
|
|
|
+ case HeapRootType::RegisterPointer:
|
|
|
+ node.set("root"sv, "RegisterPointer");
|
|
|
+ return;
|
|
|
+ case HeapRootType::StackPointer:
|
|
|
+ node.set("root"sv, "StackPointer");
|
|
|
+ return;
|
|
|
+ case HeapRootType::VM:
|
|
|
+ node.set("root"sv, "VM");
|
|
|
+ return;
|
|
|
+ default:
|
|
|
+ VERIFY_NOT_REACHED();
|
|
|
+ }
|
|
|
+ },
|
|
|
+ [&](SourceLocation* location) {
|
|
|
+ node.set("root", DeprecatedString::formatted("SafeFunction {} {}:{}", location->function_name(), location->filename(), location->line_number()));
|
|
|
+ });
|
|
|
+ }
|
|
|
+ node.set("class_name"sv, it.value.class_name);
|
|
|
+ node.set("edges"sv, edges);
|
|
|
+ graph.set(DeprecatedString::number(it.key), node);
|
|
|
+ }
|
|
|
+
|
|
|
+ dbgln("{}", graph.to_deprecated_string());
|
|
|
+ }
|
|
|
+
|
|
|
+private:
|
|
|
+ struct GraphNode {
|
|
|
+ Optional<HeapRootTypeOrLocation> root_origin;
|
|
|
+ StringView class_name;
|
|
|
+ HashTable<FlatPtr> edges {};
|
|
|
+ };
|
|
|
+
|
|
|
+ GraphNode* m_node_being_visited { nullptr };
|
|
|
+ Vector<Cell&> m_work_queue;
|
|
|
+ HashMap<FlatPtr, GraphNode> m_graph;
|
|
|
+};
|
|
|
+
|
|
|
+void Heap::dump_graph()
|
|
|
+{
|
|
|
+ HashMap<Cell*, HeapRootTypeOrLocation> roots;
|
|
|
+ gather_roots(roots);
|
|
|
+ GraphConstructorVisitor visitor(roots);
|
|
|
+ vm().bytecode_interpreter().visit_edges(visitor);
|
|
|
+ visitor.visit_all_cells();
|
|
|
+ visitor.dump();
|
|
|
+}
|
|
|
+
|
|
|
void Heap::collect_garbage(CollectionType collection_type, bool print_report)
|
|
|
{
|
|
|
VERIFY(!m_collecting_garbage);
|
|
@@ -110,7 +214,7 @@ void Heap::collect_garbage(CollectionType collection_type, bool print_report)
|
|
|
m_should_gc_when_deferral_ends = true;
|
|
|
return;
|
|
|
}
|
|
|
- HashTable<Cell*> roots;
|
|
|
+ HashMap<Cell*, HeapRootTypeOrLocation> roots;
|
|
|
gather_roots(roots);
|
|
|
mark_live_cells(roots);
|
|
|
}
|
|
@@ -118,44 +222,44 @@ void Heap::collect_garbage(CollectionType collection_type, bool print_report)
|
|
|
sweep_dead_cells(print_report, collection_measurement_timer);
|
|
|
}
|
|
|
|
|
|
-void Heap::gather_roots(HashTable<Cell*>& roots)
|
|
|
+void Heap::gather_roots(HashMap<Cell*, HeapRootTypeOrLocation>& roots)
|
|
|
{
|
|
|
vm().gather_roots(roots);
|
|
|
gather_conservative_roots(roots);
|
|
|
|
|
|
for (auto& handle : m_handles)
|
|
|
- roots.set(handle.cell());
|
|
|
+ roots.set(handle.cell(), HeapRootType::Handle);
|
|
|
|
|
|
for (auto& vector : m_marked_vectors)
|
|
|
vector.gather_roots(roots);
|
|
|
|
|
|
if constexpr (HEAP_DEBUG) {
|
|
|
dbgln("gather_roots:");
|
|
|
- for (auto* root : roots)
|
|
|
+ for (auto* root : roots.keys())
|
|
|
dbgln(" + {}", root);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-static void add_possible_value(HashTable<FlatPtr>& possible_pointers, FlatPtr data)
|
|
|
+static void add_possible_value(HashMap<FlatPtr, HeapRootTypeOrLocation>& possible_pointers, FlatPtr data, HeapRootTypeOrLocation origin)
|
|
|
{
|
|
|
if constexpr (sizeof(FlatPtr*) == sizeof(Value)) {
|
|
|
// Because Value stores pointers in non-canonical form we have to check if the top bytes
|
|
|
// match any pointer-backed tag, in that case we have to extract the pointer to its
|
|
|
// canonical form and add that as a possible pointer.
|
|
|
if ((data & SHIFTED_IS_CELL_PATTERN) == SHIFTED_IS_CELL_PATTERN)
|
|
|
- possible_pointers.set(Value::extract_pointer_bits(data));
|
|
|
+ possible_pointers.set(Value::extract_pointer_bits(data), move(origin));
|
|
|
else
|
|
|
- possible_pointers.set(data);
|
|
|
+ possible_pointers.set(data, move(origin));
|
|
|
} else {
|
|
|
static_assert((sizeof(Value) % sizeof(FlatPtr*)) == 0);
|
|
|
// In the 32-bit case we will look at the top and bottom part of Value separately we just
|
|
|
// add both the upper and lower bytes as possible pointers.
|
|
|
- possible_pointers.set(data);
|
|
|
+ possible_pointers.set(data, move(origin));
|
|
|
}
|
|
|
}
|
|
|
|
|
|
#ifdef HAS_ADDRESS_SANITIZER
|
|
|
-__attribute__((no_sanitize("address"))) void Heap::gather_asan_fake_stack_roots(HashTable<FlatPtr>& possible_pointers, FlatPtr addr)
|
|
|
+__attribute__((no_sanitize("address"))) void Heap::gather_asan_fake_stack_roots(HashMap<FlatPtr, HeapRootTypeOrLocation>& possible_pointers, FlatPtr addr)
|
|
|
{
|
|
|
void* begin = nullptr;
|
|
|
void* end = nullptr;
|
|
@@ -166,17 +270,17 @@ __attribute__((no_sanitize("address"))) void Heap::gather_asan_fake_stack_roots(
|
|
|
void const* real_address = *real_stack_addr;
|
|
|
if (real_address == nullptr)
|
|
|
continue;
|
|
|
- add_possible_value(possible_pointers, reinterpret_cast<FlatPtr>(real_address));
|
|
|
+ add_possible_value(possible_pointers, reinterpret_cast<FlatPtr>(real_address), HeapRootType::StackPointer);
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
#else
|
|
|
-void Heap::gather_asan_fake_stack_roots(HashTable<FlatPtr>&, FlatPtr)
|
|
|
+void Heap::gather_asan_fake_stack_roots(HashMap<FlatPtr, HeapRootTypeOrLocation>&, FlatPtr)
|
|
|
{
|
|
|
}
|
|
|
#endif
|
|
|
|
|
|
-__attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(HashTable<Cell*>& roots)
|
|
|
+__attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(HashMap<Cell*, HeapRootTypeOrLocation>& roots)
|
|
|
{
|
|
|
FlatPtr dummy;
|
|
|
|
|
@@ -185,19 +289,19 @@ __attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(Has
|
|
|
jmp_buf buf;
|
|
|
setjmp(buf);
|
|
|
|
|
|
- HashTable<FlatPtr> possible_pointers;
|
|
|
+ HashMap<FlatPtr, HeapRootTypeOrLocation> possible_pointers;
|
|
|
|
|
|
auto* raw_jmp_buf = reinterpret_cast<FlatPtr const*>(buf);
|
|
|
|
|
|
for (size_t i = 0; i < ((size_t)sizeof(buf)) / sizeof(FlatPtr); ++i)
|
|
|
- add_possible_value(possible_pointers, raw_jmp_buf[i]);
|
|
|
+ add_possible_value(possible_pointers, raw_jmp_buf[i], HeapRootType::RegisterPointer);
|
|
|
|
|
|
auto stack_reference = bit_cast<FlatPtr>(&dummy);
|
|
|
auto& stack_info = m_vm.stack_info();
|
|
|
|
|
|
for (FlatPtr stack_address = stack_reference; stack_address < stack_info.top(); stack_address += sizeof(FlatPtr)) {
|
|
|
auto data = *reinterpret_cast<FlatPtr*>(stack_address);
|
|
|
- add_possible_value(possible_pointers, data);
|
|
|
+ add_possible_value(possible_pointers, data, HeapRootType::StackPointer);
|
|
|
gather_asan_fake_stack_roots(possible_pointers, data);
|
|
|
}
|
|
|
|
|
@@ -206,7 +310,8 @@ __attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(Has
|
|
|
if (s_custom_ranges_for_conservative_scan) {
|
|
|
for (auto& custom_range : *s_custom_ranges_for_conservative_scan) {
|
|
|
for (size_t i = 0; i < (custom_range.value / sizeof(FlatPtr)); ++i) {
|
|
|
- add_possible_value(possible_pointers, custom_range.key[i]);
|
|
|
+ auto safe_function_location = s_safe_function_locations->get(custom_range.key);
|
|
|
+ add_possible_value(possible_pointers, custom_range.key[i], *safe_function_location);
|
|
|
}
|
|
|
}
|
|
|
}
|
|
@@ -217,7 +322,7 @@ __attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(Has
|
|
|
return IterationDecision::Continue;
|
|
|
});
|
|
|
|
|
|
- for (auto possible_pointer : possible_pointers) {
|
|
|
+ for (auto possible_pointer : possible_pointers.keys()) {
|
|
|
if (!possible_pointer)
|
|
|
continue;
|
|
|
dbgln_if(HEAP_DEBUG, " ? {}", (void const*)possible_pointer);
|
|
@@ -226,7 +331,7 @@ __attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(Has
|
|
|
if (auto* cell = possible_heap_block->cell_from_possible_pointer(possible_pointer)) {
|
|
|
if (cell->state() == Cell::State::Live) {
|
|
|
dbgln_if(HEAP_DEBUG, " ?-> {}", (void const*)cell);
|
|
|
- roots.set(cell);
|
|
|
+ roots.set(cell, *possible_pointers.get(possible_pointer));
|
|
|
} else {
|
|
|
dbgln_if(HEAP_DEBUG, " #-> {}", (void const*)cell);
|
|
|
}
|
|
@@ -237,9 +342,9 @@ __attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(Has
|
|
|
|
|
|
class MarkingVisitor final : public Cell::Visitor {
|
|
|
public:
|
|
|
- explicit MarkingVisitor(HashTable<Cell*> const& roots)
|
|
|
+ explicit MarkingVisitor(HashMap<Cell*, HeapRootTypeOrLocation> const& roots)
|
|
|
{
|
|
|
- for (auto* root : roots) {
|
|
|
+ for (auto* root : roots.keys()) {
|
|
|
visit(root);
|
|
|
}
|
|
|
}
|
|
@@ -265,7 +370,7 @@ private:
|
|
|
Vector<Cell&> m_work_queue;
|
|
|
};
|
|
|
|
|
|
-void Heap::mark_live_cells(HashTable<Cell*> const& roots)
|
|
|
+void Heap::mark_live_cells(HashMap<Cell*, HeapRootTypeOrLocation> const& roots)
|
|
|
{
|
|
|
dbgln_if(HEAP_DEBUG, "mark_live_cells:");
|
|
|
|
|
@@ -432,21 +537,28 @@ void Heap::uproot_cell(Cell* cell)
|
|
|
m_uprooted_cells.append(cell);
|
|
|
}
|
|
|
|
|
|
-void register_safe_function_closure(void* base, size_t size)
|
|
|
+void register_safe_function_closure(void* base, size_t size, SourceLocation* location)
|
|
|
{
|
|
|
if (!s_custom_ranges_for_conservative_scan) {
|
|
|
// FIXME: This per-thread HashMap is currently leaked on thread exit.
|
|
|
s_custom_ranges_for_conservative_scan = new HashMap<FlatPtr*, size_t>;
|
|
|
}
|
|
|
+ if (!s_safe_function_locations) {
|
|
|
+ s_safe_function_locations = new HashMap<FlatPtr*, SourceLocation*>;
|
|
|
+ }
|
|
|
auto result = s_custom_ranges_for_conservative_scan->set(reinterpret_cast<FlatPtr*>(base), size);
|
|
|
VERIFY(result == AK::HashSetResult::InsertedNewEntry);
|
|
|
+ result = s_safe_function_locations->set(reinterpret_cast<FlatPtr*>(base), location);
|
|
|
+ VERIFY(result == AK::HashSetResult::InsertedNewEntry);
|
|
|
}
|
|
|
|
|
|
-void unregister_safe_function_closure(void* base, size_t)
|
|
|
+void unregister_safe_function_closure(void* base, size_t, SourceLocation*)
|
|
|
{
|
|
|
VERIFY(s_custom_ranges_for_conservative_scan);
|
|
|
- bool did_remove = s_custom_ranges_for_conservative_scan->remove(reinterpret_cast<FlatPtr*>(base));
|
|
|
- VERIFY(did_remove);
|
|
|
+ bool did_remove_range = s_custom_ranges_for_conservative_scan->remove(reinterpret_cast<FlatPtr*>(base));
|
|
|
+ VERIFY(did_remove_range);
|
|
|
+ bool did_remove_location = s_safe_function_locations->remove(reinterpret_cast<FlatPtr*>(base));
|
|
|
+ VERIFY(did_remove_location);
|
|
|
}
|
|
|
|
|
|
}
|