diff --git a/Userland/Libraries/LibJS/Heap/Heap.cpp b/Userland/Libraries/LibJS/Heap/Heap.cpp index 22a0589a5a6..fed82c1ac8a 100644 --- a/Userland/Libraries/LibJS/Heap/Heap.cpp +++ b/Userland/Libraries/LibJS/Heap/Heap.cpp @@ -7,6 +7,8 @@ #include #include #include +#include +#include #include #include #include @@ -36,6 +38,7 @@ static int gc_perf_string_id; // NOTE: We keep a per-thread list of custom ranges. This hinges on the assumption that there is one JS VM per thread. static __thread HashMap* s_custom_ranges_for_conservative_scan = nullptr; +static __thread HashMap* s_safe_function_locations = nullptr; Heap::Heap(VM& vm) : HeapBase(vm) @@ -91,6 +94,107 @@ Cell* Heap::allocate_cell(size_t size) return allocator.allocate_cell(*this); } +class GraphConstructorVisitor final : public Cell::Visitor { +public: + explicit GraphConstructorVisitor(HashMap const& roots) + { + for (auto* root : roots.keys()) { + visit(root); + auto& graph_node = m_graph.ensure(reinterpret_cast(root)); + graph_node.class_name = root->class_name(); + graph_node.root_origin = *roots.get(root); + } + } + + virtual void visit_impl(Cell& cell) override + { + if (m_node_being_visited) + m_node_being_visited->edges.set(reinterpret_cast(&cell)); + + if (m_graph.get(reinterpret_cast(&cell)).has_value()) + return; + + m_work_queue.append(cell); + } + + void visit_all_cells() + { + while (!m_work_queue.is_empty()) { + auto ptr = reinterpret_cast(&m_work_queue.last()); + m_node_being_visited = &m_graph.ensure(ptr); + m_node_being_visited->class_name = m_work_queue.last().class_name(); + m_work_queue.take_last().visit_edges(*this); + m_node_being_visited = nullptr; + } + } + + void dump() + { + auto graph = AK::JsonObject(); + for (auto& it : m_graph) { + AK::JsonArray edges; + for (auto const& value : it.value.edges) { + edges.must_append(DeprecatedString::formatted("{}", value)); + } + + auto node = AK::JsonObject(); + if (it.value.root_origin.has_value()) { + it.value.root_origin->visit( + [&](HeapRootType location) { + switch (location) { + case HeapRootType::Handle: + node.set("root"sv, "Handle"sv); + return; + case HeapRootType::MarkedVector: + node.set("root"sv, "MarkedVector"); + return; + case HeapRootType::RegisterPointer: + node.set("root"sv, "RegisterPointer"); + return; + case HeapRootType::StackPointer: + node.set("root"sv, "StackPointer"); + return; + case HeapRootType::VM: + node.set("root"sv, "VM"); + return; + default: + VERIFY_NOT_REACHED(); + } + }, + [&](SourceLocation* location) { + node.set("root", DeprecatedString::formatted("SafeFunction {} {}:{}", location->function_name(), location->filename(), location->line_number())); + }); + } + node.set("class_name"sv, it.value.class_name); + node.set("edges"sv, edges); + graph.set(DeprecatedString::number(it.key), node); + } + + dbgln("{}", graph.to_deprecated_string()); + } + +private: + struct GraphNode { + Optional root_origin; + StringView class_name; + HashTable edges {}; + }; + + GraphNode* m_node_being_visited { nullptr }; + Vector m_work_queue; + HashMap m_graph; +}; + +void Heap::dump_graph() +{ + HashMap roots; + gather_roots(roots); + GraphConstructorVisitor visitor(roots); + vm().bytecode_interpreter().visit_edges(visitor); + visitor.visit_all_cells(); + visitor.dump(); +} + void Heap::collect_garbage(CollectionType collection_type, bool print_report) { VERIFY(!m_collecting_garbage); @@ -110,7 +214,7 @@ void Heap::collect_garbage(CollectionType collection_type, bool print_report) m_should_gc_when_deferral_ends = true; return; } - HashTable roots; + HashMap roots; gather_roots(roots); mark_live_cells(roots); } @@ -118,44 +222,44 @@ void Heap::collect_garbage(CollectionType collection_type, bool print_report) sweep_dead_cells(print_report, collection_measurement_timer); } -void Heap::gather_roots(HashTable& roots) +void Heap::gather_roots(HashMap& roots) { vm().gather_roots(roots); gather_conservative_roots(roots); for (auto& handle : m_handles) - roots.set(handle.cell()); + roots.set(handle.cell(), HeapRootType::Handle); for (auto& vector : m_marked_vectors) vector.gather_roots(roots); if constexpr (HEAP_DEBUG) { dbgln("gather_roots:"); - for (auto* root : roots) + for (auto* root : roots.keys()) dbgln(" + {}", root); } } -static void add_possible_value(HashTable& possible_pointers, FlatPtr data) +static void add_possible_value(HashMap& possible_pointers, FlatPtr data, HeapRootTypeOrLocation origin) { if constexpr (sizeof(FlatPtr*) == sizeof(Value)) { // Because Value stores pointers in non-canonical form we have to check if the top bytes // match any pointer-backed tag, in that case we have to extract the pointer to its // canonical form and add that as a possible pointer. if ((data & SHIFTED_IS_CELL_PATTERN) == SHIFTED_IS_CELL_PATTERN) - possible_pointers.set(Value::extract_pointer_bits(data)); + possible_pointers.set(Value::extract_pointer_bits(data), move(origin)); else - possible_pointers.set(data); + possible_pointers.set(data, move(origin)); } else { static_assert((sizeof(Value) % sizeof(FlatPtr*)) == 0); // In the 32-bit case we will look at the top and bottom part of Value separately we just // add both the upper and lower bytes as possible pointers. - possible_pointers.set(data); + possible_pointers.set(data, move(origin)); } } #ifdef HAS_ADDRESS_SANITIZER -__attribute__((no_sanitize("address"))) void Heap::gather_asan_fake_stack_roots(HashTable& possible_pointers, FlatPtr addr) +__attribute__((no_sanitize("address"))) void Heap::gather_asan_fake_stack_roots(HashMap& possible_pointers, FlatPtr addr) { void* begin = nullptr; void* end = nullptr; @@ -166,17 +270,17 @@ __attribute__((no_sanitize("address"))) void Heap::gather_asan_fake_stack_roots( void const* real_address = *real_stack_addr; if (real_address == nullptr) continue; - add_possible_value(possible_pointers, reinterpret_cast(real_address)); + add_possible_value(possible_pointers, reinterpret_cast(real_address), HeapRootType::StackPointer); } } } #else -void Heap::gather_asan_fake_stack_roots(HashTable&, FlatPtr) +void Heap::gather_asan_fake_stack_roots(HashMap&, FlatPtr) { } #endif -__attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(HashTable& roots) +__attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(HashMap& roots) { FlatPtr dummy; @@ -185,19 +289,19 @@ __attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(Has jmp_buf buf; setjmp(buf); - HashTable possible_pointers; + HashMap possible_pointers; auto* raw_jmp_buf = reinterpret_cast(buf); for (size_t i = 0; i < ((size_t)sizeof(buf)) / sizeof(FlatPtr); ++i) - add_possible_value(possible_pointers, raw_jmp_buf[i]); + add_possible_value(possible_pointers, raw_jmp_buf[i], HeapRootType::RegisterPointer); auto stack_reference = bit_cast(&dummy); auto& stack_info = m_vm.stack_info(); for (FlatPtr stack_address = stack_reference; stack_address < stack_info.top(); stack_address += sizeof(FlatPtr)) { auto data = *reinterpret_cast(stack_address); - add_possible_value(possible_pointers, data); + add_possible_value(possible_pointers, data, HeapRootType::StackPointer); gather_asan_fake_stack_roots(possible_pointers, data); } @@ -206,7 +310,8 @@ __attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(Has if (s_custom_ranges_for_conservative_scan) { for (auto& custom_range : *s_custom_ranges_for_conservative_scan) { for (size_t i = 0; i < (custom_range.value / sizeof(FlatPtr)); ++i) { - add_possible_value(possible_pointers, custom_range.key[i]); + auto safe_function_location = s_safe_function_locations->get(custom_range.key); + add_possible_value(possible_pointers, custom_range.key[i], *safe_function_location); } } } @@ -217,7 +322,7 @@ __attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(Has return IterationDecision::Continue; }); - for (auto possible_pointer : possible_pointers) { + for (auto possible_pointer : possible_pointers.keys()) { if (!possible_pointer) continue; dbgln_if(HEAP_DEBUG, " ? {}", (void const*)possible_pointer); @@ -226,7 +331,7 @@ __attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(Has if (auto* cell = possible_heap_block->cell_from_possible_pointer(possible_pointer)) { if (cell->state() == Cell::State::Live) { dbgln_if(HEAP_DEBUG, " ?-> {}", (void const*)cell); - roots.set(cell); + roots.set(cell, *possible_pointers.get(possible_pointer)); } else { dbgln_if(HEAP_DEBUG, " #-> {}", (void const*)cell); } @@ -237,9 +342,9 @@ __attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(Has class MarkingVisitor final : public Cell::Visitor { public: - explicit MarkingVisitor(HashTable const& roots) + explicit MarkingVisitor(HashMap const& roots) { - for (auto* root : roots) { + for (auto* root : roots.keys()) { visit(root); } } @@ -265,7 +370,7 @@ private: Vector m_work_queue; }; -void Heap::mark_live_cells(HashTable const& roots) +void Heap::mark_live_cells(HashMap const& roots) { dbgln_if(HEAP_DEBUG, "mark_live_cells:"); @@ -432,21 +537,28 @@ void Heap::uproot_cell(Cell* cell) m_uprooted_cells.append(cell); } -void register_safe_function_closure(void* base, size_t size) +void register_safe_function_closure(void* base, size_t size, SourceLocation* location) { if (!s_custom_ranges_for_conservative_scan) { // FIXME: This per-thread HashMap is currently leaked on thread exit. s_custom_ranges_for_conservative_scan = new HashMap; } + if (!s_safe_function_locations) { + s_safe_function_locations = new HashMap; + } auto result = s_custom_ranges_for_conservative_scan->set(reinterpret_cast(base), size); VERIFY(result == AK::HashSetResult::InsertedNewEntry); + result = s_safe_function_locations->set(reinterpret_cast(base), location); + VERIFY(result == AK::HashSetResult::InsertedNewEntry); } -void unregister_safe_function_closure(void* base, size_t) +void unregister_safe_function_closure(void* base, size_t, SourceLocation*) { VERIFY(s_custom_ranges_for_conservative_scan); - bool did_remove = s_custom_ranges_for_conservative_scan->remove(reinterpret_cast(base)); - VERIFY(did_remove); + bool did_remove_range = s_custom_ranges_for_conservative_scan->remove(reinterpret_cast(base)); + VERIFY(did_remove_range); + bool did_remove_location = s_safe_function_locations->remove(reinterpret_cast(base)); + VERIFY(did_remove_location); } } diff --git a/Userland/Libraries/LibJS/Heap/Heap.h b/Userland/Libraries/LibJS/Heap/Heap.h index 657e1958160..32999fc141b 100644 --- a/Userland/Libraries/LibJS/Heap/Heap.h +++ b/Userland/Libraries/LibJS/Heap/Heap.h @@ -19,6 +19,7 @@ #include #include #include +#include #include #include #include @@ -58,6 +59,7 @@ public: }; void collect_garbage(CollectionType = CollectionType::CollectGarbage, bool print_report = false); + void dump_graph(); bool should_collect_on_every_allocation() const { return m_should_collect_on_every_allocation; } void set_should_collect_on_every_allocation(bool b) { m_should_collect_on_every_allocation = b; } @@ -83,10 +85,10 @@ private: Cell* allocate_cell(size_t); - void gather_roots(HashTable&); - void gather_conservative_roots(HashTable&); - void gather_asan_fake_stack_roots(HashTable&, FlatPtr); - void mark_live_cells(HashTable const& live_cells); + void gather_roots(HashMap&); + void gather_conservative_roots(HashMap&); + void gather_asan_fake_stack_roots(HashMap&, FlatPtr); + void mark_live_cells(HashMap const& live_cells); void finalize_unmarked_cells(); void sweep_dead_cells(bool print_report, Core::ElapsedTimer const&); diff --git a/Userland/Libraries/LibJS/Heap/HeapRootTypeOrLocation.h b/Userland/Libraries/LibJS/Heap/HeapRootTypeOrLocation.h new file mode 100644 index 00000000000..1f86e4cc348 --- /dev/null +++ b/Userland/Libraries/LibJS/Heap/HeapRootTypeOrLocation.h @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2023, Aliaksandr Kalenik + * + * SPDX-License-Identifier: BSD-2-Clause + */ + +#pragma once + +#include + +namespace JS { + +enum class HeapRootType { + Handle, + MarkedVector, + RegisterPointer, + StackPointer, + VM, +}; + +using HeapRootTypeOrLocation = Variant; + +} diff --git a/Userland/Libraries/LibJS/Heap/MarkedVector.h b/Userland/Libraries/LibJS/Heap/MarkedVector.h index 3a28d8462ba..5546c6d7fcb 100644 --- a/Userland/Libraries/LibJS/Heap/MarkedVector.h +++ b/Userland/Libraries/LibJS/Heap/MarkedVector.h @@ -7,17 +7,18 @@ #pragma once -#include +#include #include #include #include #include +#include namespace JS { class MarkedVectorBase { public: - virtual void gather_roots(HashTable&) const = 0; + virtual void gather_roots(HashMap&) const = 0; protected: explicit MarkedVectorBase(Heap&); @@ -64,14 +65,14 @@ public: return *this; } - virtual void gather_roots(HashTable& roots) const override + virtual void gather_roots(HashMap& roots) const override { for (auto& value : *this) { if constexpr (IsSame) { if (value.is_cell()) - roots.set(&const_cast(value).as_cell()); + roots.set(&const_cast(value).as_cell(), HeapRootType::MarkedVector); } else { - roots.set(value); + roots.set(value, HeapRootType::MarkedVector); } } } diff --git a/Userland/Libraries/LibJS/Runtime/VM.cpp b/Userland/Libraries/LibJS/Runtime/VM.cpp index a8cfd913e3e..d60089b886f 100644 --- a/Userland/Libraries/LibJS/Runtime/VM.cpp +++ b/Userland/Libraries/LibJS/Runtime/VM.cpp @@ -192,29 +192,29 @@ Bytecode::Interpreter& VM::bytecode_interpreter() return *m_bytecode_interpreter; } -void VM::gather_roots(HashTable& roots) +void VM::gather_roots(HashMap& roots) { - roots.set(m_empty_string); + roots.set(m_empty_string, HeapRootType::VM); for (auto string : m_single_ascii_character_strings) - roots.set(string); + roots.set(string, HeapRootType::VM); auto gather_roots_from_execution_context_stack = [&roots](Vector const& stack) { for (auto& execution_context : stack) { if (execution_context->this_value.is_cell()) - roots.set(&execution_context->this_value.as_cell()); + roots.set(&execution_context->this_value.as_cell(), HeapRootType::VM); for (auto& argument : execution_context->arguments) { if (argument.is_cell()) - roots.set(&argument.as_cell()); + roots.set(&argument.as_cell(), HeapRootType::VM); } - roots.set(execution_context->lexical_environment); - roots.set(execution_context->variable_environment); - roots.set(execution_context->private_environment); + roots.set(execution_context->lexical_environment, HeapRootType::VM); + roots.set(execution_context->variable_environment, HeapRootType::VM); + roots.set(execution_context->private_environment, HeapRootType::VM); if (auto context_owner = execution_context->context_owner) - roots.set(context_owner); + roots.set(context_owner, HeapRootType::VM); execution_context->script_or_module.visit( [](Empty) {}, [&](auto& script_or_module) { - roots.set(script_or_module.ptr()); + roots.set(script_or_module.ptr(), HeapRootType::VM); }); } }; @@ -224,15 +224,15 @@ void VM::gather_roots(HashTable& roots) gather_roots_from_execution_context_stack(saved_stack); #define __JS_ENUMERATE(SymbolName, snake_name) \ - roots.set(m_well_known_symbols.snake_name); + roots.set(m_well_known_symbols.snake_name, HeapRootType::VM); JS_ENUMERATE_WELL_KNOWN_SYMBOLS #undef __JS_ENUMERATE for (auto& symbol : m_global_symbol_registry) - roots.set(symbol.value); + roots.set(symbol.value, HeapRootType::VM); for (auto finalization_registry : m_finalization_registry_cleanup_jobs) - roots.set(finalization_registry); + roots.set(finalization_registry, HeapRootType::VM); } ThrowCompletionOr VM::named_evaluation_if_anonymous_function(ASTNode const& expression, DeprecatedFlyString const& name) diff --git a/Userland/Libraries/LibJS/Runtime/VM.h b/Userland/Libraries/LibJS/Runtime/VM.h index 33d82e03f21..9c52a70c5fd 100644 --- a/Userland/Libraries/LibJS/Runtime/VM.h +++ b/Userland/Libraries/LibJS/Runtime/VM.h @@ -47,7 +47,7 @@ public: void dump_backtrace() const; - void gather_roots(HashTable&); + void gather_roots(HashMap&); #define __JS_ENUMERATE(SymbolName, snake_name) \ NonnullGCPtr well_known_symbol_##snake_name() const \ diff --git a/Userland/Libraries/LibJS/SafeFunction.h b/Userland/Libraries/LibJS/SafeFunction.h index caf959abbc4..17f1004e34c 100644 --- a/Userland/Libraries/LibJS/SafeFunction.h +++ b/Userland/Libraries/LibJS/SafeFunction.h @@ -9,11 +9,12 @@ #pragma once #include +#include namespace JS { -void register_safe_function_closure(void*, size_t); -void unregister_safe_function_closure(void*, size_t); +void register_safe_function_closure(void*, size_t, SourceLocation*); +void unregister_safe_function_closure(void*, size_t, SourceLocation*); template class SafeFunction; @@ -38,7 +39,7 @@ public: if (!m_size) return; if (auto* wrapper = callable_wrapper()) - register_safe_function_closure(wrapper, m_size); + register_safe_function_closure(wrapper, m_size, &m_location); } void unregister_closure() @@ -46,24 +47,27 @@ public: if (!m_size) return; if (auto* wrapper = callable_wrapper()) - unregister_safe_function_closure(wrapper, m_size); + unregister_safe_function_closure(wrapper, m_size, &m_location); } template - SafeFunction(CallableType&& callable) + SafeFunction(CallableType&& callable, SourceLocation location = SourceLocation::current()) requires((AK::IsFunctionObject && IsCallableWithArguments && !IsSame, SafeFunction>)) + : m_location(location) { init_with_callable(forward(callable), CallableKind::FunctionObject); } template - SafeFunction(FunctionType f) + SafeFunction(FunctionType f, SourceLocation location = SourceLocation::current()) requires((AK::IsFunctionPointer && IsCallableWithArguments, Out, In...> && !IsSame, SafeFunction>)) + : m_location(location) { init_with_callable(move(f), CallableKind::FunctionPointer); } SafeFunction(SafeFunction&& other) + : m_location(move(other.m_location)) { move_from(move(other)); } @@ -215,6 +219,7 @@ private: VERIFY(m_kind == FunctionKind::NullPointer); auto* other_wrapper = other.callable_wrapper(); m_size = other.m_size; + AK::TypedTransfer::move(&m_location, &other.m_location, 1); switch (other.m_kind) { case FunctionKind::NullPointer: break; @@ -225,8 +230,10 @@ private: register_closure(); break; case FunctionKind::Outline: + other.unregister_closure(); *bit_cast(&m_storage) = other_wrapper; m_kind = FunctionKind::Outline; + register_closure(); break; default: VERIFY_NOT_REACHED(); @@ -238,6 +245,7 @@ private: bool m_deferred_clear { false }; mutable Atomic m_call_nesting_level { 0 }; size_t m_size { 0 }; + SourceLocation m_location; // Empirically determined to fit most lambdas and functions. static constexpr size_t inline_capacity = 4 * sizeof(void*);