Просмотр исходного кода

LibJS: Add GC graph dumper

This change introduces a very basic GC graph dumper. The `dump_graph()`
function outputs JSON data that contains information about all nodes in
the graph, including their class types and edges.

Root nodes will have a property indicating their root type or source
location if the root is captured by a SafeFunction. It would be useful
to add source location for other types of roots in the future.

Output JSON dump have following format:
```json
    "4908721208": {
        "class_name": "Accessor",
        "edges": [
            "4909298232",
            "4909297976"
        ]
    },
    "4907520440": {
        "root": "SafeFunction Optional Optional.h:137",
        "class_name": "Realm",
        "edges": [
            "4908269624",
            "4924821560",
            "4908409240",
            "4908483960",
            "4924527672"
        ]
    },
    "4908251320": {
        "class_name": "CSSStyleRule",
        "edges": [
            "4908302648",
            "4925101656",
            "4908251192"
        ]
    },
```
Aliaksandr Kalenik 1 год назад
Родитель
Сommit
0ff29349e6

+ 137 - 25
Userland/Libraries/LibJS/Heap/Heap.cpp

@@ -7,6 +7,8 @@
 #include <AK/Badge.h>
 #include <AK/Debug.h>
 #include <AK/HashTable.h>
+#include <AK/JsonArray.h>
+#include <AK/JsonObject.h>
 #include <AK/StackInfo.h>
 #include <AK/TemporaryChange.h>
 #include <LibCore/ElapsedTimer.h>
@@ -36,6 +38,7 @@ static int gc_perf_string_id;
 
 // NOTE: We keep a per-thread list of custom ranges. This hinges on the assumption that there is one JS VM per thread.
 static __thread HashMap<FlatPtr*, size_t>* s_custom_ranges_for_conservative_scan = nullptr;
+static __thread HashMap<FlatPtr*, SourceLocation*>* s_safe_function_locations = nullptr;
 
 Heap::Heap(VM& vm)
     : HeapBase(vm)
@@ -91,6 +94,107 @@ Cell* Heap::allocate_cell(size_t size)
     return allocator.allocate_cell(*this);
 }
 
+class GraphConstructorVisitor final : public Cell::Visitor {
+public:
+    explicit GraphConstructorVisitor(HashMap<Cell*, HeapRootTypeOrLocation> const& roots)
+    {
+        for (auto* root : roots.keys()) {
+            visit(root);
+            auto& graph_node = m_graph.ensure(reinterpret_cast<FlatPtr>(root));
+            graph_node.class_name = root->class_name();
+            graph_node.root_origin = *roots.get(root);
+        }
+    }
+
+    virtual void visit_impl(Cell& cell) override
+    {
+        if (m_node_being_visited)
+            m_node_being_visited->edges.set(reinterpret_cast<FlatPtr>(&cell));
+
+        if (m_graph.get(reinterpret_cast<FlatPtr>(&cell)).has_value())
+            return;
+
+        m_work_queue.append(cell);
+    }
+
+    void visit_all_cells()
+    {
+        while (!m_work_queue.is_empty()) {
+            auto ptr = reinterpret_cast<FlatPtr>(&m_work_queue.last());
+            m_node_being_visited = &m_graph.ensure(ptr);
+            m_node_being_visited->class_name = m_work_queue.last().class_name();
+            m_work_queue.take_last().visit_edges(*this);
+            m_node_being_visited = nullptr;
+        }
+    }
+
+    void dump()
+    {
+        auto graph = AK::JsonObject();
+        for (auto& it : m_graph) {
+            AK::JsonArray edges;
+            for (auto const& value : it.value.edges) {
+                edges.must_append(DeprecatedString::formatted("{}", value));
+            }
+
+            auto node = AK::JsonObject();
+            if (it.value.root_origin.has_value()) {
+                it.value.root_origin->visit(
+                    [&](HeapRootType location) {
+                        switch (location) {
+                        case HeapRootType::Handle:
+                            node.set("root"sv, "Handle"sv);
+                            return;
+                        case HeapRootType::MarkedVector:
+                            node.set("root"sv, "MarkedVector");
+                            return;
+                        case HeapRootType::RegisterPointer:
+                            node.set("root"sv, "RegisterPointer");
+                            return;
+                        case HeapRootType::StackPointer:
+                            node.set("root"sv, "StackPointer");
+                            return;
+                        case HeapRootType::VM:
+                            node.set("root"sv, "VM");
+                            return;
+                        default:
+                            VERIFY_NOT_REACHED();
+                        }
+                    },
+                    [&](SourceLocation* location) {
+                        node.set("root", DeprecatedString::formatted("SafeFunction {} {}:{}", location->function_name(), location->filename(), location->line_number()));
+                    });
+            }
+            node.set("class_name"sv, it.value.class_name);
+            node.set("edges"sv, edges);
+            graph.set(DeprecatedString::number(it.key), node);
+        }
+
+        dbgln("{}", graph.to_deprecated_string());
+    }
+
+private:
+    struct GraphNode {
+        Optional<HeapRootTypeOrLocation> root_origin;
+        StringView class_name;
+        HashTable<FlatPtr> edges {};
+    };
+
+    GraphNode* m_node_being_visited { nullptr };
+    Vector<Cell&> m_work_queue;
+    HashMap<FlatPtr, GraphNode> m_graph;
+};
+
+void Heap::dump_graph()
+{
+    HashMap<Cell*, HeapRootTypeOrLocation> roots;
+    gather_roots(roots);
+    GraphConstructorVisitor visitor(roots);
+    vm().bytecode_interpreter().visit_edges(visitor);
+    visitor.visit_all_cells();
+    visitor.dump();
+}
+
 void Heap::collect_garbage(CollectionType collection_type, bool print_report)
 {
     VERIFY(!m_collecting_garbage);
@@ -110,7 +214,7 @@ void Heap::collect_garbage(CollectionType collection_type, bool print_report)
             m_should_gc_when_deferral_ends = true;
             return;
         }
-        HashTable<Cell*> roots;
+        HashMap<Cell*, HeapRootTypeOrLocation> roots;
         gather_roots(roots);
         mark_live_cells(roots);
     }
@@ -118,44 +222,44 @@ void Heap::collect_garbage(CollectionType collection_type, bool print_report)
     sweep_dead_cells(print_report, collection_measurement_timer);
 }
 
-void Heap::gather_roots(HashTable<Cell*>& roots)
+void Heap::gather_roots(HashMap<Cell*, HeapRootTypeOrLocation>& roots)
 {
     vm().gather_roots(roots);
     gather_conservative_roots(roots);
 
     for (auto& handle : m_handles)
-        roots.set(handle.cell());
+        roots.set(handle.cell(), HeapRootType::Handle);
 
     for (auto& vector : m_marked_vectors)
         vector.gather_roots(roots);
 
     if constexpr (HEAP_DEBUG) {
         dbgln("gather_roots:");
-        for (auto* root : roots)
+        for (auto* root : roots.keys())
             dbgln("  + {}", root);
     }
 }
 
-static void add_possible_value(HashTable<FlatPtr>& possible_pointers, FlatPtr data)
+static void add_possible_value(HashMap<FlatPtr, HeapRootTypeOrLocation>& possible_pointers, FlatPtr data, HeapRootTypeOrLocation origin)
 {
     if constexpr (sizeof(FlatPtr*) == sizeof(Value)) {
         // Because Value stores pointers in non-canonical form we have to check if the top bytes
         // match any pointer-backed tag, in that case we have to extract the pointer to its
         // canonical form and add that as a possible pointer.
         if ((data & SHIFTED_IS_CELL_PATTERN) == SHIFTED_IS_CELL_PATTERN)
-            possible_pointers.set(Value::extract_pointer_bits(data));
+            possible_pointers.set(Value::extract_pointer_bits(data), move(origin));
         else
-            possible_pointers.set(data);
+            possible_pointers.set(data, move(origin));
     } else {
         static_assert((sizeof(Value) % sizeof(FlatPtr*)) == 0);
         // In the 32-bit case we will look at the top and bottom part of Value separately we just
         // add both the upper and lower bytes as possible pointers.
-        possible_pointers.set(data);
+        possible_pointers.set(data, move(origin));
     }
 }
 
 #ifdef HAS_ADDRESS_SANITIZER
-__attribute__((no_sanitize("address"))) void Heap::gather_asan_fake_stack_roots(HashTable<FlatPtr>& possible_pointers, FlatPtr addr)
+__attribute__((no_sanitize("address"))) void Heap::gather_asan_fake_stack_roots(HashMap<FlatPtr, HeapRootTypeOrLocation>& possible_pointers, FlatPtr addr)
 {
     void* begin = nullptr;
     void* end = nullptr;
@@ -166,17 +270,17 @@ __attribute__((no_sanitize("address"))) void Heap::gather_asan_fake_stack_roots(
             void const* real_address = *real_stack_addr;
             if (real_address == nullptr)
                 continue;
-            add_possible_value(possible_pointers, reinterpret_cast<FlatPtr>(real_address));
+            add_possible_value(possible_pointers, reinterpret_cast<FlatPtr>(real_address), HeapRootType::StackPointer);
         }
     }
 }
 #else
-void Heap::gather_asan_fake_stack_roots(HashTable<FlatPtr>&, FlatPtr)
+void Heap::gather_asan_fake_stack_roots(HashMap<FlatPtr, HeapRootTypeOrLocation>&, FlatPtr)
 {
 }
 #endif
 
-__attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(HashTable<Cell*>& roots)
+__attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(HashMap<Cell*, HeapRootTypeOrLocation>& roots)
 {
     FlatPtr dummy;
 
@@ -185,19 +289,19 @@ __attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(Has
     jmp_buf buf;
     setjmp(buf);
 
-    HashTable<FlatPtr> possible_pointers;
+    HashMap<FlatPtr, HeapRootTypeOrLocation> possible_pointers;
 
     auto* raw_jmp_buf = reinterpret_cast<FlatPtr const*>(buf);
 
     for (size_t i = 0; i < ((size_t)sizeof(buf)) / sizeof(FlatPtr); ++i)
-        add_possible_value(possible_pointers, raw_jmp_buf[i]);
+        add_possible_value(possible_pointers, raw_jmp_buf[i], HeapRootType::RegisterPointer);
 
     auto stack_reference = bit_cast<FlatPtr>(&dummy);
     auto& stack_info = m_vm.stack_info();
 
     for (FlatPtr stack_address = stack_reference; stack_address < stack_info.top(); stack_address += sizeof(FlatPtr)) {
         auto data = *reinterpret_cast<FlatPtr*>(stack_address);
-        add_possible_value(possible_pointers, data);
+        add_possible_value(possible_pointers, data, HeapRootType::StackPointer);
         gather_asan_fake_stack_roots(possible_pointers, data);
     }
 
@@ -206,7 +310,8 @@ __attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(Has
     if (s_custom_ranges_for_conservative_scan) {
         for (auto& custom_range : *s_custom_ranges_for_conservative_scan) {
             for (size_t i = 0; i < (custom_range.value / sizeof(FlatPtr)); ++i) {
-                add_possible_value(possible_pointers, custom_range.key[i]);
+                auto safe_function_location = s_safe_function_locations->get(custom_range.key);
+                add_possible_value(possible_pointers, custom_range.key[i], *safe_function_location);
             }
         }
     }
@@ -217,7 +322,7 @@ __attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(Has
         return IterationDecision::Continue;
     });
 
-    for (auto possible_pointer : possible_pointers) {
+    for (auto possible_pointer : possible_pointers.keys()) {
         if (!possible_pointer)
             continue;
         dbgln_if(HEAP_DEBUG, "  ? {}", (void const*)possible_pointer);
@@ -226,7 +331,7 @@ __attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(Has
             if (auto* cell = possible_heap_block->cell_from_possible_pointer(possible_pointer)) {
                 if (cell->state() == Cell::State::Live) {
                     dbgln_if(HEAP_DEBUG, "  ?-> {}", (void const*)cell);
-                    roots.set(cell);
+                    roots.set(cell, *possible_pointers.get(possible_pointer));
                 } else {
                     dbgln_if(HEAP_DEBUG, "  #-> {}", (void const*)cell);
                 }
@@ -237,9 +342,9 @@ __attribute__((no_sanitize("address"))) void Heap::gather_conservative_roots(Has
 
 class MarkingVisitor final : public Cell::Visitor {
 public:
-    explicit MarkingVisitor(HashTable<Cell*> const& roots)
+    explicit MarkingVisitor(HashMap<Cell*, HeapRootTypeOrLocation> const& roots)
     {
-        for (auto* root : roots) {
+        for (auto* root : roots.keys()) {
             visit(root);
         }
     }
@@ -265,7 +370,7 @@ private:
     Vector<Cell&> m_work_queue;
 };
 
-void Heap::mark_live_cells(HashTable<Cell*> const& roots)
+void Heap::mark_live_cells(HashMap<Cell*, HeapRootTypeOrLocation> const& roots)
 {
     dbgln_if(HEAP_DEBUG, "mark_live_cells:");
 
@@ -432,21 +537,28 @@ void Heap::uproot_cell(Cell* cell)
     m_uprooted_cells.append(cell);
 }
 
-void register_safe_function_closure(void* base, size_t size)
+void register_safe_function_closure(void* base, size_t size, SourceLocation* location)
 {
     if (!s_custom_ranges_for_conservative_scan) {
         // FIXME: This per-thread HashMap is currently leaked on thread exit.
         s_custom_ranges_for_conservative_scan = new HashMap<FlatPtr*, size_t>;
     }
+    if (!s_safe_function_locations) {
+        s_safe_function_locations = new HashMap<FlatPtr*, SourceLocation*>;
+    }
     auto result = s_custom_ranges_for_conservative_scan->set(reinterpret_cast<FlatPtr*>(base), size);
     VERIFY(result == AK::HashSetResult::InsertedNewEntry);
+    result = s_safe_function_locations->set(reinterpret_cast<FlatPtr*>(base), location);
+    VERIFY(result == AK::HashSetResult::InsertedNewEntry);
 }
 
-void unregister_safe_function_closure(void* base, size_t)
+void unregister_safe_function_closure(void* base, size_t, SourceLocation*)
 {
     VERIFY(s_custom_ranges_for_conservative_scan);
-    bool did_remove = s_custom_ranges_for_conservative_scan->remove(reinterpret_cast<FlatPtr*>(base));
-    VERIFY(did_remove);
+    bool did_remove_range = s_custom_ranges_for_conservative_scan->remove(reinterpret_cast<FlatPtr*>(base));
+    VERIFY(did_remove_range);
+    bool did_remove_location = s_safe_function_locations->remove(reinterpret_cast<FlatPtr*>(base));
+    VERIFY(did_remove_location);
 }
 
 }

+ 6 - 4
Userland/Libraries/LibJS/Heap/Heap.h

@@ -19,6 +19,7 @@
 #include <LibJS/Heap/Cell.h>
 #include <LibJS/Heap/CellAllocator.h>
 #include <LibJS/Heap/Handle.h>
+#include <LibJS/Heap/HeapRootTypeOrLocation.h>
 #include <LibJS/Heap/Internals.h>
 #include <LibJS/Heap/MarkedVector.h>
 #include <LibJS/Runtime/Completion.h>
@@ -58,6 +59,7 @@ public:
     };
 
     void collect_garbage(CollectionType = CollectionType::CollectGarbage, bool print_report = false);
+    void dump_graph();
 
     bool should_collect_on_every_allocation() const { return m_should_collect_on_every_allocation; }
     void set_should_collect_on_every_allocation(bool b) { m_should_collect_on_every_allocation = b; }
@@ -83,10 +85,10 @@ private:
 
     Cell* allocate_cell(size_t);
 
-    void gather_roots(HashTable<Cell*>&);
-    void gather_conservative_roots(HashTable<Cell*>&);
-    void gather_asan_fake_stack_roots(HashTable<FlatPtr>&, FlatPtr);
-    void mark_live_cells(HashTable<Cell*> const& live_cells);
+    void gather_roots(HashMap<Cell*, HeapRootTypeOrLocation>&);
+    void gather_conservative_roots(HashMap<Cell*, HeapRootTypeOrLocation>&);
+    void gather_asan_fake_stack_roots(HashMap<FlatPtr, HeapRootTypeOrLocation>&, FlatPtr);
+    void mark_live_cells(HashMap<Cell*, HeapRootTypeOrLocation> const& live_cells);
     void finalize_unmarked_cells();
     void sweep_dead_cells(bool print_report, Core::ElapsedTimer const&);
 

+ 23 - 0
Userland/Libraries/LibJS/Heap/HeapRootTypeOrLocation.h

@@ -0,0 +1,23 @@
+/*
+ * Copyright (c) 2023, Aliaksandr Kalenik <kalenik.aliaksandr@gmail.com>
+ *
+ * SPDX-License-Identifier: BSD-2-Clause
+ */
+
+#pragma once
+
+#include <AK/SourceLocation.h>
+
+namespace JS {
+
+enum class HeapRootType {
+    Handle,
+    MarkedVector,
+    RegisterPointer,
+    StackPointer,
+    VM,
+};
+
+using HeapRootTypeOrLocation = Variant<HeapRootType, SourceLocation*>;
+
+}

+ 6 - 5
Userland/Libraries/LibJS/Heap/MarkedVector.h

@@ -7,17 +7,18 @@
 
 #pragma once
 
-#include <AK/HashTable.h>
+#include <AK/HashMap.h>
 #include <AK/IntrusiveList.h>
 #include <AK/Vector.h>
 #include <LibJS/Forward.h>
 #include <LibJS/Heap/Cell.h>
+#include <LibJS/Heap/HeapRootTypeOrLocation.h>
 
 namespace JS {
 
 class MarkedVectorBase {
 public:
-    virtual void gather_roots(HashTable<Cell*>&) const = 0;
+    virtual void gather_roots(HashMap<Cell*, JS::HeapRootTypeOrLocation>&) const = 0;
 
 protected:
     explicit MarkedVectorBase(Heap&);
@@ -64,14 +65,14 @@ public:
         return *this;
     }
 
-    virtual void gather_roots(HashTable<Cell*>& roots) const override
+    virtual void gather_roots(HashMap<Cell*, JS::HeapRootTypeOrLocation>& roots) const override
     {
         for (auto& value : *this) {
             if constexpr (IsSame<Value, T>) {
                 if (value.is_cell())
-                    roots.set(&const_cast<T&>(value).as_cell());
+                    roots.set(&const_cast<T&>(value).as_cell(), HeapRootType::MarkedVector);
             } else {
-                roots.set(value);
+                roots.set(value, HeapRootType::MarkedVector);
             }
         }
     }

+ 13 - 13
Userland/Libraries/LibJS/Runtime/VM.cpp

@@ -192,29 +192,29 @@ Bytecode::Interpreter& VM::bytecode_interpreter()
     return *m_bytecode_interpreter;
 }
 
-void VM::gather_roots(HashTable<Cell*>& roots)
+void VM::gather_roots(HashMap<Cell*, HeapRootTypeOrLocation>& roots)
 {
-    roots.set(m_empty_string);
+    roots.set(m_empty_string, HeapRootType::VM);
     for (auto string : m_single_ascii_character_strings)
-        roots.set(string);
+        roots.set(string, HeapRootType::VM);
 
     auto gather_roots_from_execution_context_stack = [&roots](Vector<ExecutionContext*> const& stack) {
         for (auto& execution_context : stack) {
             if (execution_context->this_value.is_cell())
-                roots.set(&execution_context->this_value.as_cell());
+                roots.set(&execution_context->this_value.as_cell(), HeapRootType::VM);
             for (auto& argument : execution_context->arguments) {
                 if (argument.is_cell())
-                    roots.set(&argument.as_cell());
+                    roots.set(&argument.as_cell(), HeapRootType::VM);
             }
-            roots.set(execution_context->lexical_environment);
-            roots.set(execution_context->variable_environment);
-            roots.set(execution_context->private_environment);
+            roots.set(execution_context->lexical_environment, HeapRootType::VM);
+            roots.set(execution_context->variable_environment, HeapRootType::VM);
+            roots.set(execution_context->private_environment, HeapRootType::VM);
             if (auto context_owner = execution_context->context_owner)
-                roots.set(context_owner);
+                roots.set(context_owner, HeapRootType::VM);
             execution_context->script_or_module.visit(
                 [](Empty) {},
                 [&](auto& script_or_module) {
-                    roots.set(script_or_module.ptr());
+                    roots.set(script_or_module.ptr(), HeapRootType::VM);
                 });
         }
     };
@@ -224,15 +224,15 @@ void VM::gather_roots(HashTable<Cell*>& roots)
         gather_roots_from_execution_context_stack(saved_stack);
 
 #define __JS_ENUMERATE(SymbolName, snake_name) \
-    roots.set(m_well_known_symbols.snake_name);
+    roots.set(m_well_known_symbols.snake_name, HeapRootType::VM);
     JS_ENUMERATE_WELL_KNOWN_SYMBOLS
 #undef __JS_ENUMERATE
 
     for (auto& symbol : m_global_symbol_registry)
-        roots.set(symbol.value);
+        roots.set(symbol.value, HeapRootType::VM);
 
     for (auto finalization_registry : m_finalization_registry_cleanup_jobs)
-        roots.set(finalization_registry);
+        roots.set(finalization_registry, HeapRootType::VM);
 }
 
 ThrowCompletionOr<Value> VM::named_evaluation_if_anonymous_function(ASTNode const& expression, DeprecatedFlyString const& name)

+ 1 - 1
Userland/Libraries/LibJS/Runtime/VM.h

@@ -47,7 +47,7 @@ public:
 
     void dump_backtrace() const;
 
-    void gather_roots(HashTable<Cell*>&);
+    void gather_roots(HashMap<Cell*, HeapRootTypeOrLocation>&);
 
 #define __JS_ENUMERATE(SymbolName, snake_name)                  \
     NonnullGCPtr<Symbol> well_known_symbol_##snake_name() const \

+ 14 - 6
Userland/Libraries/LibJS/SafeFunction.h

@@ -9,11 +9,12 @@
 #pragma once
 
 #include <AK/Function.h>
+#include <AK/SourceLocation.h>
 
 namespace JS {
 
-void register_safe_function_closure(void*, size_t);
-void unregister_safe_function_closure(void*, size_t);
+void register_safe_function_closure(void*, size_t, SourceLocation*);
+void unregister_safe_function_closure(void*, size_t, SourceLocation*);
 
 template<typename>
 class SafeFunction;
@@ -38,7 +39,7 @@ public:
         if (!m_size)
             return;
         if (auto* wrapper = callable_wrapper())
-            register_safe_function_closure(wrapper, m_size);
+            register_safe_function_closure(wrapper, m_size, &m_location);
     }
 
     void unregister_closure()
@@ -46,24 +47,27 @@ public:
         if (!m_size)
             return;
         if (auto* wrapper = callable_wrapper())
-            unregister_safe_function_closure(wrapper, m_size);
+            unregister_safe_function_closure(wrapper, m_size, &m_location);
     }
 
     template<typename CallableType>
-    SafeFunction(CallableType&& callable)
+    SafeFunction(CallableType&& callable, SourceLocation location = SourceLocation::current())
     requires((AK::IsFunctionObject<CallableType> && IsCallableWithArguments<CallableType, Out, In...> && !IsSame<RemoveCVReference<CallableType>, SafeFunction>))
+        : m_location(location)
     {
         init_with_callable(forward<CallableType>(callable), CallableKind::FunctionObject);
     }
 
     template<typename FunctionType>
-    SafeFunction(FunctionType f)
+    SafeFunction(FunctionType f, SourceLocation location = SourceLocation::current())
     requires((AK::IsFunctionPointer<FunctionType> && IsCallableWithArguments<RemovePointer<FunctionType>, Out, In...> && !IsSame<RemoveCVReference<FunctionType>, SafeFunction>))
+        : m_location(location)
     {
         init_with_callable(move(f), CallableKind::FunctionPointer);
     }
 
     SafeFunction(SafeFunction&& other)
+        : m_location(move(other.m_location))
     {
         move_from(move(other));
     }
@@ -215,6 +219,7 @@ private:
         VERIFY(m_kind == FunctionKind::NullPointer);
         auto* other_wrapper = other.callable_wrapper();
         m_size = other.m_size;
+        AK::TypedTransfer<SourceLocation>::move(&m_location, &other.m_location, 1);
         switch (other.m_kind) {
         case FunctionKind::NullPointer:
             break;
@@ -225,8 +230,10 @@ private:
             register_closure();
             break;
         case FunctionKind::Outline:
+            other.unregister_closure();
             *bit_cast<CallableWrapperBase**>(&m_storage) = other_wrapper;
             m_kind = FunctionKind::Outline;
+            register_closure();
             break;
         default:
             VERIFY_NOT_REACHED();
@@ -238,6 +245,7 @@ private:
     bool m_deferred_clear { false };
     mutable Atomic<u16> m_call_nesting_level { 0 };
     size_t m_size { 0 };
+    SourceLocation m_location;
 
     // Empirically determined to fit most lambdas and functions.
     static constexpr size_t inline_capacity = 4 * sizeof(void*);