mirror of
https://github.com/LadybirdBrowser/ladybird.git
synced 2024-11-25 00:50:22 +00:00
Kernel: Remove krealloc()
This was only used by a single class (AK::ByteBuffer) in the kernel and not in an OOM-safe way. Now that ByteBuffer no longer uses it, there's no need for the kernel heap to burden itself with supporting this.
This commit is contained in:
parent
966880eb45
commit
25e850ebb1
Notes:
sideshowbarker
2024-07-18 09:19:59 +09:00
Author: https://github.com/awesomekling Commit: https://github.com/SerenityOS/serenity/commit/25e850ebb1c
4 changed files with 0 additions and 48 deletions
|
@ -17,7 +17,6 @@
|
|||
# define kmalloc malloc
|
||||
# define kmalloc_good_size malloc_good_size
|
||||
# define kfree free
|
||||
# define krealloc realloc
|
||||
|
||||
inline void kfree_sized(void* ptr, size_t)
|
||||
{
|
||||
|
|
|
@ -118,34 +118,6 @@ public:
|
|||
}
|
||||
}
|
||||
|
||||
template<typename MainHeap>
|
||||
void* reallocate(void* ptr, size_t new_size, MainHeap& h)
|
||||
{
|
||||
if (!ptr)
|
||||
return h.allocate(new_size);
|
||||
|
||||
auto* a = allocation_header(ptr);
|
||||
VERIFY((u8*)a >= m_chunks && (u8*)ptr < m_chunks + m_total_chunks * CHUNK_SIZE);
|
||||
VERIFY((u8*)a + a->allocation_size_in_chunks * CHUNK_SIZE <= m_chunks + m_total_chunks * CHUNK_SIZE);
|
||||
|
||||
size_t old_size = a->allocation_size_in_chunks * CHUNK_SIZE - sizeof(AllocationHeader);
|
||||
|
||||
if (old_size == new_size)
|
||||
return ptr;
|
||||
|
||||
auto* new_ptr = h.allocate(new_size);
|
||||
if (new_ptr) {
|
||||
__builtin_memcpy(new_ptr, ptr, min(old_size, new_size));
|
||||
deallocate(ptr);
|
||||
}
|
||||
return new_ptr;
|
||||
}
|
||||
|
||||
void* reallocate(void* ptr, size_t new_size)
|
||||
{
|
||||
return reallocate(ptr, new_size, *this);
|
||||
}
|
||||
|
||||
bool contains(const void* ptr) const
|
||||
{
|
||||
const auto* a = allocation_header(ptr);
|
||||
|
@ -319,17 +291,6 @@ public:
|
|||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
|
||||
void* reallocate(void* ptr, size_t new_size)
|
||||
{
|
||||
if (!ptr)
|
||||
return allocate(new_size);
|
||||
for (auto* subheap = &m_heaps; subheap; subheap = subheap->next) {
|
||||
if (subheap->heap.contains(ptr))
|
||||
return subheap->heap.reallocate(ptr, new_size, *this);
|
||||
}
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
|
||||
HeapType& add_subheap(void* memory, size_t memory_size)
|
||||
{
|
||||
VERIFY(memory_size > sizeof(SubHeap));
|
||||
|
|
|
@ -298,13 +298,6 @@ void kfree(void* ptr)
|
|||
--g_nested_kfree_calls;
|
||||
}
|
||||
|
||||
void* krealloc(void* ptr, size_t new_size)
|
||||
{
|
||||
kmalloc_verify_nospinlock_held();
|
||||
ScopedSpinLock lock(s_lock);
|
||||
return g_kmalloc_global->m_heap.reallocate(ptr, new_size);
|
||||
}
|
||||
|
||||
size_t kmalloc_good_size(size_t size)
|
||||
{
|
||||
return size;
|
||||
|
|
|
@ -40,7 +40,6 @@ void kmalloc_init();
|
|||
[[gnu::malloc, gnu::returns_nonnull, gnu::alloc_size(1)]] void* kmalloc_impl(size_t);
|
||||
[[gnu::malloc, gnu::returns_nonnull, gnu::alloc_size(1)]] void* kmalloc_eternal(size_t);
|
||||
|
||||
void* krealloc(void*, size_t);
|
||||
void kfree(void*);
|
||||
void kfree_sized(void*, size_t);
|
||||
|
||||
|
|
Loading…
Reference in a new issue