Forráskód Böngészése

Revert "LibC: Simplify malloc size classes"

This reverts commit f91bcb8895cd6b76b2977ad0632fef521ba2f1d1.
Andreas Kling 4 éve
szülő
commit
7957f13e98

+ 2 - 2
Userland/DevTools/UserspaceEmulator/MallocTracer.cpp

@@ -68,7 +68,7 @@ void MallocTracer::target_did_malloc(Badge<Emulator>, FlatPtr address, size_t si
             }));
         auto& malloc_data = *mmap_region.malloc_metadata();
 
-        bool is_chunked_block = malloc_data.chunk_size <= size_classes[size_classes.size() - 1];
+        bool is_chunked_block = malloc_data.chunk_size <= size_classes[num_size_classes - 1];
         if (is_chunked_block)
             malloc_data.mallocations.resize((ChunkedBlock::block_size - sizeof(ChunkedBlock)) / malloc_data.chunk_size);
         else
@@ -92,7 +92,7 @@ ALWAYS_INLINE Mallocation* MallocRegionMetadata::mallocation_for_address(FlatPtr
 
 ALWAYS_INLINE Optional<size_t> MallocRegionMetadata::chunk_index_for_address(FlatPtr address) const
 {
-    bool is_chunked_block = chunk_size <= size_classes[size_classes.size() - 1];
+    bool is_chunked_block = chunk_size <= size_classes[num_size_classes - 1];
     if (!is_chunked_block) {
         // This is a BigAllocationBlock
         return 0;

+ 4 - 4
Userland/Libraries/LibC/malloc.cpp

@@ -102,12 +102,12 @@ struct BigAllocator {
 // are run. Similarly, we can not allow global destructors to destruct
 // them. We could have used AK::NeverDestoyed to prevent the latter,
 // but it would have not helped with the former.
-static u8 g_allocators_storage[sizeof(Allocator) * size_classes.size()];
+static u8 g_allocators_storage[sizeof(Allocator) * num_size_classes];
 static u8 g_big_allocators_storage[sizeof(BigAllocator)];
 
-static inline Allocator (&allocators())[size_classes.size()]
+static inline Allocator (&allocators())[num_size_classes]
 {
-    return reinterpret_cast<Allocator(&)[size_classes.size()]>(g_allocators_storage);
+    return reinterpret_cast<Allocator(&)[num_size_classes]>(g_allocators_storage);
 }
 
 static inline BigAllocator (&big_allocators())[1]
@@ -442,7 +442,7 @@ void __malloc_init()
     if (secure_getenv("LIBC_PROFILE_MALLOC"))
         s_profiling = true;
 
-    for (size_t i = 0; i < size_classes.size(); ++i) {
+    for (size_t i = 0; i < num_size_classes; ++i) {
         new (&allocators()[i]) Allocator();
         allocators()[i].size = size_classes[i];
     }

+ 12 - 6
Userland/Libraries/LibC/mallocdefs.h

@@ -6,8 +6,6 @@
 
 #pragma once
 
-#include <AK/AllOf.h>
-#include <AK/Array.h>
 #include <AK/InlineLinkedList.h>
 #include <AK/Types.h>
 
@@ -16,10 +14,18 @@
 #define MALLOC_SCRUB_BYTE 0xdc
 #define FREE_SCRUB_BYTE 0xed
 
-static constexpr Array<unsigned short, 13> size_classes { 8, 16, 32, 64, 128, 256, 504, 1016, 2032, 4088, 8184, 16376, 32752 };
-static constexpr auto malloc_alignment = 8;
-static_assert(all_of(size_classes.begin(), size_classes.end(),
-    [](const auto val) { return val % malloc_alignment == 0; }));
+static constexpr unsigned short size_classes[] = { 8, 16, 32, 64, 128, 256, 504, 1016, 2032, 4088, 8184, 16376, 32752, 0 };
+static constexpr size_t num_size_classes = (sizeof(size_classes) / sizeof(unsigned short)) - 1;
+
+consteval bool check_size_classes_alignment()
+{
+    for (size_t i = 0; i < num_size_classes; i++) {
+        if ((size_classes[i] % 8) != 0)
+            return false;
+    }
+    return true;
+}
+static_assert(check_size_classes_alignment());
 
 struct CommonHeader {
     size_t m_magic;