SlabAllocator.cpp 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128
  1. #include <AK/Assertions.h>
  2. #include <Kernel/Heap/SlabAllocator.h>
  3. #include <Kernel/Heap/kmalloc.h>
  4. #include <Kernel/VM/Region.h>
  5. template<size_t templated_slab_size>
  6. class SlabAllocator {
  7. public:
  8. SlabAllocator() {}
  9. void init(size_t size)
  10. {
  11. m_base = kmalloc_eternal(size);
  12. m_end = (u8*)m_base + size;
  13. FreeSlab* slabs = (FreeSlab*)m_base;
  14. size_t slab_count = size / templated_slab_size;
  15. for (size_t i = 1; i < slab_count; ++i) {
  16. slabs[i].next = &slabs[i - 1];
  17. }
  18. slabs[0].next = nullptr;
  19. m_freelist = &slabs[slab_count - 1];
  20. m_num_allocated = 0;
  21. m_num_free = slab_count;
  22. }
  23. constexpr size_t slab_size() const { return templated_slab_size; }
  24. void* alloc()
  25. {
  26. InterruptDisabler disabler;
  27. if (!m_freelist)
  28. return kmalloc(slab_size());
  29. ASSERT(m_freelist);
  30. void* ptr = m_freelist;
  31. m_freelist = m_freelist->next;
  32. ++m_num_allocated;
  33. --m_num_free;
  34. return ptr;
  35. }
  36. void dealloc(void* ptr)
  37. {
  38. InterruptDisabler disabler;
  39. ASSERT(ptr);
  40. if (ptr < m_base || ptr >= m_end) {
  41. kfree(ptr);
  42. return;
  43. }
  44. ((FreeSlab*)ptr)->next = m_freelist;
  45. m_freelist = (FreeSlab*)ptr;
  46. ++m_num_allocated;
  47. --m_num_free;
  48. }
  49. size_t num_allocated() const { return m_num_allocated; }
  50. size_t num_free() const { return m_num_free; }
  51. private:
  52. struct FreeSlab {
  53. FreeSlab* next { nullptr };
  54. char padding[templated_slab_size - sizeof(FreeSlab*)];
  55. };
  56. FreeSlab* m_freelist { nullptr };
  57. size_t m_num_allocated { 0 };
  58. size_t m_num_free { 0 };
  59. void* m_base { nullptr };
  60. void* m_end { nullptr };
  61. static_assert(sizeof(FreeSlab) == templated_slab_size);
  62. };
  63. static SlabAllocator<8> s_slab_allocator_8;
  64. static SlabAllocator<16> s_slab_allocator_16;
  65. static SlabAllocator<32> s_slab_allocator_32;
  66. static SlabAllocator<48> s_slab_allocator_48;
  67. static_assert(sizeof(Region) <= s_slab_allocator_48.slab_size());
  68. template<typename Callback>
  69. void for_each_allocator(Callback callback)
  70. {
  71. callback(s_slab_allocator_8);
  72. callback(s_slab_allocator_16);
  73. callback(s_slab_allocator_32);
  74. callback(s_slab_allocator_48);
  75. }
  76. void slab_alloc_init()
  77. {
  78. s_slab_allocator_8.init(384 * KB);
  79. s_slab_allocator_16.init(128 * KB);
  80. s_slab_allocator_32.init(128 * KB);
  81. s_slab_allocator_48.init(128 * KB);
  82. }
  83. void* slab_alloc(size_t slab_size)
  84. {
  85. if (slab_size <= 8)
  86. return s_slab_allocator_8.alloc();
  87. if (slab_size <= 16)
  88. return s_slab_allocator_16.alloc();
  89. if (slab_size <= 32)
  90. return s_slab_allocator_32.alloc();
  91. if (slab_size <= 48)
  92. return s_slab_allocator_48.alloc();
  93. ASSERT_NOT_REACHED();
  94. }
  95. void slab_dealloc(void* ptr, size_t slab_size)
  96. {
  97. if (slab_size <= 8)
  98. return s_slab_allocator_8.dealloc(ptr);
  99. if (slab_size <= 16)
  100. return s_slab_allocator_16.dealloc(ptr);
  101. if (slab_size <= 32)
  102. return s_slab_allocator_32.dealloc(ptr);
  103. if (slab_size <= 48)
  104. return s_slab_allocator_48.dealloc(ptr);
  105. ASSERT_NOT_REACHED();
  106. }
  107. void slab_alloc_stats(Function<void(size_t slab_size, size_t allocated, size_t free)> callback)
  108. {
  109. for_each_allocator([&](auto& allocator) {
  110. callback(allocator.slab_size(), allocator.num_allocated(), allocator.num_free());
  111. });
  112. }