HeapBlock.h 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119
  1. /*
  2. * Copyright (c) 2020, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #pragma once
  7. #include <AK/IntrusiveList.h>
  8. #include <AK/Platform.h>
  9. #include <AK/Types.h>
  10. #include <LibJS/Forward.h>
  11. #include <LibJS/Heap/Cell.h>
  12. #ifdef HAS_ADDRESS_SANITIZER
  13. # include <sanitizer/asan_interface.h>
  14. #endif
  15. namespace JS {
  16. class HeapBlock {
  17. AK_MAKE_NONCOPYABLE(HeapBlock);
  18. AK_MAKE_NONMOVABLE(HeapBlock);
  19. public:
  20. static constexpr size_t block_size = 16 * KiB;
  21. static NonnullOwnPtr<HeapBlock> create_with_cell_size(Heap&, size_t);
  22. size_t cell_size() const { return m_cell_size; }
  23. size_t cell_count() const { return (block_size - sizeof(HeapBlock)) / m_cell_size; }
  24. bool is_full() const { return !has_lazy_freelist() && !m_freelist; }
  25. ALWAYS_INLINE Cell* allocate()
  26. {
  27. Cell* allocated_cell = nullptr;
  28. if (m_freelist) {
  29. VERIFY(is_valid_cell_pointer(m_freelist));
  30. allocated_cell = exchange(m_freelist, m_freelist->next);
  31. } else if (has_lazy_freelist()) {
  32. allocated_cell = cell(m_next_lazy_freelist_index++);
  33. }
  34. if (allocated_cell) {
  35. ASAN_UNPOISON_MEMORY_REGION(allocated_cell, m_cell_size);
  36. }
  37. return allocated_cell;
  38. }
  39. void deallocate(Cell*);
  40. template<typename Callback>
  41. void for_each_cell(Callback callback)
  42. {
  43. auto end = has_lazy_freelist() ? m_next_lazy_freelist_index : cell_count();
  44. for (size_t i = 0; i < end; ++i)
  45. callback(cell(i));
  46. }
  47. template<Cell::State state, typename Callback>
  48. void for_each_cell_in_state(Callback callback)
  49. {
  50. for_each_cell([&](auto* cell) {
  51. if (cell->state() == state)
  52. callback(cell);
  53. });
  54. }
  55. Heap& heap() { return m_heap; }
  56. static HeapBlock* from_cell(const Cell* cell)
  57. {
  58. return reinterpret_cast<HeapBlock*>((FlatPtr)cell & ~(block_size - 1));
  59. }
  60. Cell* cell_from_possible_pointer(FlatPtr pointer)
  61. {
  62. if (pointer < reinterpret_cast<FlatPtr>(m_storage))
  63. return nullptr;
  64. size_t cell_index = (pointer - reinterpret_cast<FlatPtr>(m_storage)) / m_cell_size;
  65. auto end = has_lazy_freelist() ? m_next_lazy_freelist_index : cell_count();
  66. if (cell_index >= end)
  67. return nullptr;
  68. return cell(cell_index);
  69. }
  70. bool is_valid_cell_pointer(const Cell* cell)
  71. {
  72. return cell_from_possible_pointer((FlatPtr)cell);
  73. }
  74. IntrusiveListNode<HeapBlock> m_list_node;
  75. private:
  76. HeapBlock(Heap&, size_t cell_size);
  77. bool has_lazy_freelist() const { return m_next_lazy_freelist_index < cell_count(); }
  78. struct FreelistEntry final : public Cell {
  79. FreelistEntry* next { nullptr };
  80. virtual const char* class_name() const override { return "FreelistEntry"; }
  81. };
  82. Cell* cell(size_t index)
  83. {
  84. return reinterpret_cast<Cell*>(&m_storage[index * cell_size()]);
  85. }
  86. Heap& m_heap;
  87. size_t m_cell_size { 0 };
  88. size_t m_next_lazy_freelist_index { 0 };
  89. FreelistEntry* m_freelist { nullptr };
  90. alignas(Cell) u8 m_storage[];
  91. public:
  92. static constexpr size_t min_possible_cell_size = sizeof(FreelistEntry);
  93. };
  94. }