HeapBlock.h 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120
  1. /*
  2. * Copyright (c) 2020, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #pragma once
  7. #include <AK/IntrusiveList.h>
  8. #include <AK/Platform.h>
  9. #include <AK/StringView.h>
  10. #include <AK/Types.h>
  11. #include <LibJS/Forward.h>
  12. #include <LibJS/Heap/Cell.h>
  13. #ifdef HAS_ADDRESS_SANITIZER
  14. # include <sanitizer/asan_interface.h>
  15. #endif
  16. namespace JS {
  17. class HeapBlock {
  18. AK_MAKE_NONCOPYABLE(HeapBlock);
  19. AK_MAKE_NONMOVABLE(HeapBlock);
  20. public:
  21. static constexpr size_t block_size = 16 * KiB;
  22. static NonnullOwnPtr<HeapBlock> create_with_cell_size(Heap&, size_t);
  23. size_t cell_size() const { return m_cell_size; }
  24. size_t cell_count() const { return (block_size - sizeof(HeapBlock)) / m_cell_size; }
  25. bool is_full() const { return !has_lazy_freelist() && !m_freelist; }
  26. ALWAYS_INLINE Cell* allocate()
  27. {
  28. Cell* allocated_cell = nullptr;
  29. if (m_freelist) {
  30. VERIFY(is_valid_cell_pointer(m_freelist));
  31. allocated_cell = exchange(m_freelist, m_freelist->next);
  32. } else if (has_lazy_freelist()) {
  33. allocated_cell = cell(m_next_lazy_freelist_index++);
  34. }
  35. if (allocated_cell) {
  36. ASAN_UNPOISON_MEMORY_REGION(allocated_cell, m_cell_size);
  37. }
  38. return allocated_cell;
  39. }
  40. void deallocate(Cell*);
  41. template<typename Callback>
  42. void for_each_cell(Callback callback)
  43. {
  44. auto end = has_lazy_freelist() ? m_next_lazy_freelist_index : cell_count();
  45. for (size_t i = 0; i < end; ++i)
  46. callback(cell(i));
  47. }
  48. template<Cell::State state, typename Callback>
  49. void for_each_cell_in_state(Callback callback)
  50. {
  51. for_each_cell([&](auto* cell) {
  52. if (cell->state() == state)
  53. callback(cell);
  54. });
  55. }
  56. Heap& heap() { return m_heap; }
  57. static HeapBlock* from_cell(Cell const* cell)
  58. {
  59. return reinterpret_cast<HeapBlock*>((FlatPtr)cell & ~(block_size - 1));
  60. }
  61. Cell* cell_from_possible_pointer(FlatPtr pointer)
  62. {
  63. if (pointer < reinterpret_cast<FlatPtr>(m_storage))
  64. return nullptr;
  65. size_t cell_index = (pointer - reinterpret_cast<FlatPtr>(m_storage)) / m_cell_size;
  66. auto end = has_lazy_freelist() ? m_next_lazy_freelist_index : cell_count();
  67. if (cell_index >= end)
  68. return nullptr;
  69. return cell(cell_index);
  70. }
  71. bool is_valid_cell_pointer(Cell const* cell)
  72. {
  73. return cell_from_possible_pointer((FlatPtr)cell);
  74. }
  75. IntrusiveListNode<HeapBlock> m_list_node;
  76. private:
  77. HeapBlock(Heap&, size_t cell_size);
  78. bool has_lazy_freelist() const { return m_next_lazy_freelist_index < cell_count(); }
  79. struct FreelistEntry final : public Cell {
  80. JS_CELL(FreelistEntry, Cell);
  81. GCPtr<FreelistEntry> next;
  82. };
  83. Cell* cell(size_t index)
  84. {
  85. return reinterpret_cast<Cell*>(&m_storage[index * cell_size()]);
  86. }
  87. Heap& m_heap;
  88. size_t m_cell_size { 0 };
  89. size_t m_next_lazy_freelist_index { 0 };
  90. GCPtr<FreelistEntry> m_freelist;
  91. alignas(Cell) u8 m_storage[];
  92. public:
  93. static constexpr size_t min_possible_cell_size = sizeof(FreelistEntry);
  94. };
  95. }