HeapBlock.h 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121
  1. /*
  2. * Copyright (c) 2020, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #pragma once
  7. #include <AK/IntrusiveList.h>
  8. #include <AK/Platform.h>
  9. #include <AK/StringView.h>
  10. #include <AK/Types.h>
  11. #include <LibJS/Forward.h>
  12. #include <LibJS/Heap/Cell.h>
  13. #include <LibJS/Heap/Internals.h>
  14. #ifdef HAS_ADDRESS_SANITIZER
  15. # include <sanitizer/asan_interface.h>
  16. #endif
  17. namespace JS {
  18. class HeapBlock : public HeapBlockBase {
  19. AK_MAKE_NONCOPYABLE(HeapBlock);
  20. AK_MAKE_NONMOVABLE(HeapBlock);
  21. public:
  22. using HeapBlockBase::block_size;
  23. static NonnullOwnPtr<HeapBlock> create_with_cell_size(Heap&, CellAllocator&, size_t cell_size, char const* class_name);
  24. size_t cell_size() const { return m_cell_size; }
  25. size_t cell_count() const { return (block_size - sizeof(HeapBlock)) / m_cell_size; }
  26. bool is_full() const { return !has_lazy_freelist() && !m_freelist; }
  27. ALWAYS_INLINE Cell* allocate()
  28. {
  29. Cell* allocated_cell = nullptr;
  30. if (m_freelist) {
  31. VERIFY(is_valid_cell_pointer(m_freelist));
  32. allocated_cell = exchange(m_freelist, m_freelist->next);
  33. } else if (has_lazy_freelist()) {
  34. allocated_cell = cell(m_next_lazy_freelist_index++);
  35. }
  36. if (allocated_cell) {
  37. ASAN_UNPOISON_MEMORY_REGION(allocated_cell, m_cell_size);
  38. }
  39. return allocated_cell;
  40. }
  41. void deallocate(Cell*);
  42. template<typename Callback>
  43. void for_each_cell(Callback callback)
  44. {
  45. auto end = has_lazy_freelist() ? m_next_lazy_freelist_index : cell_count();
  46. for (size_t i = 0; i < end; ++i)
  47. callback(cell(i));
  48. }
  49. template<Cell::State state, typename Callback>
  50. void for_each_cell_in_state(Callback callback)
  51. {
  52. for_each_cell([&](auto* cell) {
  53. if (cell->state() == state)
  54. callback(cell);
  55. });
  56. }
  57. static HeapBlock* from_cell(Cell const* cell)
  58. {
  59. return static_cast<HeapBlock*>(HeapBlockBase::from_cell(cell));
  60. }
  61. Cell* cell_from_possible_pointer(FlatPtr pointer)
  62. {
  63. if (pointer < reinterpret_cast<FlatPtr>(m_storage))
  64. return nullptr;
  65. size_t cell_index = (pointer - reinterpret_cast<FlatPtr>(m_storage)) / m_cell_size;
  66. auto end = has_lazy_freelist() ? m_next_lazy_freelist_index : cell_count();
  67. if (cell_index >= end)
  68. return nullptr;
  69. return cell(cell_index);
  70. }
  71. bool is_valid_cell_pointer(Cell const* cell)
  72. {
  73. return cell_from_possible_pointer((FlatPtr)cell);
  74. }
  75. IntrusiveListNode<HeapBlock> m_list_node;
  76. CellAllocator& cell_allocator() { return m_cell_allocator; }
  77. private:
  78. HeapBlock(Heap&, CellAllocator&, size_t cell_size);
  79. bool has_lazy_freelist() const { return m_next_lazy_freelist_index < cell_count(); }
  80. struct FreelistEntry final : public Cell {
  81. JS_CELL(FreelistEntry, Cell);
  82. GCPtr<FreelistEntry> next;
  83. };
  84. Cell* cell(size_t index)
  85. {
  86. return reinterpret_cast<Cell*>(&m_storage[index * cell_size()]);
  87. }
  88. CellAllocator& m_cell_allocator;
  89. size_t m_cell_size { 0 };
  90. size_t m_next_lazy_freelist_index { 0 };
  91. GCPtr<FreelistEntry> m_freelist;
  92. alignas(__BIGGEST_ALIGNMENT__) u8 m_storage[];
  93. public:
  94. static constexpr size_t min_possible_cell_size = sizeof(FreelistEntry);
  95. };
  96. }