PhysicalRegion.cpp 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171
  1. /*
  2. * Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/Bitmap.h>
  7. #include <AK/NonnullRefPtr.h>
  8. #include <AK/RefPtr.h>
  9. #include <AK/Vector.h>
  10. #include <Kernel/Assertions.h>
  11. #include <Kernel/Random.h>
  12. #include <Kernel/VM/PhysicalPage.h>
  13. #include <Kernel/VM/PhysicalRegion.h>
  14. namespace Kernel {
  15. NonnullRefPtr<PhysicalRegion> PhysicalRegion::create(PhysicalAddress lower, PhysicalAddress upper)
  16. {
  17. return adopt_ref(*new PhysicalRegion(lower, upper));
  18. }
  19. PhysicalRegion::PhysicalRegion(PhysicalAddress lower, PhysicalAddress upper)
  20. : m_lower(lower)
  21. , m_upper(upper)
  22. {
  23. }
  24. void PhysicalRegion::expand(PhysicalAddress lower, PhysicalAddress upper)
  25. {
  26. VERIFY(!m_pages);
  27. m_lower = lower;
  28. m_upper = upper;
  29. }
  30. unsigned PhysicalRegion::finalize_capacity()
  31. {
  32. VERIFY(!m_pages);
  33. m_pages = (m_upper.get() - m_lower.get()) / PAGE_SIZE;
  34. m_bitmap.grow(m_pages, false);
  35. return size();
  36. }
  37. NonnullRefPtrVector<PhysicalPage> PhysicalRegion::take_contiguous_free_pages(size_t count, bool supervisor, size_t physical_alignment)
  38. {
  39. VERIFY(m_pages);
  40. VERIFY(m_used != m_pages);
  41. NonnullRefPtrVector<PhysicalPage> physical_pages;
  42. physical_pages.ensure_capacity(count);
  43. auto first_contiguous_page = find_contiguous_free_pages(count, physical_alignment);
  44. for (size_t index = 0; index < count; index++)
  45. physical_pages.append(PhysicalPage::create(m_lower.offset(PAGE_SIZE * (index + first_contiguous_page)), supervisor));
  46. return physical_pages;
  47. }
  48. unsigned PhysicalRegion::find_contiguous_free_pages(size_t count, size_t physical_alignment)
  49. {
  50. VERIFY(count != 0);
  51. VERIFY(physical_alignment % PAGE_SIZE == 0);
  52. // search from the last page we allocated
  53. auto range = find_and_allocate_contiguous_range(count, physical_alignment / PAGE_SIZE);
  54. VERIFY(range.has_value());
  55. return range.value();
  56. }
  57. Optional<unsigned> PhysicalRegion::find_one_free_page()
  58. {
  59. if (m_used == m_pages) {
  60. // We know we don't have any free pages, no need to check the bitmap
  61. // Check if we can draw one from the return queue
  62. if (m_recently_returned.size() > 0) {
  63. u8 index = get_fast_random<u8>() % m_recently_returned.size();
  64. Checked<PhysicalPtr> local_offset = m_recently_returned[index].get();
  65. local_offset -= m_lower.get();
  66. m_recently_returned.remove(index);
  67. VERIFY(!local_offset.has_overflow());
  68. VERIFY(local_offset.value() < (FlatPtr)(m_pages * PAGE_SIZE));
  69. return local_offset.value() / PAGE_SIZE;
  70. }
  71. return {};
  72. }
  73. auto free_index = m_bitmap.find_one_anywhere_unset(m_free_hint);
  74. if (!free_index.has_value())
  75. return {};
  76. auto page_index = free_index.value();
  77. m_bitmap.set(page_index, true);
  78. m_used++;
  79. m_free_hint = free_index.value() + 1; // Just a guess
  80. if (m_free_hint >= m_bitmap.size())
  81. m_free_hint = 0;
  82. return page_index;
  83. }
  84. Optional<unsigned> PhysicalRegion::find_and_allocate_contiguous_range(size_t count, unsigned alignment)
  85. {
  86. VERIFY(count != 0);
  87. size_t found_pages_count = 0;
  88. // TODO: Improve how we deal with alignment != 1
  89. auto first_index = m_bitmap.find_longest_range_of_unset_bits(count + alignment - 1, found_pages_count);
  90. if (!first_index.has_value())
  91. return {};
  92. auto page = first_index.value();
  93. if (alignment != 1) {
  94. auto lower_page = m_lower.get() / PAGE_SIZE;
  95. page = ((lower_page + page + alignment - 1) & ~(alignment - 1)) - lower_page;
  96. }
  97. if (found_pages_count >= count) {
  98. m_bitmap.set_range<true>(page, count);
  99. m_used += count;
  100. m_free_hint = first_index.value() + count + 1; // Just a guess
  101. if (m_free_hint >= m_bitmap.size())
  102. m_free_hint = 0;
  103. return page;
  104. }
  105. return {};
  106. }
  107. RefPtr<PhysicalPage> PhysicalRegion::take_free_page(bool supervisor)
  108. {
  109. VERIFY(m_pages);
  110. auto free_index = find_one_free_page();
  111. if (!free_index.has_value())
  112. return nullptr;
  113. return PhysicalPage::create(m_lower.offset((PhysicalPtr)free_index.value() * PAGE_SIZE), supervisor);
  114. }
  115. void PhysicalRegion::free_page_at(PhysicalAddress addr)
  116. {
  117. VERIFY(m_pages);
  118. if (m_used == 0) {
  119. VERIFY_NOT_REACHED();
  120. }
  121. Checked<PhysicalPtr> local_offset = addr.get();
  122. local_offset -= m_lower.get();
  123. VERIFY(!local_offset.has_overflow());
  124. VERIFY(local_offset.value() < ((PhysicalPtr)m_pages * PAGE_SIZE));
  125. auto page = local_offset.value() / PAGE_SIZE;
  126. m_bitmap.set(page, false);
  127. m_free_hint = page; // We know we can find one here for sure
  128. m_used--;
  129. }
  130. void PhysicalRegion::return_page(const PhysicalPage& page)
  131. {
  132. auto returned_count = m_recently_returned.size();
  133. if (returned_count >= m_recently_returned.capacity()) {
  134. // Return queue is full, pick a random entry and free that page
  135. // and replace the entry with this page
  136. auto& entry = m_recently_returned[get_fast_random<u8>()];
  137. free_page_at(entry);
  138. entry = page.paddr();
  139. } else {
  140. // Still filling the return queue, just append it
  141. m_recently_returned.append(page.paddr());
  142. }
  143. }
  144. }