RangeAllocator.cpp 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217
  1. /*
  2. * Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>
  3. * All rights reserved.
  4. *
  5. * Redistribution and use in source and binary forms, with or without
  6. * modification, are permitted provided that the following conditions are met:
  7. *
  8. * 1. Redistributions of source code must retain the above copyright notice, this
  9. * list of conditions and the following disclaimer.
  10. *
  11. * 2. Redistributions in binary form must reproduce the above copyright notice,
  12. * this list of conditions and the following disclaimer in the documentation
  13. * and/or other materials provided with the distribution.
  14. *
  15. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
  16. * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  17. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
  18. * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
  19. * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
  20. * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
  21. * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
  22. * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
  23. * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  24. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  25. */
  26. #include <AK/BinarySearch.h>
  27. #include <AK/QuickSort.h>
  28. #include <Kernel/Random.h>
  29. #include <Kernel/Thread.h>
  30. #include <Kernel/VM/RangeAllocator.h>
  31. //#define VRA_DEBUG
  32. #define VM_GUARD_PAGES
  33. namespace Kernel {
  34. RangeAllocator::RangeAllocator()
  35. {
  36. }
  37. void RangeAllocator::initialize_with_range(VirtualAddress base, size_t size)
  38. {
  39. m_total_range = { base, size };
  40. m_available_ranges.append({ base, size });
  41. #ifdef VRA_DEBUG
  42. ScopedSpinLock lock(m_lock);
  43. dump();
  44. #endif
  45. }
  46. void RangeAllocator::initialize_from_parent(const RangeAllocator& parent_allocator)
  47. {
  48. ScopedSpinLock lock(parent_allocator.m_lock);
  49. m_total_range = parent_allocator.m_total_range;
  50. m_available_ranges = parent_allocator.m_available_ranges;
  51. }
  52. RangeAllocator::~RangeAllocator()
  53. {
  54. }
  55. void RangeAllocator::dump() const
  56. {
  57. ASSERT(m_lock.is_locked());
  58. dbg() << "RangeAllocator{" << this << "}";
  59. for (auto& range : m_available_ranges) {
  60. dbg() << " " << String::format("%x", range.base().get()) << " -> " << String::format("%x", range.end().get() - 1);
  61. }
  62. }
  63. Vector<Range, 2> Range::carve(const Range& taken)
  64. {
  65. Vector<Range, 2> parts;
  66. if (taken == *this)
  67. return {};
  68. if (taken.base() > base())
  69. parts.append({ base(), taken.base().get() - base().get() });
  70. if (taken.end() < end())
  71. parts.append({ taken.end(), end().get() - taken.end().get() });
  72. #ifdef VRA_DEBUG
  73. dbg() << "VRA: carve: take " << String::format("%x", taken.base().get()) << "-" << String::format("%x", taken.end().get() - 1) << " from " << String::format("%x", base().get()) << "-" << String::format("%x", end().get() - 1);
  74. for (size_t i = 0; i < parts.size(); ++i)
  75. dbg() << " " << String::format("%x", parts[i].base().get()) << "-" << String::format("%x", parts[i].end().get() - 1);
  76. #endif
  77. return parts;
  78. }
  79. void RangeAllocator::carve_at_index(int index, const Range& range)
  80. {
  81. ASSERT(m_lock.is_locked());
  82. auto remaining_parts = m_available_ranges[index].carve(range);
  83. ASSERT(remaining_parts.size() >= 1);
  84. m_available_ranges[index] = remaining_parts[0];
  85. if (remaining_parts.size() == 2)
  86. m_available_ranges.insert(index + 1, move(remaining_parts[1]));
  87. }
  88. Range RangeAllocator::allocate_anywhere(size_t size, size_t alignment)
  89. {
  90. if (!size)
  91. return {};
  92. #ifdef VM_GUARD_PAGES
  93. // NOTE: We pad VM allocations with a guard page on each side.
  94. size_t effective_size = size + PAGE_SIZE * 2;
  95. size_t offset_from_effective_base = PAGE_SIZE;
  96. #else
  97. size_t effective_size = size;
  98. size_t offset_from_effective_base = 0;
  99. #endif
  100. ScopedSpinLock lock(m_lock);
  101. for (size_t i = 0; i < m_available_ranges.size(); ++i) {
  102. auto& available_range = m_available_ranges[i];
  103. // FIXME: This check is probably excluding some valid candidates when using a large alignment.
  104. if (available_range.size() < (effective_size + alignment))
  105. continue;
  106. FlatPtr initial_base = available_range.base().offset(offset_from_effective_base).get();
  107. FlatPtr aligned_base = round_up_to_power_of_two(initial_base, alignment);
  108. Range allocated_range(VirtualAddress(aligned_base), size);
  109. if (available_range == allocated_range) {
  110. #ifdef VRA_DEBUG
  111. dbg() << "VRA: Allocated perfect-fit anywhere(" << String::format("%zu", size) << ", " << String::format("%zu", alignment) << "): " << String::format("%x", allocated_range.base().get());
  112. #endif
  113. m_available_ranges.remove(i);
  114. return allocated_range;
  115. }
  116. carve_at_index(i, allocated_range);
  117. #ifdef VRA_DEBUG
  118. dbg() << "VRA: Allocated anywhere(" << String::format("%zu", size) << ", " << String::format("%zu", alignment) << "): " << String::format("%x", allocated_range.base().get());
  119. dump();
  120. #endif
  121. return allocated_range;
  122. }
  123. klog() << "VRA: Failed to allocate anywhere: " << size << ", " << alignment;
  124. return {};
  125. }
  126. Range RangeAllocator::allocate_specific(VirtualAddress base, size_t size)
  127. {
  128. if (!size)
  129. return {};
  130. Range allocated_range(base, size);
  131. ScopedSpinLock lock(m_lock);
  132. for (size_t i = 0; i < m_available_ranges.size(); ++i) {
  133. auto& available_range = m_available_ranges[i];
  134. if (!available_range.contains(base, size))
  135. continue;
  136. if (available_range == allocated_range) {
  137. m_available_ranges.remove(i);
  138. return allocated_range;
  139. }
  140. carve_at_index(i, allocated_range);
  141. #ifdef VRA_DEBUG
  142. dbg() << "VRA: Allocated specific(" << size << "): " << String::format("%x", available_range.base().get());
  143. dump();
  144. #endif
  145. return allocated_range;
  146. }
  147. dbg() << "VRA: Failed to allocate specific range: " << base << "(" << size << ")";
  148. return {};
  149. }
  150. void RangeAllocator::deallocate(Range range)
  151. {
  152. ScopedSpinLock lock(m_lock);
  153. ASSERT(m_total_range.contains(range));
  154. ASSERT(range.size());
  155. ASSERT(range.base() < range.end());
  156. #ifdef VRA_DEBUG
  157. dbg() << "VRA: Deallocate: " << String::format("%x", range.base().get()) << "(" << range.size() << ")";
  158. dump();
  159. #endif
  160. ASSERT(!m_available_ranges.is_empty());
  161. size_t nearby_index = 0;
  162. auto* existing_range = binary_search(
  163. m_available_ranges.span(), range, [](auto& a, auto& b) {
  164. return a.base().get() - b.end().get();
  165. },
  166. &nearby_index);
  167. size_t inserted_index = 0;
  168. if (existing_range) {
  169. existing_range->m_size += range.size();
  170. inserted_index = nearby_index;
  171. } else {
  172. m_available_ranges.insert_before_matching(
  173. Range(range), [&](auto& entry) {
  174. return entry.base() >= range.end();
  175. },
  176. nearby_index, &inserted_index);
  177. }
  178. if (inserted_index < (m_available_ranges.size() - 1)) {
  179. // We already merged with previous. Try to merge with next.
  180. auto& inserted_range = m_available_ranges[inserted_index];
  181. auto& next_range = m_available_ranges[inserted_index + 1];
  182. if (inserted_range.end() == next_range.base()) {
  183. inserted_range.m_size += next_range.size();
  184. m_available_ranges.remove(inserted_index + 1);
  185. return;
  186. }
  187. }
  188. #ifdef VRA_DEBUG
  189. dbg() << "VRA: After deallocate";
  190. dump();
  191. #endif
  192. }
  193. }