MallocTracer.cpp 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428
  1. /*
  2. * Copyright (c) 2020, Andreas Kling <kling@serenityos.org>
  3. * Copyright (c) 2021, Tobias Christiansen <tobi@tobyase.de>
  4. *
  5. * SPDX-License-Identifier: BSD-2-Clause
  6. */
  7. #include "MallocTracer.h"
  8. #include "Emulator.h"
  9. #include "MmapRegion.h"
  10. #include <AK/Debug.h>
  11. #include <AK/TemporaryChange.h>
  12. #include <mallocdefs.h>
  13. #include <string.h>
  14. #include <unistd.h>
  15. namespace UserspaceEmulator {
  16. MallocTracer::MallocTracer(Emulator& emulator)
  17. : m_emulator(emulator)
  18. {
  19. }
  20. template<typename Callback>
  21. inline void MallocTracer::for_each_mallocation(Callback callback) const
  22. {
  23. m_emulator.mmu().for_each_region([&](auto& region) {
  24. if (is<MmapRegion>(region) && static_cast<const MmapRegion&>(region).is_malloc_block()) {
  25. auto* malloc_data = static_cast<MmapRegion&>(region).malloc_metadata();
  26. for (auto& mallocation : malloc_data->mallocations) {
  27. if (mallocation.used && callback(mallocation) == IterationDecision::Break)
  28. return IterationDecision::Break;
  29. }
  30. }
  31. return IterationDecision::Continue;
  32. });
  33. }
  34. void MallocTracer::update_metadata(MmapRegion& mmap_region, size_t chunk_size)
  35. {
  36. mmap_region.set_malloc_metadata({},
  37. adopt_own(*new MallocRegionMetadata {
  38. .region = mmap_region,
  39. .address = mmap_region.base(),
  40. .chunk_size = chunk_size,
  41. .mallocations = {},
  42. }));
  43. auto& malloc_data = *mmap_region.malloc_metadata();
  44. bool is_chunked_block = malloc_data.chunk_size <= size_classes[num_size_classes - 1];
  45. if (is_chunked_block)
  46. malloc_data.mallocations.resize((ChunkedBlock::block_size - sizeof(ChunkedBlock)) / malloc_data.chunk_size);
  47. else
  48. malloc_data.mallocations.resize(1);
  49. // Mark the containing mmap region as a malloc block!
  50. mmap_region.set_malloc(true);
  51. }
  52. void MallocTracer::target_did_malloc(Badge<Emulator>, FlatPtr address, size_t size)
  53. {
  54. if (m_emulator.is_in_loader_code())
  55. return;
  56. auto* region = m_emulator.mmu().find_region({ 0x23, address });
  57. VERIFY(region);
  58. VERIFY(is<MmapRegion>(*region));
  59. auto& mmap_region = static_cast<MmapRegion&>(*region);
  60. auto* shadow_bits = mmap_region.shadow_data() + address - mmap_region.base();
  61. memset(shadow_bits, 0, size);
  62. if (auto* existing_mallocation = find_mallocation(address)) {
  63. VERIFY(existing_mallocation->freed);
  64. existing_mallocation->size = size;
  65. existing_mallocation->freed = false;
  66. existing_mallocation->malloc_backtrace = m_emulator.raw_backtrace();
  67. existing_mallocation->free_backtrace.clear();
  68. return;
  69. }
  70. if (!mmap_region.is_malloc_block()) {
  71. auto chunk_size = mmap_region.read32(offsetof(CommonHeader, m_size)).value();
  72. update_metadata(mmap_region, chunk_size);
  73. }
  74. auto* mallocation = mmap_region.malloc_metadata()->mallocation_for_address(address);
  75. VERIFY(mallocation);
  76. *mallocation = { address, size, true, false, m_emulator.raw_backtrace(), Vector<FlatPtr>() };
  77. }
  78. void MallocTracer::target_did_change_chunk_size(Badge<Emulator>, FlatPtr block, size_t chunk_size)
  79. {
  80. if (m_emulator.is_in_loader_code())
  81. return;
  82. auto* region = m_emulator.mmu().find_region({ 0x23, block });
  83. VERIFY(region);
  84. VERIFY(is<MmapRegion>(*region));
  85. auto& mmap_region = static_cast<MmapRegion&>(*region);
  86. update_metadata(mmap_region, chunk_size);
  87. }
  88. ALWAYS_INLINE Mallocation* MallocRegionMetadata::mallocation_for_address(FlatPtr address) const
  89. {
  90. auto index = chunk_index_for_address(address);
  91. if (!index.has_value())
  92. return nullptr;
  93. return &const_cast<Mallocation&>(this->mallocations[index.value()]);
  94. }
  95. ALWAYS_INLINE Optional<size_t> MallocRegionMetadata::chunk_index_for_address(FlatPtr address) const
  96. {
  97. bool is_chunked_block = chunk_size <= size_classes[num_size_classes - 1];
  98. if (!is_chunked_block) {
  99. // This is a BigAllocationBlock
  100. return 0;
  101. }
  102. auto offset_into_block = address - this->address;
  103. if (offset_into_block < sizeof(ChunkedBlock))
  104. return 0;
  105. auto chunk_offset = offset_into_block - sizeof(ChunkedBlock);
  106. auto chunk_index = chunk_offset / this->chunk_size;
  107. if (chunk_index >= mallocations.size())
  108. return {};
  109. return chunk_index;
  110. }
  111. void MallocTracer::target_did_free(Badge<Emulator>, FlatPtr address)
  112. {
  113. if (!address)
  114. return;
  115. if (m_emulator.is_in_loader_code())
  116. return;
  117. if (auto* mallocation = find_mallocation(address)) {
  118. if (mallocation->freed) {
  119. reportln("\n=={}== \033[31;1mDouble free()\033[0m, {:p}", getpid(), address);
  120. reportln("=={}== Address {} has already been passed to free()", getpid(), address);
  121. m_emulator.dump_backtrace();
  122. } else {
  123. mallocation->freed = true;
  124. mallocation->free_backtrace = m_emulator.raw_backtrace();
  125. }
  126. return;
  127. }
  128. reportln("\n=={}== \033[31;1mInvalid free()\033[0m, {:p}", getpid(), address);
  129. reportln("=={}== Address {} has never been returned by malloc()", getpid(), address);
  130. m_emulator.dump_backtrace();
  131. }
  132. void MallocTracer::target_did_realloc(Badge<Emulator>, FlatPtr address, size_t size)
  133. {
  134. if (m_emulator.is_in_loader_code())
  135. return;
  136. auto* region = m_emulator.mmu().find_region({ 0x23, address });
  137. VERIFY(region);
  138. VERIFY(is<MmapRegion>(*region));
  139. auto& mmap_region = static_cast<MmapRegion&>(*region);
  140. VERIFY(mmap_region.is_malloc_block());
  141. auto* existing_mallocation = find_mallocation(address);
  142. VERIFY(existing_mallocation);
  143. VERIFY(!existing_mallocation->freed);
  144. size_t old_size = existing_mallocation->size;
  145. auto* shadow_bits = mmap_region.shadow_data() + address - mmap_region.base();
  146. if (size > old_size) {
  147. memset(shadow_bits + old_size, 1, size - old_size);
  148. } else {
  149. memset(shadow_bits + size, 1, old_size - size);
  150. }
  151. existing_mallocation->size = size;
  152. // FIXME: Should we track malloc/realloc backtrace separately perhaps?
  153. existing_mallocation->malloc_backtrace = m_emulator.raw_backtrace();
  154. }
  155. Mallocation* MallocTracer::find_mallocation(FlatPtr address)
  156. {
  157. auto* region = m_emulator.mmu().find_region({ 0x23, address });
  158. if (!region)
  159. return nullptr;
  160. return find_mallocation(*region, address);
  161. }
  162. Mallocation* MallocTracer::find_mallocation_before(FlatPtr address)
  163. {
  164. Mallocation* found_mallocation = nullptr;
  165. for_each_mallocation([&](auto& mallocation) {
  166. if (mallocation.address >= address)
  167. return IterationDecision::Continue;
  168. if (!found_mallocation || (mallocation.address > found_mallocation->address))
  169. found_mallocation = const_cast<Mallocation*>(&mallocation);
  170. return IterationDecision::Continue;
  171. });
  172. return found_mallocation;
  173. }
  174. Mallocation* MallocTracer::find_mallocation_after(FlatPtr address)
  175. {
  176. Mallocation* found_mallocation = nullptr;
  177. for_each_mallocation([&](auto& mallocation) {
  178. if (mallocation.address <= address)
  179. return IterationDecision::Continue;
  180. if (!found_mallocation || (mallocation.address < found_mallocation->address))
  181. found_mallocation = const_cast<Mallocation*>(&mallocation);
  182. return IterationDecision::Continue;
  183. });
  184. return found_mallocation;
  185. }
  186. void MallocTracer::audit_read(const Region& region, FlatPtr address, size_t size)
  187. {
  188. if (!m_auditing_enabled)
  189. return;
  190. if (m_emulator.is_in_malloc_or_free() || m_emulator.is_in_libsystem()) {
  191. return;
  192. }
  193. if (m_emulator.is_in_loader_code()) {
  194. return;
  195. }
  196. auto* mallocation = find_mallocation(region, address);
  197. if (!mallocation) {
  198. reportln("\n=={}== \033[31;1mHeap buffer overflow\033[0m, invalid {}-byte read at address {:p}", getpid(), size, address);
  199. m_emulator.dump_backtrace();
  200. auto* mallocation_before = find_mallocation_before(address);
  201. auto* mallocation_after = find_mallocation_after(address);
  202. size_t distance_to_mallocation_before = mallocation_before ? (address - mallocation_before->address - mallocation_before->size) : 0;
  203. size_t distance_to_mallocation_after = mallocation_after ? (mallocation_after->address - address) : 0;
  204. if (mallocation_before && (!mallocation_after || distance_to_mallocation_before < distance_to_mallocation_after)) {
  205. reportln("=={}== Address is {} byte(s) after block of size {}, identity {:p}, allocated at:", getpid(), distance_to_mallocation_before, mallocation_before->size, mallocation_before->address);
  206. m_emulator.dump_backtrace(mallocation_before->malloc_backtrace);
  207. return;
  208. }
  209. if (mallocation_after && (!mallocation_before || distance_to_mallocation_after < distance_to_mallocation_before)) {
  210. reportln("=={}== Address is {} byte(s) before block of size {}, identity {:p}, allocated at:", getpid(), distance_to_mallocation_after, mallocation_after->size, mallocation_after->address);
  211. m_emulator.dump_backtrace(mallocation_after->malloc_backtrace);
  212. }
  213. return;
  214. }
  215. size_t offset_into_mallocation = address - mallocation->address;
  216. if (mallocation->freed) {
  217. reportln("\n=={}== \033[31;1mUse-after-free\033[0m, invalid {}-byte read at address {:p}", getpid(), size, address);
  218. m_emulator.dump_backtrace();
  219. reportln("=={}== Address is {} byte(s) into block of size {}, allocated at:", getpid(), offset_into_mallocation, mallocation->size);
  220. m_emulator.dump_backtrace(mallocation->malloc_backtrace);
  221. reportln("=={}== Later freed at:", getpid());
  222. m_emulator.dump_backtrace(mallocation->free_backtrace);
  223. return;
  224. }
  225. }
  226. void MallocTracer::audit_write(const Region& region, FlatPtr address, size_t size)
  227. {
  228. if (!m_auditing_enabled)
  229. return;
  230. if (m_emulator.is_in_malloc_or_free())
  231. return;
  232. if (m_emulator.is_in_loader_code()) {
  233. return;
  234. }
  235. auto* mallocation = find_mallocation(region, address);
  236. if (!mallocation) {
  237. reportln("\n=={}== \033[31;1mHeap buffer overflow\033[0m, invalid {}-byte write at address {:p}", getpid(), size, address);
  238. m_emulator.dump_backtrace();
  239. auto* mallocation_before = find_mallocation_before(address);
  240. auto* mallocation_after = find_mallocation_after(address);
  241. size_t distance_to_mallocation_before = mallocation_before ? (address - mallocation_before->address - mallocation_before->size) : 0;
  242. size_t distance_to_mallocation_after = mallocation_after ? (mallocation_after->address - address) : 0;
  243. if (mallocation_before && (!mallocation_after || distance_to_mallocation_before < distance_to_mallocation_after)) {
  244. reportln("=={}== Address is {} byte(s) after block of size {}, identity {:p}, allocated at:", getpid(), distance_to_mallocation_before, mallocation_before->size, mallocation_before->address);
  245. m_emulator.dump_backtrace(mallocation_before->malloc_backtrace);
  246. return;
  247. }
  248. if (mallocation_after && (!mallocation_before || distance_to_mallocation_after < distance_to_mallocation_before)) {
  249. reportln("=={}== Address is {} byte(s) before block of size {}, identity {:p}, allocated at:", getpid(), distance_to_mallocation_after, mallocation_after->size, mallocation_after->address);
  250. m_emulator.dump_backtrace(mallocation_after->malloc_backtrace);
  251. }
  252. return;
  253. }
  254. size_t offset_into_mallocation = address - mallocation->address;
  255. if (mallocation->freed) {
  256. reportln("\n=={}== \033[31;1mUse-after-free\033[0m, invalid {}-byte write at address {:p}", getpid(), size, address);
  257. m_emulator.dump_backtrace();
  258. reportln("=={}== Address is {} byte(s) into block of size {}, allocated at:", getpid(), offset_into_mallocation, mallocation->size);
  259. m_emulator.dump_backtrace(mallocation->malloc_backtrace);
  260. reportln("=={}== Later freed at:", getpid());
  261. m_emulator.dump_backtrace(mallocation->free_backtrace);
  262. return;
  263. }
  264. }
  265. void MallocTracer::populate_memory_graph()
  266. {
  267. // Create Node for each live Mallocation
  268. for_each_mallocation([&](auto& mallocation) {
  269. if (mallocation.freed)
  270. return IterationDecision::Continue;
  271. m_memory_graph.set(mallocation.address, {});
  272. return IterationDecision::Continue;
  273. });
  274. // Find pointers from each memory region to another
  275. for_each_mallocation([&](auto& mallocation) {
  276. if (mallocation.freed)
  277. return IterationDecision::Continue;
  278. size_t pointers_in_mallocation = mallocation.size / sizeof(u32);
  279. auto& edges_from_mallocation = m_memory_graph.find(mallocation.address)->value;
  280. for (size_t i = 0; i < pointers_in_mallocation; ++i) {
  281. auto value = m_emulator.mmu().read32({ 0x23, mallocation.address + i * sizeof(u32) });
  282. auto other_address = value.value();
  283. if (!value.is_uninitialized() && m_memory_graph.contains(value.value())) {
  284. if constexpr (REACHABLE_DEBUG)
  285. reportln("region/mallocation {:p} is reachable from other mallocation {:p}", other_address, mallocation.address);
  286. edges_from_mallocation.edges_from_node.append(other_address);
  287. }
  288. }
  289. return IterationDecision::Continue;
  290. });
  291. // Find mallocations that are pointed to by other regions
  292. Vector<FlatPtr> reachable_mallocations = {};
  293. m_emulator.mmu().for_each_region([&](auto& region) {
  294. // Skip the stack
  295. if (region.is_stack())
  296. return IterationDecision::Continue;
  297. if (region.is_text())
  298. return IterationDecision::Continue;
  299. if (!region.is_readable())
  300. return IterationDecision::Continue;
  301. // Skip malloc blocks
  302. if (is<MmapRegion>(region) && static_cast<const MmapRegion&>(region).is_malloc_block())
  303. return IterationDecision::Continue;
  304. size_t pointers_in_region = region.size() / sizeof(u32);
  305. for (size_t i = 0; i < pointers_in_region; ++i) {
  306. auto value = region.read32(i * sizeof(u32));
  307. auto other_address = value.value();
  308. if (!value.is_uninitialized() && m_memory_graph.contains(value.value())) {
  309. if constexpr (REACHABLE_DEBUG)
  310. reportln("region/mallocation {:p} is reachable from region {:p}-{:p}", other_address, region.base(), region.end() - 1);
  311. m_memory_graph.find(other_address)->value.is_reachable = true;
  312. reachable_mallocations.append(other_address);
  313. }
  314. }
  315. return IterationDecision::Continue;
  316. });
  317. // Propagate reachability
  318. // There are probably better ways to do that
  319. Vector<FlatPtr> visited = {};
  320. for (size_t i = 0; i < reachable_mallocations.size(); ++i) {
  321. auto reachable = reachable_mallocations.at(i);
  322. if (visited.contains_slow(reachable))
  323. continue;
  324. visited.append(reachable);
  325. auto& mallocation_node = m_memory_graph.find(reachable)->value;
  326. if (!mallocation_node.is_reachable)
  327. mallocation_node.is_reachable = true;
  328. for (auto& edge : mallocation_node.edges_from_node) {
  329. reachable_mallocations.append(edge);
  330. }
  331. }
  332. }
  333. void MallocTracer::dump_memory_graph()
  334. {
  335. for (auto& key : m_memory_graph.keys()) {
  336. auto value = m_memory_graph.find(key)->value;
  337. dbgln("Block {:p} [{}reachable] ({} edges)", key, !value.is_reachable ? "not " : "", value.edges_from_node.size());
  338. for (auto& edge : value.edges_from_node) {
  339. dbgln(" -> {:p}", edge);
  340. }
  341. }
  342. }
  343. void MallocTracer::dump_leak_report()
  344. {
  345. TemporaryChange change(m_auditing_enabled, false);
  346. size_t bytes_leaked = 0;
  347. size_t leaks_found = 0;
  348. populate_memory_graph();
  349. if constexpr (REACHABLE_DEBUG)
  350. dump_memory_graph();
  351. for_each_mallocation([&](auto& mallocation) {
  352. if (mallocation.freed)
  353. return IterationDecision::Continue;
  354. auto& value = m_memory_graph.find(mallocation.address)->value;
  355. if (value.is_reachable)
  356. return IterationDecision::Continue;
  357. ++leaks_found;
  358. bytes_leaked += mallocation.size;
  359. reportln("\n=={}== \033[31;1mLeak\033[0m, {}-byte allocation at address {:p}", getpid(), mallocation.size, mallocation.address);
  360. m_emulator.dump_backtrace(mallocation.malloc_backtrace);
  361. return IterationDecision::Continue;
  362. });
  363. if (!leaks_found)
  364. reportln("\n=={}== \033[32;1mNo leaks found!\033[0m", getpid());
  365. else
  366. reportln("\n=={}== \033[31;1m{} leak(s) found: {} byte(s) leaked\033[0m", getpid(), leaks_found, bytes_leaked);
  367. }
  368. }