DynamicLoader.cpp 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845
  1. /*
  2. * Copyright (c) 2019-2020, Andrew Kaster <akaster@serenityos.org>
  3. * Copyright (c) 2020, Itamar S. <itamar8910@gmail.com>
  4. * Copyright (c) 2021, Andreas Kling <kling@serenityos.org>
  5. * Copyright (c) 2022-2023, Daniel Bertalan <dani@danielbertalan.dev>
  6. *
  7. * SPDX-License-Identifier: BSD-2-Clause
  8. */
  9. #include <AK/Debug.h>
  10. #include <AK/Optional.h>
  11. #include <AK/QuickSort.h>
  12. #include <AK/StringBuilder.h>
  13. #include <LibELF/Arch/GenericDynamicRelocationType.h>
  14. #include <LibELF/Arch/tls.h>
  15. #include <LibELF/DynamicLinker.h>
  16. #include <LibELF/DynamicLoader.h>
  17. #include <LibELF/Hashes.h>
  18. #include <LibELF/Validation.h>
  19. #include <assert.h>
  20. #include <bits/dlfcn_integration.h>
  21. #include <dlfcn.h>
  22. #include <errno.h>
  23. #include <stdio.h>
  24. #include <stdlib.h>
  25. #include <string.h>
  26. #include <sys/mman.h>
  27. #include <sys/stat.h>
  28. #include <unistd.h>
  29. #ifndef AK_OS_SERENITY
  30. static void* mmap_with_name(void* addr, size_t length, int prot, int flags, int fd, off_t offset, char const*)
  31. {
  32. return mmap(addr, length, prot, flags, fd, offset);
  33. }
  34. # define MAP_RANDOMIZED 0
  35. #endif
  36. #if ARCH(AARCH64)
  37. # define HAS_TLSDESC_SUPPORT
  38. extern "C" {
  39. void* __tlsdesc_static(void*);
  40. }
  41. #endif
  42. namespace ELF {
  43. Result<NonnullRefPtr<DynamicLoader>, DlErrorMessage> DynamicLoader::try_create(int fd, ByteString filepath)
  44. {
  45. VERIFY(filepath.starts_with('/'));
  46. struct stat stat;
  47. if (fstat(fd, &stat) < 0) {
  48. return DlErrorMessage { "DynamicLoader::try_create fstat" };
  49. }
  50. VERIFY(stat.st_size >= 0);
  51. auto size = static_cast<size_t>(stat.st_size);
  52. if (size < sizeof(Elf_Ehdr))
  53. return DlErrorMessage { ByteString::formatted("File {} has invalid ELF header", filepath) };
  54. ByteString file_mmap_name = ByteString::formatted("ELF_DYN: {}", filepath);
  55. auto* data = mmap_with_name(nullptr, size, PROT_READ, MAP_SHARED, fd, 0, file_mmap_name.characters());
  56. if (data == MAP_FAILED) {
  57. return DlErrorMessage { "DynamicLoader::try_create mmap" };
  58. }
  59. auto loader = adopt_ref(*new DynamicLoader(fd, move(filepath), data, size));
  60. if (!loader->is_valid())
  61. return DlErrorMessage { "ELF image validation failed" };
  62. return loader;
  63. }
  64. DynamicLoader::DynamicLoader(int fd, ByteString filepath, void* data, size_t size)
  65. : m_filepath(move(filepath))
  66. , m_file_size(size)
  67. , m_image_fd(fd)
  68. , m_file_data(data)
  69. {
  70. m_elf_image = adopt_own(*new ELF::Image((u8*)m_file_data, m_file_size));
  71. m_valid = validate();
  72. if (m_valid)
  73. find_tls_size_and_alignment();
  74. else
  75. dbgln("Image validation failed for file {}", m_filepath);
  76. }
  77. DynamicLoader::~DynamicLoader()
  78. {
  79. if (munmap(m_file_data, m_file_size) < 0) {
  80. perror("munmap");
  81. VERIFY_NOT_REACHED();
  82. }
  83. if (close(m_image_fd) < 0) {
  84. perror("close");
  85. VERIFY_NOT_REACHED();
  86. }
  87. }
  88. void DynamicLoader::find_tls_size_and_alignment()
  89. {
  90. image().for_each_program_header([this](auto program_header) {
  91. if (program_header.type() == PT_TLS) {
  92. m_tls_size_of_current_object = program_header.size_in_memory();
  93. auto alignment = program_header.alignment();
  94. VERIFY(!alignment || is_power_of_two(alignment));
  95. m_tls_alignment_of_current_object = alignment > 1 ? alignment : 0; // No need to reserve extra space for single byte alignment
  96. return IterationDecision::Break;
  97. }
  98. return IterationDecision::Continue;
  99. });
  100. }
  101. bool DynamicLoader::validate()
  102. {
  103. if (!image().is_valid())
  104. return false;
  105. auto* elf_header = (Elf_Ehdr*)m_file_data;
  106. if (!validate_elf_header(*elf_header, m_file_size))
  107. return false;
  108. auto result_or_error = validate_program_headers(*elf_header, m_file_size, { m_file_data, m_file_size });
  109. if (result_or_error.is_error() || !result_or_error.value())
  110. return false;
  111. return true;
  112. }
  113. RefPtr<DynamicObject> DynamicLoader::map()
  114. {
  115. if (m_dynamic_object) {
  116. // Already mapped.
  117. return nullptr;
  118. }
  119. if (!m_valid) {
  120. dbgln("DynamicLoader::map failed: image is invalid");
  121. return nullptr;
  122. }
  123. load_program_headers();
  124. VERIFY(!m_base_address.is_null());
  125. m_dynamic_object = DynamicObject::create(m_filepath, m_base_address, m_dynamic_section_address);
  126. m_dynamic_object->set_tls_offset(m_tls_offset);
  127. m_dynamic_object->set_tls_size(m_tls_size_of_current_object);
  128. return m_dynamic_object;
  129. }
  130. bool DynamicLoader::link(unsigned flags)
  131. {
  132. return load_stage_2(flags);
  133. }
  134. bool DynamicLoader::load_stage_2(unsigned flags)
  135. {
  136. VERIFY(flags & RTLD_GLOBAL);
  137. if (m_dynamic_object->has_text_relocations()) {
  138. dbgln("\033[33mWarning:\033[0m Dynamic object {} has text relocations", m_dynamic_object->filepath());
  139. for (auto& text_segment : m_text_segments) {
  140. VERIFY(text_segment.address().get() != 0);
  141. #ifndef AK_OS_MACOS
  142. // Remap this text region as private.
  143. if (mremap(text_segment.address().as_ptr(), text_segment.size(), text_segment.size(), MAP_PRIVATE) == MAP_FAILED) {
  144. perror("mremap .text: MAP_PRIVATE");
  145. return false;
  146. }
  147. #endif
  148. if (0 > mprotect(text_segment.address().as_ptr(), text_segment.size(), PROT_READ | PROT_WRITE)) {
  149. perror("mprotect .text: PROT_READ | PROT_WRITE"); // FIXME: dlerror?
  150. return false;
  151. }
  152. }
  153. } else {
  154. // .text needs to be executable while we process relocations because it might contain IFUNC resolvers.
  155. // We don't allow IFUNC resolvers in objects with textrels.
  156. for (auto& text_segment : m_text_segments) {
  157. if (mprotect(text_segment.address().as_ptr(), text_segment.size(), PROT_READ | PROT_EXEC) < 0) {
  158. perror("mprotect .text: PROT_READ | PROT_EXEC");
  159. return false;
  160. }
  161. }
  162. }
  163. do_main_relocations();
  164. return true;
  165. }
  166. void DynamicLoader::do_main_relocations()
  167. {
  168. do_relr_relocations();
  169. Optional<DynamicLoader::CachedLookupResult> cached_result;
  170. m_dynamic_object->relocation_section().for_each_relocation([&](DynamicObject::Relocation const& relocation) {
  171. switch (do_direct_relocation(relocation, cached_result, ShouldCallIfuncResolver::No)) {
  172. case RelocationResult::Failed:
  173. dbgln("Loader.so: {} unresolved symbol '{}'", m_filepath, relocation.symbol().name());
  174. VERIFY_NOT_REACHED();
  175. case RelocationResult::CallIfuncResolver:
  176. m_direct_ifunc_relocations.append(relocation);
  177. break;
  178. case RelocationResult::Success:
  179. break;
  180. }
  181. });
  182. // If the object is position-independent, the pointer to the PLT trampoline needs to be relocated.
  183. auto fixup_trampoline_pointer = [&](DynamicObject::Relocation const& relocation) {
  184. VERIFY(static_cast<GenericDynamicRelocationType>(relocation.type()) == GenericDynamicRelocationType::JUMP_SLOT);
  185. if (image().is_dynamic())
  186. *((FlatPtr*)relocation.address().as_ptr()) += m_dynamic_object->base_address().get();
  187. };
  188. m_dynamic_object->plt_relocation_section().for_each_relocation([&](DynamicObject::Relocation const& relocation) {
  189. if (static_cast<GenericDynamicRelocationType>(relocation.type()) == GenericDynamicRelocationType::IRELATIVE) {
  190. m_direct_ifunc_relocations.append(relocation);
  191. return;
  192. }
  193. if (static_cast<GenericDynamicRelocationType>(relocation.type()) == GenericDynamicRelocationType::TLSDESC) {
  194. // GNU ld for some reason puts TLSDESC relocations into .rela.plt
  195. // https://sourceware.org/bugzilla/show_bug.cgi?id=28387
  196. auto result = do_direct_relocation(relocation, cached_result, ShouldCallIfuncResolver::No);
  197. VERIFY(result == RelocationResult::Success);
  198. return;
  199. }
  200. // FIXME: Or LD_BIND_NOW is set?
  201. if (m_dynamic_object->must_bind_now()) {
  202. switch (do_plt_relocation(relocation, ShouldCallIfuncResolver::No)) {
  203. case RelocationResult::Failed:
  204. dbgln("Loader.so: {} unresolved symbol '{}'", m_filepath, relocation.symbol().name());
  205. VERIFY_NOT_REACHED();
  206. case RelocationResult::CallIfuncResolver:
  207. m_plt_ifunc_relocations.append(relocation);
  208. // Set up lazy binding, in case an IFUNC resolver calls another IFUNC that hasn't been resolved yet.
  209. fixup_trampoline_pointer(relocation);
  210. break;
  211. case RelocationResult::Success:
  212. break;
  213. }
  214. } else {
  215. fixup_trampoline_pointer(relocation);
  216. }
  217. });
  218. }
  219. Result<NonnullRefPtr<DynamicObject>, DlErrorMessage> DynamicLoader::load_stage_3(unsigned flags)
  220. {
  221. if (flags & RTLD_LAZY) {
  222. if (m_dynamic_object->has_plt())
  223. setup_plt_trampoline();
  224. }
  225. // IFUNC resolvers can only be called after the PLT has been populated,
  226. // as they may call arbitrary functions via the PLT.
  227. for (auto const& relocation : m_plt_ifunc_relocations) {
  228. auto result = do_plt_relocation(relocation, ShouldCallIfuncResolver::Yes);
  229. VERIFY(result == RelocationResult::Success);
  230. }
  231. Optional<DynamicLoader::CachedLookupResult> cached_result;
  232. for (auto const& relocation : m_direct_ifunc_relocations) {
  233. auto result = do_direct_relocation(relocation, cached_result, ShouldCallIfuncResolver::Yes);
  234. VERIFY(result == RelocationResult::Success);
  235. }
  236. if (m_dynamic_object->has_text_relocations()) {
  237. // If we don't have textrels, .text has already been made executable by this point in load_stage_2.
  238. for (auto& text_segment : m_text_segments) {
  239. if (mprotect(text_segment.address().as_ptr(), text_segment.size(), PROT_READ | PROT_EXEC) < 0) {
  240. return DlErrorMessage { ByteString::formatted("mprotect .text: PROT_READ | PROT_EXEC: {}", strerror(errno)) };
  241. }
  242. }
  243. }
  244. if (m_relro_segment_size) {
  245. if (mprotect(m_relro_segment_address.as_ptr(), m_relro_segment_size, PROT_READ) < 0) {
  246. return DlErrorMessage { ByteString::formatted("mprotect .relro: PROT_READ: {}", strerror(errno)) };
  247. }
  248. #ifdef AK_OS_SERENITY
  249. if (set_mmap_name(m_relro_segment_address.as_ptr(), m_relro_segment_size, ByteString::formatted("{}: .relro", m_filepath).characters()) < 0) {
  250. return DlErrorMessage { ByteString::formatted("set_mmap_name .relro: {}", strerror(errno)) };
  251. }
  252. #endif
  253. }
  254. m_fully_relocated = true;
  255. return NonnullRefPtr<DynamicObject> { *m_dynamic_object };
  256. }
  257. void DynamicLoader::load_stage_4()
  258. {
  259. call_object_init_functions();
  260. m_fully_initialized = true;
  261. }
  262. void DynamicLoader::load_program_headers()
  263. {
  264. FlatPtr ph_load_start = SIZE_MAX;
  265. FlatPtr ph_load_end = 0;
  266. // We walk the program header list once to find the requested address ranges of the program.
  267. // We don't fill in the list of regions yet to keep malloc memory blocks from interfering with our reservation.
  268. image().for_each_program_header([&](Image::ProgramHeader const& program_header) {
  269. if (program_header.type() != PT_LOAD)
  270. return;
  271. FlatPtr section_start = program_header.vaddr().get();
  272. FlatPtr section_end = section_start + program_header.size_in_memory();
  273. if (ph_load_start > section_start)
  274. ph_load_start = section_start;
  275. if (ph_load_end < section_end)
  276. ph_load_end = section_end;
  277. });
  278. void* requested_load_address = image().is_dynamic() ? nullptr : reinterpret_cast<void*>(ph_load_start);
  279. int reservation_mmap_flags = MAP_ANON | MAP_PRIVATE | MAP_NORESERVE;
  280. if (image().is_dynamic())
  281. reservation_mmap_flags |= MAP_RANDOMIZED;
  282. #ifdef MAP_FIXED_NOREPLACE
  283. else
  284. reservation_mmap_flags |= MAP_FIXED_NOREPLACE;
  285. #endif
  286. // First, we make a dummy reservation mapping, in order to allocate enough VM
  287. // to hold all regions contiguously in the address space.
  288. FlatPtr ph_load_base = ph_load_start & ~(FlatPtr)0xfffu;
  289. ph_load_end = round_up_to_power_of_two(ph_load_end, PAGE_SIZE);
  290. size_t total_mapping_size = ph_load_end - ph_load_base;
  291. // Before we make our reservation, unmap our existing mapped ELF image that we used for reading header information.
  292. // This leaves our pointers dangling momentarily, but it reduces the chance that we will conflict with ourselves.
  293. if (munmap(m_file_data, m_file_size) < 0) {
  294. perror("munmap old mapping");
  295. VERIFY_NOT_REACHED();
  296. }
  297. m_elf_image = nullptr;
  298. m_file_data = nullptr;
  299. auto* reservation = mmap(requested_load_address, total_mapping_size, PROT_NONE, reservation_mmap_flags, 0, 0);
  300. if (reservation == MAP_FAILED) {
  301. perror("mmap reservation");
  302. VERIFY_NOT_REACHED();
  303. }
  304. // Now that we can't accidentally block our requested space, re-map our ELF image.
  305. ByteString file_mmap_name = ByteString::formatted("ELF_DYN: {}", m_filepath);
  306. auto* data = mmap_with_name(nullptr, m_file_size, PROT_READ, MAP_SHARED, m_image_fd, 0, file_mmap_name.characters());
  307. if (data == MAP_FAILED) {
  308. perror("mmap new mapping");
  309. VERIFY_NOT_REACHED();
  310. }
  311. m_file_data = data;
  312. m_elf_image = adopt_own(*new ELF::Image((u8*)m_file_data, m_file_size));
  313. VERIFY(requested_load_address == nullptr || reservation == requested_load_address);
  314. m_base_address = VirtualAddress { reservation };
  315. // Most binaries have four loadable regions, three of which are mapped
  316. // (symbol tables/relocation information, executable instructions, read-only data)
  317. // and one of which is copied (modifiable data).
  318. // These are allocated in-line to cut down on the malloc calls.
  319. Vector<ProgramHeaderRegion, 3> map_regions;
  320. Vector<ProgramHeaderRegion, 1> copy_regions;
  321. Optional<ProgramHeaderRegion> relro_region;
  322. VirtualAddress dynamic_region_desired_vaddr;
  323. image().for_each_program_header([&](Image::ProgramHeader const& program_header) {
  324. ProgramHeaderRegion region {};
  325. region.set_program_header(program_header.raw_header());
  326. if (region.is_tls_template()) {
  327. // Skip, this is handled in DynamicLoader::copy_initial_tls_data_into.
  328. } else if (region.is_load()) {
  329. if (region.size_in_memory() == 0)
  330. return;
  331. if (region.is_writable()) {
  332. copy_regions.append(region);
  333. } else {
  334. map_regions.append(region);
  335. }
  336. } else if (region.is_dynamic()) {
  337. dynamic_region_desired_vaddr = region.desired_load_address();
  338. } else if (region.is_relro()) {
  339. VERIFY(!relro_region.has_value());
  340. relro_region = region;
  341. }
  342. });
  343. VERIFY(!map_regions.is_empty() || !copy_regions.is_empty());
  344. auto compare_load_address = [](ProgramHeaderRegion& a, ProgramHeaderRegion& b) {
  345. return a.desired_load_address().as_ptr() < b.desired_load_address().as_ptr();
  346. };
  347. quick_sort(map_regions, compare_load_address);
  348. quick_sort(copy_regions, compare_load_address);
  349. // Pre-allocate any malloc memory needed before unmapping the reservation.
  350. // We don't want any future malloc to accidentally mmap a reserved address!
  351. ByteString text_segment_name = ByteString::formatted("{}: .text", m_filepath);
  352. ByteString rodata_segment_name = ByteString::formatted("{}: .rodata", m_filepath);
  353. ByteString data_segment_name = ByteString::formatted("{}: .data", m_filepath);
  354. m_text_segments.ensure_capacity(map_regions.size());
  355. // Finally, we unmap the reservation.
  356. if (munmap(reservation, total_mapping_size) < 0) {
  357. perror("munmap reservation");
  358. VERIFY_NOT_REACHED();
  359. }
  360. // WARNING: Allocating after this point has the possibility of malloc stealing our reserved
  361. // virtual memory addresses. Be careful not to malloc below!
  362. // Process regions in order: .text, .data, .tls
  363. for (auto& region : map_regions) {
  364. FlatPtr ph_desired_base = region.desired_load_address().get();
  365. FlatPtr ph_base = region.desired_load_address().page_base().get();
  366. FlatPtr ph_end = ph_base + round_up_to_power_of_two(region.size_in_memory() + region.desired_load_address().get() - ph_base, PAGE_SIZE);
  367. char const* const segment_name = region.is_executable() ? text_segment_name.characters() : rodata_segment_name.characters();
  368. // Now we can map the text segment at the reserved address.
  369. auto* segment_base = (u8*)mmap_with_name(
  370. (u8*)reservation + ph_base - ph_load_base,
  371. ph_desired_base - ph_base + region.size_in_image(),
  372. PROT_READ,
  373. MAP_SHARED | MAP_FIXED,
  374. m_image_fd,
  375. VirtualAddress { region.offset() }.page_base().get(),
  376. segment_name);
  377. if (segment_base == MAP_FAILED) {
  378. perror("mmap non-writable");
  379. VERIFY_NOT_REACHED();
  380. }
  381. // NOTE: Capacity ensured above the line of no malloc above
  382. if (region.is_executable())
  383. m_text_segments.unchecked_append({ VirtualAddress { segment_base }, ph_end - ph_base });
  384. }
  385. VERIFY(requested_load_address == nullptr || requested_load_address == reservation);
  386. if (relro_region.has_value()) {
  387. m_relro_segment_size = relro_region->size_in_memory();
  388. m_relro_segment_address = VirtualAddress { (u8*)reservation + relro_region->desired_load_address().get() - ph_load_base };
  389. }
  390. if (image().is_dynamic())
  391. m_dynamic_section_address = VirtualAddress { (u8*)reservation + dynamic_region_desired_vaddr.get() - ph_load_base };
  392. else
  393. m_dynamic_section_address = dynamic_region_desired_vaddr;
  394. for (auto& region : copy_regions) {
  395. FlatPtr ph_data_base = region.desired_load_address().page_base().get();
  396. FlatPtr ph_data_end = ph_data_base + round_up_to_power_of_two(region.size_in_memory() + region.desired_load_address().get() - ph_data_base, PAGE_SIZE);
  397. auto* data_segment_address = (u8*)reservation + ph_data_base - ph_load_base;
  398. size_t data_segment_size = ph_data_end - ph_data_base;
  399. // Finally, we make an anonymous mapping for the data segment. Contents are then copied from the file.
  400. auto* data_segment = (u8*)mmap_with_name(
  401. data_segment_address,
  402. data_segment_size,
  403. PROT_READ | PROT_WRITE,
  404. MAP_ANONYMOUS | MAP_PRIVATE | MAP_FIXED,
  405. 0,
  406. 0,
  407. data_segment_name.characters());
  408. if (MAP_FAILED == data_segment) {
  409. perror("mmap writable");
  410. VERIFY_NOT_REACHED();
  411. }
  412. VirtualAddress data_segment_start;
  413. if (image().is_dynamic())
  414. data_segment_start = VirtualAddress { (u8*)reservation + region.desired_load_address().get() };
  415. else
  416. data_segment_start = region.desired_load_address();
  417. VERIFY(data_segment_start.as_ptr() + region.size_in_memory() <= data_segment + data_segment_size);
  418. memcpy(data_segment_start.as_ptr(), (u8*)m_file_data + region.offset(), region.size_in_image());
  419. }
  420. }
  421. DynamicLoader::RelocationResult DynamicLoader::do_direct_relocation(DynamicObject::Relocation const& relocation,
  422. Optional<DynamicLoader::CachedLookupResult>& cached_result,
  423. ShouldCallIfuncResolver should_call_ifunc_resolver)
  424. {
  425. FlatPtr* patch_ptr = nullptr;
  426. if (is_dynamic())
  427. patch_ptr = (FlatPtr*)(m_dynamic_object->base_address().as_ptr() + relocation.offset());
  428. else
  429. patch_ptr = (FlatPtr*)(FlatPtr)relocation.offset();
  430. auto call_ifunc_resolver = [](VirtualAddress address) {
  431. return VirtualAddress { reinterpret_cast<DynamicObject::IfuncResolver>(address.get())() };
  432. };
  433. auto lookup_symbol = [&](DynamicObject::Symbol const& symbol) {
  434. // The static linker sorts relocations by the referenced symbol. Especially when vtables
  435. // in large inheritance hierarchies are involved, there might be tens of references to
  436. // the same symbol. We can avoid redundant lookups by keeping track of the previous result.
  437. if (!cached_result.has_value() || !cached_result.value().symbol.definitely_equals(symbol))
  438. cached_result = DynamicLoader::CachedLookupResult { symbol, DynamicLoader::lookup_symbol(symbol) };
  439. return cached_result.value().result;
  440. };
  441. struct ResolvedTLSSymbol {
  442. DynamicObject const& dynamic_object;
  443. FlatPtr value;
  444. };
  445. auto resolve_tls_symbol = [&](DynamicObject::Relocation const& relocation) -> Optional<ResolvedTLSSymbol> {
  446. if (relocation.symbol_index() == 0)
  447. return ResolvedTLSSymbol { relocation.dynamic_object(), 0 };
  448. auto res = lookup_symbol(relocation.symbol());
  449. if (!res.has_value())
  450. return {};
  451. VERIFY(relocation.symbol().type() != STT_GNU_IFUNC);
  452. VERIFY(res.value().dynamic_object != nullptr);
  453. return ResolvedTLSSymbol { *res.value().dynamic_object, res.value().value };
  454. };
  455. using enum GenericDynamicRelocationType;
  456. switch (static_cast<GenericDynamicRelocationType>(relocation.type())) {
  457. case NONE:
  458. // Apparently most loaders will just skip these?
  459. // Seems if the 'link editor' generates one something is funky with your code
  460. break;
  461. case ABSOLUTE: {
  462. auto symbol = relocation.symbol();
  463. auto res = lookup_symbol(symbol);
  464. VirtualAddress symbol_address;
  465. if (!res.has_value()) {
  466. if (symbol.bind() != STB_WEAK) {
  467. dbgln("ERROR: symbol not found: {}.", symbol.name());
  468. return RelocationResult::Failed;
  469. }
  470. symbol_address = VirtualAddress { (FlatPtr)0 };
  471. } else {
  472. if (res.value().type == STT_GNU_IFUNC && should_call_ifunc_resolver == ShouldCallIfuncResolver::No)
  473. return RelocationResult::CallIfuncResolver;
  474. symbol_address = res.value().address;
  475. }
  476. if (relocation.addend_used())
  477. *patch_ptr = symbol_address.get() + relocation.addend();
  478. else
  479. *patch_ptr += symbol_address.get();
  480. if (res.has_value() && res.value().type == STT_GNU_IFUNC)
  481. *patch_ptr = call_ifunc_resolver(VirtualAddress { *patch_ptr }).get();
  482. break;
  483. }
  484. #if !ARCH(RISCV64)
  485. case GLOB_DAT: {
  486. auto symbol = relocation.symbol();
  487. auto res = lookup_symbol(symbol);
  488. VirtualAddress symbol_location;
  489. if (!res.has_value()) {
  490. if (symbol.bind() != STB_WEAK) {
  491. // Symbol not found
  492. return RelocationResult::Failed;
  493. }
  494. symbol_location = VirtualAddress { (FlatPtr)0 };
  495. } else {
  496. symbol_location = res.value().address;
  497. if (res.value().type == STT_GNU_IFUNC) {
  498. if (should_call_ifunc_resolver == ShouldCallIfuncResolver::No)
  499. return RelocationResult::CallIfuncResolver;
  500. if (res.value().dynamic_object != nullptr && res.value().dynamic_object->has_text_relocations()) {
  501. dbgln("\033[31mError:\033[0m Refusing to call IFUNC resolver defined in an object with text relocations.");
  502. return RelocationResult::Failed;
  503. }
  504. symbol_location = call_ifunc_resolver(symbol_location);
  505. }
  506. }
  507. VERIFY(symbol_location != m_dynamic_object->base_address());
  508. *patch_ptr = symbol_location.get();
  509. break;
  510. }
  511. #endif
  512. case RELATIVE: {
  513. if (!image().is_dynamic())
  514. break;
  515. // FIXME: According to the spec, R_386_relative ones must be done first.
  516. // We could explicitly do them first using m_number_of_relocations from DT_RELCOUNT
  517. // However, our compiler is nice enough to put them at the front of the relocations for us :)
  518. if (relocation.addend_used())
  519. *patch_ptr = m_dynamic_object->base_address().offset(relocation.addend()).get();
  520. else
  521. *patch_ptr += m_dynamic_object->base_address().get();
  522. break;
  523. }
  524. case TLS_TPREL: {
  525. auto maybe_resolution = resolve_tls_symbol(relocation);
  526. if (!maybe_resolution.has_value())
  527. break;
  528. auto [dynamic_object_of_symbol, symbol_value] = maybe_resolution.value();
  529. size_t addend = relocation.addend_used() ? relocation.addend() : *patch_ptr;
  530. *patch_ptr = addend + dynamic_object_of_symbol.tls_offset().value() + symbol_value + TLS_TP_STATIC_TLS_BLOCK_OFFSET;
  531. if constexpr (TLS_VARIANT == 1) {
  532. // Until offset TLS_TP_STATIC_TLS_BLOCK_OFFSET there's the thread's ThreadControlBlock, we don't want to collide with it.
  533. VERIFY(static_cast<ssize_t>(*patch_ptr) >= static_cast<ssize_t>(TLS_TP_STATIC_TLS_BLOCK_OFFSET));
  534. } else if constexpr (TLS_VARIANT == 2) {
  535. // At offset 0 there's the thread's ThreadControlBlock, we don't want to collide with it.
  536. VERIFY(static_cast<ssize_t>(*patch_ptr) < 0);
  537. }
  538. break;
  539. }
  540. case TLS_DTPMOD: {
  541. auto maybe_resolution = resolve_tls_symbol(relocation);
  542. if (!maybe_resolution.has_value())
  543. break;
  544. // We repurpose the module index to store the TLS block's TP offset. This is fine
  545. // because we currently only support a single static TLS block.
  546. *patch_ptr = maybe_resolution->dynamic_object.tls_offset().value();
  547. break;
  548. }
  549. case TLS_DTPREL: {
  550. auto maybe_resolution = resolve_tls_symbol(relocation);
  551. if (!maybe_resolution.has_value())
  552. break;
  553. size_t addend = relocation.addend_used() ? relocation.addend() : *patch_ptr;
  554. *patch_ptr = addend + maybe_resolution->value - TLS_DTV_OFFSET + TLS_TP_STATIC_TLS_BLOCK_OFFSET;
  555. break;
  556. }
  557. #ifdef HAS_TLSDESC_SUPPORT
  558. case TLSDESC: {
  559. auto maybe_resolution = resolve_tls_symbol(relocation);
  560. if (!maybe_resolution.has_value())
  561. break;
  562. auto [dynamic_object_of_symbol, symbol_value] = maybe_resolution.value();
  563. size_t addend = relocation.addend_used() ? relocation.addend() : *patch_ptr;
  564. patch_ptr[0] = (FlatPtr)__tlsdesc_static;
  565. patch_ptr[1] = addend + dynamic_object_of_symbol.tls_offset().value() + symbol_value;
  566. break;
  567. }
  568. #endif
  569. case IRELATIVE: {
  570. if (should_call_ifunc_resolver == ShouldCallIfuncResolver::No)
  571. return RelocationResult::CallIfuncResolver;
  572. VirtualAddress resolver;
  573. if (relocation.addend_used())
  574. resolver = m_dynamic_object->base_address().offset(relocation.addend());
  575. else
  576. resolver = m_dynamic_object->base_address().offset(*patch_ptr);
  577. if (m_dynamic_object->has_text_relocations()) {
  578. dbgln("\033[31mError:\033[0m Refusing to call IFUNC resolver defined in an object with text relocations.");
  579. return RelocationResult::Failed;
  580. }
  581. *patch_ptr = call_ifunc_resolver(resolver).get();
  582. break;
  583. }
  584. case JUMP_SLOT:
  585. VERIFY_NOT_REACHED(); // PLT relocations are handled by do_plt_relocation.
  586. default:
  587. // Raise the alarm! Someone needs to implement this relocation type
  588. dbgln("Found a new exciting relocation type {}", relocation.type());
  589. VERIFY_NOT_REACHED();
  590. }
  591. return RelocationResult::Success;
  592. }
  593. DynamicLoader::RelocationResult DynamicLoader::do_plt_relocation(DynamicObject::Relocation const& relocation, ShouldCallIfuncResolver should_call_ifunc_resolver)
  594. {
  595. VERIFY(static_cast<GenericDynamicRelocationType>(relocation.type()) == GenericDynamicRelocationType::JUMP_SLOT);
  596. auto symbol = relocation.symbol();
  597. auto* relocation_address = (FlatPtr*)relocation.address().as_ptr();
  598. VirtualAddress symbol_location {};
  599. if (auto result = lookup_symbol(symbol); result.has_value()) {
  600. auto address = result.value().address;
  601. if (result.value().type == STT_GNU_IFUNC) {
  602. if (should_call_ifunc_resolver == ShouldCallIfuncResolver::No)
  603. return RelocationResult::CallIfuncResolver;
  604. symbol_location = VirtualAddress { reinterpret_cast<DynamicObject::IfuncResolver>(address.get())() };
  605. } else {
  606. symbol_location = address;
  607. }
  608. } else if (symbol.bind() != STB_WEAK) {
  609. return RelocationResult::Failed;
  610. }
  611. dbgln_if(DYNAMIC_LOAD_DEBUG, "DynamicLoader: Jump slot relocation: putting {} ({}) into PLT at {}", symbol.name(), symbol_location, (void*)relocation_address);
  612. *relocation_address = symbol_location.get();
  613. return RelocationResult::Success;
  614. }
  615. void DynamicLoader::do_relr_relocations()
  616. {
  617. auto base_address = m_dynamic_object->base_address().get();
  618. m_dynamic_object->for_each_relr_relocation([base_address](FlatPtr address) {
  619. *(FlatPtr*)address += base_address;
  620. });
  621. }
  622. void DynamicLoader::copy_initial_tls_data_into(Bytes buffer) const
  623. {
  624. image().for_each_program_header([this, &buffer](ELF::Image::ProgramHeader program_header) {
  625. if (program_header.type() != PT_TLS)
  626. return IterationDecision::Continue;
  627. // Note: The "size in image" is only concerned with initialized data. Uninitialized data (.tbss) is
  628. // only included in the "size in memory" metric, and is expected to not be touched or read from, as
  629. // it is not present in the image and zeroed out in-memory. We will still check that the buffer has
  630. // space for both the initialized and the uninitialized data.
  631. // TODO: Is the initialized data always in the beginning of the TLS segment, or should we walk the
  632. // sections to figure that out?
  633. VERIFY(program_header.size_in_image() <= program_header.size_in_memory());
  634. VERIFY(program_header.size_in_memory() <= m_tls_size_of_current_object);
  635. if constexpr (TLS_VARIANT == 1) {
  636. size_t tls_start_in_buffer = m_tls_offset;
  637. VERIFY(tls_start_in_buffer + program_header.size_in_memory() <= buffer.size());
  638. memcpy(buffer.data() + tls_start_in_buffer, static_cast<u8 const*>(m_file_data) + program_header.offset(), program_header.size_in_image());
  639. } else if constexpr (TLS_VARIANT == 2) {
  640. size_t tls_start_in_buffer = buffer.size() + m_tls_offset;
  641. VERIFY(tls_start_in_buffer + program_header.size_in_memory() <= buffer.size());
  642. memcpy(buffer.data() + tls_start_in_buffer, static_cast<u8 const*>(m_file_data) + program_header.offset(), program_header.size_in_image());
  643. }
  644. return IterationDecision::Break;
  645. });
  646. }
  647. // Defined in <arch>/plt_trampoline.S
  648. extern "C" void _plt_trampoline(void) __attribute__((visibility("hidden")));
  649. void DynamicLoader::setup_plt_trampoline()
  650. {
  651. VERIFY(m_dynamic_object);
  652. VERIFY(m_dynamic_object->has_plt());
  653. VirtualAddress got_address = m_dynamic_object->plt_got_base_address();
  654. auto* got_ptr = (FlatPtr*)got_address.as_ptr();
  655. #if ARCH(AARCH64) || ARCH(X86_64)
  656. got_ptr[1] = (FlatPtr)m_dynamic_object.ptr();
  657. got_ptr[2] = (FlatPtr)&_plt_trampoline;
  658. #elif ARCH(RISCV64)
  659. got_ptr[0] = (FlatPtr)&_plt_trampoline;
  660. got_ptr[1] = (FlatPtr)m_dynamic_object.ptr();
  661. #else
  662. # error Unknown architecture
  663. #endif
  664. }
  665. // Called from our ASM routine _plt_trampoline.
  666. extern "C" FlatPtr _fixup_plt_entry(DynamicObject* object, u32 relocation_offset)
  667. {
  668. auto const& relocation = object->plt_relocation_section().relocation_at_offset(relocation_offset);
  669. auto result = DynamicLoader::do_plt_relocation(relocation, ShouldCallIfuncResolver::Yes);
  670. if (result != DynamicLoader::RelocationResult::Success) {
  671. dbgln("Loader.so: {} unresolved symbol '{}'", object->filepath(), relocation.symbol().name());
  672. VERIFY_NOT_REACHED();
  673. }
  674. return *reinterpret_cast<FlatPtr*>(relocation.address().as_ptr());
  675. }
  676. void DynamicLoader::call_object_init_functions()
  677. {
  678. typedef void (*InitFunc)();
  679. if (m_dynamic_object->has_init_section()) {
  680. auto init_function = m_dynamic_object->init_section_function();
  681. (init_function)();
  682. }
  683. if (m_dynamic_object->has_init_array_section()) {
  684. auto init_array_section = m_dynamic_object->init_array_section();
  685. InitFunc* init_begin = (InitFunc*)(init_array_section.address().as_ptr());
  686. InitFunc* init_end = init_begin + init_array_section.entry_count();
  687. while (init_begin != init_end) {
  688. // Android sources claim that these can be -1, to be ignored.
  689. // 0 definitely shows up. Apparently 0/-1 are valid? Confusing.
  690. if (!*init_begin || ((FlatPtr)*init_begin == (FlatPtr)-1))
  691. continue;
  692. (*init_begin)();
  693. ++init_begin;
  694. }
  695. }
  696. }
  697. Optional<DynamicObject::SymbolLookupResult> DynamicLoader::lookup_symbol(const ELF::DynamicObject::Symbol& symbol)
  698. {
  699. if (symbol.is_undefined() || symbol.bind() == STB_WEAK)
  700. return DynamicLinker::lookup_global_symbol(symbol.name());
  701. return DynamicObject::SymbolLookupResult { symbol.value(), symbol.size(), symbol.address(), symbol.bind(), symbol.type(), &symbol.object() };
  702. }
  703. void DynamicLoader::compute_topological_order(Vector<NonnullRefPtr<DynamicLoader>>& topological_order)
  704. {
  705. VERIFY(m_topological_ordering_state == TopologicalOrderingState::NotVisited);
  706. m_topological_ordering_state = TopologicalOrderingState::Visiting;
  707. Vector<NonnullRefPtr<DynamicLoader>> actual_dependencies;
  708. for (auto const& dependency : m_true_dependencies) {
  709. auto state = dependency->m_topological_ordering_state;
  710. if (state == TopologicalOrderingState::NotVisited)
  711. dependency->compute_topological_order(topological_order);
  712. if (state == TopologicalOrderingState::Visited)
  713. actual_dependencies.append(dependency);
  714. }
  715. m_true_dependencies = actual_dependencies;
  716. m_topological_ordering_state = TopologicalOrderingState::Visited;
  717. topological_order.append(*this);
  718. }
  719. }