run-tests.cpp 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425
  1. /*
  2. * Copyright (c) 2021, Andrew Kaster <akaster@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/LexicalPath.h>
  7. #include <LibCore/ArgsParser.h>
  8. #include <LibCore/ConfigFile.h>
  9. #include <LibCore/Environment.h>
  10. #include <LibCore/System.h>
  11. #include <LibCoredump/Backtrace.h>
  12. #include <LibFileSystem/FileSystem.h>
  13. #include <LibMain/Main.h>
  14. #include <LibRegex/Regex.h>
  15. #include <LibTest/TestRunner.h>
  16. #include <signal.h>
  17. #include <spawn.h>
  18. #include <stdlib.h>
  19. #include <sys/wait.h>
  20. #include <unistd.h>
  21. namespace Test {
  22. TestRunner* TestRunner::s_the = nullptr;
  23. }
  24. using Test::get_time_in_ms;
  25. using Test::print_modifiers;
  26. struct FileResult {
  27. LexicalPath file_path;
  28. double time_taken { 0 };
  29. Test::Result result { Test::Result::Pass };
  30. int stdout_err_fd { -1 };
  31. pid_t child_pid { 0 };
  32. };
  33. ByteString g_currently_running_test;
  34. class TestRunner : public ::Test::TestRunner {
  35. public:
  36. TestRunner(ByteString test_root, Regex<PosixExtended> exclude_regex, NonnullRefPtr<Core::ConfigFile> config, Regex<PosixExtended> skip_regex, bool run_skipped_tests, bool print_progress, bool print_json, bool print_all_output, bool unlink_coredumps, bool print_times = true)
  37. : ::Test::TestRunner(move(test_root), print_times, print_progress, print_json)
  38. , m_exclude_regex(move(exclude_regex))
  39. , m_config(move(config))
  40. , m_skip_regex(move(skip_regex))
  41. , m_run_skipped_tests(run_skipped_tests)
  42. , m_print_all_output(print_all_output)
  43. , m_unlink_coredumps(unlink_coredumps)
  44. {
  45. if (!run_skipped_tests) {
  46. m_skip_directories = m_config->read_entry("Global", "SkipDirectories", "").split(' ');
  47. m_skip_files = m_config->read_entry("Global", "SkipTests", "").split(' ');
  48. }
  49. }
  50. virtual ~TestRunner() = default;
  51. protected:
  52. virtual void do_run_single_test(ByteString const& test_path, size_t current_text_index, size_t num_tests) override;
  53. virtual Vector<ByteString> get_test_paths() const override;
  54. virtual Vector<ByteString> const* get_failed_test_names() const override { return &m_failed_test_names; }
  55. virtual FileResult run_test_file(ByteString const& test_path);
  56. bool should_skip_test(LexicalPath const& test_path);
  57. Regex<PosixExtended> m_exclude_regex;
  58. NonnullRefPtr<Core::ConfigFile> m_config;
  59. Vector<ByteString> m_skip_directories;
  60. Vector<ByteString> m_skip_files;
  61. Vector<ByteString> m_failed_test_names;
  62. Regex<PosixExtended> m_skip_regex;
  63. bool m_run_skipped_tests { false };
  64. bool m_print_all_output { false };
  65. bool m_unlink_coredumps { false };
  66. };
  67. Vector<ByteString> TestRunner::get_test_paths() const
  68. {
  69. Vector<ByteString> paths;
  70. Test::iterate_directory_recursively(m_test_root, [&](ByteString const& file_path) {
  71. if (access(file_path.characters(), R_OK | X_OK) != 0)
  72. return;
  73. auto result = m_exclude_regex.match(file_path, PosixFlags::Global);
  74. if (!result.success) // must NOT match the regex to be a valid test file
  75. paths.append(file_path);
  76. });
  77. quick_sort(paths);
  78. return paths;
  79. }
  80. bool TestRunner::should_skip_test(LexicalPath const& test_path)
  81. {
  82. if (m_run_skipped_tests)
  83. return false;
  84. for (ByteString const& dir : m_skip_directories) {
  85. if (test_path.dirname().contains(dir))
  86. return true;
  87. }
  88. for (ByteString const& file : m_skip_files) {
  89. if (test_path.basename().contains(file))
  90. return true;
  91. }
  92. auto result = m_skip_regex.match(test_path.basename(), PosixFlags::Global);
  93. if (result.success)
  94. return true;
  95. return false;
  96. }
  97. void TestRunner::do_run_single_test(ByteString const& test_path, size_t current_test_index, size_t num_tests)
  98. {
  99. g_currently_running_test = test_path;
  100. auto test_relative_path = LexicalPath::relative_path(test_path, m_test_root);
  101. outln(" START {} ({}/{})", test_relative_path, current_test_index, num_tests);
  102. fflush(stdout); // we really want to see the start text in case the test hangs
  103. auto test_result = run_test_file(test_path);
  104. switch (test_result.result) {
  105. case Test::Result::Pass:
  106. ++m_counts.tests_passed;
  107. break;
  108. case Test::Result::ExpectedFail:
  109. ++m_counts.tests_expected_failed;
  110. break;
  111. case Test::Result::Skip:
  112. ++m_counts.tests_skipped;
  113. break;
  114. case Test::Result::Fail:
  115. ++m_counts.tests_failed;
  116. break;
  117. case Test::Result::Crashed:
  118. ++m_counts.tests_failed; // FIXME: tests_crashed
  119. break;
  120. }
  121. if (test_result.result != Test::Result::Skip)
  122. ++m_counts.files_total;
  123. m_total_elapsed_time_in_ms += test_result.time_taken;
  124. bool crashed_or_failed = test_result.result == Test::Result::Fail || test_result.result == Test::Result::Crashed;
  125. bool print_stdout_stderr = crashed_or_failed || m_print_all_output;
  126. if (crashed_or_failed) {
  127. m_failed_test_names.append(test_path);
  128. print_modifiers({ Test::BG_RED, Test::FG_BOLD });
  129. out("{}", test_result.result == Test::Result::Fail ? " FAIL " : "CRASHED");
  130. print_modifiers({ Test::CLEAR });
  131. if (test_result.result == Test::Result::Crashed) {
  132. auto pid_search_string = ByteString::formatted("_{}_", test_result.child_pid);
  133. Optional<ByteString> coredump_path;
  134. Core::DirIterator iterator("/tmp/coredump"sv);
  135. if (!iterator.has_error()) {
  136. while (iterator.has_next()) {
  137. auto path = iterator.next_full_path();
  138. if (!path.contains(pid_search_string))
  139. continue;
  140. coredump_path = path;
  141. auto reader = Coredump::Reader::create(path);
  142. if (!reader)
  143. break;
  144. dbgln("Last crash backtrace for {} (was pid {}):", test_path, test_result.child_pid);
  145. reader->for_each_thread_info([&](auto thread_info) {
  146. Coredump::Backtrace thread_backtrace(*reader, thread_info);
  147. auto tid = thread_info.tid; // Note: Yoinking this out of the struct because we can't pass a reference to it (as it's a misaligned field in a packed struct)
  148. dbgln("Thread {}", tid);
  149. for (auto const& entry : thread_backtrace.entries())
  150. dbgln("- {}", entry.to_byte_string(true));
  151. return IterationDecision::Continue;
  152. });
  153. break;
  154. }
  155. }
  156. if (m_unlink_coredumps && coredump_path.has_value())
  157. (void)Core::System::unlink(coredump_path.value());
  158. }
  159. } else {
  160. print_modifiers({ Test::BG_GREEN, Test::FG_BLACK, Test::FG_BOLD });
  161. out(" PASS ");
  162. print_modifiers({ Test::CLEAR });
  163. }
  164. out(" {}", test_relative_path);
  165. print_modifiers({ Test::CLEAR, Test::ITALIC, Test::FG_GRAY });
  166. if (test_result.time_taken < 1000) {
  167. outln(" ({}ms)", static_cast<int>(test_result.time_taken));
  168. } else {
  169. outln(" ({:3}s)", test_result.time_taken / 1000.0);
  170. }
  171. print_modifiers({ Test::CLEAR });
  172. if (test_result.result != Test::Result::Pass) {
  173. print_modifiers({ Test::FG_GRAY, Test::FG_BOLD });
  174. out(" Test: ");
  175. if (crashed_or_failed) {
  176. print_modifiers({ Test::CLEAR, Test::FG_RED });
  177. outln("{} ({})", test_result.file_path.basename(), test_result.result == Test::Result::Fail ? "failed" : "crashed");
  178. } else {
  179. print_modifiers({ Test::CLEAR, Test::FG_ORANGE });
  180. auto const status = test_result.result == Test::Result::Skip ? "skipped"sv : "expected fail"sv;
  181. outln("{} ({})", test_result.file_path.basename(), status);
  182. }
  183. print_modifiers({ Test::CLEAR });
  184. }
  185. // Make sure our clear modifiers goes through before we dump file output via write(2)
  186. fflush(stdout);
  187. if (print_stdout_stderr && test_result.stdout_err_fd > 0) {
  188. int ret = lseek(test_result.stdout_err_fd, 0, SEEK_SET);
  189. VERIFY(ret == 0);
  190. for (;;) {
  191. char buf[32768];
  192. ssize_t nread = read(test_result.stdout_err_fd, buf, sizeof(buf));
  193. if (nread == 0)
  194. break;
  195. if (nread < 0) {
  196. perror("read");
  197. break;
  198. }
  199. size_t already_written = 0;
  200. while (already_written < (size_t)nread) {
  201. ssize_t nwritten = write(STDOUT_FILENO, buf + already_written, nread - already_written);
  202. if (nwritten < 0) {
  203. perror("write");
  204. break;
  205. }
  206. already_written += nwritten;
  207. }
  208. }
  209. }
  210. close(test_result.stdout_err_fd);
  211. }
  212. FileResult TestRunner::run_test_file(ByteString const& test_path)
  213. {
  214. double start_time = get_time_in_ms();
  215. auto path_for_test = LexicalPath(test_path);
  216. if (should_skip_test(path_for_test)) {
  217. return FileResult { move(path_for_test), 0.0, Test::Result::Skip, -1 };
  218. }
  219. // FIXME: actual error handling, mark test as :yaksplode: if any are bad instead of VERIFY
  220. posix_spawn_file_actions_t file_actions;
  221. posix_spawn_file_actions_init(&file_actions);
  222. char child_out_err_path[] = "/tmp/run-tests.XXXXXX";
  223. int child_out_err_file = mkstemp(child_out_err_path);
  224. VERIFY(child_out_err_file >= 0);
  225. ByteString dirname = path_for_test.dirname();
  226. ByteString basename = path_for_test.basename();
  227. (void)posix_spawn_file_actions_adddup2(&file_actions, child_out_err_file, STDOUT_FILENO);
  228. (void)posix_spawn_file_actions_adddup2(&file_actions, child_out_err_file, STDERR_FILENO);
  229. (void)posix_spawn_file_actions_addchdir(&file_actions, dirname.characters());
  230. Vector<char const*, 4> argv;
  231. argv.append(basename.characters());
  232. auto extra_args = m_config->read_entry(path_for_test.basename(), "Arguments", "").split(' ');
  233. for (auto& arg : extra_args)
  234. argv.append(arg.characters());
  235. argv.append(nullptr);
  236. pid_t child_pid = -1;
  237. // FIXME: Do we really want to copy test runner's entire env?
  238. int ret = posix_spawn(&child_pid, test_path.characters(), &file_actions, nullptr, const_cast<char* const*>(argv.data()), environ);
  239. VERIFY(ret == 0);
  240. VERIFY(child_pid > 0);
  241. int wstatus;
  242. Test::Result test_result = Test::Result::Fail;
  243. for (size_t num_waits = 0; num_waits < 2; ++num_waits) {
  244. ret = waitpid(child_pid, &wstatus, 0); // intentionally not setting WCONTINUED
  245. if (ret != child_pid)
  246. break; // we'll end up with a failure
  247. if (WIFEXITED(wstatus)) {
  248. if (WEXITSTATUS(wstatus) == 0) {
  249. test_result = Test::Result::Pass;
  250. }
  251. break;
  252. } else if (WIFSIGNALED(wstatus)) {
  253. test_result = Test::Result::Crashed;
  254. break;
  255. } else if (WIFSTOPPED(wstatus)) {
  256. outln("{} was stopped unexpectedly, sending SIGCONT", test_path);
  257. kill(child_pid, SIGCONT);
  258. }
  259. }
  260. // Remove the child's stdout from /tmp. This does cause the temp file to be observable
  261. // while the test is executing, but if it hangs that might even be a bonus :)
  262. ret = unlink(child_out_err_path);
  263. VERIFY(ret == 0);
  264. return FileResult { move(path_for_test), get_time_in_ms() - start_time, test_result, child_out_err_file, child_pid };
  265. }
  266. ErrorOr<int> serenity_main(Main::Arguments arguments)
  267. {
  268. auto program_name = LexicalPath::basename(arguments.strings[0]);
  269. #ifdef SIGINFO
  270. TRY(Core::System::signal(SIGINFO, [](int) {
  271. static char buffer[4096];
  272. auto& counts = ::Test::TestRunner::the()->counts();
  273. int len = snprintf(buffer, sizeof(buffer), "Pass: %d, Fail: %d, Skip: %d\nCurrent test: %s\n", counts.tests_passed, counts.tests_failed, counts.tests_skipped, g_currently_running_test.characters());
  274. write(STDOUT_FILENO, buffer, len);
  275. }));
  276. #endif
  277. bool print_progress =
  278. #ifdef AK_OS_SERENITY
  279. true; // Use OSC 9 to print progress
  280. #else
  281. false;
  282. #endif
  283. bool print_json = false;
  284. bool print_all_output = false;
  285. bool run_benchmarks = false;
  286. bool run_skipped_tests = false;
  287. bool unlink_coredumps = false;
  288. StringView specified_test_root;
  289. ByteString test_glob;
  290. ByteString exclude_pattern;
  291. ByteString config_file;
  292. Core::ArgsParser args_parser;
  293. args_parser.add_option(Core::ArgsParser::Option {
  294. .argument_mode = Core::ArgsParser::OptionArgumentMode::Required,
  295. .help_string = "Show progress with OSC 9 (true, false)",
  296. .long_name = "show-progress",
  297. .short_name = 'p',
  298. .accept_value = [&](StringView str) {
  299. if ("true"sv == str)
  300. print_progress = true;
  301. else if ("false"sv == str)
  302. print_progress = false;
  303. else
  304. return false;
  305. return true;
  306. },
  307. });
  308. args_parser.add_option(print_json, "Show results as JSON", "json", 'j');
  309. args_parser.add_option(print_all_output, "Show all test output", "verbose", 'v');
  310. args_parser.add_option(run_benchmarks, "Run benchmarks as well", "benchmarks", 'b');
  311. args_parser.add_option(run_skipped_tests, "Run all matching tests, even those marked as 'skip'", "all", 'a');
  312. args_parser.add_option(unlink_coredumps, "Unlink coredumps after printing backtraces", "unlink-coredumps");
  313. args_parser.add_option(test_glob, "Only run tests matching the given glob", "filter", 'f', "glob");
  314. args_parser.add_option(exclude_pattern, "Regular expression to use to exclude paths from being considered tests", "exclude-pattern", 'e', "pattern");
  315. args_parser.add_option(config_file, "Configuration file to use", "config-file", 'c', "filename");
  316. args_parser.add_positional_argument(specified_test_root, "Tests root directory", "path", Core::ArgsParser::Required::No);
  317. args_parser.parse(arguments);
  318. test_glob = ByteString::formatted("*{}*", test_glob);
  319. if (Core::Environment::has("DISABLE_DBG_OUTPUT"sv))
  320. AK::set_debug_enabled(false);
  321. // Make UBSAN deadly for all tests we run by default.
  322. TRY(Core::Environment::set("UBSAN_OPTIONS"sv, "halt_on_error=1"sv, Core::Environment::Overwrite::Yes));
  323. if (!run_benchmarks)
  324. TRY(Core::Environment::set("TESTS_ONLY"sv, "1"sv, Core::Environment::Overwrite::Yes));
  325. ByteString test_root;
  326. if (!specified_test_root.is_empty()) {
  327. test_root = ByteString { specified_test_root };
  328. } else {
  329. test_root = "/usr/Tests";
  330. }
  331. if (!FileSystem::is_directory(test_root)) {
  332. warnln("Test root is not a directory: {}", test_root);
  333. return 1;
  334. }
  335. test_root = TRY(FileSystem::real_path(test_root));
  336. auto void_or_error = Core::System::chdir(test_root);
  337. if (void_or_error.is_error()) {
  338. warnln("chdir failed: {}", void_or_error.error());
  339. return void_or_error.release_error();
  340. }
  341. auto config_or_error = config_file.is_empty() ? Core::ConfigFile::open_for_app("Tests") : Core::ConfigFile::open(config_file);
  342. if (config_or_error.is_error()) {
  343. warnln("Failed to open configuration file ({}): {}", config_file.is_empty() ? "User config for Tests" : config_file.characters(), config_or_error.error());
  344. return config_or_error.release_error();
  345. }
  346. auto config = config_or_error.release_value();
  347. if (config->num_groups() == 0)
  348. warnln("Empty configuration file ({}) loaded!", config_file.is_empty() ? "User config for Tests" : config_file.characters());
  349. if (exclude_pattern.is_empty())
  350. exclude_pattern = config->read_entry("Global", "NotTestsPattern", "$^"); // default is match nothing (aka match end then beginning)
  351. Regex<PosixExtended> exclude_regex(exclude_pattern, {});
  352. if (exclude_regex.parser_result.error != regex::Error::NoError) {
  353. warnln("Exclude pattern \"{}\" is invalid", exclude_pattern);
  354. return 1;
  355. }
  356. // we need to preconfigure this, because we can't autoinitialize Regex types
  357. // in the Testrunner
  358. auto skip_regex_pattern = config->read_entry("Global", "SkipRegex", "$^");
  359. Regex<PosixExtended> skip_regex { skip_regex_pattern, {} };
  360. if (skip_regex.parser_result.error != regex::Error::NoError) {
  361. warnln("SkipRegex pattern \"{}\" is invalid", skip_regex_pattern);
  362. return 1;
  363. }
  364. TestRunner test_runner(test_root, move(exclude_regex), move(config), move(skip_regex), run_skipped_tests, print_progress, print_json, print_all_output, unlink_coredumps);
  365. test_runner.run(test_glob);
  366. return test_runner.counts().tests_failed;
  367. }