run-tests.cpp 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358
  1. /*
  2. * Copyright (c) 2021, Andrew Kaster <akaster@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/LexicalPath.h>
  7. #include <LibCore/ArgsParser.h>
  8. #include <LibCore/ConfigFile.h>
  9. #include <LibCore/File.h>
  10. #include <LibRegex/Regex.h>
  11. #include <LibTest/TestRunner.h>
  12. #include <signal.h>
  13. #include <spawn.h>
  14. #include <sys/wait.h>
  15. #include <unistd.h>
  16. namespace Test {
  17. TestRunner* TestRunner::s_the = nullptr;
  18. }
  19. using Test::get_time_in_ms;
  20. using Test::print_modifiers;
  21. struct FileResult {
  22. LexicalPath file_path;
  23. double time_taken { 0 };
  24. Test::Result result { Test::Result::Pass };
  25. int stdout_err_fd { -1 };
  26. };
  27. String g_currently_running_test;
  28. class TestRunner : public ::Test::TestRunner {
  29. public:
  30. TestRunner(String test_root, Regex<PosixExtended> exclude_regex, NonnullRefPtr<Core::ConfigFile> config, Regex<PosixExtended> skip_regex, bool print_progress, bool print_json, bool print_all_output, bool print_times = true)
  31. : ::Test::TestRunner(move(test_root), print_times, print_progress, print_json)
  32. , m_exclude_regex(move(exclude_regex))
  33. , m_config(move(config))
  34. , m_skip_regex(move(skip_regex))
  35. , m_print_all_output(print_all_output)
  36. {
  37. m_skip_directories = m_config->read_entry("Global", "SkipDirectories", "").split(' ');
  38. m_skip_files = m_config->read_entry("Global", "SkipTests", "").split(' ');
  39. }
  40. virtual ~TestRunner() = default;
  41. protected:
  42. virtual void do_run_single_test(const String& test_path, size_t current_text_index, size_t num_tests) override;
  43. virtual Vector<String> get_test_paths() const override;
  44. virtual const Vector<String>* get_failed_test_names() const override { return &m_failed_test_names; }
  45. virtual FileResult run_test_file(const String& test_path);
  46. bool should_skip_test(const LexicalPath& test_path);
  47. Regex<PosixExtended> m_exclude_regex;
  48. NonnullRefPtr<Core::ConfigFile> m_config;
  49. Vector<String> m_skip_directories;
  50. Vector<String> m_skip_files;
  51. Vector<String> m_failed_test_names;
  52. Regex<PosixExtended> m_skip_regex;
  53. bool m_print_all_output { false };
  54. };
  55. Vector<String> TestRunner::get_test_paths() const
  56. {
  57. Vector<String> paths;
  58. Test::iterate_directory_recursively(m_test_root, [&](const String& file_path) {
  59. if (access(file_path.characters(), R_OK | X_OK) != 0)
  60. return;
  61. auto result = m_exclude_regex.match(file_path, PosixFlags::Global);
  62. if (!result.success) // must NOT match the regex to be a valid test file
  63. paths.append(file_path);
  64. });
  65. quick_sort(paths);
  66. return paths;
  67. }
  68. bool TestRunner::should_skip_test(const LexicalPath& test_path)
  69. {
  70. for (const String& dir : m_skip_directories) {
  71. if (test_path.dirname().contains(dir))
  72. return true;
  73. }
  74. for (const String& file : m_skip_files) {
  75. if (test_path.basename().contains(file))
  76. return true;
  77. }
  78. auto result = m_skip_regex.match(test_path.basename(), PosixFlags::Global);
  79. if (result.success)
  80. return true;
  81. return false;
  82. }
  83. void TestRunner::do_run_single_test(const String& test_path, size_t current_test_index, size_t num_tests)
  84. {
  85. g_currently_running_test = test_path;
  86. auto test_relative_path = LexicalPath::relative_path(test_path, m_test_root);
  87. outln(" START {} ({}/{})", test_relative_path, current_test_index, num_tests);
  88. fflush(stdout); // we really want to see the start text in case the test hangs
  89. auto test_result = run_test_file(test_path);
  90. switch (test_result.result) {
  91. case Test::Result::Pass:
  92. ++m_counts.tests_passed;
  93. break;
  94. case Test::Result::Skip:
  95. ++m_counts.tests_skipped;
  96. break;
  97. case Test::Result::Fail:
  98. ++m_counts.tests_failed;
  99. break;
  100. case Test::Result::Crashed:
  101. ++m_counts.tests_failed; // FIXME: tests_crashed
  102. break;
  103. }
  104. if (test_result.result != Test::Result::Skip)
  105. ++m_counts.files_total;
  106. m_total_elapsed_time_in_ms += test_result.time_taken;
  107. bool crashed_or_failed = test_result.result == Test::Result::Fail || test_result.result == Test::Result::Crashed;
  108. bool print_stdout_stderr = crashed_or_failed || m_print_all_output;
  109. if (crashed_or_failed) {
  110. m_failed_test_names.append(test_path);
  111. print_modifiers({ Test::BG_RED, Test::FG_BLACK, Test::FG_BOLD });
  112. out("{}", test_result.result == Test::Result::Fail ? " FAIL " : "CRASHED");
  113. print_modifiers({ Test::CLEAR });
  114. } else {
  115. print_modifiers({ Test::BG_GREEN, Test::FG_BLACK, Test::FG_BOLD });
  116. out(" PASS ");
  117. print_modifiers({ Test::CLEAR });
  118. }
  119. out(" {}", test_relative_path);
  120. print_modifiers({ Test::CLEAR, Test::ITALIC, Test::FG_GRAY });
  121. if (test_result.time_taken < 1000) {
  122. outln(" ({}ms)", static_cast<int>(test_result.time_taken));
  123. } else {
  124. outln(" ({:3}s)", test_result.time_taken / 1000.0);
  125. }
  126. print_modifiers({ Test::CLEAR });
  127. if (test_result.result != Test::Result::Pass) {
  128. print_modifiers({ Test::FG_GRAY, Test::FG_BOLD });
  129. out(" Test: ");
  130. if (crashed_or_failed) {
  131. print_modifiers({ Test::CLEAR, Test::FG_RED });
  132. outln("{} ({})", test_result.file_path.basename(), test_result.result == Test::Result::Fail ? "failed" : "crashed");
  133. } else {
  134. print_modifiers({ Test::CLEAR, Test::FG_ORANGE });
  135. outln("{} (skipped)", test_result.file_path.basename());
  136. }
  137. print_modifiers({ Test::CLEAR });
  138. }
  139. // Make sure our clear modifiers goes through before we dump file output via write(2)
  140. fflush(stdout);
  141. if (print_stdout_stderr && test_result.stdout_err_fd > 0) {
  142. int ret = lseek(test_result.stdout_err_fd, 0, SEEK_SET);
  143. VERIFY(ret == 0);
  144. for (;;) {
  145. char buf[32768];
  146. ssize_t nread = read(test_result.stdout_err_fd, buf, sizeof(buf));
  147. if (nread == 0)
  148. break;
  149. if (nread < 0) {
  150. perror("read");
  151. break;
  152. }
  153. size_t already_written = 0;
  154. while (already_written < (size_t)nread) {
  155. ssize_t nwritten = write(STDOUT_FILENO, buf + already_written, nread - already_written);
  156. if (nwritten < 0) {
  157. perror("write");
  158. break;
  159. }
  160. already_written += nwritten;
  161. }
  162. }
  163. }
  164. close(test_result.stdout_err_fd);
  165. }
  166. FileResult TestRunner::run_test_file(const String& test_path)
  167. {
  168. double start_time = get_time_in_ms();
  169. auto path_for_test = LexicalPath(test_path);
  170. if (should_skip_test(path_for_test)) {
  171. return FileResult { move(path_for_test), 0.0, Test::Result::Skip, -1 };
  172. }
  173. // FIXME: actual error handling, mark test as :yaksplode: if any are bad instead of VERIFY
  174. posix_spawn_file_actions_t file_actions;
  175. posix_spawn_file_actions_init(&file_actions);
  176. char child_out_err_path[] = "/tmp/run-tests.XXXXXX";
  177. int child_out_err_file = mkstemp(child_out_err_path);
  178. VERIFY(child_out_err_file >= 0);
  179. String dirname = path_for_test.dirname();
  180. String basename = path_for_test.basename();
  181. (void)posix_spawn_file_actions_adddup2(&file_actions, child_out_err_file, STDOUT_FILENO);
  182. (void)posix_spawn_file_actions_adddup2(&file_actions, child_out_err_file, STDERR_FILENO);
  183. (void)posix_spawn_file_actions_addchdir(&file_actions, dirname.characters());
  184. Vector<const char*, 4> argv;
  185. argv.append(basename.characters());
  186. auto extra_args = m_config->read_entry(path_for_test.basename(), "Arguments", "").split(' ');
  187. for (auto& arg : extra_args)
  188. argv.append(arg.characters());
  189. argv.append(nullptr);
  190. pid_t child_pid = -1;
  191. // FIXME: Do we really want to copy test runner's entire env?
  192. int ret = posix_spawn(&child_pid, test_path.characters(), &file_actions, nullptr, const_cast<char* const*>(argv.data()), environ);
  193. VERIFY(ret == 0);
  194. VERIFY(child_pid > 0);
  195. int wstatus;
  196. Test::Result test_result = Test::Result::Fail;
  197. for (size_t num_waits = 0; num_waits < 2; ++num_waits) {
  198. ret = waitpid(child_pid, &wstatus, 0); // intentionally not setting WCONTINUED
  199. if (ret != child_pid)
  200. break; // we'll end up with a failure
  201. if (WIFEXITED(wstatus)) {
  202. if (wstatus == 0) {
  203. test_result = Test::Result::Pass;
  204. }
  205. break;
  206. } else if (WIFSIGNALED(wstatus)) {
  207. test_result = Test::Result::Crashed;
  208. break;
  209. } else if (WIFSTOPPED(wstatus)) {
  210. outln("{} was stopped unexpectedly, sending SIGCONT", test_path);
  211. kill(child_pid, SIGCONT);
  212. }
  213. }
  214. // Remove the child's stdout from /tmp. This does cause the temp file to be observable
  215. // while the test is executing, but if it hangs that might even be a bonus :)
  216. ret = unlink(child_out_err_path);
  217. VERIFY(ret == 0);
  218. return FileResult { move(path_for_test), get_time_in_ms() - start_time, test_result, child_out_err_file };
  219. }
  220. int main(int argc, char** argv)
  221. {
  222. auto program_name = LexicalPath::basename(argv[0]);
  223. #ifdef SIGINFO
  224. signal(SIGINFO, [](int) {
  225. static char buffer[4096];
  226. auto& counts = ::Test::TestRunner::the()->counts();
  227. int len = snprintf(buffer, sizeof(buffer), "Pass: %d, Fail: %d, Skip: %d\nCurrent test: %s\n", counts.tests_passed, counts.tests_failed, counts.tests_skipped, g_currently_running_test.characters());
  228. write(STDOUT_FILENO, buffer, len);
  229. });
  230. #endif
  231. bool print_progress =
  232. #ifdef __serenity__
  233. true; // Use OSC 9 to print progress
  234. #else
  235. false;
  236. #endif
  237. bool print_json = false;
  238. bool print_all_output = false;
  239. const char* specified_test_root = nullptr;
  240. String test_glob;
  241. String exclude_pattern;
  242. String config_file;
  243. Core::ArgsParser args_parser;
  244. args_parser.add_option(Core::ArgsParser::Option {
  245. .requires_argument = true,
  246. .help_string = "Show progress with OSC 9 (true, false)",
  247. .long_name = "show-progress",
  248. .short_name = 'p',
  249. .accept_value = [&](auto* str) {
  250. if ("true"sv == str)
  251. print_progress = true;
  252. else if ("false"sv == str)
  253. print_progress = false;
  254. else
  255. return false;
  256. return true;
  257. },
  258. });
  259. args_parser.add_option(print_json, "Show results as JSON", "json", 'j');
  260. args_parser.add_option(print_all_output, "Show all test output", "verbose", 'v');
  261. args_parser.add_option(test_glob, "Only run tests matching the given glob", "filter", 'f', "glob");
  262. args_parser.add_option(exclude_pattern, "Regular expression to use to exclude paths from being considered tests", "exclude-pattern", 'e', "pattern");
  263. args_parser.add_option(config_file, "Configuration file to use", "config-file", 'c', "filename");
  264. args_parser.add_positional_argument(specified_test_root, "Tests root directory", "path", Core::ArgsParser::Required::No);
  265. args_parser.parse(argc, argv);
  266. test_glob = String::formatted("*{}*", test_glob);
  267. if (getenv("DISABLE_DBG_OUTPUT")) {
  268. AK::set_debug_enabled(false);
  269. }
  270. String test_root;
  271. if (specified_test_root) {
  272. test_root = String { specified_test_root };
  273. } else {
  274. test_root = "/usr/Tests";
  275. }
  276. if (!Core::File::is_directory(test_root)) {
  277. warnln("Test root is not a directory: {}", test_root);
  278. return 1;
  279. }
  280. test_root = Core::File::real_path_for(test_root);
  281. if (chdir(test_root.characters()) < 0) {
  282. auto saved_errno = errno;
  283. warnln("chdir failed: {}", strerror(saved_errno));
  284. return 1;
  285. }
  286. auto config = config_file.is_empty() ? Core::ConfigFile::open_for_app("Tests") : Core::ConfigFile::open(config_file);
  287. if (config->num_groups() == 0)
  288. warnln("Empty configuration file ({}) loaded!", config_file.is_empty() ? "User config for Tests" : config_file.characters());
  289. if (exclude_pattern.is_empty())
  290. exclude_pattern = config->read_entry("Global", "NotTestsPattern", "$^"); // default is match nothing (aka match end then beginning)
  291. Regex<PosixExtended> exclude_regex(exclude_pattern, {});
  292. if (exclude_regex.parser_result.error != Error::NoError) {
  293. warnln("Exclude pattern \"{}\" is invalid", exclude_pattern);
  294. return 1;
  295. }
  296. // we need to preconfigure this, because we can't autoinitialize Regex types
  297. // in the Testrunner
  298. auto skip_regex_pattern = config->read_entry("Global", "SkipRegex", "$^");
  299. Regex<PosixExtended> skip_regex { skip_regex_pattern, {} };
  300. if (skip_regex.parser_result.error != Error::NoError) {
  301. warnln("SkipRegex pattern \"{}\" is invalid", skip_regex_pattern);
  302. return 1;
  303. }
  304. TestRunner test_runner(test_root, move(exclude_regex), move(config), move(skip_regex), print_progress, print_json, print_all_output);
  305. test_runner.run(test_glob);
  306. return test_runner.counts().tests_failed;
  307. }