From 78afd368cc0f6c408b7df661bf66ebbd57ac7c30 Mon Sep 17 00:00:00 2001 From: John Konecny <24961694+jfkonecn@users.noreply.github.com> Date: Sat, 3 Aug 2024 19:51:47 -0400 Subject: [PATCH] Can now test multiple files --- crates/cli/src/lib.rs | 230 ++++++++++++++++++++++-------------------- 1 file changed, 121 insertions(+), 109 deletions(-) diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index 982358d03a5..669de4264b5 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -264,6 +264,7 @@ pub fn build_app() -> Command { Arg::new(ROC_FILE) .help("The .roc file to test") .value_parser(value_parser!(PathBuf)) + .num_args(0..) .required(false) .default_value(DEFAULT_ROC_FILENAME) ) @@ -498,18 +499,20 @@ pub fn test(matches: &ArgMatches, target: Target) -> io::Result { Some(n) => Threading::AtMost(*n), }; - let path = matches.get_one::(ROC_FILE).unwrap(); + // let path = matches.get_one::(ROC_FILE).unwrap(); + let paths: Vec<_> = matches.get_many::(ROC_FILE).unwrap().collect(); - // Spawn the root task - if !path.exists() { - let current_dir = env::current_dir().unwrap(); - let expected_file_path = current_dir.join(path); + for path in paths.iter() { + // Spawn the root task + if !path.exists() { + let current_dir = env::current_dir().unwrap(); + let expected_file_path = current_dir.join(path); - let current_dir_string = current_dir.display(); - let expected_file_path_string = expected_file_path.display(); + let current_dir_string = current_dir.display(); + let expected_file_path_string = expected_file_path.display(); - // TODO these should use roc_reporting to display nicer error messages. - match matches.value_source(ROC_FILE) { + // TODO these should use roc_reporting to display nicer error messages. + match matches.value_source(ROC_FILE) { Some(ValueSource::DefaultValue) => { eprintln!( "\nThe current directory ({current_dir_string}) does not contain a {DEFAULT_ROC_FILENAME} file to use as a default.\n\nYou can run `roc help` for more information on how to provide a .roc file.\n" @@ -518,126 +521,124 @@ pub fn test(matches: &ArgMatches, target: Target) -> io::Result { _ => eprintln!("\nThis file was not found: {expected_file_path_string}\n\nYou can run `roc help` for more information on how to provide a .roc file.\n"), } - process::exit(1); + process::exit(1); + } } - let arena = &arena; - let function_kind = FunctionKind::from_env(); + let mut all_files_total_failed_count = 0; + let mut all_files_total_passed_count = 0; - let opt_main_path = matches.get_one::(FLAG_MAIN); - - // Step 1: compile the app and generate the .o file - let load_config = LoadConfig { - target, - function_kind, - // TODO: expose this from CLI? - render: roc_reporting::report::RenderTarget::ColorTerminal, - palette: roc_reporting::report::DEFAULT_PALETTE, - threading, - exec_mode: ExecutionMode::Test, - }; - let load_result = roc_load::load_and_monomorphize( - arena, - path.to_path_buf(), - opt_main_path.cloned(), - RocCacheDir::Persistent(cache::roc_cache_dir().as_path()), - load_config, - ); + for path in paths.iter() { + let arena = &arena; + let function_kind = FunctionKind::from_env(); - let mut loaded = match load_result { - Ok(loaded) => loaded, - Err(LoadMonomorphizedError::LoadingProblem(problem)) => { - return handle_loading_problem(problem); - } - Err(LoadMonomorphizedError::ErrorModule(module)) => { - return handle_error_module(module, start_time.elapsed(), path.as_os_str(), false); - } - }; - let problems = report_problems_monomorphized(&mut loaded); + let opt_main_path = matches.get_one::(FLAG_MAIN); - let mut expectations = std::mem::take(&mut loaded.expectations); + // Step 1: compile the app and generate the .o file + let load_config = LoadConfig { + target, + function_kind, + // TODO: expose this from CLI? + render: roc_reporting::report::RenderTarget::ColorTerminal, + palette: roc_reporting::report::DEFAULT_PALETTE, + threading, + exec_mode: ExecutionMode::Test, + }; + let load_result = roc_load::load_and_monomorphize( + arena, + path.to_path_buf(), + opt_main_path.cloned(), + RocCacheDir::Persistent(cache::roc_cache_dir().as_path()), + load_config, + ); - let interns = loaded.interns.clone(); - let sources = loaded.sources.clone(); + let mut loaded = match load_result { + Ok(loaded) => loaded, + Err(LoadMonomorphizedError::LoadingProblem(problem)) => { + return handle_loading_problem(problem); + } + Err(LoadMonomorphizedError::ErrorModule(module)) => { + return handle_error_module(module, start_time.elapsed(), path.as_os_str(), false); + } + }; + let problems = report_problems_monomorphized(&mut loaded); - let (dyn_lib, expects_by_module, layout_interner) = - roc_repl_expect::run::expect_mono_module_to_dylib( - arena, - target, - loaded, - opt_level, - LlvmBackendMode::CliTest, - ) - .unwrap(); + let mut expectations = std::mem::take(&mut loaded.expectations); - // Print warnings before running tests. - { - debug_assert_eq!( - problems.errors, 0, - "if there were errors, we would have already exited." - ); - if problems.warnings > 0 { - problems.print_error_warning_count(start_time.elapsed()); - println!(".\n\nRunning tests…\n\n\x1B[36m{}\x1B[39m", "─".repeat(80)); - } - } + let interns = loaded.interns.clone(); + let sources = loaded.sources.clone(); - // Run the tests. - let arena = &bumpalo::Bump::new(); - let interns = arena.alloc(interns); + let (dyn_lib, expects_by_module, layout_interner) = + roc_repl_expect::run::expect_mono_module_to_dylib( + arena, + target, + loaded, + opt_level, + LlvmBackendMode::CliTest, + ) + .unwrap(); - let mut writer = std::io::stdout(); + // Print warnings before running tests. + { + debug_assert_eq!( + problems.errors, 0, + "if there were errors, we would have already exited." + ); + if problems.warnings > 0 { + problems.print_error_warning_count(start_time.elapsed()); + println!(".\n\nRunning tests…\n\n\x1B[36m{}\x1B[39m", "─".repeat(80)); + } + } - let mut total_failed_count = 0; - let mut total_passed_count = 0; + // Run the tests. + let arena = &bumpalo::Bump::new(); + let interns = arena.alloc(interns); - let mut results_by_module = Vec::new(); - let global_layout_interner = layout_interner.into_global(); + let mut writer = std::io::stdout(); - let compilation_duration = start_time.elapsed(); + let mut total_failed_count = 0; + let mut total_passed_count = 0; - for (module_id, expects) in expects_by_module.into_iter() { - let test_start_time = Instant::now(); + let mut results_by_module = Vec::new(); + let global_layout_interner = layout_interner.into_global(); - let (failed_count, passed_count) = roc_repl_expect::run::run_toplevel_expects( - &mut writer, - roc_reporting::report::RenderTarget::ColorTerminal, - arena, - interns, - &global_layout_interner, - &dyn_lib, - &mut expectations, - expects, - ) - .unwrap(); + let compilation_duration = start_time.elapsed(); - let tests_duration = test_start_time.elapsed(); + for (module_id, expects) in expects_by_module.into_iter() { + let test_start_time = Instant::now(); - results_by_module.push(ModuleTestResults { - module_id, - failed_count, - passed_count, - tests_duration, - }); + let (failed_count, passed_count) = roc_repl_expect::run::run_toplevel_expects( + &mut writer, + roc_reporting::report::RenderTarget::ColorTerminal, + arena, + interns, + &global_layout_interner, + &dyn_lib, + &mut expectations, + expects, + ) + .unwrap(); - total_failed_count += failed_count; - total_passed_count += passed_count; - } + let tests_duration = test_start_time.elapsed(); - let total_duration = start_time.elapsed(); + results_by_module.push(ModuleTestResults { + module_id, + failed_count, + passed_count, + tests_duration, + }); - if total_failed_count == 0 && total_passed_count == 0 { - // TODO print this in a more nicely formatted way! - println!("No expectations were found."); + total_failed_count += failed_count; + total_passed_count += passed_count; + } - // If no tests ran, treat that as an error. This is perhaps - // briefly annoying at the very beginning of a project when - // you actually have zero tests, but it can save you from - // having a change to your CI script accidentally stop - // running tests altogether! - Ok(2) - } else { - if matches.get_flag(FLAG_VERBOSE) { + let total_duration = start_time.elapsed(); + all_files_total_failed_count += total_failed_count; + all_files_total_passed_count += total_passed_count; + if total_failed_count == 0 && total_passed_count == 0 { + // TODO print this in a more nicely formatted way! + println!("No expectations were found."); + } else if matches.get_flag(FLAG_VERBOSE) { println!("Compiled in {} ms.", compilation_duration.as_millis()); for module_test_results in results_by_module { print_test_results(module_test_results, &sources); @@ -647,8 +648,19 @@ pub fn test(matches: &ArgMatches, target: Target) -> io::Result { test_summary(total_failed_count, total_passed_count, total_duration); println!("{test_summary_str}"); } + } + if all_files_total_failed_count == 0 && all_files_total_passed_count == 0 { + // TODO print this in a more nicely formatted way! + println!("No expectations were found."); - Ok((total_failed_count > 0) as i32) + // If no tests ran, treat that as an error. This is perhaps + // briefly annoying at the very beginning of a project when + // you actually have zero tests, but it can save you from + // having a change to your CI script accidentally stop + // running tests altogether! + Ok(2) + } else { + Ok((all_files_total_failed_count > 0) as i32) } }