1use super::{install, test::filter::ProjectPathsAwareFilter, watch::WatchArgs};
2use crate::{
3 MultiContractRunner, MultiContractRunnerBuilder,
4 decode::decode_console_logs,
5 gas_report::GasReport,
6 multi_runner::{is_test_contract, matches_artifact},
7 result::{SuiteResult, TestOutcome, TestStatus},
8 traces::{
9 CallTraceDecoderBuilder, InternalTraceMode, TraceKind,
10 debug::{ContractSources, DebugTraceIdentifier},
11 decode_trace_arena, folded_stack_trace,
12 identifier::SignaturesIdentifier,
13 },
14};
15use alloy_primitives::U256;
16use chrono::Utc;
17use clap::{Parser, ValueHint};
18use eyre::{Context, OptionExt, Result, bail};
19use foundry_cli::{
20 opts::{BuildOpts, GlobalArgs},
21 utils::{self, LoadConfig},
22};
23use foundry_common::{
24 EmptyTestFilter, TestFunctionExt, compile::ProjectCompiler, evm::EvmArgs, fs, shell,
25};
26use foundry_compilers::{
27 ProjectCompileOutput, artifacts::output_selection::OutputSelection,
28 compilers::multi::MultiCompiler,
29};
30use foundry_config::{
31 Config, figment,
32 figment::{
33 Metadata, Profile, Provider,
34 value::{Dict, Map},
35 },
36 filter::GlobMatcher,
37};
38use foundry_debugger::Debugger;
39use foundry_evm::traces::identifier::TraceIdentifiers;
40use regex::Regex;
41use std::{
42 collections::{BTreeMap, BTreeSet},
43 fmt::Write,
44 path::PathBuf,
45 sync::{Arc, mpsc::channel},
46 time::{Duration, Instant},
47};
48use yansi::Paint;
49
50mod filter;
51mod summary;
52use crate::{result::TestKind, traces::render_trace_arena_inner};
53pub use filter::FilterArgs;
54use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite};
55use summary::{TestSummaryReport, format_invariant_metrics_table};
56
57foundry_config::merge_impl_figment_convert!(TestArgs, build, evm);
59
60#[derive(Clone, Debug, Parser)]
62#[command(next_help_heading = "Test options")]
63pub struct TestArgs {
64 #[command(flatten)]
66 pub global: GlobalArgs,
67
68 #[arg(value_hint = ValueHint::FilePath)]
70 pub path: Option<GlobMatcher>,
71
72 #[arg(long, conflicts_with_all = ["flamegraph", "flamechart", "decode_internal", "rerun"])]
79 debug: bool,
80
81 #[arg(long)]
86 flamegraph: bool,
87
88 #[arg(long, conflicts_with = "flamegraph")]
93 flamechart: bool,
94
95 #[arg(long)]
102 decode_internal: bool,
103
104 #[arg(
106 long,
107 requires = "debug",
108 value_hint = ValueHint::FilePath,
109 value_name = "PATH"
110 )]
111 dump: Option<PathBuf>,
112
113 #[arg(long, env = "FORGE_GAS_REPORT")]
115 gas_report: bool,
116
117 #[arg(long, env = "FORGE_SNAPSHOT_CHECK")]
119 gas_snapshot_check: Option<bool>,
120
121 #[arg(long, env = "FORGE_SNAPSHOT_EMIT")]
123 gas_snapshot_emit: Option<bool>,
124
125 #[arg(long, env = "FORGE_ALLOW_FAILURE")]
127 allow_failure: bool,
128
129 #[arg(long, short, env = "FORGE_SUPPRESS_SUCCESSFUL_TRACES", help_heading = "Display options")]
131 suppress_successful_traces: bool,
132
133 #[arg(long, conflicts_with_all = ["quiet", "json", "gas_report", "summary", "list", "show_progress"], help_heading = "Display options")]
135 pub junit: bool,
136
137 #[arg(long)]
139 pub fail_fast: bool,
140
141 #[arg(long, env = "ETHERSCAN_API_KEY", value_name = "KEY")]
143 etherscan_api_key: Option<String>,
144
145 #[arg(long, short, conflicts_with_all = ["show_progress", "decode_internal", "summary"], help_heading = "Display options")]
147 list: bool,
148
149 #[arg(long)]
151 pub fuzz_seed: Option<U256>,
152
153 #[arg(long, env = "FOUNDRY_FUZZ_RUNS", value_name = "RUNS")]
154 pub fuzz_runs: Option<u64>,
155
156 #[arg(long, env = "FOUNDRY_FUZZ_TIMEOUT", value_name = "TIMEOUT")]
158 pub fuzz_timeout: Option<u64>,
159
160 #[arg(long)]
162 pub fuzz_input_file: Option<String>,
163
164 #[arg(long, conflicts_with_all = ["quiet", "json"], help_heading = "Display options")]
166 pub show_progress: bool,
167
168 #[arg(long)]
171 pub rerun: bool,
172
173 #[arg(long, help_heading = "Display options")]
175 pub summary: bool,
176
177 #[arg(long, help_heading = "Display options", requires = "summary")]
179 pub detailed: bool,
180
181 #[arg(long, help_heading = "Display options")]
183 pub disable_labels: bool,
184
185 #[command(flatten)]
186 filter: FilterArgs,
187
188 #[command(flatten)]
189 evm: EvmArgs,
190
191 #[command(flatten)]
192 pub build: BuildOpts,
193
194 #[command(flatten)]
195 pub watch: WatchArgs,
196}
197
198impl TestArgs {
199 pub async fn run(self) -> Result<TestOutcome> {
200 trace!(target: "forge::test", "executing test command");
201 self.execute_tests().await
202 }
203
204 #[instrument(target = "forge::test", skip_all)]
213 pub fn get_sources_to_compile(
214 &self,
215 config: &Config,
216 test_filter: &ProjectPathsAwareFilter,
217 ) -> Result<Option<BTreeSet<PathBuf>>> {
218 if test_filter.is_empty() {
220 return Ok(None);
221 }
222
223 let mut project = config.create_project(true, true)?;
224 project.update_output_selection(|selection| {
225 *selection = OutputSelection::common_output_selection(["abi".to_string()]);
226 });
227 let output = project.compile()?;
228 if output.has_compiler_errors() {
229 sh_println!("{output}")?;
230 eyre::bail!("Compilation failed");
231 }
232
233 let sources = output
234 .artifact_ids()
235 .filter_map(|(id, artifact)| artifact.abi.as_ref().map(|abi| (id, abi)))
236 .filter(|(id, abi)| {
237 !is_test_contract(abi.functions()) || matches_artifact(test_filter, id, abi)
238 })
239 .map(|(id, _)| id.source)
240 .collect::<BTreeSet<_>>();
241 Ok(Some(sources))
242 }
243
244 pub async fn execute_tests(mut self) -> Result<TestOutcome> {
251 let (mut config, mut evm_opts) = self.load_config_and_evm_opts()?;
253
254 if self.gas_report {
256 evm_opts.isolate = true;
257 } else {
258 config.fuzz.gas_report_samples = 0;
260 config.invariant.gas_report_samples = 0;
261 }
262
263 if install::install_missing_dependencies(&mut config) && config.auto_detect_remappings {
265 config = self.load_config()?;
267 }
268
269 let project = config.project()?;
271
272 let filter = self.filter(&config)?;
273 trace!(target: "forge::test", ?filter, "using filter");
274
275 let compiler = ProjectCompiler::new()
276 .dynamic_test_linking(config.dynamic_test_linking)
277 .quiet(shell::is_json() || self.junit)
278 .files(self.get_sources_to_compile(&config, &filter)?.unwrap_or_default());
279 let output = compiler.compile(&project)?;
280
281 let project_root = &project.paths.root;
283
284 let should_debug = self.debug;
285 let should_draw = self.flamegraph || self.flamechart;
286
287 let verbosity = evm_opts.verbosity;
289 if (self.gas_report && evm_opts.verbosity < 3) || self.flamegraph || self.flamechart {
290 evm_opts.verbosity = 3;
291 }
292
293 let env = evm_opts.evm_env().await?;
294
295 if should_draw && !self.decode_internal {
297 self.decode_internal = true;
298 }
299
300 let decode_internal = if self.decode_internal {
302 InternalTraceMode::Simple
305 } else {
306 InternalTraceMode::None
307 };
308
309 let config = Arc::new(config);
311 let runner = MultiContractRunnerBuilder::new(config.clone())
312 .set_debug(should_debug)
313 .set_decode_internal(decode_internal)
314 .initial_balance(evm_opts.initial_balance)
315 .evm_spec(config.evm_spec_id())
316 .sender(evm_opts.sender)
317 .with_fork(evm_opts.get_fork(&config, env.clone()))
318 .enable_isolation(evm_opts.isolate)
319 .fail_fast(self.fail_fast)
320 .odyssey(evm_opts.odyssey)
321 .build::<MultiCompiler>(project_root, &output, env, evm_opts)?;
322
323 let libraries = runner.libraries.clone();
324 let mut outcome = self.run_tests(runner, config, verbosity, &filter, &output).await?;
325
326 if should_draw {
327 let (suite_name, test_name, mut test_result) =
328 outcome.remove_first().ok_or_eyre("no tests were executed")?;
329
330 let (_, arena) = test_result
331 .traces
332 .iter_mut()
333 .find(|(kind, _)| *kind == TraceKind::Execution)
334 .unwrap();
335
336 let decoder = outcome.last_run_decoder.as_ref().unwrap();
338 decode_trace_arena(arena, decoder).await;
339 let mut fst = folded_stack_trace::build(arena);
340
341 let label = if self.flamegraph { "flamegraph" } else { "flamechart" };
342 let contract = suite_name.split(':').next_back().unwrap();
343 let test_name = test_name.trim_end_matches("()");
344 let file_name = format!("cache/{label}_{contract}_{test_name}.svg");
345 let file = std::fs::File::create(&file_name).wrap_err("failed to create file")?;
346 let file = std::io::BufWriter::new(file);
347
348 let mut options = inferno::flamegraph::Options::default();
349 options.title = format!("{label} {contract}::{test_name}");
350 options.count_name = "gas".to_string();
351 if self.flamechart {
352 options.flame_chart = true;
353 fst.reverse();
354 }
355
356 inferno::flamegraph::from_lines(&mut options, fst.iter().map(String::as_str), file)
358 .wrap_err("failed to write svg")?;
359 sh_println!("Saved to {file_name}")?;
360
361 if let Err(e) = opener::open(&file_name) {
363 sh_err!("Failed to open {file_name}; please open it manually: {e}")?;
364 }
365 }
366
367 if should_debug {
368 let (_, _, test_result) =
370 outcome.remove_first().ok_or_eyre("no tests were executed")?;
371
372 let sources =
373 ContractSources::from_project_output(&output, project.root(), Some(&libraries))?;
374
375 let mut builder = Debugger::builder()
377 .traces(
378 test_result.traces.iter().filter(|(t, _)| t.is_execution()).cloned().collect(),
379 )
380 .sources(sources)
381 .breakpoints(test_result.breakpoints.clone());
382
383 if let Some(decoder) = &outcome.last_run_decoder {
384 builder = builder.decoder(decoder);
385 }
386
387 let mut debugger = builder.build();
388 if let Some(dump_path) = self.dump {
389 debugger.dump_to_file(&dump_path)?;
390 } else {
391 debugger.try_run_tui()?;
392 }
393 }
394
395 Ok(outcome)
396 }
397
398 pub async fn run_tests(
400 &self,
401 mut runner: MultiContractRunner,
402 config: Arc<Config>,
403 verbosity: u8,
404 filter: &ProjectPathsAwareFilter,
405 output: &ProjectCompileOutput,
406 ) -> eyre::Result<TestOutcome> {
407 if self.list {
408 return list(runner, filter);
409 }
410
411 trace!(target: "forge::test", "running all tests");
412
413 let silent = self.gas_report && shell::is_json() || self.summary && shell::is_json();
415
416 let num_filtered = runner.matching_test_functions(filter).count();
417
418 if num_filtered == 0 {
419 let mut total_tests = num_filtered;
420 if !filter.is_empty() {
421 total_tests = runner.matching_test_functions(&EmptyTestFilter::default()).count();
422 }
423 if total_tests == 0 {
424 sh_println!(
425 "No tests found in project! Forge looks for functions that start with `test`"
426 )?;
427 } else {
428 let mut msg = format!("no tests match the provided pattern:\n{filter}");
429 if let Some(test_pattern) = &filter.args().test_pattern {
431 let test_name = test_pattern.as_str();
432 let candidates = runner.all_test_functions(filter).map(|f| &f.name);
434 if let Some(suggestion) = utils::did_you_mean(test_name, candidates).pop() {
435 write!(msg, "\nDid you mean `{suggestion}`?")?;
436 }
437 }
438 sh_warn!("{msg}")?;
439 }
440 return Ok(TestOutcome::empty(false));
441 }
442
443 if num_filtered != 1 && (self.debug || self.flamegraph || self.flamechart) {
444 let action = if self.flamegraph {
445 "generate a flamegraph"
446 } else if self.flamechart {
447 "generate a flamechart"
448 } else {
449 "run the debugger"
450 };
451 let filter = if filter.is_empty() {
452 String::new()
453 } else {
454 format!("\n\nFilter used:\n{filter}")
455 };
456 eyre::bail!(
457 "{num_filtered} tests matched your criteria, but exactly 1 test must match in order to {action}.\n\n\
458 Use --match-contract and --match-path to further limit the search.{filter}",
459 );
460 }
461
462 if num_filtered == 1 && self.decode_internal {
464 runner.decode_internal = InternalTraceMode::Full;
465 }
466
467 if !self.gas_report && !self.summary && shell::is_json() {
469 let mut results = runner.test_collect(filter)?;
470 results.values_mut().for_each(|suite_result| {
471 for test_result in suite_result.test_results.values_mut() {
472 if verbosity >= 2 {
473 test_result.decoded_logs = decode_console_logs(&test_result.logs);
475 } else {
476 test_result.logs = vec![];
478 }
479 }
480 });
481 sh_println!("{}", serde_json::to_string(&results)?)?;
482 return Ok(TestOutcome::new(results, self.allow_failure));
483 }
484
485 if self.junit {
486 let results = runner.test_collect(filter)?;
487 sh_println!("{}", junit_xml_report(&results, verbosity).to_string()?)?;
488 return Ok(TestOutcome::new(results, self.allow_failure));
489 }
490
491 let remote_chain_id = runner.evm_opts.get_remote_chain_id().await;
492 let known_contracts = runner.known_contracts.clone();
493
494 let libraries = runner.libraries.clone();
495
496 let (tx, rx) = channel::<(String, SuiteResult)>();
498 let timer = Instant::now();
499 let show_progress = config.show_progress;
500 let handle = tokio::task::spawn_blocking({
501 let filter = filter.clone();
502 move || runner.test(&filter, tx, show_progress)
503 });
504
505 let mut identifier = TraceIdentifiers::new().with_local(&known_contracts);
507
508 if !self.gas_report {
511 identifier = identifier.with_etherscan(&config, remote_chain_id)?;
512 }
513
514 let mut builder = CallTraceDecoderBuilder::new()
516 .with_known_contracts(&known_contracts)
517 .with_label_disabled(self.disable_labels)
518 .with_verbosity(verbosity);
519 if !self.gas_report {
521 builder =
522 builder.with_signature_identifier(SignaturesIdentifier::from_config(&config)?);
523 }
524
525 if self.decode_internal {
526 let sources =
527 ContractSources::from_project_output(output, &config.root, Some(&libraries))?;
528 builder = builder.with_debug_identifier(DebugTraceIdentifier::new(sources));
529 }
530 let mut decoder = builder.build();
531
532 let mut gas_report = self.gas_report.then(|| {
533 GasReport::new(
534 config.gas_reports.clone(),
535 config.gas_reports_ignore.clone(),
536 config.gas_reports_include_tests,
537 )
538 });
539
540 let mut gas_snapshots = BTreeMap::<String, BTreeMap<String, String>>::new();
541
542 let mut outcome = TestOutcome::empty(self.allow_failure);
543
544 let mut any_test_failed = false;
545 for (contract_name, suite_result) in rx {
546 let tests = &suite_result.test_results;
547
548 decoder.clear_addresses();
550
551 let identify_addresses = verbosity >= 3
553 || self.gas_report
554 || self.debug
555 || self.flamegraph
556 || self.flamechart;
557
558 if !silent {
560 sh_println!()?;
561 for warning in &suite_result.warnings {
562 sh_warn!("{warning}")?;
563 }
564 if !tests.is_empty() {
565 let len = tests.len();
566 let tests = if len > 1 { "tests" } else { "test" };
567 sh_println!("Ran {len} {tests} for {contract_name}")?;
568 }
569 }
570
571 for (name, result) in tests {
573 let show_traces =
574 !self.suppress_successful_traces || result.status == TestStatus::Failure;
575 if !silent {
576 sh_println!("{}", result.short_result(name))?;
577
578 if let TestKind::Invariant { metrics, .. } = &result.kind
580 && !metrics.is_empty()
581 {
582 let _ = sh_println!("\n{}\n", format_invariant_metrics_table(metrics));
583 }
584
585 if verbosity >= 2 && show_traces {
587 let console_logs = decode_console_logs(&result.logs);
589 if !console_logs.is_empty() {
590 sh_println!("Logs:")?;
591 for log in console_logs {
592 sh_println!(" {log}")?;
593 }
594 sh_println!()?;
595 }
596 }
597 }
598
599 any_test_failed |= result.status == TestStatus::Failure;
602
603 decoder.clear_addresses();
605 decoder.labels.extend(result.labels.iter().map(|(k, v)| (*k, v.clone())));
606
607 let mut decoded_traces = Vec::with_capacity(result.traces.len());
609 for (kind, arena) in &mut result.traces.clone() {
610 if identify_addresses {
611 decoder.identify(arena, &mut identifier);
612 }
613
614 let should_include = match kind {
620 TraceKind::Execution => {
621 (verbosity == 3 && result.status.is_failure()) || verbosity >= 4
622 }
623 TraceKind::Setup => {
624 (verbosity == 4 && result.status.is_failure()) || verbosity >= 5
625 }
626 TraceKind::Deployment => false,
627 };
628
629 if should_include {
630 decode_trace_arena(arena, &decoder).await;
631 decoded_traces.push(render_trace_arena_inner(arena, false, verbosity > 4));
632 }
633 }
634
635 if !silent && show_traces && !decoded_traces.is_empty() {
636 sh_println!("Traces:")?;
637 for trace in &decoded_traces {
638 sh_println!("{trace}")?;
639 }
640 }
641
642 if let Some(gas_report) = &mut gas_report {
643 gas_report.analyze(result.traces.iter().map(|(_, a)| &a.arena), &decoder).await;
644
645 for trace in &result.gas_report_traces {
646 decoder.clear_addresses();
647
648 for (kind, arena) in &result.traces {
651 if !matches!(kind, TraceKind::Execution) {
652 decoder.identify(arena, &mut identifier);
653 }
654 }
655
656 for arena in trace {
657 decoder.identify(arena, &mut identifier);
658 gas_report.analyze([arena], &decoder).await;
659 }
660 }
661 }
662
663 for (group, new_snapshots) in &result.gas_snapshots {
665 gas_snapshots.entry(group.clone()).or_default().extend(new_snapshots.clone());
666 }
667 }
668
669 if !gas_snapshots.is_empty() {
671 if self.gas_snapshot_check.unwrap_or(config.gas_snapshot_check) {
683 let differences_found = gas_snapshots.clone().into_iter().fold(
684 false,
685 |mut found, (group, snapshots)| {
686 if !&config.snapshots.join(format!("{group}.json")).exists() {
688 return false;
689 }
690
691 let previous_snapshots: BTreeMap<String, String> =
692 fs::read_json_file(&config.snapshots.join(format!("{group}.json")))
693 .expect("Failed to read snapshots from disk");
694
695 let diff: BTreeMap<_, _> = snapshots
696 .iter()
697 .filter_map(|(k, v)| {
698 previous_snapshots.get(k).and_then(|previous_snapshot| {
699 if previous_snapshot != v {
700 Some((
701 k.clone(),
702 (previous_snapshot.clone(), v.clone()),
703 ))
704 } else {
705 None
706 }
707 })
708 })
709 .collect();
710
711 if !diff.is_empty() {
712 let _ = sh_eprintln!(
713 "{}",
714 format!("\n[{group}] Failed to match snapshots:").red().bold()
715 );
716
717 for (key, (previous_snapshot, snapshot)) in &diff {
718 let _ = sh_eprintln!(
719 "{}",
720 format!("- [{key}] {previous_snapshot} → {snapshot}").red()
721 );
722 }
723
724 found = true;
725 }
726
727 found
728 },
729 );
730
731 if differences_found {
732 sh_eprintln!()?;
733 eyre::bail!("Snapshots differ from previous run");
734 }
735 }
736
737 if self.gas_snapshot_emit.unwrap_or(config.gas_snapshot_emit) {
747 fs::create_dir_all(&config.snapshots)?;
749
750 gas_snapshots.clone().into_iter().for_each(|(group, snapshots)| {
752 fs::write_pretty_json_file(
753 &config.snapshots.join(format!("{group}.json")),
754 &snapshots,
755 )
756 .expect("Failed to write gas snapshots to disk");
757 });
758 }
759 }
760
761 if !silent {
763 sh_println!("{}", suite_result.summary())?;
764 }
765
766 outcome.results.insert(contract_name, suite_result);
768
769 if self.fail_fast && any_test_failed {
771 break;
772 }
773 }
774 outcome.last_run_decoder = Some(decoder);
775 let duration = timer.elapsed();
776
777 trace!(target: "forge::test", len=outcome.results.len(), %any_test_failed, "done with results");
778
779 if let Some(gas_report) = gas_report {
780 let finalized = gas_report.finalize();
781 sh_println!("{}", &finalized)?;
782 outcome.gas_report = Some(finalized);
783 }
784
785 if !self.summary && !shell::is_json() {
786 sh_println!("{}", outcome.summary(duration))?;
787 }
788
789 if self.summary && !outcome.results.is_empty() {
790 let summary_report = TestSummaryReport::new(self.detailed, outcome.clone());
791 sh_println!("{}", &summary_report)?;
792 }
793
794 if let Err(e) = handle.await {
796 match e.try_into_panic() {
797 Ok(payload) => std::panic::resume_unwind(payload),
798 Err(e) => return Err(e.into()),
799 }
800 }
801
802 persist_run_failures(&config, &outcome);
804
805 Ok(outcome)
806 }
807
808 pub fn filter(&self, config: &Config) -> Result<ProjectPathsAwareFilter> {
811 let mut filter = self.filter.clone();
812 if self.rerun {
813 filter.test_pattern = last_run_failures(config);
814 }
815 if filter.path_pattern.is_some() {
816 if self.path.is_some() {
817 bail!("Can not supply both --match-path and |path|");
818 }
819 } else {
820 filter.path_pattern = self.path.clone();
821 }
822 Ok(filter.merge_with_config(config))
823 }
824
825 pub fn is_watch(&self) -> bool {
827 self.watch.watch.is_some()
828 }
829
830 pub(crate) fn watchexec_config(&self) -> Result<watchexec::Config> {
832 self.watch.watchexec_config(|| {
833 let config = self.load_config()?;
834 Ok([config.src, config.test])
835 })
836 }
837}
838
839impl Provider for TestArgs {
840 fn metadata(&self) -> Metadata {
841 Metadata::named("Core Build Args Provider")
842 }
843
844 fn data(&self) -> Result<Map<Profile, Dict>, figment::Error> {
845 let mut dict = Dict::default();
846
847 let mut fuzz_dict = Dict::default();
848 if let Some(fuzz_seed) = self.fuzz_seed {
849 fuzz_dict.insert("seed".to_string(), fuzz_seed.to_string().into());
850 }
851 if let Some(fuzz_runs) = self.fuzz_runs {
852 fuzz_dict.insert("runs".to_string(), fuzz_runs.into());
853 }
854 if let Some(fuzz_timeout) = self.fuzz_timeout {
855 fuzz_dict.insert("timeout".to_string(), fuzz_timeout.into());
856 }
857 if let Some(fuzz_input_file) = self.fuzz_input_file.clone() {
858 fuzz_dict.insert("failure_persist_file".to_string(), fuzz_input_file.into());
859 }
860 dict.insert("fuzz".to_string(), fuzz_dict.into());
861
862 if let Some(etherscan_api_key) =
863 self.etherscan_api_key.as_ref().filter(|s| !s.trim().is_empty())
864 {
865 dict.insert("etherscan_api_key".to_string(), etherscan_api_key.to_string().into());
866 }
867
868 if self.show_progress {
869 dict.insert("show_progress".to_string(), true.into());
870 }
871
872 Ok(Map::from([(Config::selected_profile(), dict)]))
873 }
874}
875
876fn list(runner: MultiContractRunner, filter: &ProjectPathsAwareFilter) -> Result<TestOutcome> {
878 let results = runner.list(filter);
879
880 if shell::is_json() {
881 sh_println!("{}", serde_json::to_string(&results)?)?;
882 } else {
883 for (file, contracts) in &results {
884 sh_println!("{file}")?;
885 for (contract, tests) in contracts {
886 sh_println!(" {contract}")?;
887 sh_println!(" {}\n", tests.join("\n "))?;
888 }
889 }
890 }
891 Ok(TestOutcome::empty(false))
892}
893
894fn last_run_failures(config: &Config) -> Option<regex::Regex> {
896 match fs::read_to_string(&config.test_failures_file) {
897 Ok(filter) => Regex::new(&filter)
898 .inspect_err(|e| {
899 _ = sh_warn!(
900 "failed to parse test filter from {:?}: {e}",
901 config.test_failures_file
902 )
903 })
904 .ok(),
905 Err(_) => None,
906 }
907}
908
909fn persist_run_failures(config: &Config, outcome: &TestOutcome) {
911 if outcome.failed() > 0 && fs::create_file(&config.test_failures_file).is_ok() {
912 let mut filter = String::new();
913 let mut failures = outcome.failures().peekable();
914 while let Some((test_name, _)) = failures.next() {
915 if test_name.is_any_test()
916 && let Some(test_match) = test_name.split("(").next()
917 {
918 filter.push_str(test_match);
919 if failures.peek().is_some() {
920 filter.push('|');
921 }
922 }
923 }
924 let _ = fs::write(&config.test_failures_file, filter);
925 }
926}
927
928fn junit_xml_report(results: &BTreeMap<String, SuiteResult>, verbosity: u8) -> Report {
930 let mut total_duration = Duration::default();
931 let mut junit_report = Report::new("Test run");
932 junit_report.set_timestamp(Utc::now());
933 for (suite_name, suite_result) in results {
934 let mut test_suite = TestSuite::new(suite_name);
935 total_duration += suite_result.duration;
936 test_suite.set_time(suite_result.duration);
937 test_suite.set_system_out(suite_result.summary());
938 for (test_name, test_result) in &suite_result.test_results {
939 let mut test_status = match test_result.status {
940 TestStatus::Success => TestCaseStatus::success(),
941 TestStatus::Failure => TestCaseStatus::non_success(NonSuccessKind::Failure),
942 TestStatus::Skipped => TestCaseStatus::skipped(),
943 };
944 if let Some(reason) = &test_result.reason {
945 test_status.set_message(reason);
946 }
947
948 let mut test_case = TestCase::new(test_name, test_status);
949 test_case.set_time(test_result.duration);
950
951 let mut sys_out = String::new();
952 let result_report = test_result.kind.report();
953 write!(sys_out, "{test_result} {test_name} {result_report}").unwrap();
954 if verbosity >= 2 && !test_result.logs.is_empty() {
955 write!(sys_out, "\\nLogs:\\n").unwrap();
956 let console_logs = decode_console_logs(&test_result.logs);
957 for log in console_logs {
958 write!(sys_out, " {log}\\n").unwrap();
959 }
960 }
961
962 test_case.set_system_out(sys_out);
963 test_suite.add_test_case(test_case);
964 }
965 junit_report.add_test_suite(test_suite);
966 }
967 junit_report.set_time(total_duration);
968 junit_report
969}
970
971#[cfg(test)]
972mod tests {
973 use super::*;
974 use foundry_config::Chain;
975
976 #[test]
977 fn watch_parse() {
978 let args: TestArgs = TestArgs::parse_from(["foundry-cli", "-vw"]);
979 assert!(args.watch.watch.is_some());
980 }
981
982 #[test]
983 fn fuzz_seed() {
984 let args: TestArgs = TestArgs::parse_from(["foundry-cli", "--fuzz-seed", "0x10"]);
985 assert!(args.fuzz_seed.is_some());
986 }
987
988 #[test]
990 fn fuzz_seed_exists() {
991 let args: TestArgs =
992 TestArgs::parse_from(["foundry-cli", "-vvv", "--gas-report", "--fuzz-seed", "0x10"]);
993 assert!(args.fuzz_seed.is_some());
994 }
995
996 #[test]
997 fn extract_chain() {
998 let test = |arg: &str, expected: Chain| {
999 let args = TestArgs::parse_from(["foundry-cli", arg]);
1000 assert_eq!(args.evm.env.chain, Some(expected));
1001 let (config, evm_opts) = args.load_config_and_evm_opts().unwrap();
1002 assert_eq!(config.chain, Some(expected));
1003 assert_eq!(evm_opts.env.chain_id, Some(expected.id()));
1004 };
1005 test("--chain-id=1", Chain::mainnet());
1006 test("--chain-id=42", Chain::from_id(42));
1007 }
1008}