forge/cmd/test/
mod.rs

1use super::{install, test::filter::ProjectPathsAwareFilter, watch::WatchArgs};
2use crate::{
3    MultiContractRunner, MultiContractRunnerBuilder,
4    decode::decode_console_logs,
5    gas_report::GasReport,
6    multi_runner::matches_artifact,
7    result::{SuiteResult, TestOutcome, TestStatus},
8    traces::{
9        CallTraceDecoderBuilder, InternalTraceMode, TraceKind,
10        debug::{ContractSources, DebugTraceIdentifier},
11        decode_trace_arena, folded_stack_trace,
12        identifier::SignaturesIdentifier,
13    },
14};
15use alloy_primitives::U256;
16use chrono::Utc;
17use clap::{Parser, ValueHint};
18use eyre::{Context, OptionExt, Result, bail};
19use foundry_cli::{
20    opts::{BuildOpts, EvmArgs, GlobalArgs},
21    utils::{self, LoadConfig},
22};
23use foundry_common::{EmptyTestFilter, TestFunctionExt, compile::ProjectCompiler, fs, shell};
24use foundry_compilers::{
25    ProjectCompileOutput,
26    artifacts::output_selection::OutputSelection,
27    compilers::{
28        Language,
29        multi::{MultiCompiler, MultiCompilerLanguage},
30    },
31    utils::source_files_iter,
32};
33use foundry_config::{
34    Config, figment,
35    figment::{
36        Metadata, Profile, Provider,
37        value::{Dict, Map},
38    },
39    filter::GlobMatcher,
40};
41use foundry_debugger::Debugger;
42use foundry_evm::{
43    opts::EvmOpts,
44    traces::{backtrace::BacktraceBuilder, identifier::TraceIdentifiers, prune_trace_depth},
45};
46use regex::Regex;
47use std::{
48    collections::{BTreeMap, BTreeSet},
49    fmt::Write,
50    path::{Path, PathBuf},
51    sync::{Arc, mpsc::channel},
52    time::{Duration, Instant},
53};
54use yansi::Paint;
55
56mod filter;
57mod summary;
58use crate::{result::TestKind, traces::render_trace_arena_inner};
59pub use filter::FilterArgs;
60use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite};
61use summary::{TestSummaryReport, format_invariant_metrics_table};
62
63// Loads project's figment and merges the build cli arguments into it
64foundry_config::merge_impl_figment_convert!(TestArgs, build, evm);
65
66/// CLI arguments for `forge test`.
67#[derive(Clone, Debug, Parser)]
68#[command(next_help_heading = "Test options")]
69pub struct TestArgs {
70    // Include global options for users of this struct.
71    #[command(flatten)]
72    pub global: GlobalArgs,
73
74    /// The contract file you want to test, it's a shortcut for --match-path.
75    #[arg(value_hint = ValueHint::FilePath)]
76    pub path: Option<GlobMatcher>,
77
78    /// Run a single test in the debugger.
79    ///
80    /// The matching test will be opened in the debugger regardless of the outcome of the test.
81    ///
82    /// If the matching test is a fuzz test, then it will open the debugger on the first failure
83    /// case. If the fuzz test does not fail, it will open the debugger on the last fuzz case.
84    #[arg(long, conflicts_with_all = ["flamegraph", "flamechart", "decode_internal", "rerun"])]
85    debug: bool,
86
87    /// Generate a flamegraph for a single test. Implies `--decode-internal`.
88    ///
89    /// A flame graph is used to visualize which functions or operations within the smart contract
90    /// are consuming the most gas overall in a sorted manner.
91    #[arg(long)]
92    flamegraph: bool,
93
94    /// Generate a flamechart for a single test. Implies `--decode-internal`.
95    ///
96    /// A flame chart shows the gas usage over time, illustrating when each function is
97    /// called (execution order) and how much gas it consumes at each point in the timeline.
98    #[arg(long, conflicts_with = "flamegraph")]
99    flamechart: bool,
100
101    /// Identify internal functions in traces.
102    ///
103    /// This will trace internal functions and decode stack parameters.
104    ///
105    /// Parameters stored in memory (such as bytes or arrays) are currently decoded only when a
106    /// single function is matched, similarly to `--debug`, for performance reasons.
107    #[arg(long)]
108    decode_internal: bool,
109
110    /// Dumps all debugger steps to file.
111    #[arg(
112        long,
113        requires = "debug",
114        value_hint = ValueHint::FilePath,
115        value_name = "PATH"
116    )]
117    dump: Option<PathBuf>,
118
119    /// Print a gas report.
120    #[arg(long, env = "FORGE_GAS_REPORT")]
121    gas_report: bool,
122
123    /// Check gas snapshots against previous runs.
124    #[arg(long, env = "FORGE_SNAPSHOT_CHECK")]
125    gas_snapshot_check: Option<bool>,
126
127    /// Enable/disable recording of gas snapshot results.
128    #[arg(long, env = "FORGE_SNAPSHOT_EMIT")]
129    gas_snapshot_emit: Option<bool>,
130
131    /// Exit with code 0 even if a test fails.
132    #[arg(long, env = "FORGE_ALLOW_FAILURE")]
133    allow_failure: bool,
134
135    /// Suppress successful test traces and show only traces for failures.
136    #[arg(long, short, env = "FORGE_SUPPRESS_SUCCESSFUL_TRACES", help_heading = "Display options")]
137    suppress_successful_traces: bool,
138
139    /// Defines the depth of a trace
140    #[arg(long)]
141    trace_depth: Option<usize>,
142
143    /// Output test results as JUnit XML report.
144    #[arg(long, conflicts_with_all = ["quiet", "json", "gas_report", "summary", "list", "show_progress"], help_heading = "Display options")]
145    pub junit: bool,
146
147    /// Stop running tests after the first failure.
148    #[arg(long)]
149    pub fail_fast: bool,
150
151    /// The Etherscan (or equivalent) API key.
152    #[arg(long, env = "ETHERSCAN_API_KEY", value_name = "KEY")]
153    etherscan_api_key: Option<String>,
154
155    /// List tests instead of running them.
156    #[arg(long, short, conflicts_with_all = ["show_progress", "decode_internal", "summary"], help_heading = "Display options")]
157    list: bool,
158
159    /// Set seed used to generate randomness during your fuzz runs.
160    #[arg(long)]
161    pub fuzz_seed: Option<U256>,
162
163    #[arg(long, env = "FOUNDRY_FUZZ_RUNS", value_name = "RUNS")]
164    pub fuzz_runs: Option<u64>,
165
166    /// Timeout for each fuzz run in seconds.
167    #[arg(long, env = "FOUNDRY_FUZZ_TIMEOUT", value_name = "TIMEOUT")]
168    pub fuzz_timeout: Option<u64>,
169
170    /// File to rerun fuzz failures from.
171    #[arg(long)]
172    pub fuzz_input_file: Option<String>,
173
174    /// Show test execution progress.
175    #[arg(long, conflicts_with_all = ["quiet", "json"], help_heading = "Display options")]
176    pub show_progress: bool,
177
178    /// Re-run recorded test failures from last run.
179    /// If no failure recorded then regular test run is performed.
180    #[arg(long)]
181    pub rerun: bool,
182
183    /// Print test summary table.
184    #[arg(long, help_heading = "Display options")]
185    pub summary: bool,
186
187    /// Print detailed test summary table.
188    #[arg(long, help_heading = "Display options", requires = "summary")]
189    pub detailed: bool,
190
191    /// Disables the labels in the traces.
192    #[arg(long, help_heading = "Display options")]
193    pub disable_labels: bool,
194
195    #[command(flatten)]
196    filter: FilterArgs,
197
198    #[command(flatten)]
199    evm: EvmArgs,
200
201    #[command(flatten)]
202    pub build: BuildOpts,
203
204    #[command(flatten)]
205    pub watch: WatchArgs,
206}
207
208impl TestArgs {
209    pub async fn run(mut self) -> Result<TestOutcome> {
210        trace!(target: "forge::test", "executing test command");
211        self.compile_and_run().await
212    }
213
214    /// Returns a list of files that need to be compiled in order to run all the tests that match
215    /// the given filter.
216    ///
217    /// This means that it will return all sources that are not test contracts or that match the
218    /// filter. We want to compile all non-test sources always because tests might depend on them
219    /// dynamically through cheatcodes.
220    #[instrument(target = "forge::test", skip_all)]
221    pub fn get_sources_to_compile(
222        &self,
223        config: &Config,
224        test_filter: &ProjectPathsAwareFilter,
225    ) -> Result<BTreeSet<PathBuf>> {
226        // An empty filter doesn't filter out anything.
227        // We can still optimize slightly by excluding scripts.
228        if test_filter.is_empty() {
229            return Ok(source_files_iter(&config.src, MultiCompilerLanguage::FILE_EXTENSIONS)
230                .chain(source_files_iter(&config.test, MultiCompilerLanguage::FILE_EXTENSIONS))
231                .collect());
232        }
233
234        let mut project = config.create_project(true, true)?;
235        project.update_output_selection(|selection| {
236            *selection = OutputSelection::common_output_selection(["abi".to_string()]);
237        });
238        let output = project.compile()?;
239        if output.has_compiler_errors() {
240            sh_println!("{output}")?;
241            eyre::bail!("Compilation failed");
242        }
243
244        Ok(output
245            .artifact_ids()
246            .filter_map(|(id, artifact)| artifact.abi.as_ref().map(|abi| (id, abi)))
247            .filter(|(id, abi)| {
248                id.source.starts_with(&config.src) || matches_artifact(test_filter, id, abi)
249            })
250            .map(|(id, _)| id.source)
251            .collect())
252    }
253
254    /// Executes all the tests in the project.
255    ///
256    /// This will trigger the build process first. On success all test contracts that match the
257    /// configured filter will be executed
258    ///
259    /// Returns the test results for all matching tests.
260    pub async fn compile_and_run(&mut self) -> Result<TestOutcome> {
261        // Merge all configs.
262        let (mut config, evm_opts) = self.load_config_and_evm_opts()?;
263
264        // Install missing dependencies.
265        if install::install_missing_dependencies(&mut config).await && config.auto_detect_remappings
266        {
267            // need to re-configure here to also catch additional remappings
268            config = self.load_config()?;
269        }
270
271        // Set up the project.
272        let project = config.project()?;
273
274        let filter = self.filter(&config)?;
275        trace!(target: "forge::test", ?filter, "using filter");
276
277        let compiler = ProjectCompiler::new()
278            .dynamic_test_linking(config.dynamic_test_linking)
279            .quiet(shell::is_json() || self.junit)
280            .files(self.get_sources_to_compile(&config, &filter)?);
281        let output = compiler.compile(&project)?;
282
283        self.run_tests(&project.paths.root, config, evm_opts, &output, &filter, false).await
284    }
285
286    /// Executes all the tests in the project.
287    ///
288    /// See [`Self::compile_and_run`] for more details.
289    pub async fn run_tests(
290        &mut self,
291        project_root: &Path,
292        mut config: Config,
293        mut evm_opts: EvmOpts,
294        output: &ProjectCompileOutput,
295        filter: &ProjectPathsAwareFilter,
296        coverage: bool,
297    ) -> Result<TestOutcome> {
298        // Explicitly enable isolation for gas reports for more correct gas accounting.
299        if self.gas_report {
300            evm_opts.isolate = true;
301        } else {
302            // Do not collect gas report traces if gas report is not enabled.
303            config.fuzz.gas_report_samples = 0;
304            config.invariant.gas_report_samples = 0;
305        }
306
307        // Create test options from general project settings and compiler output.
308        let should_debug = self.debug;
309        let should_draw = self.flamegraph || self.flamechart;
310
311        // Determine print verbosity and executor verbosity.
312        let verbosity = evm_opts.verbosity;
313        if (self.gas_report && evm_opts.verbosity < 3) || self.flamegraph || self.flamechart {
314            evm_opts.verbosity = 3;
315        }
316
317        let env = evm_opts.evm_env().await?;
318
319        // Enable internal tracing for more informative flamegraph.
320        if should_draw && !self.decode_internal {
321            self.decode_internal = true;
322        }
323
324        // Choose the internal function tracing mode, if --decode-internal is provided.
325        let decode_internal = if self.decode_internal {
326            // If more than one function matched, we enable simple tracing.
327            // If only one function matched, we enable full tracing. This is done in `run_tests`.
328            InternalTraceMode::Simple
329        } else {
330            InternalTraceMode::None
331        };
332
333        // Prepare the test builder.
334        let config = Arc::new(config);
335        let runner = MultiContractRunnerBuilder::new(config.clone())
336            .set_debug(should_debug)
337            .set_decode_internal(decode_internal)
338            .initial_balance(evm_opts.initial_balance)
339            .evm_spec(config.evm_spec_id())
340            .sender(evm_opts.sender)
341            .with_fork(evm_opts.get_fork(&config, env.clone()))
342            .enable_isolation(evm_opts.isolate)
343            .networks(evm_opts.networks)
344            .fail_fast(self.fail_fast)
345            .set_coverage(coverage)
346            .build::<MultiCompiler>(output, env, evm_opts)?;
347
348        let libraries = runner.libraries.clone();
349        let mut outcome = self.run_tests_inner(runner, config, verbosity, filter, output).await?;
350
351        if should_draw {
352            let (suite_name, test_name, mut test_result) =
353                outcome.remove_first().ok_or_eyre("no tests were executed")?;
354
355            let (_, arena) = test_result
356                .traces
357                .iter_mut()
358                .find(|(kind, _)| *kind == TraceKind::Execution)
359                .unwrap();
360
361            // Decode traces.
362            let decoder = outcome.last_run_decoder.as_ref().unwrap();
363            decode_trace_arena(arena, decoder).await;
364            let mut fst = folded_stack_trace::build(arena);
365
366            let label = if self.flamegraph { "flamegraph" } else { "flamechart" };
367            let contract = suite_name.split(':').next_back().unwrap();
368            let test_name = test_name.trim_end_matches("()");
369            let file_name = format!("cache/{label}_{contract}_{test_name}.svg");
370            let file = std::fs::File::create(&file_name).wrap_err("failed to create file")?;
371            let file = std::io::BufWriter::new(file);
372
373            let mut options = inferno::flamegraph::Options::default();
374            options.title = format!("{label} {contract}::{test_name}");
375            options.count_name = "gas".to_string();
376            if self.flamechart {
377                options.flame_chart = true;
378                fst.reverse();
379            }
380
381            // Generate SVG.
382            inferno::flamegraph::from_lines(&mut options, fst.iter().map(String::as_str), file)
383                .wrap_err("failed to write svg")?;
384            sh_println!("Saved to {file_name}")?;
385
386            // Open SVG in default program.
387            if let Err(e) = opener::open(&file_name) {
388                sh_err!("Failed to open {file_name}; please open it manually: {e}")?;
389            }
390        }
391
392        if should_debug {
393            // Get first non-empty suite result. We will have only one such entry.
394            let (_, _, test_result) =
395                outcome.remove_first().ok_or_eyre("no tests were executed")?;
396
397            let sources =
398                ContractSources::from_project_output(output, project_root, Some(&libraries))?;
399
400            // Run the debugger.
401            let mut builder = Debugger::builder()
402                .traces(
403                    test_result.traces.iter().filter(|(t, _)| t.is_execution()).cloned().collect(),
404                )
405                .sources(sources)
406                .breakpoints(test_result.breakpoints.clone());
407
408            if let Some(decoder) = &outcome.last_run_decoder {
409                builder = builder.decoder(decoder);
410            }
411
412            let mut debugger = builder.build();
413            if let Some(dump_path) = &self.dump {
414                debugger.dump_to_file(dump_path)?;
415            } else {
416                debugger.try_run_tui()?;
417            }
418        }
419
420        Ok(outcome)
421    }
422
423    /// Run all tests that matches the filter predicate from a test runner
424    async fn run_tests_inner(
425        &self,
426        mut runner: MultiContractRunner,
427        config: Arc<Config>,
428        verbosity: u8,
429        filter: &ProjectPathsAwareFilter,
430        output: &ProjectCompileOutput,
431    ) -> eyre::Result<TestOutcome> {
432        if self.list {
433            return list(runner, filter);
434        }
435
436        trace!(target: "forge::test", "running all tests");
437
438        // If we need to render to a serialized format, we should not print anything else to stdout.
439        let silent = self.gas_report && shell::is_json() || self.summary && shell::is_json();
440
441        let num_filtered = runner.matching_test_functions(filter).count();
442
443        if num_filtered == 0 {
444            let mut total_tests = num_filtered;
445            if !filter.is_empty() {
446                total_tests = runner.matching_test_functions(&EmptyTestFilter::default()).count();
447            }
448            if total_tests == 0 {
449                sh_println!(
450                    "No tests found in project! Forge looks for functions that start with `test`"
451                )?;
452            } else {
453                let mut msg = format!("no tests match the provided pattern:\n{filter}");
454                // Try to suggest a test when there's no match.
455                if let Some(test_pattern) = &filter.args().test_pattern {
456                    let test_name = test_pattern.as_str();
457                    // Filter contracts but not test functions.
458                    let candidates = runner.all_test_functions(filter).map(|f| &f.name);
459                    if let Some(suggestion) = utils::did_you_mean(test_name, candidates).pop() {
460                        write!(msg, "\nDid you mean `{suggestion}`?")?;
461                    }
462                }
463                sh_warn!("{msg}")?;
464            }
465            return Ok(TestOutcome::empty(Some(runner), false));
466        }
467
468        if num_filtered != 1 && (self.debug || self.flamegraph || self.flamechart) {
469            let action = if self.flamegraph {
470                "generate a flamegraph"
471            } else if self.flamechart {
472                "generate a flamechart"
473            } else {
474                "run the debugger"
475            };
476            let filter = if filter.is_empty() {
477                String::new()
478            } else {
479                format!("\n\nFilter used:\n{filter}")
480            };
481            eyre::bail!(
482                "{num_filtered} tests matched your criteria, but exactly 1 test must match in order to {action}.\n\n\
483                 Use --match-contract and --match-path to further limit the search.{filter}",
484            );
485        }
486
487        // If exactly one test matched, we enable full tracing.
488        if num_filtered == 1 && self.decode_internal {
489            runner.decode_internal = InternalTraceMode::Full;
490        }
491
492        // Run tests in a non-streaming fashion and collect results for serialization.
493        if !self.gas_report && !self.summary && shell::is_json() {
494            let mut results = runner.test_collect(filter)?;
495            results.values_mut().for_each(|suite_result| {
496                for test_result in suite_result.test_results.values_mut() {
497                    if verbosity >= 2 {
498                        // Decode logs at level 2 and above.
499                        test_result.decoded_logs = decode_console_logs(&test_result.logs);
500                    } else {
501                        // Empty logs for non verbose runs.
502                        test_result.logs = vec![];
503                    }
504                }
505            });
506            sh_println!("{}", serde_json::to_string(&results)?)?;
507            return Ok(TestOutcome::new(Some(runner), results, self.allow_failure));
508        }
509
510        if self.junit {
511            let results = runner.test_collect(filter)?;
512            sh_println!("{}", junit_xml_report(&results, verbosity).to_string()?)?;
513            return Ok(TestOutcome::new(Some(runner), results, self.allow_failure));
514        }
515
516        let remote_chain =
517            if runner.fork.is_some() { runner.env.tx.chain_id.map(Into::into) } else { None };
518        let known_contracts = runner.known_contracts.clone();
519
520        let libraries = runner.libraries.clone();
521
522        // Run tests in a streaming fashion.
523        let (tx, rx) = channel::<(String, SuiteResult)>();
524        let timer = Instant::now();
525        let show_progress = config.show_progress;
526        let handle = tokio::task::spawn_blocking({
527            let filter = filter.clone();
528            move || runner.test(&filter, tx, show_progress).map(|()| runner)
529        });
530
531        // Set up trace identifiers.
532        let mut identifier = TraceIdentifiers::new().with_local(&known_contracts);
533
534        // Avoid using external identifiers for gas report as we decode more traces and this will be
535        // expensive.
536        if !self.gas_report {
537            identifier = identifier.with_external(&config, remote_chain)?;
538        }
539
540        // Build the trace decoder.
541        let mut builder = CallTraceDecoderBuilder::new()
542            .with_known_contracts(&known_contracts)
543            .with_label_disabled(self.disable_labels)
544            .with_verbosity(verbosity);
545        // Signatures are of no value for gas reports.
546        if !self.gas_report {
547            builder =
548                builder.with_signature_identifier(SignaturesIdentifier::from_config(&config)?);
549        }
550
551        if self.decode_internal {
552            let sources =
553                ContractSources::from_project_output(output, &config.root, Some(&libraries))?;
554            builder = builder.with_debug_identifier(DebugTraceIdentifier::new(sources));
555        }
556        let mut decoder = builder.build();
557
558        let mut gas_report = self.gas_report.then(|| {
559            GasReport::new(
560                config.gas_reports.clone(),
561                config.gas_reports_ignore.clone(),
562                config.gas_reports_include_tests,
563            )
564        });
565
566        let mut gas_snapshots = BTreeMap::<String, BTreeMap<String, String>>::new();
567
568        let mut outcome = TestOutcome::empty(None, self.allow_failure);
569
570        let mut any_test_failed = false;
571        let mut backtrace_builder = None;
572        for (contract_name, mut suite_result) in rx {
573            let tests = &mut suite_result.test_results;
574            let has_tests = !tests.is_empty();
575
576            // Clear the addresses and labels from previous test.
577            decoder.clear_addresses();
578
579            // We identify addresses if we're going to print *any* trace or gas report.
580            let identify_addresses = verbosity >= 3
581                || self.gas_report
582                || self.debug
583                || self.flamegraph
584                || self.flamechart;
585
586            // Print suite header.
587            if !silent {
588                sh_println!()?;
589                for warning in &suite_result.warnings {
590                    sh_warn!("{warning}")?;
591                }
592                if has_tests {
593                    let len = tests.len();
594                    let tests = if len > 1 { "tests" } else { "test" };
595                    sh_println!("Ran {len} {tests} for {contract_name}")?;
596                }
597            }
598
599            // Process individual test results, printing logs and traces when necessary.
600            for (name, result) in tests {
601                let show_traces =
602                    !self.suppress_successful_traces || result.status == TestStatus::Failure;
603                if !silent {
604                    sh_println!("{}", result.short_result(name))?;
605
606                    // Display invariant metrics if invariant kind.
607                    if let TestKind::Invariant { metrics, .. } = &result.kind
608                        && !metrics.is_empty()
609                    {
610                        let _ = sh_println!("\n{}\n", format_invariant_metrics_table(metrics));
611                    }
612
613                    // We only display logs at level 2 and above
614                    if verbosity >= 2 && show_traces {
615                        // We only decode logs from Hardhat and DS-style console events
616                        let console_logs = decode_console_logs(&result.logs);
617                        if !console_logs.is_empty() {
618                            sh_println!("Logs:")?;
619                            for log in console_logs {
620                                sh_println!("  {log}")?;
621                            }
622                            sh_println!()?;
623                        }
624                    }
625                }
626
627                // We shouldn't break out of the outer loop directly here so that we finish
628                // processing the remaining tests and print the suite summary.
629                any_test_failed |= result.status == TestStatus::Failure;
630
631                // Clear the addresses and labels from previous runs.
632                decoder.clear_addresses();
633                decoder.labels.extend(result.labels.iter().map(|(k, v)| (*k, v.clone())));
634
635                // Identify addresses and decode traces.
636                let mut decoded_traces = Vec::with_capacity(result.traces.len());
637                for (kind, arena) in &mut result.traces {
638                    if identify_addresses {
639                        decoder.identify(arena, &mut identifier);
640                    }
641
642                    // verbosity:
643                    // - 0..3: nothing
644                    // - 3: only display traces for failed tests
645                    // - 4: also display the setup trace for failed tests
646                    // - 5..: display all traces for all tests, including storage changes
647                    let should_include = match kind {
648                        TraceKind::Execution => {
649                            (verbosity == 3 && result.status.is_failure()) || verbosity >= 4
650                        }
651                        TraceKind::Setup => {
652                            (verbosity == 4 && result.status.is_failure()) || verbosity >= 5
653                        }
654                        TraceKind::Deployment => false,
655                    };
656
657                    if should_include {
658                        decode_trace_arena(arena, &decoder).await;
659
660                        if let Some(trace_depth) = self.trace_depth {
661                            prune_trace_depth(arena, trace_depth);
662                        }
663
664                        decoded_traces.push(render_trace_arena_inner(arena, false, verbosity > 4));
665                    }
666                }
667
668                if !silent && show_traces && !decoded_traces.is_empty() {
669                    sh_println!("Traces:")?;
670                    for trace in &decoded_traces {
671                        sh_println!("{trace}")?;
672                    }
673                }
674
675                // Extract and display backtrace for failed tests when verbosity >= 3
676                if !silent
677                    && result.status.is_failure()
678                    && verbosity >= 3
679                    && !result.traces.is_empty()
680                    && let Some((_, arena)) =
681                        result.traces.iter().find(|(kind, _)| matches!(kind, TraceKind::Execution))
682                {
683                    // Lazily initialize the backtrace builder on first failure
684                    let builder = backtrace_builder.get_or_insert_with(|| {
685                        BacktraceBuilder::new(
686                            output,
687                            config.root.clone(),
688                            config.parsed_libraries().ok(),
689                            config.via_ir,
690                        )
691                    });
692
693                    let backtrace = builder.from_traces(arena);
694
695                    if !backtrace.is_empty() {
696                        sh_println!("{}", backtrace)?;
697                    }
698                }
699
700                if let Some(gas_report) = &mut gas_report {
701                    gas_report.analyze(result.traces.iter().map(|(_, a)| &a.arena), &decoder).await;
702
703                    for trace in &result.gas_report_traces {
704                        decoder.clear_addresses();
705
706                        // Re-execute setup and deployment traces to collect identities created in
707                        // setUp and constructor.
708                        for (kind, arena) in &result.traces {
709                            if !matches!(kind, TraceKind::Execution) {
710                                decoder.identify(arena, &mut identifier);
711                            }
712                        }
713
714                        for arena in trace {
715                            decoder.identify(arena, &mut identifier);
716                            gas_report.analyze([arena], &decoder).await;
717                        }
718                    }
719                }
720                // Clear memory.
721                result.gas_report_traces = Default::default();
722
723                // Collect and merge gas snapshots.
724                for (group, new_snapshots) in &result.gas_snapshots {
725                    gas_snapshots.entry(group.clone()).or_default().extend(new_snapshots.clone());
726                }
727            }
728
729            // Write gas snapshots to disk if any were collected.
730            if !gas_snapshots.is_empty() {
731                // By default `gas_snapshot_check` is set to `false` in the config.
732                //
733                // The user can either:
734                // - Set `FORGE_SNAPSHOT_CHECK=true` in the environment.
735                // - Pass `--gas-snapshot-check=true` as a CLI argument.
736                // - Set `gas_snapshot_check = true` in the config.
737                //
738                // If the user passes `--gas-snapshot-check=<bool>` then it will override the config
739                // and the environment variable, disabling the check if `false` is passed.
740                //
741                // Exiting early with code 1 if differences are found.
742                if self.gas_snapshot_check.unwrap_or(config.gas_snapshot_check) {
743                    let differences_found = gas_snapshots.clone().into_iter().fold(
744                        false,
745                        |mut found, (group, snapshots)| {
746                            // If the snapshot file doesn't exist, we can't compare so we skip.
747                            if !&config.snapshots.join(format!("{group}.json")).exists() {
748                                return false;
749                            }
750
751                            let previous_snapshots: BTreeMap<String, String> =
752                                fs::read_json_file(&config.snapshots.join(format!("{group}.json")))
753                                    .expect("Failed to read snapshots from disk");
754
755                            let diff: BTreeMap<_, _> = snapshots
756                                .iter()
757                                .filter_map(|(k, v)| {
758                                    previous_snapshots.get(k).and_then(|previous_snapshot| {
759                                        if previous_snapshot != v {
760                                            Some((
761                                                k.clone(),
762                                                (previous_snapshot.clone(), v.clone()),
763                                            ))
764                                        } else {
765                                            None
766                                        }
767                                    })
768                                })
769                                .collect();
770
771                            if !diff.is_empty() {
772                                let _ = sh_eprintln!(
773                                    "{}",
774                                    format!("\n[{group}] Failed to match snapshots:").red().bold()
775                                );
776
777                                for (key, (previous_snapshot, snapshot)) in &diff {
778                                    let _ = sh_eprintln!(
779                                        "{}",
780                                        format!("- [{key}] {previous_snapshot} → {snapshot}").red()
781                                    );
782                                }
783
784                                found = true;
785                            }
786
787                            found
788                        },
789                    );
790
791                    if differences_found {
792                        sh_eprintln!()?;
793                        eyre::bail!("Snapshots differ from previous run");
794                    }
795                }
796
797                // By default `gas_snapshot_emit` is set to `true` in the config.
798                //
799                // The user can either:
800                // - Set `FORGE_SNAPSHOT_EMIT=false` in the environment.
801                // - Pass `--gas-snapshot-emit=false` as a CLI argument.
802                // - Set `gas_snapshot_emit = false` in the config.
803                //
804                // If the user passes `--gas-snapshot-emit=<bool>` then it will override the config
805                // and the environment variable, enabling the check if `true` is passed.
806                if self.gas_snapshot_emit.unwrap_or(config.gas_snapshot_emit) {
807                    // Create `snapshots` directory if it doesn't exist.
808                    fs::create_dir_all(&config.snapshots)?;
809
810                    // Write gas snapshots to disk per group.
811                    gas_snapshots.clone().into_iter().for_each(|(group, snapshots)| {
812                        fs::write_pretty_json_file(
813                            &config.snapshots.join(format!("{group}.json")),
814                            &snapshots,
815                        )
816                        .expect("Failed to write gas snapshots to disk");
817                    });
818                }
819            }
820
821            // Print suite summary.
822            if !silent && has_tests {
823                sh_println!("{}", suite_result.summary())?;
824            }
825
826            // Add the suite result to the outcome.
827            outcome.results.insert(contract_name, suite_result);
828
829            // Stop processing the remaining suites if any test failed and `fail_fast` is set.
830            if self.fail_fast && any_test_failed {
831                break;
832            }
833        }
834        outcome.last_run_decoder = Some(decoder);
835        let duration = timer.elapsed();
836
837        trace!(target: "forge::test", len=outcome.results.len(), %any_test_failed, "done with results");
838
839        if let Some(gas_report) = gas_report {
840            let finalized = gas_report.finalize();
841            sh_println!("{}", &finalized)?;
842            outcome.gas_report = Some(finalized);
843        }
844
845        if !self.summary && !shell::is_json() {
846            sh_println!("{}", outcome.summary(duration))?;
847        }
848
849        if self.summary && !outcome.results.is_empty() {
850            let summary_report = TestSummaryReport::new(self.detailed, outcome.clone());
851            sh_println!("{}", &summary_report)?;
852        }
853
854        // Reattach the task.
855        match handle.await {
856            Ok(result) => outcome.runner = Some(result?),
857            Err(e) => match e.try_into_panic() {
858                Ok(payload) => std::panic::resume_unwind(payload),
859                Err(e) => return Err(e.into()),
860            },
861        }
862
863        // Persist test run failures to enable replaying.
864        persist_run_failures(&config, &outcome);
865
866        Ok(outcome)
867    }
868
869    /// Returns the flattened [`FilterArgs`] arguments merged with [`Config`].
870    /// Loads and applies filter from file if only last test run failures performed.
871    pub fn filter(&self, config: &Config) -> Result<ProjectPathsAwareFilter> {
872        let mut filter = self.filter.clone();
873        if self.rerun {
874            filter.test_pattern = last_run_failures(config);
875        }
876        if filter.path_pattern.is_some() {
877            if self.path.is_some() {
878                bail!("Can not supply both --match-path and |path|");
879            }
880        } else {
881            filter.path_pattern = self.path.clone();
882        }
883        Ok(filter.merge_with_config(config))
884    }
885
886    /// Returns whether `BuildArgs` was configured with `--watch`
887    pub fn is_watch(&self) -> bool {
888        self.watch.watch.is_some()
889    }
890
891    /// Returns the [`watchexec::Config`] necessary to bootstrap a new watch loop.
892    pub(crate) fn watchexec_config(&self) -> Result<watchexec::Config> {
893        self.watch.watchexec_config(|| {
894            let config = self.load_config()?;
895            Ok([config.src, config.test])
896        })
897    }
898}
899
900impl Provider for TestArgs {
901    fn metadata(&self) -> Metadata {
902        Metadata::named("Core Build Args Provider")
903    }
904
905    fn data(&self) -> Result<Map<Profile, Dict>, figment::Error> {
906        let mut dict = Dict::default();
907
908        let mut fuzz_dict = Dict::default();
909        if let Some(fuzz_seed) = self.fuzz_seed {
910            fuzz_dict.insert("seed".to_string(), fuzz_seed.to_string().into());
911        }
912        if let Some(fuzz_runs) = self.fuzz_runs {
913            fuzz_dict.insert("runs".to_string(), fuzz_runs.into());
914        }
915        if let Some(fuzz_timeout) = self.fuzz_timeout {
916            fuzz_dict.insert("timeout".to_string(), fuzz_timeout.into());
917        }
918        if let Some(fuzz_input_file) = self.fuzz_input_file.clone() {
919            fuzz_dict.insert("failure_persist_file".to_string(), fuzz_input_file.into());
920        }
921        dict.insert("fuzz".to_string(), fuzz_dict.into());
922
923        if let Some(etherscan_api_key) =
924            self.etherscan_api_key.as_ref().filter(|s| !s.trim().is_empty())
925        {
926            dict.insert("etherscan_api_key".to_string(), etherscan_api_key.to_string().into());
927        }
928
929        if self.show_progress {
930            dict.insert("show_progress".to_string(), true.into());
931        }
932
933        Ok(Map::from([(Config::selected_profile(), dict)]))
934    }
935}
936
937/// Lists all matching tests
938fn list(runner: MultiContractRunner, filter: &ProjectPathsAwareFilter) -> Result<TestOutcome> {
939    let results = runner.list(filter);
940
941    if shell::is_json() {
942        sh_println!("{}", serde_json::to_string(&results)?)?;
943    } else {
944        for (file, contracts) in &results {
945            sh_println!("{file}")?;
946            for (contract, tests) in contracts {
947                sh_println!("  {contract}")?;
948                sh_println!("    {}\n", tests.join("\n    "))?;
949            }
950        }
951    }
952    Ok(TestOutcome::empty(Some(runner), false))
953}
954
955/// Load persisted filter (with last test run failures) from file.
956fn last_run_failures(config: &Config) -> Option<regex::Regex> {
957    match fs::read_to_string(&config.test_failures_file) {
958        Ok(filter) => Regex::new(&filter)
959            .inspect_err(|e| {
960                _ = sh_warn!(
961                    "failed to parse test filter from {:?}: {e}",
962                    config.test_failures_file
963                )
964            })
965            .ok(),
966        Err(_) => None,
967    }
968}
969
970/// Persist filter with last test run failures (only if there's any failure).
971fn persist_run_failures(config: &Config, outcome: &TestOutcome) {
972    if outcome.failed() > 0 && fs::create_file(&config.test_failures_file).is_ok() {
973        let mut filter = String::new();
974        let mut failures = outcome.failures().peekable();
975        while let Some((test_name, _)) = failures.next() {
976            if test_name.is_any_test()
977                && let Some(test_match) = test_name.split("(").next()
978            {
979                filter.push_str(test_match);
980                if failures.peek().is_some() {
981                    filter.push('|');
982                }
983            }
984        }
985        let _ = fs::write(&config.test_failures_file, filter);
986    }
987}
988
989/// Generate test report in JUnit XML report format.
990fn junit_xml_report(results: &BTreeMap<String, SuiteResult>, verbosity: u8) -> Report {
991    let mut total_duration = Duration::default();
992    let mut junit_report = Report::new("Test run");
993    junit_report.set_timestamp(Utc::now());
994    for (suite_name, suite_result) in results {
995        let mut test_suite = TestSuite::new(suite_name);
996        total_duration += suite_result.duration;
997        test_suite.set_time(suite_result.duration);
998        test_suite.set_system_out(suite_result.summary());
999        for (test_name, test_result) in &suite_result.test_results {
1000            let mut test_status = match test_result.status {
1001                TestStatus::Success => TestCaseStatus::success(),
1002                TestStatus::Failure => TestCaseStatus::non_success(NonSuccessKind::Failure),
1003                TestStatus::Skipped => TestCaseStatus::skipped(),
1004            };
1005            if let Some(reason) = &test_result.reason {
1006                test_status.set_message(reason);
1007            }
1008
1009            let mut test_case = TestCase::new(test_name, test_status);
1010            test_case.set_time(test_result.duration);
1011
1012            let mut sys_out = String::new();
1013            let result_report = test_result.kind.report();
1014            write!(sys_out, "{test_result} {test_name} {result_report}").unwrap();
1015            if verbosity >= 2 && !test_result.logs.is_empty() {
1016                write!(sys_out, "\\nLogs:\\n").unwrap();
1017                let console_logs = decode_console_logs(&test_result.logs);
1018                for log in console_logs {
1019                    write!(sys_out, "  {log}\\n").unwrap();
1020                }
1021            }
1022
1023            test_case.set_system_out(sys_out);
1024            test_suite.add_test_case(test_case);
1025        }
1026        junit_report.add_test_suite(test_suite);
1027    }
1028    junit_report.set_time(total_duration);
1029    junit_report
1030}
1031
1032#[cfg(test)]
1033mod tests {
1034    use super::*;
1035    use foundry_config::Chain;
1036
1037    #[test]
1038    fn watch_parse() {
1039        let args: TestArgs = TestArgs::parse_from(["foundry-cli", "-vw"]);
1040        assert!(args.watch.watch.is_some());
1041    }
1042
1043    #[test]
1044    fn fuzz_seed() {
1045        let args: TestArgs = TestArgs::parse_from(["foundry-cli", "--fuzz-seed", "0x10"]);
1046        assert!(args.fuzz_seed.is_some());
1047    }
1048
1049    #[test]
1050    fn depth_trace() {
1051        let args: TestArgs = TestArgs::parse_from(["foundry-cli", "--trace-depth", "2"]);
1052        assert!(args.trace_depth.is_some());
1053    }
1054
1055    // <https://github.com/foundry-rs/foundry/issues/5913>
1056    #[test]
1057    fn fuzz_seed_exists() {
1058        let args: TestArgs =
1059            TestArgs::parse_from(["foundry-cli", "-vvv", "--gas-report", "--fuzz-seed", "0x10"]);
1060        assert!(args.fuzz_seed.is_some());
1061    }
1062
1063    #[test]
1064    fn extract_chain() {
1065        let test = |arg: &str, expected: Chain| {
1066            let args = TestArgs::parse_from(["foundry-cli", arg]);
1067            assert_eq!(args.evm.env.chain, Some(expected));
1068            let (config, evm_opts) = args.load_config_and_evm_opts().unwrap();
1069            assert_eq!(config.chain, Some(expected));
1070            assert_eq!(evm_opts.env.chain_id, Some(expected.id()));
1071        };
1072        test("--chain-id=1", Chain::mainnet());
1073        test("--chain-id=42", Chain::from_id(42));
1074    }
1075}