forge/cmd/test/
mod.rs

1use super::{install, test::filter::ProjectPathsAwareFilter, watch::WatchArgs};
2use crate::{
3    MultiContractRunner, MultiContractRunnerBuilder,
4    decode::decode_console_logs,
5    gas_report::GasReport,
6    multi_runner::{is_test_contract, matches_artifact},
7    result::{SuiteResult, TestOutcome, TestStatus},
8    traces::{
9        CallTraceDecoderBuilder, InternalTraceMode, TraceKind,
10        debug::{ContractSources, DebugTraceIdentifier},
11        decode_trace_arena, folded_stack_trace,
12        identifier::SignaturesIdentifier,
13    },
14};
15use alloy_primitives::U256;
16use chrono::Utc;
17use clap::{Parser, ValueHint};
18use eyre::{Context, OptionExt, Result, bail};
19use foundry_cli::{
20    opts::{BuildOpts, GlobalArgs},
21    utils::{self, LoadConfig},
22};
23use foundry_common::{
24    EmptyTestFilter, TestFunctionExt, compile::ProjectCompiler, evm::EvmArgs, fs, shell,
25};
26use foundry_compilers::{
27    ProjectCompileOutput, artifacts::output_selection::OutputSelection,
28    compilers::multi::MultiCompiler,
29};
30use foundry_config::{
31    Config, figment,
32    figment::{
33        Metadata, Profile, Provider,
34        value::{Dict, Map},
35    },
36    filter::GlobMatcher,
37};
38use foundry_debugger::Debugger;
39use foundry_evm::traces::identifier::TraceIdentifiers;
40use regex::Regex;
41use std::{
42    collections::{BTreeMap, BTreeSet},
43    fmt::Write,
44    path::PathBuf,
45    sync::{Arc, mpsc::channel},
46    time::{Duration, Instant},
47};
48use yansi::Paint;
49
50mod filter;
51mod summary;
52use crate::{result::TestKind, traces::render_trace_arena_inner};
53pub use filter::FilterArgs;
54use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite};
55use summary::{TestSummaryReport, format_invariant_metrics_table};
56
57// Loads project's figment and merges the build cli arguments into it
58foundry_config::merge_impl_figment_convert!(TestArgs, build, evm);
59
60/// CLI arguments for `forge test`.
61#[derive(Clone, Debug, Parser)]
62#[command(next_help_heading = "Test options")]
63pub struct TestArgs {
64    // Include global options for users of this struct.
65    #[command(flatten)]
66    pub global: GlobalArgs,
67
68    /// The contract file you want to test, it's a shortcut for --match-path.
69    #[arg(value_hint = ValueHint::FilePath)]
70    pub path: Option<GlobMatcher>,
71
72    /// Run a single test in the debugger.
73    ///
74    /// The matching test will be opened in the debugger regardless of the outcome of the test.
75    ///
76    /// If the matching test is a fuzz test, then it will open the debugger on the first failure
77    /// case. If the fuzz test does not fail, it will open the debugger on the last fuzz case.
78    #[arg(long, conflicts_with_all = ["flamegraph", "flamechart", "decode_internal", "rerun"])]
79    debug: bool,
80
81    /// Generate a flamegraph for a single test. Implies `--decode-internal`.
82    ///
83    /// A flame graph is used to visualize which functions or operations within the smart contract
84    /// are consuming the most gas overall in a sorted manner.
85    #[arg(long)]
86    flamegraph: bool,
87
88    /// Generate a flamechart for a single test. Implies `--decode-internal`.
89    ///
90    /// A flame chart shows the gas usage over time, illustrating when each function is
91    /// called (execution order) and how much gas it consumes at each point in the timeline.
92    #[arg(long, conflicts_with = "flamegraph")]
93    flamechart: bool,
94
95    /// Identify internal functions in traces.
96    ///
97    /// This will trace internal functions and decode stack parameters.
98    ///
99    /// Parameters stored in memory (such as bytes or arrays) are currently decoded only when a
100    /// single function is matched, similarly to `--debug`, for performance reasons.
101    #[arg(long)]
102    decode_internal: bool,
103
104    /// Dumps all debugger steps to file.
105    #[arg(
106        long,
107        requires = "debug",
108        value_hint = ValueHint::FilePath,
109        value_name = "PATH"
110    )]
111    dump: Option<PathBuf>,
112
113    /// Print a gas report.
114    #[arg(long, env = "FORGE_GAS_REPORT")]
115    gas_report: bool,
116
117    /// Check gas snapshots against previous runs.
118    #[arg(long, env = "FORGE_SNAPSHOT_CHECK")]
119    gas_snapshot_check: Option<bool>,
120
121    /// Enable/disable recording of gas snapshot results.
122    #[arg(long, env = "FORGE_SNAPSHOT_EMIT")]
123    gas_snapshot_emit: Option<bool>,
124
125    /// Exit with code 0 even if a test fails.
126    #[arg(long, env = "FORGE_ALLOW_FAILURE")]
127    allow_failure: bool,
128
129    /// Suppress successful test traces and show only traces for failures.
130    #[arg(long, short, env = "FORGE_SUPPRESS_SUCCESSFUL_TRACES", help_heading = "Display options")]
131    suppress_successful_traces: bool,
132
133    /// Output test results as JUnit XML report.
134    #[arg(long, conflicts_with_all = ["quiet", "json", "gas_report", "summary", "list", "show_progress"], help_heading = "Display options")]
135    pub junit: bool,
136
137    /// Stop running tests after the first failure.
138    #[arg(long)]
139    pub fail_fast: bool,
140
141    /// The Etherscan (or equivalent) API key.
142    #[arg(long, env = "ETHERSCAN_API_KEY", value_name = "KEY")]
143    etherscan_api_key: Option<String>,
144
145    /// List tests instead of running them.
146    #[arg(long, short, conflicts_with_all = ["show_progress", "decode_internal", "summary"], help_heading = "Display options")]
147    list: bool,
148
149    /// Set seed used to generate randomness during your fuzz runs.
150    #[arg(long)]
151    pub fuzz_seed: Option<U256>,
152
153    #[arg(long, env = "FOUNDRY_FUZZ_RUNS", value_name = "RUNS")]
154    pub fuzz_runs: Option<u64>,
155
156    /// Timeout for each fuzz run in seconds.
157    #[arg(long, env = "FOUNDRY_FUZZ_TIMEOUT", value_name = "TIMEOUT")]
158    pub fuzz_timeout: Option<u64>,
159
160    /// File to rerun fuzz failures from.
161    #[arg(long)]
162    pub fuzz_input_file: Option<String>,
163
164    /// Show test execution progress.
165    #[arg(long, conflicts_with_all = ["quiet", "json"], help_heading = "Display options")]
166    pub show_progress: bool,
167
168    /// Re-run recorded test failures from last run.
169    /// If no failure recorded then regular test run is performed.
170    #[arg(long)]
171    pub rerun: bool,
172
173    /// Print test summary table.
174    #[arg(long, help_heading = "Display options")]
175    pub summary: bool,
176
177    /// Print detailed test summary table.
178    #[arg(long, help_heading = "Display options", requires = "summary")]
179    pub detailed: bool,
180
181    /// Disables the labels in the traces.
182    #[arg(long, help_heading = "Display options")]
183    pub disable_labels: bool,
184
185    #[command(flatten)]
186    filter: FilterArgs,
187
188    #[command(flatten)]
189    evm: EvmArgs,
190
191    #[command(flatten)]
192    pub build: BuildOpts,
193
194    #[command(flatten)]
195    pub watch: WatchArgs,
196}
197
198impl TestArgs {
199    pub async fn run(self) -> Result<TestOutcome> {
200        trace!(target: "forge::test", "executing test command");
201        self.execute_tests().await
202    }
203
204    /// Returns a list of files that need to be compiled in order to run all the tests that match
205    /// the given filter.
206    ///
207    /// This means that it will return all sources that are not test contracts or that match the
208    /// filter. We want to compile all non-test sources always because tests might depend on them
209    /// dynamically through cheatcodes.
210    ///
211    /// Returns `None` if all sources should be compiled.
212    #[instrument(target = "forge::test", skip_all)]
213    pub fn get_sources_to_compile(
214        &self,
215        config: &Config,
216        test_filter: &ProjectPathsAwareFilter,
217    ) -> Result<Option<BTreeSet<PathBuf>>> {
218        // An empty filter doesn't filter out anything.
219        if test_filter.is_empty() {
220            return Ok(None);
221        }
222
223        let mut project = config.create_project(true, true)?;
224        project.update_output_selection(|selection| {
225            *selection = OutputSelection::common_output_selection(["abi".to_string()]);
226        });
227        let output = project.compile()?;
228        if output.has_compiler_errors() {
229            sh_println!("{output}")?;
230            eyre::bail!("Compilation failed");
231        }
232
233        let sources = output
234            .artifact_ids()
235            .filter_map(|(id, artifact)| artifact.abi.as_ref().map(|abi| (id, abi)))
236            .filter(|(id, abi)| {
237                !is_test_contract(abi.functions()) || matches_artifact(test_filter, id, abi)
238            })
239            .map(|(id, _)| id.source)
240            .collect::<BTreeSet<_>>();
241        Ok(Some(sources))
242    }
243
244    /// Executes all the tests in the project.
245    ///
246    /// This will trigger the build process first. On success all test contracts that match the
247    /// configured filter will be executed
248    ///
249    /// Returns the test results for all matching tests.
250    pub async fn execute_tests(mut self) -> Result<TestOutcome> {
251        // Merge all configs.
252        let (mut config, mut evm_opts) = self.load_config_and_evm_opts()?;
253
254        // Explicitly enable isolation for gas reports for more correct gas accounting.
255        if self.gas_report {
256            evm_opts.isolate = true;
257        } else {
258            // Do not collect gas report traces if gas report is not enabled.
259            config.fuzz.gas_report_samples = 0;
260            config.invariant.gas_report_samples = 0;
261        }
262
263        // Install missing dependencies.
264        if install::install_missing_dependencies(&mut config) && config.auto_detect_remappings {
265            // need to re-configure here to also catch additional remappings
266            config = self.load_config()?;
267        }
268
269        // Set up the project.
270        let project = config.project()?;
271
272        let filter = self.filter(&config)?;
273        trace!(target: "forge::test", ?filter, "using filter");
274
275        let compiler = ProjectCompiler::new()
276            .dynamic_test_linking(config.dynamic_test_linking)
277            .quiet(shell::is_json() || self.junit)
278            .files(self.get_sources_to_compile(&config, &filter)?.unwrap_or_default());
279        let output = compiler.compile(&project)?;
280
281        // Create test options from general project settings and compiler output.
282        let project_root = &project.paths.root;
283
284        let should_debug = self.debug;
285        let should_draw = self.flamegraph || self.flamechart;
286
287        // Determine print verbosity and executor verbosity.
288        let verbosity = evm_opts.verbosity;
289        if (self.gas_report && evm_opts.verbosity < 3) || self.flamegraph || self.flamechart {
290            evm_opts.verbosity = 3;
291        }
292
293        let env = evm_opts.evm_env().await?;
294
295        // Enable internal tracing for more informative flamegraph.
296        if should_draw && !self.decode_internal {
297            self.decode_internal = true;
298        }
299
300        // Choose the internal function tracing mode, if --decode-internal is provided.
301        let decode_internal = if self.decode_internal {
302            // If more than one function matched, we enable simple tracing.
303            // If only one function matched, we enable full tracing. This is done in `run_tests`.
304            InternalTraceMode::Simple
305        } else {
306            InternalTraceMode::None
307        };
308
309        // Prepare the test builder.
310        let config = Arc::new(config);
311        let runner = MultiContractRunnerBuilder::new(config.clone())
312            .set_debug(should_debug)
313            .set_decode_internal(decode_internal)
314            .initial_balance(evm_opts.initial_balance)
315            .evm_spec(config.evm_spec_id())
316            .sender(evm_opts.sender)
317            .with_fork(evm_opts.get_fork(&config, env.clone()))
318            .enable_isolation(evm_opts.isolate)
319            .fail_fast(self.fail_fast)
320            .odyssey(evm_opts.odyssey)
321            .build::<MultiCompiler>(project_root, &output, env, evm_opts)?;
322
323        let libraries = runner.libraries.clone();
324        let mut outcome = self.run_tests(runner, config, verbosity, &filter, &output).await?;
325
326        if should_draw {
327            let (suite_name, test_name, mut test_result) =
328                outcome.remove_first().ok_or_eyre("no tests were executed")?;
329
330            let (_, arena) = test_result
331                .traces
332                .iter_mut()
333                .find(|(kind, _)| *kind == TraceKind::Execution)
334                .unwrap();
335
336            // Decode traces.
337            let decoder = outcome.last_run_decoder.as_ref().unwrap();
338            decode_trace_arena(arena, decoder).await;
339            let mut fst = folded_stack_trace::build(arena);
340
341            let label = if self.flamegraph { "flamegraph" } else { "flamechart" };
342            let contract = suite_name.split(':').next_back().unwrap();
343            let test_name = test_name.trim_end_matches("()");
344            let file_name = format!("cache/{label}_{contract}_{test_name}.svg");
345            let file = std::fs::File::create(&file_name).wrap_err("failed to create file")?;
346            let file = std::io::BufWriter::new(file);
347
348            let mut options = inferno::flamegraph::Options::default();
349            options.title = format!("{label} {contract}::{test_name}");
350            options.count_name = "gas".to_string();
351            if self.flamechart {
352                options.flame_chart = true;
353                fst.reverse();
354            }
355
356            // Generate SVG.
357            inferno::flamegraph::from_lines(&mut options, fst.iter().map(String::as_str), file)
358                .wrap_err("failed to write svg")?;
359            sh_println!("Saved to {file_name}")?;
360
361            // Open SVG in default program.
362            if let Err(e) = opener::open(&file_name) {
363                sh_err!("Failed to open {file_name}; please open it manually: {e}")?;
364            }
365        }
366
367        if should_debug {
368            // Get first non-empty suite result. We will have only one such entry.
369            let (_, _, test_result) =
370                outcome.remove_first().ok_or_eyre("no tests were executed")?;
371
372            let sources =
373                ContractSources::from_project_output(&output, project.root(), Some(&libraries))?;
374
375            // Run the debugger.
376            let mut builder = Debugger::builder()
377                .traces(
378                    test_result.traces.iter().filter(|(t, _)| t.is_execution()).cloned().collect(),
379                )
380                .sources(sources)
381                .breakpoints(test_result.breakpoints.clone());
382
383            if let Some(decoder) = &outcome.last_run_decoder {
384                builder = builder.decoder(decoder);
385            }
386
387            let mut debugger = builder.build();
388            if let Some(dump_path) = self.dump {
389                debugger.dump_to_file(&dump_path)?;
390            } else {
391                debugger.try_run_tui()?;
392            }
393        }
394
395        Ok(outcome)
396    }
397
398    /// Run all tests that matches the filter predicate from a test runner
399    pub async fn run_tests(
400        &self,
401        mut runner: MultiContractRunner,
402        config: Arc<Config>,
403        verbosity: u8,
404        filter: &ProjectPathsAwareFilter,
405        output: &ProjectCompileOutput,
406    ) -> eyre::Result<TestOutcome> {
407        if self.list {
408            return list(runner, filter);
409        }
410
411        trace!(target: "forge::test", "running all tests");
412
413        // If we need to render to a serialized format, we should not print anything else to stdout.
414        let silent = self.gas_report && shell::is_json() || self.summary && shell::is_json();
415
416        let num_filtered = runner.matching_test_functions(filter).count();
417
418        if num_filtered == 0 {
419            let mut total_tests = num_filtered;
420            if !filter.is_empty() {
421                total_tests = runner.matching_test_functions(&EmptyTestFilter::default()).count();
422            }
423            if total_tests == 0 {
424                sh_println!(
425                    "No tests found in project! Forge looks for functions that start with `test`"
426                )?;
427            } else {
428                let mut msg = format!("no tests match the provided pattern:\n{filter}");
429                // Try to suggest a test when there's no match.
430                if let Some(test_pattern) = &filter.args().test_pattern {
431                    let test_name = test_pattern.as_str();
432                    // Filter contracts but not test functions.
433                    let candidates = runner.all_test_functions(filter).map(|f| &f.name);
434                    if let Some(suggestion) = utils::did_you_mean(test_name, candidates).pop() {
435                        write!(msg, "\nDid you mean `{suggestion}`?")?;
436                    }
437                }
438                sh_warn!("{msg}")?;
439            }
440            return Ok(TestOutcome::empty(false));
441        }
442
443        if num_filtered != 1 && (self.debug || self.flamegraph || self.flamechart) {
444            let action = if self.flamegraph {
445                "generate a flamegraph"
446            } else if self.flamechart {
447                "generate a flamechart"
448            } else {
449                "run the debugger"
450            };
451            let filter = if filter.is_empty() {
452                String::new()
453            } else {
454                format!("\n\nFilter used:\n{filter}")
455            };
456            eyre::bail!(
457                "{num_filtered} tests matched your criteria, but exactly 1 test must match in order to {action}.\n\n\
458                 Use --match-contract and --match-path to further limit the search.{filter}",
459            );
460        }
461
462        // If exactly one test matched, we enable full tracing.
463        if num_filtered == 1 && self.decode_internal {
464            runner.decode_internal = InternalTraceMode::Full;
465        }
466
467        // Run tests in a non-streaming fashion and collect results for serialization.
468        if !self.gas_report && !self.summary && shell::is_json() {
469            let mut results = runner.test_collect(filter)?;
470            results.values_mut().for_each(|suite_result| {
471                for test_result in suite_result.test_results.values_mut() {
472                    if verbosity >= 2 {
473                        // Decode logs at level 2 and above.
474                        test_result.decoded_logs = decode_console_logs(&test_result.logs);
475                    } else {
476                        // Empty logs for non verbose runs.
477                        test_result.logs = vec![];
478                    }
479                }
480            });
481            sh_println!("{}", serde_json::to_string(&results)?)?;
482            return Ok(TestOutcome::new(results, self.allow_failure));
483        }
484
485        if self.junit {
486            let results = runner.test_collect(filter)?;
487            sh_println!("{}", junit_xml_report(&results, verbosity).to_string()?)?;
488            return Ok(TestOutcome::new(results, self.allow_failure));
489        }
490
491        let remote_chain_id = runner.evm_opts.get_remote_chain_id().await;
492        let known_contracts = runner.known_contracts.clone();
493
494        let libraries = runner.libraries.clone();
495
496        // Run tests in a streaming fashion.
497        let (tx, rx) = channel::<(String, SuiteResult)>();
498        let timer = Instant::now();
499        let show_progress = config.show_progress;
500        let handle = tokio::task::spawn_blocking({
501            let filter = filter.clone();
502            move || runner.test(&filter, tx, show_progress)
503        });
504
505        // Set up trace identifiers.
506        let mut identifier = TraceIdentifiers::new().with_local(&known_contracts);
507
508        // Avoid using etherscan for gas report as we decode more traces and this will be
509        // expensive.
510        if !self.gas_report {
511            identifier = identifier.with_etherscan(&config, remote_chain_id)?;
512        }
513
514        // Build the trace decoder.
515        let mut builder = CallTraceDecoderBuilder::new()
516            .with_known_contracts(&known_contracts)
517            .with_label_disabled(self.disable_labels)
518            .with_verbosity(verbosity);
519        // Signatures are of no value for gas reports.
520        if !self.gas_report {
521            builder =
522                builder.with_signature_identifier(SignaturesIdentifier::from_config(&config)?);
523        }
524
525        if self.decode_internal {
526            let sources =
527                ContractSources::from_project_output(output, &config.root, Some(&libraries))?;
528            builder = builder.with_debug_identifier(DebugTraceIdentifier::new(sources));
529        }
530        let mut decoder = builder.build();
531
532        let mut gas_report = self.gas_report.then(|| {
533            GasReport::new(
534                config.gas_reports.clone(),
535                config.gas_reports_ignore.clone(),
536                config.gas_reports_include_tests,
537            )
538        });
539
540        let mut gas_snapshots = BTreeMap::<String, BTreeMap<String, String>>::new();
541
542        let mut outcome = TestOutcome::empty(self.allow_failure);
543
544        let mut any_test_failed = false;
545        for (contract_name, suite_result) in rx {
546            let tests = &suite_result.test_results;
547
548            // Clear the addresses and labels from previous test.
549            decoder.clear_addresses();
550
551            // We identify addresses if we're going to print *any* trace or gas report.
552            let identify_addresses = verbosity >= 3
553                || self.gas_report
554                || self.debug
555                || self.flamegraph
556                || self.flamechart;
557
558            // Print suite header.
559            if !silent {
560                sh_println!()?;
561                for warning in &suite_result.warnings {
562                    sh_warn!("{warning}")?;
563                }
564                if !tests.is_empty() {
565                    let len = tests.len();
566                    let tests = if len > 1 { "tests" } else { "test" };
567                    sh_println!("Ran {len} {tests} for {contract_name}")?;
568                }
569            }
570
571            // Process individual test results, printing logs and traces when necessary.
572            for (name, result) in tests {
573                let show_traces =
574                    !self.suppress_successful_traces || result.status == TestStatus::Failure;
575                if !silent {
576                    sh_println!("{}", result.short_result(name))?;
577
578                    // Display invariant metrics if invariant kind.
579                    if let TestKind::Invariant { metrics, .. } = &result.kind
580                        && !metrics.is_empty()
581                    {
582                        let _ = sh_println!("\n{}\n", format_invariant_metrics_table(metrics));
583                    }
584
585                    // We only display logs at level 2 and above
586                    if verbosity >= 2 && show_traces {
587                        // We only decode logs from Hardhat and DS-style console events
588                        let console_logs = decode_console_logs(&result.logs);
589                        if !console_logs.is_empty() {
590                            sh_println!("Logs:")?;
591                            for log in console_logs {
592                                sh_println!("  {log}")?;
593                            }
594                            sh_println!()?;
595                        }
596                    }
597                }
598
599                // We shouldn't break out of the outer loop directly here so that we finish
600                // processing the remaining tests and print the suite summary.
601                any_test_failed |= result.status == TestStatus::Failure;
602
603                // Clear the addresses and labels from previous runs.
604                decoder.clear_addresses();
605                decoder.labels.extend(result.labels.iter().map(|(k, v)| (*k, v.clone())));
606
607                // Identify addresses and decode traces.
608                let mut decoded_traces = Vec::with_capacity(result.traces.len());
609                for (kind, arena) in &mut result.traces.clone() {
610                    if identify_addresses {
611                        decoder.identify(arena, &mut identifier);
612                    }
613
614                    // verbosity:
615                    // - 0..3: nothing
616                    // - 3: only display traces for failed tests
617                    // - 4: also display the setup trace for failed tests
618                    // - 5..: display all traces for all tests, including storage changes
619                    let should_include = match kind {
620                        TraceKind::Execution => {
621                            (verbosity == 3 && result.status.is_failure()) || verbosity >= 4
622                        }
623                        TraceKind::Setup => {
624                            (verbosity == 4 && result.status.is_failure()) || verbosity >= 5
625                        }
626                        TraceKind::Deployment => false,
627                    };
628
629                    if should_include {
630                        decode_trace_arena(arena, &decoder).await;
631                        decoded_traces.push(render_trace_arena_inner(arena, false, verbosity > 4));
632                    }
633                }
634
635                if !silent && show_traces && !decoded_traces.is_empty() {
636                    sh_println!("Traces:")?;
637                    for trace in &decoded_traces {
638                        sh_println!("{trace}")?;
639                    }
640                }
641
642                if let Some(gas_report) = &mut gas_report {
643                    gas_report.analyze(result.traces.iter().map(|(_, a)| &a.arena), &decoder).await;
644
645                    for trace in &result.gas_report_traces {
646                        decoder.clear_addresses();
647
648                        // Re-execute setup and deployment traces to collect identities created in
649                        // setUp and constructor.
650                        for (kind, arena) in &result.traces {
651                            if !matches!(kind, TraceKind::Execution) {
652                                decoder.identify(arena, &mut identifier);
653                            }
654                        }
655
656                        for arena in trace {
657                            decoder.identify(arena, &mut identifier);
658                            gas_report.analyze([arena], &decoder).await;
659                        }
660                    }
661                }
662
663                // Collect and merge gas snapshots.
664                for (group, new_snapshots) in &result.gas_snapshots {
665                    gas_snapshots.entry(group.clone()).or_default().extend(new_snapshots.clone());
666                }
667            }
668
669            // Write gas snapshots to disk if any were collected.
670            if !gas_snapshots.is_empty() {
671                // By default `gas_snapshot_check` is set to `false` in the config.
672                //
673                // The user can either:
674                // - Set `FORGE_SNAPSHOT_CHECK=true` in the environment.
675                // - Pass `--gas-snapshot-check=true` as a CLI argument.
676                // - Set `gas_snapshot_check = true` in the config.
677                //
678                // If the user passes `--gas-snapshot-check=<bool>` then it will override the config
679                // and the environment variable, disabling the check if `false` is passed.
680                //
681                // Exiting early with code 1 if differences are found.
682                if self.gas_snapshot_check.unwrap_or(config.gas_snapshot_check) {
683                    let differences_found = gas_snapshots.clone().into_iter().fold(
684                        false,
685                        |mut found, (group, snapshots)| {
686                            // If the snapshot file doesn't exist, we can't compare so we skip.
687                            if !&config.snapshots.join(format!("{group}.json")).exists() {
688                                return false;
689                            }
690
691                            let previous_snapshots: BTreeMap<String, String> =
692                                fs::read_json_file(&config.snapshots.join(format!("{group}.json")))
693                                    .expect("Failed to read snapshots from disk");
694
695                            let diff: BTreeMap<_, _> = snapshots
696                                .iter()
697                                .filter_map(|(k, v)| {
698                                    previous_snapshots.get(k).and_then(|previous_snapshot| {
699                                        if previous_snapshot != v {
700                                            Some((
701                                                k.clone(),
702                                                (previous_snapshot.clone(), v.clone()),
703                                            ))
704                                        } else {
705                                            None
706                                        }
707                                    })
708                                })
709                                .collect();
710
711                            if !diff.is_empty() {
712                                let _ = sh_eprintln!(
713                                    "{}",
714                                    format!("\n[{group}] Failed to match snapshots:").red().bold()
715                                );
716
717                                for (key, (previous_snapshot, snapshot)) in &diff {
718                                    let _ = sh_eprintln!(
719                                        "{}",
720                                        format!("- [{key}] {previous_snapshot} → {snapshot}").red()
721                                    );
722                                }
723
724                                found = true;
725                            }
726
727                            found
728                        },
729                    );
730
731                    if differences_found {
732                        sh_eprintln!()?;
733                        eyre::bail!("Snapshots differ from previous run");
734                    }
735                }
736
737                // By default `gas_snapshot_emit` is set to `true` in the config.
738                //
739                // The user can either:
740                // - Set `FORGE_SNAPSHOT_EMIT=false` in the environment.
741                // - Pass `--gas-snapshot-emit=false` as a CLI argument.
742                // - Set `gas_snapshot_emit = false` in the config.
743                //
744                // If the user passes `--gas-snapshot-emit=<bool>` then it will override the config
745                // and the environment variable, enabling the check if `true` is passed.
746                if self.gas_snapshot_emit.unwrap_or(config.gas_snapshot_emit) {
747                    // Create `snapshots` directory if it doesn't exist.
748                    fs::create_dir_all(&config.snapshots)?;
749
750                    // Write gas snapshots to disk per group.
751                    gas_snapshots.clone().into_iter().for_each(|(group, snapshots)| {
752                        fs::write_pretty_json_file(
753                            &config.snapshots.join(format!("{group}.json")),
754                            &snapshots,
755                        )
756                        .expect("Failed to write gas snapshots to disk");
757                    });
758                }
759            }
760
761            // Print suite summary.
762            if !silent {
763                sh_println!("{}", suite_result.summary())?;
764            }
765
766            // Add the suite result to the outcome.
767            outcome.results.insert(contract_name, suite_result);
768
769            // Stop processing the remaining suites if any test failed and `fail_fast` is set.
770            if self.fail_fast && any_test_failed {
771                break;
772            }
773        }
774        outcome.last_run_decoder = Some(decoder);
775        let duration = timer.elapsed();
776
777        trace!(target: "forge::test", len=outcome.results.len(), %any_test_failed, "done with results");
778
779        if let Some(gas_report) = gas_report {
780            let finalized = gas_report.finalize();
781            sh_println!("{}", &finalized)?;
782            outcome.gas_report = Some(finalized);
783        }
784
785        if !self.summary && !shell::is_json() {
786            sh_println!("{}", outcome.summary(duration))?;
787        }
788
789        if self.summary && !outcome.results.is_empty() {
790            let summary_report = TestSummaryReport::new(self.detailed, outcome.clone());
791            sh_println!("{}", &summary_report)?;
792        }
793
794        // Reattach the task.
795        if let Err(e) = handle.await {
796            match e.try_into_panic() {
797                Ok(payload) => std::panic::resume_unwind(payload),
798                Err(e) => return Err(e.into()),
799            }
800        }
801
802        // Persist test run failures to enable replaying.
803        persist_run_failures(&config, &outcome);
804
805        Ok(outcome)
806    }
807
808    /// Returns the flattened [`FilterArgs`] arguments merged with [`Config`].
809    /// Loads and applies filter from file if only last test run failures performed.
810    pub fn filter(&self, config: &Config) -> Result<ProjectPathsAwareFilter> {
811        let mut filter = self.filter.clone();
812        if self.rerun {
813            filter.test_pattern = last_run_failures(config);
814        }
815        if filter.path_pattern.is_some() {
816            if self.path.is_some() {
817                bail!("Can not supply both --match-path and |path|");
818            }
819        } else {
820            filter.path_pattern = self.path.clone();
821        }
822        Ok(filter.merge_with_config(config))
823    }
824
825    /// Returns whether `BuildArgs` was configured with `--watch`
826    pub fn is_watch(&self) -> bool {
827        self.watch.watch.is_some()
828    }
829
830    /// Returns the [`watchexec::Config`] necessary to bootstrap a new watch loop.
831    pub(crate) fn watchexec_config(&self) -> Result<watchexec::Config> {
832        self.watch.watchexec_config(|| {
833            let config = self.load_config()?;
834            Ok([config.src, config.test])
835        })
836    }
837}
838
839impl Provider for TestArgs {
840    fn metadata(&self) -> Metadata {
841        Metadata::named("Core Build Args Provider")
842    }
843
844    fn data(&self) -> Result<Map<Profile, Dict>, figment::Error> {
845        let mut dict = Dict::default();
846
847        let mut fuzz_dict = Dict::default();
848        if let Some(fuzz_seed) = self.fuzz_seed {
849            fuzz_dict.insert("seed".to_string(), fuzz_seed.to_string().into());
850        }
851        if let Some(fuzz_runs) = self.fuzz_runs {
852            fuzz_dict.insert("runs".to_string(), fuzz_runs.into());
853        }
854        if let Some(fuzz_timeout) = self.fuzz_timeout {
855            fuzz_dict.insert("timeout".to_string(), fuzz_timeout.into());
856        }
857        if let Some(fuzz_input_file) = self.fuzz_input_file.clone() {
858            fuzz_dict.insert("failure_persist_file".to_string(), fuzz_input_file.into());
859        }
860        dict.insert("fuzz".to_string(), fuzz_dict.into());
861
862        if let Some(etherscan_api_key) =
863            self.etherscan_api_key.as_ref().filter(|s| !s.trim().is_empty())
864        {
865            dict.insert("etherscan_api_key".to_string(), etherscan_api_key.to_string().into());
866        }
867
868        if self.show_progress {
869            dict.insert("show_progress".to_string(), true.into());
870        }
871
872        Ok(Map::from([(Config::selected_profile(), dict)]))
873    }
874}
875
876/// Lists all matching tests
877fn list(runner: MultiContractRunner, filter: &ProjectPathsAwareFilter) -> Result<TestOutcome> {
878    let results = runner.list(filter);
879
880    if shell::is_json() {
881        sh_println!("{}", serde_json::to_string(&results)?)?;
882    } else {
883        for (file, contracts) in &results {
884            sh_println!("{file}")?;
885            for (contract, tests) in contracts {
886                sh_println!("  {contract}")?;
887                sh_println!("    {}\n", tests.join("\n    "))?;
888            }
889        }
890    }
891    Ok(TestOutcome::empty(false))
892}
893
894/// Load persisted filter (with last test run failures) from file.
895fn last_run_failures(config: &Config) -> Option<regex::Regex> {
896    match fs::read_to_string(&config.test_failures_file) {
897        Ok(filter) => Regex::new(&filter)
898            .inspect_err(|e| {
899                _ = sh_warn!(
900                    "failed to parse test filter from {:?}: {e}",
901                    config.test_failures_file
902                )
903            })
904            .ok(),
905        Err(_) => None,
906    }
907}
908
909/// Persist filter with last test run failures (only if there's any failure).
910fn persist_run_failures(config: &Config, outcome: &TestOutcome) {
911    if outcome.failed() > 0 && fs::create_file(&config.test_failures_file).is_ok() {
912        let mut filter = String::new();
913        let mut failures = outcome.failures().peekable();
914        while let Some((test_name, _)) = failures.next() {
915            if test_name.is_any_test()
916                && let Some(test_match) = test_name.split("(").next()
917            {
918                filter.push_str(test_match);
919                if failures.peek().is_some() {
920                    filter.push('|');
921                }
922            }
923        }
924        let _ = fs::write(&config.test_failures_file, filter);
925    }
926}
927
928/// Generate test report in JUnit XML report format.
929fn junit_xml_report(results: &BTreeMap<String, SuiteResult>, verbosity: u8) -> Report {
930    let mut total_duration = Duration::default();
931    let mut junit_report = Report::new("Test run");
932    junit_report.set_timestamp(Utc::now());
933    for (suite_name, suite_result) in results {
934        let mut test_suite = TestSuite::new(suite_name);
935        total_duration += suite_result.duration;
936        test_suite.set_time(suite_result.duration);
937        test_suite.set_system_out(suite_result.summary());
938        for (test_name, test_result) in &suite_result.test_results {
939            let mut test_status = match test_result.status {
940                TestStatus::Success => TestCaseStatus::success(),
941                TestStatus::Failure => TestCaseStatus::non_success(NonSuccessKind::Failure),
942                TestStatus::Skipped => TestCaseStatus::skipped(),
943            };
944            if let Some(reason) = &test_result.reason {
945                test_status.set_message(reason);
946            }
947
948            let mut test_case = TestCase::new(test_name, test_status);
949            test_case.set_time(test_result.duration);
950
951            let mut sys_out = String::new();
952            let result_report = test_result.kind.report();
953            write!(sys_out, "{test_result} {test_name} {result_report}").unwrap();
954            if verbosity >= 2 && !test_result.logs.is_empty() {
955                write!(sys_out, "\\nLogs:\\n").unwrap();
956                let console_logs = decode_console_logs(&test_result.logs);
957                for log in console_logs {
958                    write!(sys_out, "  {log}\\n").unwrap();
959                }
960            }
961
962            test_case.set_system_out(sys_out);
963            test_suite.add_test_case(test_case);
964        }
965        junit_report.add_test_suite(test_suite);
966    }
967    junit_report.set_time(total_duration);
968    junit_report
969}
970
971#[cfg(test)]
972mod tests {
973    use super::*;
974    use foundry_config::Chain;
975
976    #[test]
977    fn watch_parse() {
978        let args: TestArgs = TestArgs::parse_from(["foundry-cli", "-vw"]);
979        assert!(args.watch.watch.is_some());
980    }
981
982    #[test]
983    fn fuzz_seed() {
984        let args: TestArgs = TestArgs::parse_from(["foundry-cli", "--fuzz-seed", "0x10"]);
985        assert!(args.fuzz_seed.is_some());
986    }
987
988    // <https://github.com/foundry-rs/foundry/issues/5913>
989    #[test]
990    fn fuzz_seed_exists() {
991        let args: TestArgs =
992            TestArgs::parse_from(["foundry-cli", "-vvv", "--gas-report", "--fuzz-seed", "0x10"]);
993        assert!(args.fuzz_seed.is_some());
994    }
995
996    #[test]
997    fn extract_chain() {
998        let test = |arg: &str, expected: Chain| {
999            let args = TestArgs::parse_from(["foundry-cli", arg]);
1000            assert_eq!(args.evm.env.chain, Some(expected));
1001            let (config, evm_opts) = args.load_config_and_evm_opts().unwrap();
1002            assert_eq!(config.chain, Some(expected));
1003            assert_eq!(evm_opts.env.chain_id, Some(expected.id()));
1004        };
1005        test("--chain-id=1", Chain::mainnet());
1006        test("--chain-id=42", Chain::from_id(42));
1007    }
1008}