forge/cmd/test/
mod.rs

1use super::{install, test::filter::ProjectPathsAwareFilter, watch::WatchArgs};
2use crate::{
3    MultiContractRunner, MultiContractRunnerBuilder,
4    decode::decode_console_logs,
5    gas_report::GasReport,
6    multi_runner::matches_artifact,
7    result::{SuiteResult, TestOutcome, TestStatus},
8    traces::{
9        CallTraceDecoderBuilder, InternalTraceMode, TraceKind,
10        debug::{ContractSources, DebugTraceIdentifier},
11        decode_trace_arena, folded_stack_trace,
12        identifier::SignaturesIdentifier,
13    },
14};
15use alloy_primitives::U256;
16use chrono::Utc;
17use clap::{Parser, ValueHint};
18use eyre::{Context, OptionExt, Result, bail};
19use foundry_cli::{
20    opts::{BuildOpts, EvmArgs, GlobalArgs},
21    utils::{self, LoadConfig},
22};
23use foundry_common::{EmptyTestFilter, TestFunctionExt, compile::ProjectCompiler, fs, shell};
24use foundry_compilers::{
25    Language, ProjectCompileOutput, artifacts::output_selection::OutputSelection,
26    compilers::multi::MultiCompiler, multi::MultiCompilerLanguage, utils::source_files_iter,
27};
28use foundry_config::{
29    Config, figment,
30    figment::{
31        Metadata, Profile, Provider,
32        value::{Dict, Map},
33    },
34    filter::GlobMatcher,
35};
36use foundry_debugger::Debugger;
37use foundry_evm::traces::identifier::TraceIdentifiers;
38use regex::Regex;
39use std::{
40    collections::{BTreeMap, BTreeSet},
41    fmt::Write,
42    path::PathBuf,
43    sync::{Arc, mpsc::channel},
44    time::{Duration, Instant},
45};
46use yansi::Paint;
47
48mod filter;
49mod summary;
50use crate::{result::TestKind, traces::render_trace_arena_inner};
51pub use filter::FilterArgs;
52use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite};
53use summary::{TestSummaryReport, format_invariant_metrics_table};
54
55// Loads project's figment and merges the build cli arguments into it
56foundry_config::merge_impl_figment_convert!(TestArgs, build, evm);
57
58/// CLI arguments for `forge test`.
59#[derive(Clone, Debug, Parser)]
60#[command(next_help_heading = "Test options")]
61pub struct TestArgs {
62    // Include global options for users of this struct.
63    #[command(flatten)]
64    pub global: GlobalArgs,
65
66    /// The contract file you want to test, it's a shortcut for --match-path.
67    #[arg(value_hint = ValueHint::FilePath)]
68    pub path: Option<GlobMatcher>,
69
70    /// Run a single test in the debugger.
71    ///
72    /// The matching test will be opened in the debugger regardless of the outcome of the test.
73    ///
74    /// If the matching test is a fuzz test, then it will open the debugger on the first failure
75    /// case. If the fuzz test does not fail, it will open the debugger on the last fuzz case.
76    #[arg(long, conflicts_with_all = ["flamegraph", "flamechart", "decode_internal", "rerun"])]
77    debug: bool,
78
79    /// Generate a flamegraph for a single test. Implies `--decode-internal`.
80    ///
81    /// A flame graph is used to visualize which functions or operations within the smart contract
82    /// are consuming the most gas overall in a sorted manner.
83    #[arg(long)]
84    flamegraph: bool,
85
86    /// Generate a flamechart for a single test. Implies `--decode-internal`.
87    ///
88    /// A flame chart shows the gas usage over time, illustrating when each function is
89    /// called (execution order) and how much gas it consumes at each point in the timeline.
90    #[arg(long, conflicts_with = "flamegraph")]
91    flamechart: bool,
92
93    /// Identify internal functions in traces.
94    ///
95    /// This will trace internal functions and decode stack parameters.
96    ///
97    /// Parameters stored in memory (such as bytes or arrays) are currently decoded only when a
98    /// single function is matched, similarly to `--debug`, for performance reasons.
99    #[arg(long)]
100    decode_internal: bool,
101
102    /// Dumps all debugger steps to file.
103    #[arg(
104        long,
105        requires = "debug",
106        value_hint = ValueHint::FilePath,
107        value_name = "PATH"
108    )]
109    dump: Option<PathBuf>,
110
111    /// Print a gas report.
112    #[arg(long, env = "FORGE_GAS_REPORT")]
113    gas_report: bool,
114
115    /// Check gas snapshots against previous runs.
116    #[arg(long, env = "FORGE_SNAPSHOT_CHECK")]
117    gas_snapshot_check: Option<bool>,
118
119    /// Enable/disable recording of gas snapshot results.
120    #[arg(long, env = "FORGE_SNAPSHOT_EMIT")]
121    gas_snapshot_emit: Option<bool>,
122
123    /// Exit with code 0 even if a test fails.
124    #[arg(long, env = "FORGE_ALLOW_FAILURE")]
125    allow_failure: bool,
126
127    /// Suppress successful test traces and show only traces for failures.
128    #[arg(long, short, env = "FORGE_SUPPRESS_SUCCESSFUL_TRACES", help_heading = "Display options")]
129    suppress_successful_traces: bool,
130
131    /// Output test results as JUnit XML report.
132    #[arg(long, conflicts_with_all = ["quiet", "json", "gas_report", "summary", "list", "show_progress"], help_heading = "Display options")]
133    pub junit: bool,
134
135    /// Stop running tests after the first failure.
136    #[arg(long)]
137    pub fail_fast: bool,
138
139    /// The Etherscan (or equivalent) API key.
140    #[arg(long, env = "ETHERSCAN_API_KEY", value_name = "KEY")]
141    etherscan_api_key: Option<String>,
142
143    /// List tests instead of running them.
144    #[arg(long, short, conflicts_with_all = ["show_progress", "decode_internal", "summary"], help_heading = "Display options")]
145    list: bool,
146
147    /// Set seed used to generate randomness during your fuzz runs.
148    #[arg(long)]
149    pub fuzz_seed: Option<U256>,
150
151    #[arg(long, env = "FOUNDRY_FUZZ_RUNS", value_name = "RUNS")]
152    pub fuzz_runs: Option<u64>,
153
154    /// Timeout for each fuzz run in seconds.
155    #[arg(long, env = "FOUNDRY_FUZZ_TIMEOUT", value_name = "TIMEOUT")]
156    pub fuzz_timeout: Option<u64>,
157
158    /// File to rerun fuzz failures from.
159    #[arg(long)]
160    pub fuzz_input_file: Option<String>,
161
162    /// Show test execution progress.
163    #[arg(long, conflicts_with_all = ["quiet", "json"], help_heading = "Display options")]
164    pub show_progress: bool,
165
166    /// Re-run recorded test failures from last run.
167    /// If no failure recorded then regular test run is performed.
168    #[arg(long)]
169    pub rerun: bool,
170
171    /// Print test summary table.
172    #[arg(long, help_heading = "Display options")]
173    pub summary: bool,
174
175    /// Print detailed test summary table.
176    #[arg(long, help_heading = "Display options", requires = "summary")]
177    pub detailed: bool,
178
179    /// Disables the labels in the traces.
180    #[arg(long, help_heading = "Display options")]
181    pub disable_labels: bool,
182
183    #[command(flatten)]
184    filter: FilterArgs,
185
186    #[command(flatten)]
187    evm: EvmArgs,
188
189    #[command(flatten)]
190    pub build: BuildOpts,
191
192    #[command(flatten)]
193    pub watch: WatchArgs,
194}
195
196impl TestArgs {
197    pub async fn run(self) -> Result<TestOutcome> {
198        trace!(target: "forge::test", "executing test command");
199        self.execute_tests().await
200    }
201
202    /// Returns a list of files that need to be compiled in order to run all the tests that match
203    /// the given filter.
204    ///
205    /// This means that it will return all sources that are not test contracts or that match the
206    /// filter. We want to compile all non-test sources always because tests might depend on them
207    /// dynamically through cheatcodes.
208    #[instrument(target = "forge::test", skip_all)]
209    pub fn get_sources_to_compile(
210        &self,
211        config: &Config,
212        test_filter: &ProjectPathsAwareFilter,
213    ) -> Result<BTreeSet<PathBuf>> {
214        // An empty filter doesn't filter out anything.
215        // We can still optimize slightly by excluding scripts.
216        if test_filter.is_empty() {
217            return Ok(source_files_iter(&config.src, MultiCompilerLanguage::FILE_EXTENSIONS)
218                .chain(source_files_iter(&config.test, MultiCompilerLanguage::FILE_EXTENSIONS))
219                .collect());
220        }
221
222        let mut project = config.create_project(true, true)?;
223        project.update_output_selection(|selection| {
224            *selection = OutputSelection::common_output_selection(["abi".to_string()]);
225        });
226        let output = project.compile()?;
227        if output.has_compiler_errors() {
228            sh_println!("{output}")?;
229            eyre::bail!("Compilation failed");
230        }
231
232        Ok(output
233            .artifact_ids()
234            .filter_map(|(id, artifact)| artifact.abi.as_ref().map(|abi| (id, abi)))
235            .filter(|(id, abi)| {
236                id.source.starts_with(&config.src) || matches_artifact(test_filter, id, abi)
237            })
238            .map(|(id, _)| id.source)
239            .collect())
240    }
241
242    /// Executes all the tests in the project.
243    ///
244    /// This will trigger the build process first. On success all test contracts that match the
245    /// configured filter will be executed
246    ///
247    /// Returns the test results for all matching tests.
248    pub async fn execute_tests(mut self) -> Result<TestOutcome> {
249        // Merge all configs.
250        let (mut config, mut evm_opts) = self.load_config_and_evm_opts()?;
251
252        // Explicitly enable isolation for gas reports for more correct gas accounting.
253        if self.gas_report {
254            evm_opts.isolate = true;
255        } else {
256            // Do not collect gas report traces if gas report is not enabled.
257            config.fuzz.gas_report_samples = 0;
258            config.invariant.gas_report_samples = 0;
259        }
260
261        // Install missing dependencies.
262        if install::install_missing_dependencies(&mut config) && config.auto_detect_remappings {
263            // need to re-configure here to also catch additional remappings
264            config = self.load_config()?;
265        }
266
267        // Set up the project.
268        let project = config.project()?;
269
270        let filter = self.filter(&config)?;
271        trace!(target: "forge::test", ?filter, "using filter");
272
273        let compiler = ProjectCompiler::new()
274            .dynamic_test_linking(config.dynamic_test_linking)
275            .quiet(shell::is_json() || self.junit)
276            .files(self.get_sources_to_compile(&config, &filter)?);
277        let output = compiler.compile(&project)?;
278
279        // Create test options from general project settings and compiler output.
280        let project_root = &project.paths.root;
281
282        let should_debug = self.debug;
283        let should_draw = self.flamegraph || self.flamechart;
284
285        // Determine print verbosity and executor verbosity.
286        let verbosity = evm_opts.verbosity;
287        if (self.gas_report && evm_opts.verbosity < 3) || self.flamegraph || self.flamechart {
288            evm_opts.verbosity = 3;
289        }
290
291        let env = evm_opts.evm_env().await?;
292
293        // Enable internal tracing for more informative flamegraph.
294        if should_draw && !self.decode_internal {
295            self.decode_internal = true;
296        }
297
298        // Choose the internal function tracing mode, if --decode-internal is provided.
299        let decode_internal = if self.decode_internal {
300            // If more than one function matched, we enable simple tracing.
301            // If only one function matched, we enable full tracing. This is done in `run_tests`.
302            InternalTraceMode::Simple
303        } else {
304            InternalTraceMode::None
305        };
306
307        // Prepare the test builder.
308        let config = Arc::new(config);
309        let runner = MultiContractRunnerBuilder::new(config.clone())
310            .set_debug(should_debug)
311            .set_decode_internal(decode_internal)
312            .initial_balance(evm_opts.initial_balance)
313            .evm_spec(config.evm_spec_id())
314            .sender(evm_opts.sender)
315            .with_fork(evm_opts.get_fork(&config, env.clone()))
316            .enable_isolation(evm_opts.isolate)
317            .networks(evm_opts.networks)
318            .fail_fast(self.fail_fast)
319            .build::<MultiCompiler>(project_root, &output, env, evm_opts)?;
320
321        let libraries = runner.libraries.clone();
322        let mut outcome = self.run_tests(runner, config, verbosity, &filter, &output).await?;
323
324        if should_draw {
325            let (suite_name, test_name, mut test_result) =
326                outcome.remove_first().ok_or_eyre("no tests were executed")?;
327
328            let (_, arena) = test_result
329                .traces
330                .iter_mut()
331                .find(|(kind, _)| *kind == TraceKind::Execution)
332                .unwrap();
333
334            // Decode traces.
335            let decoder = outcome.last_run_decoder.as_ref().unwrap();
336            decode_trace_arena(arena, decoder).await;
337            let mut fst = folded_stack_trace::build(arena);
338
339            let label = if self.flamegraph { "flamegraph" } else { "flamechart" };
340            let contract = suite_name.split(':').next_back().unwrap();
341            let test_name = test_name.trim_end_matches("()");
342            let file_name = format!("cache/{label}_{contract}_{test_name}.svg");
343            let file = std::fs::File::create(&file_name).wrap_err("failed to create file")?;
344            let file = std::io::BufWriter::new(file);
345
346            let mut options = inferno::flamegraph::Options::default();
347            options.title = format!("{label} {contract}::{test_name}");
348            options.count_name = "gas".to_string();
349            if self.flamechart {
350                options.flame_chart = true;
351                fst.reverse();
352            }
353
354            // Generate SVG.
355            inferno::flamegraph::from_lines(&mut options, fst.iter().map(String::as_str), file)
356                .wrap_err("failed to write svg")?;
357            sh_println!("Saved to {file_name}")?;
358
359            // Open SVG in default program.
360            if let Err(e) = opener::open(&file_name) {
361                sh_err!("Failed to open {file_name}; please open it manually: {e}")?;
362            }
363        }
364
365        if should_debug {
366            // Get first non-empty suite result. We will have only one such entry.
367            let (_, _, test_result) =
368                outcome.remove_first().ok_or_eyre("no tests were executed")?;
369
370            let sources =
371                ContractSources::from_project_output(&output, project.root(), Some(&libraries))?;
372
373            // Run the debugger.
374            let mut builder = Debugger::builder()
375                .traces(
376                    test_result.traces.iter().filter(|(t, _)| t.is_execution()).cloned().collect(),
377                )
378                .sources(sources)
379                .breakpoints(test_result.breakpoints.clone());
380
381            if let Some(decoder) = &outcome.last_run_decoder {
382                builder = builder.decoder(decoder);
383            }
384
385            let mut debugger = builder.build();
386            if let Some(dump_path) = self.dump {
387                debugger.dump_to_file(&dump_path)?;
388            } else {
389                debugger.try_run_tui()?;
390            }
391        }
392
393        Ok(outcome)
394    }
395
396    /// Run all tests that matches the filter predicate from a test runner
397    pub async fn run_tests(
398        &self,
399        mut runner: MultiContractRunner,
400        config: Arc<Config>,
401        verbosity: u8,
402        filter: &ProjectPathsAwareFilter,
403        output: &ProjectCompileOutput,
404    ) -> eyre::Result<TestOutcome> {
405        if self.list {
406            return list(runner, filter);
407        }
408
409        trace!(target: "forge::test", "running all tests");
410
411        // If we need to render to a serialized format, we should not print anything else to stdout.
412        let silent = self.gas_report && shell::is_json() || self.summary && shell::is_json();
413
414        let num_filtered = runner.matching_test_functions(filter).count();
415
416        if num_filtered == 0 {
417            let mut total_tests = num_filtered;
418            if !filter.is_empty() {
419                total_tests = runner.matching_test_functions(&EmptyTestFilter::default()).count();
420            }
421            if total_tests == 0 {
422                sh_println!(
423                    "No tests found in project! Forge looks for functions that start with `test`"
424                )?;
425            } else {
426                let mut msg = format!("no tests match the provided pattern:\n{filter}");
427                // Try to suggest a test when there's no match.
428                if let Some(test_pattern) = &filter.args().test_pattern {
429                    let test_name = test_pattern.as_str();
430                    // Filter contracts but not test functions.
431                    let candidates = runner.all_test_functions(filter).map(|f| &f.name);
432                    if let Some(suggestion) = utils::did_you_mean(test_name, candidates).pop() {
433                        write!(msg, "\nDid you mean `{suggestion}`?")?;
434                    }
435                }
436                sh_warn!("{msg}")?;
437            }
438            return Ok(TestOutcome::empty(false));
439        }
440
441        if num_filtered != 1 && (self.debug || self.flamegraph || self.flamechart) {
442            let action = if self.flamegraph {
443                "generate a flamegraph"
444            } else if self.flamechart {
445                "generate a flamechart"
446            } else {
447                "run the debugger"
448            };
449            let filter = if filter.is_empty() {
450                String::new()
451            } else {
452                format!("\n\nFilter used:\n{filter}")
453            };
454            eyre::bail!(
455                "{num_filtered} tests matched your criteria, but exactly 1 test must match in order to {action}.\n\n\
456                 Use --match-contract and --match-path to further limit the search.{filter}",
457            );
458        }
459
460        // If exactly one test matched, we enable full tracing.
461        if num_filtered == 1 && self.decode_internal {
462            runner.decode_internal = InternalTraceMode::Full;
463        }
464
465        // Run tests in a non-streaming fashion and collect results for serialization.
466        if !self.gas_report && !self.summary && shell::is_json() {
467            let mut results = runner.test_collect(filter)?;
468            results.values_mut().for_each(|suite_result| {
469                for test_result in suite_result.test_results.values_mut() {
470                    if verbosity >= 2 {
471                        // Decode logs at level 2 and above.
472                        test_result.decoded_logs = decode_console_logs(&test_result.logs);
473                    } else {
474                        // Empty logs for non verbose runs.
475                        test_result.logs = vec![];
476                    }
477                }
478            });
479            sh_println!("{}", serde_json::to_string(&results)?)?;
480            return Ok(TestOutcome::new(results, self.allow_failure));
481        }
482
483        if self.junit {
484            let results = runner.test_collect(filter)?;
485            sh_println!("{}", junit_xml_report(&results, verbosity).to_string()?)?;
486            return Ok(TestOutcome::new(results, self.allow_failure));
487        }
488
489        let remote_chain_id = runner.evm_opts.get_remote_chain_id().await;
490        let known_contracts = runner.known_contracts.clone();
491
492        let libraries = runner.libraries.clone();
493
494        // Run tests in a streaming fashion.
495        let (tx, rx) = channel::<(String, SuiteResult)>();
496        let timer = Instant::now();
497        let show_progress = config.show_progress;
498        let handle = tokio::task::spawn_blocking({
499            let filter = filter.clone();
500            move || runner.test(&filter, tx, show_progress)
501        });
502
503        // Set up trace identifiers.
504        let mut identifier = TraceIdentifiers::new().with_local(&known_contracts);
505
506        // Avoid using etherscan for gas report as we decode more traces and this will be
507        // expensive.
508        if !self.gas_report {
509            identifier = identifier.with_etherscan(&config, remote_chain_id)?;
510        }
511
512        // Build the trace decoder.
513        let mut builder = CallTraceDecoderBuilder::new()
514            .with_known_contracts(&known_contracts)
515            .with_label_disabled(self.disable_labels)
516            .with_verbosity(verbosity);
517        // Signatures are of no value for gas reports.
518        if !self.gas_report {
519            builder =
520                builder.with_signature_identifier(SignaturesIdentifier::from_config(&config)?);
521        }
522
523        if self.decode_internal {
524            let sources =
525                ContractSources::from_project_output(output, &config.root, Some(&libraries))?;
526            builder = builder.with_debug_identifier(DebugTraceIdentifier::new(sources));
527        }
528        let mut decoder = builder.build();
529
530        let mut gas_report = self.gas_report.then(|| {
531            GasReport::new(
532                config.gas_reports.clone(),
533                config.gas_reports_ignore.clone(),
534                config.gas_reports_include_tests,
535            )
536        });
537
538        let mut gas_snapshots = BTreeMap::<String, BTreeMap<String, String>>::new();
539
540        let mut outcome = TestOutcome::empty(self.allow_failure);
541
542        let mut any_test_failed = false;
543        for (contract_name, suite_result) in rx {
544            let tests = &suite_result.test_results;
545
546            // Clear the addresses and labels from previous test.
547            decoder.clear_addresses();
548
549            // We identify addresses if we're going to print *any* trace or gas report.
550            let identify_addresses = verbosity >= 3
551                || self.gas_report
552                || self.debug
553                || self.flamegraph
554                || self.flamechart;
555
556            // Print suite header.
557            if !silent {
558                sh_println!()?;
559                for warning in &suite_result.warnings {
560                    sh_warn!("{warning}")?;
561                }
562                if !tests.is_empty() {
563                    let len = tests.len();
564                    let tests = if len > 1 { "tests" } else { "test" };
565                    sh_println!("Ran {len} {tests} for {contract_name}")?;
566                }
567            }
568
569            // Process individual test results, printing logs and traces when necessary.
570            for (name, result) in tests {
571                let show_traces =
572                    !self.suppress_successful_traces || result.status == TestStatus::Failure;
573                if !silent {
574                    sh_println!("{}", result.short_result(name))?;
575
576                    // Display invariant metrics if invariant kind.
577                    if let TestKind::Invariant { metrics, .. } = &result.kind
578                        && !metrics.is_empty()
579                    {
580                        let _ = sh_println!("\n{}\n", format_invariant_metrics_table(metrics));
581                    }
582
583                    // We only display logs at level 2 and above
584                    if verbosity >= 2 && show_traces {
585                        // We only decode logs from Hardhat and DS-style console events
586                        let console_logs = decode_console_logs(&result.logs);
587                        if !console_logs.is_empty() {
588                            sh_println!("Logs:")?;
589                            for log in console_logs {
590                                sh_println!("  {log}")?;
591                            }
592                            sh_println!()?;
593                        }
594                    }
595                }
596
597                // We shouldn't break out of the outer loop directly here so that we finish
598                // processing the remaining tests and print the suite summary.
599                any_test_failed |= result.status == TestStatus::Failure;
600
601                // Clear the addresses and labels from previous runs.
602                decoder.clear_addresses();
603                decoder.labels.extend(result.labels.iter().map(|(k, v)| (*k, v.clone())));
604
605                // Identify addresses and decode traces.
606                let mut decoded_traces = Vec::with_capacity(result.traces.len());
607                for (kind, arena) in &mut result.traces.clone() {
608                    if identify_addresses {
609                        decoder.identify(arena, &mut identifier);
610                    }
611
612                    // verbosity:
613                    // - 0..3: nothing
614                    // - 3: only display traces for failed tests
615                    // - 4: also display the setup trace for failed tests
616                    // - 5..: display all traces for all tests, including storage changes
617                    let should_include = match kind {
618                        TraceKind::Execution => {
619                            (verbosity == 3 && result.status.is_failure()) || verbosity >= 4
620                        }
621                        TraceKind::Setup => {
622                            (verbosity == 4 && result.status.is_failure()) || verbosity >= 5
623                        }
624                        TraceKind::Deployment => false,
625                    };
626
627                    if should_include {
628                        decode_trace_arena(arena, &decoder).await;
629                        decoded_traces.push(render_trace_arena_inner(arena, false, verbosity > 4));
630                    }
631                }
632
633                if !silent && show_traces && !decoded_traces.is_empty() {
634                    sh_println!("Traces:")?;
635                    for trace in &decoded_traces {
636                        sh_println!("{trace}")?;
637                    }
638                }
639
640                if let Some(gas_report) = &mut gas_report {
641                    gas_report.analyze(result.traces.iter().map(|(_, a)| &a.arena), &decoder).await;
642
643                    for trace in &result.gas_report_traces {
644                        decoder.clear_addresses();
645
646                        // Re-execute setup and deployment traces to collect identities created in
647                        // setUp and constructor.
648                        for (kind, arena) in &result.traces {
649                            if !matches!(kind, TraceKind::Execution) {
650                                decoder.identify(arena, &mut identifier);
651                            }
652                        }
653
654                        for arena in trace {
655                            decoder.identify(arena, &mut identifier);
656                            gas_report.analyze([arena], &decoder).await;
657                        }
658                    }
659                }
660
661                // Collect and merge gas snapshots.
662                for (group, new_snapshots) in &result.gas_snapshots {
663                    gas_snapshots.entry(group.clone()).or_default().extend(new_snapshots.clone());
664                }
665            }
666
667            // Write gas snapshots to disk if any were collected.
668            if !gas_snapshots.is_empty() {
669                // By default `gas_snapshot_check` is set to `false` in the config.
670                //
671                // The user can either:
672                // - Set `FORGE_SNAPSHOT_CHECK=true` in the environment.
673                // - Pass `--gas-snapshot-check=true` as a CLI argument.
674                // - Set `gas_snapshot_check = true` in the config.
675                //
676                // If the user passes `--gas-snapshot-check=<bool>` then it will override the config
677                // and the environment variable, disabling the check if `false` is passed.
678                //
679                // Exiting early with code 1 if differences are found.
680                if self.gas_snapshot_check.unwrap_or(config.gas_snapshot_check) {
681                    let differences_found = gas_snapshots.clone().into_iter().fold(
682                        false,
683                        |mut found, (group, snapshots)| {
684                            // If the snapshot file doesn't exist, we can't compare so we skip.
685                            if !&config.snapshots.join(format!("{group}.json")).exists() {
686                                return false;
687                            }
688
689                            let previous_snapshots: BTreeMap<String, String> =
690                                fs::read_json_file(&config.snapshots.join(format!("{group}.json")))
691                                    .expect("Failed to read snapshots from disk");
692
693                            let diff: BTreeMap<_, _> = snapshots
694                                .iter()
695                                .filter_map(|(k, v)| {
696                                    previous_snapshots.get(k).and_then(|previous_snapshot| {
697                                        if previous_snapshot != v {
698                                            Some((
699                                                k.clone(),
700                                                (previous_snapshot.clone(), v.clone()),
701                                            ))
702                                        } else {
703                                            None
704                                        }
705                                    })
706                                })
707                                .collect();
708
709                            if !diff.is_empty() {
710                                let _ = sh_eprintln!(
711                                    "{}",
712                                    format!("\n[{group}] Failed to match snapshots:").red().bold()
713                                );
714
715                                for (key, (previous_snapshot, snapshot)) in &diff {
716                                    let _ = sh_eprintln!(
717                                        "{}",
718                                        format!("- [{key}] {previous_snapshot} → {snapshot}").red()
719                                    );
720                                }
721
722                                found = true;
723                            }
724
725                            found
726                        },
727                    );
728
729                    if differences_found {
730                        sh_eprintln!()?;
731                        eyre::bail!("Snapshots differ from previous run");
732                    }
733                }
734
735                // By default `gas_snapshot_emit` is set to `true` in the config.
736                //
737                // The user can either:
738                // - Set `FORGE_SNAPSHOT_EMIT=false` in the environment.
739                // - Pass `--gas-snapshot-emit=false` as a CLI argument.
740                // - Set `gas_snapshot_emit = false` in the config.
741                //
742                // If the user passes `--gas-snapshot-emit=<bool>` then it will override the config
743                // and the environment variable, enabling the check if `true` is passed.
744                if self.gas_snapshot_emit.unwrap_or(config.gas_snapshot_emit) {
745                    // Create `snapshots` directory if it doesn't exist.
746                    fs::create_dir_all(&config.snapshots)?;
747
748                    // Write gas snapshots to disk per group.
749                    gas_snapshots.clone().into_iter().for_each(|(group, snapshots)| {
750                        fs::write_pretty_json_file(
751                            &config.snapshots.join(format!("{group}.json")),
752                            &snapshots,
753                        )
754                        .expect("Failed to write gas snapshots to disk");
755                    });
756                }
757            }
758
759            // Print suite summary.
760            if !silent {
761                sh_println!("{}", suite_result.summary())?;
762            }
763
764            // Add the suite result to the outcome.
765            outcome.results.insert(contract_name, suite_result);
766
767            // Stop processing the remaining suites if any test failed and `fail_fast` is set.
768            if self.fail_fast && any_test_failed {
769                break;
770            }
771        }
772        outcome.last_run_decoder = Some(decoder);
773        let duration = timer.elapsed();
774
775        trace!(target: "forge::test", len=outcome.results.len(), %any_test_failed, "done with results");
776
777        if let Some(gas_report) = gas_report {
778            let finalized = gas_report.finalize();
779            sh_println!("{}", &finalized)?;
780            outcome.gas_report = Some(finalized);
781        }
782
783        if !self.summary && !shell::is_json() {
784            sh_println!("{}", outcome.summary(duration))?;
785        }
786
787        if self.summary && !outcome.results.is_empty() {
788            let summary_report = TestSummaryReport::new(self.detailed, outcome.clone());
789            sh_println!("{}", &summary_report)?;
790        }
791
792        // Reattach the task.
793        if let Err(e) = handle.await {
794            match e.try_into_panic() {
795                Ok(payload) => std::panic::resume_unwind(payload),
796                Err(e) => return Err(e.into()),
797            }
798        }
799
800        // Persist test run failures to enable replaying.
801        persist_run_failures(&config, &outcome);
802
803        Ok(outcome)
804    }
805
806    /// Returns the flattened [`FilterArgs`] arguments merged with [`Config`].
807    /// Loads and applies filter from file if only last test run failures performed.
808    pub fn filter(&self, config: &Config) -> Result<ProjectPathsAwareFilter> {
809        let mut filter = self.filter.clone();
810        if self.rerun {
811            filter.test_pattern = last_run_failures(config);
812        }
813        if filter.path_pattern.is_some() {
814            if self.path.is_some() {
815                bail!("Can not supply both --match-path and |path|");
816            }
817        } else {
818            filter.path_pattern = self.path.clone();
819        }
820        Ok(filter.merge_with_config(config))
821    }
822
823    /// Returns whether `BuildArgs` was configured with `--watch`
824    pub fn is_watch(&self) -> bool {
825        self.watch.watch.is_some()
826    }
827
828    /// Returns the [`watchexec::Config`] necessary to bootstrap a new watch loop.
829    pub(crate) fn watchexec_config(&self) -> Result<watchexec::Config> {
830        self.watch.watchexec_config(|| {
831            let config = self.load_config()?;
832            Ok([config.src, config.test])
833        })
834    }
835}
836
837impl Provider for TestArgs {
838    fn metadata(&self) -> Metadata {
839        Metadata::named("Core Build Args Provider")
840    }
841
842    fn data(&self) -> Result<Map<Profile, Dict>, figment::Error> {
843        let mut dict = Dict::default();
844
845        let mut fuzz_dict = Dict::default();
846        if let Some(fuzz_seed) = self.fuzz_seed {
847            fuzz_dict.insert("seed".to_string(), fuzz_seed.to_string().into());
848        }
849        if let Some(fuzz_runs) = self.fuzz_runs {
850            fuzz_dict.insert("runs".to_string(), fuzz_runs.into());
851        }
852        if let Some(fuzz_timeout) = self.fuzz_timeout {
853            fuzz_dict.insert("timeout".to_string(), fuzz_timeout.into());
854        }
855        if let Some(fuzz_input_file) = self.fuzz_input_file.clone() {
856            fuzz_dict.insert("failure_persist_file".to_string(), fuzz_input_file.into());
857        }
858        dict.insert("fuzz".to_string(), fuzz_dict.into());
859
860        if let Some(etherscan_api_key) =
861            self.etherscan_api_key.as_ref().filter(|s| !s.trim().is_empty())
862        {
863            dict.insert("etherscan_api_key".to_string(), etherscan_api_key.to_string().into());
864        }
865
866        if self.show_progress {
867            dict.insert("show_progress".to_string(), true.into());
868        }
869
870        Ok(Map::from([(Config::selected_profile(), dict)]))
871    }
872}
873
874/// Lists all matching tests
875fn list(runner: MultiContractRunner, filter: &ProjectPathsAwareFilter) -> Result<TestOutcome> {
876    let results = runner.list(filter);
877
878    if shell::is_json() {
879        sh_println!("{}", serde_json::to_string(&results)?)?;
880    } else {
881        for (file, contracts) in &results {
882            sh_println!("{file}")?;
883            for (contract, tests) in contracts {
884                sh_println!("  {contract}")?;
885                sh_println!("    {}\n", tests.join("\n    "))?;
886            }
887        }
888    }
889    Ok(TestOutcome::empty(false))
890}
891
892/// Load persisted filter (with last test run failures) from file.
893fn last_run_failures(config: &Config) -> Option<regex::Regex> {
894    match fs::read_to_string(&config.test_failures_file) {
895        Ok(filter) => Regex::new(&filter)
896            .inspect_err(|e| {
897                _ = sh_warn!(
898                    "failed to parse test filter from {:?}: {e}",
899                    config.test_failures_file
900                )
901            })
902            .ok(),
903        Err(_) => None,
904    }
905}
906
907/// Persist filter with last test run failures (only if there's any failure).
908fn persist_run_failures(config: &Config, outcome: &TestOutcome) {
909    if outcome.failed() > 0 && fs::create_file(&config.test_failures_file).is_ok() {
910        let mut filter = String::new();
911        let mut failures = outcome.failures().peekable();
912        while let Some((test_name, _)) = failures.next() {
913            if test_name.is_any_test()
914                && let Some(test_match) = test_name.split("(").next()
915            {
916                filter.push_str(test_match);
917                if failures.peek().is_some() {
918                    filter.push('|');
919                }
920            }
921        }
922        let _ = fs::write(&config.test_failures_file, filter);
923    }
924}
925
926/// Generate test report in JUnit XML report format.
927fn junit_xml_report(results: &BTreeMap<String, SuiteResult>, verbosity: u8) -> Report {
928    let mut total_duration = Duration::default();
929    let mut junit_report = Report::new("Test run");
930    junit_report.set_timestamp(Utc::now());
931    for (suite_name, suite_result) in results {
932        let mut test_suite = TestSuite::new(suite_name);
933        total_duration += suite_result.duration;
934        test_suite.set_time(suite_result.duration);
935        test_suite.set_system_out(suite_result.summary());
936        for (test_name, test_result) in &suite_result.test_results {
937            let mut test_status = match test_result.status {
938                TestStatus::Success => TestCaseStatus::success(),
939                TestStatus::Failure => TestCaseStatus::non_success(NonSuccessKind::Failure),
940                TestStatus::Skipped => TestCaseStatus::skipped(),
941            };
942            if let Some(reason) = &test_result.reason {
943                test_status.set_message(reason);
944            }
945
946            let mut test_case = TestCase::new(test_name, test_status);
947            test_case.set_time(test_result.duration);
948
949            let mut sys_out = String::new();
950            let result_report = test_result.kind.report();
951            write!(sys_out, "{test_result} {test_name} {result_report}").unwrap();
952            if verbosity >= 2 && !test_result.logs.is_empty() {
953                write!(sys_out, "\\nLogs:\\n").unwrap();
954                let console_logs = decode_console_logs(&test_result.logs);
955                for log in console_logs {
956                    write!(sys_out, "  {log}\\n").unwrap();
957                }
958            }
959
960            test_case.set_system_out(sys_out);
961            test_suite.add_test_case(test_case);
962        }
963        junit_report.add_test_suite(test_suite);
964    }
965    junit_report.set_time(total_duration);
966    junit_report
967}
968
969#[cfg(test)]
970mod tests {
971    use super::*;
972    use foundry_config::Chain;
973
974    #[test]
975    fn watch_parse() {
976        let args: TestArgs = TestArgs::parse_from(["foundry-cli", "-vw"]);
977        assert!(args.watch.watch.is_some());
978    }
979
980    #[test]
981    fn fuzz_seed() {
982        let args: TestArgs = TestArgs::parse_from(["foundry-cli", "--fuzz-seed", "0x10"]);
983        assert!(args.fuzz_seed.is_some());
984    }
985
986    // <https://github.com/foundry-rs/foundry/issues/5913>
987    #[test]
988    fn fuzz_seed_exists() {
989        let args: TestArgs =
990            TestArgs::parse_from(["foundry-cli", "-vvv", "--gas-report", "--fuzz-seed", "0x10"]);
991        assert!(args.fuzz_seed.is_some());
992    }
993
994    #[test]
995    fn extract_chain() {
996        let test = |arg: &str, expected: Chain| {
997            let args = TestArgs::parse_from(["foundry-cli", arg]);
998            assert_eq!(args.evm.env.chain, Some(expected));
999            let (config, evm_opts) = args.load_config_and_evm_opts().unwrap();
1000            assert_eq!(config.chain, Some(expected));
1001            assert_eq!(evm_opts.env.chain_id, Some(expected.id()));
1002        };
1003        test("--chain-id=1", Chain::mainnet());
1004        test("--chain-id=42", Chain::from_id(42));
1005    }
1006}